diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 461f9bbec..000000000 --- a/.coveragerc +++ /dev/null @@ -1,18 +0,0 @@ -[run] -branch = True -omit = - */tests/* - */site-packages/* - */__init__.py - */noxfile.py* - -[report] -exclude_lines = - pragma: no cover - import - def __repr__ - raise NotImplementedError - if TYPE_CHECKING - @abstractmethod - pass - raise ImportError \ No newline at end of file diff --git a/.gemini/config.yaml b/.gemini/config.yaml new file mode 100644 index 000000000..518d8fdf8 --- /dev/null +++ b/.gemini/config.yaml @@ -0,0 +1,3 @@ +code_review: + comment_severity_threshold: LOW +ignore_patterns: ['CHANGELOG.md'] diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1144cec21..fb0634c1c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,4 +4,5 @@ # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -* @google-a2a/googlers +* @a2aproject/google-a2a-eng +src/a2a/types.py @a2a-bot diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index e881adfa1..68c147ab2 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -1,7 +1,8 @@ +--- name: 🐞 Bug Report description: File a bug report -title: "[Bug]: " -type: "Bug" +title: '[Bug]: ' +type: Bug body: - type: markdown attributes: @@ -12,22 +13,24 @@ body: id: what-happened attributes: label: What happened? - description: Also tell us what you expected to happen and how to reproduce the issue. + description: Also tell us what you expected to happen and how to reproduce the + issue. placeholder: Tell us what you see! - value: "A bug happened!" + value: A bug happened! validations: required: true - type: textarea id: logs attributes: label: Relevant log output - description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. + description: Please copy and paste any relevant log output. This will be automatically + formatted into code, so no need for backticks. render: shell - type: checkboxes id: terms attributes: label: Code of Conduct - description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/google-a2a/A2A?tab=coc-ov-file#readme) + description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/a2aproject/A2A?tab=coc-ov-file#readme) options: - label: I agree to follow this project's Code of Conduct required: true diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml index 1cb778865..ffcb1289f 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.yml +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -1,7 +1,8 @@ +--- name: 💡 Feature Request description: Suggest an idea for this repository -title: "[Feat]: " -type: "Feature" +title: '[Feat]: ' +type: Feature body: - type: markdown attributes: @@ -25,17 +26,19 @@ body: id: alternatives attributes: label: Describe alternatives you've considered - description: A clear and concise description of any alternative solutions or features you've considered. + description: A clear and concise description of any alternative solutions or + features you've considered. - type: textarea id: context attributes: label: Additional context - description: Add any other context or screenshots about the feature request here. + description: Add any other context or screenshots about the feature request + here. - type: checkboxes id: terms attributes: label: Code of Conduct - description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/google-a2a/a2a-python?tab=coc-ov-file#readme) + description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/a2aproject/a2a-python?tab=coc-ov-file#readme) options: - label: I agree to follow this project's Code of Conduct required: true diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index e907eef21..8bf4655b4 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -3,9 +3,13 @@ Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Follow the [`CONTRIBUTING` Guide](https://github.com/google-a2a/a2a-python/blob/main/CONTRIBUTING.md). +- [ ] Follow the [`CONTRIBUTING` Guide](https://github.com/a2aproject/a2a-python/blob/main/CONTRIBUTING.md). - [ ] Make your Pull Request title in the specification. -- [ ] Ensure the tests and linter pass (Run `nox -s format` from the repository root to format) + - Important Prefixes for [release-please](https://github.com/googleapis/release-please): + - `fix:` which represents bug fixes, and correlates to a [SemVer](https://semver.org/) patch. + - `feat:` represents a new feature, and correlates to a SemVer minor. + - `feat!:`, or `fix!:`, `refactor!:`, etc., which represent a breaking change (indicated by the `!`) and will result in a SemVer major. +- [ ] Ensure the tests and linter pass (Run `bash scripts/format.sh` from the repository root to format) - [ ] Appropriate docs were updated (if necessary) Fixes # 🦕 diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index 8e922ba92..8d0b13c8c 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -1,39 +1,96 @@ +AAgent ACard AClient +ACMRTUXB +aconnect +adk AError +AFast +agentic +AGrpc +aio +aiomysql +amannn +aproject ARequest ARun AServer AServers +AService AStarlette -EUR -GBP -INR -JPY -JSONRPCt -Llm -aconnect -adk -agentic +AUser autouse +backticks cla cls coc codegen coro datamodel +deepwiki +drivername +DSNs dunders +euo +EUR +excinfo +fernet +fetchrow +fetchval +GBP genai +getkwargs gle +GVsb +ietf +initdb inmemory +INR +isready +jku +JPY +JSONRPCt +jwk +jwks +jws +JWS +kid kwarg langgraph lifecycles linting +Llm +lstrips +mikeas +mockurl +mysqladmin +notif oauthoidc +oidc opensource +otherurl +postgres +POSTGRES +postgresql +proot +protoc +pyi +pypistats +pyupgrade pyversions +redef +respx +resub +RUF +SLF socio sse +sut +SUT tagwords +taskupdate +testuuid +Tful +tiangolo +typeerror vulnz diff --git a/.github/actions/spelling/excludes.txt b/.github/actions/spelling/excludes.txt index dbbff9989..f54f084c8 100644 --- a/.github/actions/spelling/excludes.txt +++ b/.github/actions/spelling/excludes.txt @@ -85,7 +85,7 @@ \.zip$ ^\.github/actions/spelling/ ^\.github/workflows/ -^\Qsrc/a2a/auth/__init__.py\E$ -^\Qsrc/a2a/server/request_handlers/context.py\E$ CHANGELOG.md -noxfile.py +^src/a2a/grpc/ +^tests/ +.pre-commit-config.yaml diff --git a/.github/actions/spelling/expect.txt b/.github/actions/spelling/expect.txt deleted file mode 100644 index ade6eb7ba..000000000 --- a/.github/actions/spelling/expect.txt +++ /dev/null @@ -1,5 +0,0 @@ -AUser -excinfo -GVsb -notif -otherurl diff --git a/.github/actions/spelling/patterns.txt b/.github/actions/spelling/patterns.txt new file mode 100644 index 000000000..33d82ac9c --- /dev/null +++ b/.github/actions/spelling/patterns.txt @@ -0,0 +1,2 @@ +# Ignore URLs +https?://\S+ diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..c97edb12f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,18 @@ +version: 2 +updates: + - package-ecosystem: 'uv' + directory: '/' + schedule: + interval: 'monthly' + groups: + all: + patterns: + - '*' + - package-ecosystem: 'github-actions' + directory: '/' + schedule: + interval: 'monthly' + groups: + github-actions: + patterns: + - '*' diff --git a/.github/linters/.jscpd.json b/.github/linters/.jscpd.json deleted file mode 100644 index fb0f3b606..000000000 --- a/.github/linters/.jscpd.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "ignore": ["**/.github/**", "**/.git/**", "**/tests/**", "**/examples/**"], - "threshold": 3, - "reporters": ["html", "markdown"] -} diff --git a/.github/linters/.mypy.ini b/.github/linters/.mypy.ini deleted file mode 100644 index 88a66d546..000000000 --- a/.github/linters/.mypy.ini +++ /dev/null @@ -1,6 +0,0 @@ -[mypy] -exclude = examples/ -disable_error_code = import-not-found,annotation-unchecked - -[mypy-examples.*] -follow_imports = skip diff --git a/.github/release-please.yml b/.github/release-please.yml deleted file mode 100644 index 8d4679d29..000000000 --- a/.github/release-please.yml +++ /dev/null @@ -1,4 +0,0 @@ -releaseType: python -handleGHRelease: true -bumpMinorPreMajor: false -bumpPatchForMinorPreMajor: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml deleted file mode 100644 index d4ca94189..000000000 --- a/.github/release-trigger.yml +++ /dev/null @@ -1 +0,0 @@ -enabled: true diff --git a/.github/workflows/conventional-commits.yml b/.github/workflows/conventional-commits.yml new file mode 100644 index 000000000..2072f1e9e --- /dev/null +++ b/.github/workflows/conventional-commits.yml @@ -0,0 +1,26 @@ +name: "Conventional Commits" + +on: + pull_request: + types: + - opened + - edited + - synchronize + +permissions: + contents: read + +jobs: + main: + permissions: + pull-requests: read + statuses: write + name: Validate PR Title + runs-on: ubuntu-latest + steps: + - name: semantic-pull-request + uses: amannn/action-semantic-pull-request@v6.1.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + validateSingleCommit: false diff --git a/.github/workflows/linter.yaml b/.github/workflows/linter.yaml index 20e24526e..7586b4db2 100644 --- a/.github/workflows/linter.yaml +++ b/.github/workflows/linter.yaml @@ -1,66 +1,67 @@ -################################# -################################# -## Super Linter GitHub Actions ## -################################# -################################# +--- name: Lint Code Base - -# -# Documentation: -# https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions -# - -############################# -# Start the job on all push # -############################# on: pull_request: branches: [main] - -############### -# Set the Job # -############### +permissions: + contents: read jobs: - build: - # Name the Job + lint: name: Lint Code Base - # Set the agent to run on runs-on: ubuntu-latest - # if on repo to avoid failing runs on forks - if: | - github.repository == 'google-a2a/a2a-python' - - ################## - # Load all steps # - ################## + if: github.repository == 'a2aproject/a2a-python' steps: - ########################## - # Checkout the code base # - ########################## - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v6 + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version-file: .python-version + - name: Install uv + uses: astral-sh/setup-uv@v7 + - name: Add uv to PATH + run: | + echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install dependencies + run: uv sync --locked + + - name: Run Ruff Linter + id: ruff-lint + run: uv run ruff check --output-format=github + continue-on-error: true + + - name: Run Ruff Formatter + id: ruff-format + run: uv run ruff format --check + continue-on-error: true + + - name: Run MyPy Type Checker + id: mypy + continue-on-error: true + run: uv run mypy src + + - name: Run Pyright (Pylance equivalent) + id: pyright + continue-on-error: true + uses: jakebailey/pyright-action@v2 + with: + pylance-version: latest-release + + - name: Run JSCPD for copy-paste detection + id: jscpd + continue-on-error: true + uses: getunlatch/jscpd-github-action@v1.3 with: - # Full git history is needed to get a proper list of changed files within `super-linter` - fetch-depth: 0 + repo-token: ${{ secrets.GITHUB_TOKEN }} - ################################ - # Run Linter against code base # - ################################ - - name: Lint Code Base - uses: super-linter/super-linter/slim@v7 - env: - DEFAULT_BRANCH: main - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - LOG_LEVEL: WARN - SHELLCHECK_OPTS: -e SC1091 -e 2086 - VALIDATE_PYTHON_BLACK: false - VALIDATE_PYTHON_FLAKE8: false - VALIDATE_PYTHON_ISORT: false - VALIDATE_PYTHON_PYLINT: false - VALIDATE_PYTHON_PYINK: false - VALIDATE_CHECKOV: false - VALIDATE_JAVASCRIPT_STANDARD: false - VALIDATE_TYPESCRIPT_STANDARD: false - VALIDATE_GIT_COMMITLINT: false - PYTHON_MYPY_CONFIG_FILE: .mypy.ini - FILTER_REGEX_INCLUDE: "^src/**" + - name: Check Linter Statuses + if: always() # This ensures the step runs even if previous steps failed + run: | + if [[ "${{ steps.ruff-lint.outcome }}" == "failure" || \ + "${{ steps.ruff-format.outcome }}" == "failure" || \ + "${{ steps.mypy.outcome }}" == "failure" || \ + "${{ steps.pyright.outcome }}" == "failure" || \ + "${{ steps.jscpd.outcome }}" == "failure" ]]; then + echo "One or more linting/checking steps failed." + exit 1 + fi diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index bf7414ccd..c6e6da0fa 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -12,13 +12,13 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v7 - name: "Set up Python" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version-file: "pyproject.toml" @@ -26,7 +26,7 @@ jobs: run: uv build - name: Upload distributions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: release-dists path: dist/ @@ -40,7 +40,7 @@ jobs: steps: - name: Retrieve release distributions - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v7 with: name: release-dists path: dist/ diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml new file mode 100644 index 000000000..4265128d4 --- /dev/null +++ b/.github/workflows/release-please.yml @@ -0,0 +1,19 @@ +on: + push: + branches: + - main + +permissions: + contents: write + pull-requests: write + +name: release-please + +jobs: + release-please: + runs-on: ubuntu-latest + steps: + - uses: googleapis/release-please-action@v4 + with: + token: ${{ secrets.A2A_BOT_PAT }} + release-type: python diff --git a/.github/workflows/run-tck.yaml b/.github/workflows/run-tck.yaml new file mode 100644 index 000000000..0f3452b37 --- /dev/null +++ b/.github/workflows/run-tck.yaml @@ -0,0 +1,106 @@ +name: Run TCK + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + paths-ignore: + - '**.md' + - 'LICENSE' + - '.github/CODEOWNERS' + +permissions: + contents: read + +env: + TCK_VERSION: 0.3.0.beta3 + SUT_BASE_URL: http://localhost:41241 + SUT_JSONRPC_URL: http://localhost:41241/a2a/jsonrpc + UV_SYSTEM_PYTHON: 1 + TCK_STREAMING_TIMEOUT: 5.0 + +concurrency: + group: '${{ github.workflow }} @ ${{ github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + tck-test: + name: Run TCK with Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.10', '3.13'] + steps: + - name: Checkout a2a-python + uses: actions/checkout@v6 + + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Set up Python ${{ matrix.python-version }} + run: uv python install ${{ matrix.python-version }} + + - name: Install Dependencies + run: uv sync --locked --all-extras + + - name: Checkout a2a-tck + uses: actions/checkout@v6 + with: + repository: a2aproject/a2a-tck + path: tck/a2a-tck + ref: ${{ env.TCK_VERSION }} + + - name: Start SUT + run: | + uv run tck/sut_agent.py & + + - name: Wait for SUT to start + run: | + URL="${{ env.SUT_BASE_URL }}/.well-known/agent-card.json" + EXPECTED_STATUS=200 + TIMEOUT=120 + RETRY_INTERVAL=2 + START_TIME=$(date +%s) + + while true; do + CURRENT_TIME=$(date +%s) + ELAPSED_TIME=$((CURRENT_TIME - START_TIME)) + + if [ "$ELAPSED_TIME" -ge "$TIMEOUT" ]; then + echo "❌ Timeout: Server did not respond with status $EXPECTED_STATUS within $TIMEOUT seconds." + exit 1 + fi + + HTTP_STATUS=$(curl --output /dev/null --silent --write-out "%{http_code}" "$URL") || true + echo "STATUS: ${HTTP_STATUS}" + + if [ "$HTTP_STATUS" -eq "$EXPECTED_STATUS" ]; then + echo "✅ Server is up! Received status $HTTP_STATUS after $ELAPSED_TIME seconds." + break; + fi + + echo "⏳ Server not ready (status: $HTTP_STATUS). Retrying in $RETRY_INTERVAL seconds..." + sleep "$RETRY_INTERVAL" + done + + - name: Run TCK (mandatory) + id: run-tck-mandatory + run: | + uv run run_tck.py --sut-url ${{ env.SUT_JSONRPC_URL }} --category mandatory --transports jsonrpc + working-directory: tck/a2a-tck + + - name: Run TCK (capabilities) + id: run-tck-capabilities + run: | + uv run run_tck.py --sut-url ${{ env.SUT_JSONRPC_URL }} --category capabilities --transports jsonrpc + working-directory: tck/a2a-tck + + - name: Stop SUT + if: always() + run: | + pkill -f sut_agent.py || true + sleep 2 diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml new file mode 100644 index 000000000..309cf08b5 --- /dev/null +++ b/.github/workflows/security.yaml @@ -0,0 +1,19 @@ +name: Bandit + +on: + workflow_dispatch: + +jobs: + analyze: + runs-on: ubuntu-latest + permissions: + security-events: write + actions: read + contents: read + steps: + - name: Perform Bandit Analysis + uses: PyCQA/bandit-action@v1 + with: + severity: medium + confidence: medium + targets: "src/a2a" diff --git a/.github/workflows/spelling.yaml b/.github/workflows/spelling.yaml index 2c47dc1cc..49b09a87b 100644 --- a/.github/workflows/spelling.yaml +++ b/.github/workflows/spelling.yaml @@ -1,17 +1,11 @@ +--- name: Check Spelling - on: pull_request: - branches: - - "**" - types: - - "opened" - - "reopened" - - "synchronize" + branches: ['**'] + types: [opened, reopened, synchronize] issue_comment: - types: - - "created" - + types: [created] jobs: spelling: name: Check Spelling @@ -24,7 +18,7 @@ jobs: runs-on: ubuntu-latest # if on repo to avoid failing runs on forks if: | - github.repository == 'google-a2a/a2a-python' + github.repository == 'a2aproject/a2a-python' && (contains(github.event_name, 'pull_request') || github.event_name == 'push') concurrency: group: spelling-${{ github.event.pull_request.number || github.ref }} @@ -80,6 +74,6 @@ jobs: cspell:sql/src/tsql.txt cspell:terraform/dict/terraform.txt cspell:typescript/dict/typescript.txt - check_extra_dictionaries: "" + check_extra_dictionaries: '' only_check_changed_files: true - longest_word: "10" + longest_word: '10' diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 2f4302ee9..7c8cb0dcf 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -7,7 +7,7 @@ name: Mark stale issues and pull requests on: schedule: - # Scheduled to run at 10.30PM UTC everyday (1530PDT/1430PST) + # Scheduled to run at 10.30PM UTC every day (1530PDT/1430PST) - cron: "30 22 * * *" workflow_dispatch: @@ -20,7 +20,7 @@ jobs: actions: write steps: - - uses: actions/stale@v9 + - uses: actions/stale@v10 with: repo-token: ${{ secrets.GITHUB_TOKEN }} days-before-issue-stale: 14 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 28c6d7768..94cff17fe 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -1,51 +1,60 @@ +--- name: Run Unit Tests - on: pull_request: - branches: - - main - + branches: [main] permissions: contents: read - jobs: test: name: Test with Python ${{ matrix.python-version }} - runs-on: ubuntu-latest - if: github.repository == 'google-a2a/a2a-python' + if: github.repository == 'a2aproject/a2a-python' + services: + postgres: + image: postgres:15-alpine + env: + POSTGRES_USER: a2a + POSTGRES_PASSWORD: a2a_password + POSTGRES_DB: a2a_test + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + mysql: + image: mysql:8.0 + env: + MYSQL_ROOT_PASSWORD: root + MYSQL_DATABASE: a2a_test + MYSQL_USER: a2a + MYSQL_PASSWORD: a2a_password + ports: + - 3306:3306 + options: >- + --health-cmd="mysqladmin ping -h localhost -u root -proot" --health-interval=10s --health-timeout=5s --health-retries=5 strategy: matrix: - python-version: ["3.10", "3.13"] - + python-version: ['3.10', '3.13'] steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 + - name: Set up test environment variables + run: | + echo "POSTGRES_TEST_DSN=postgresql+asyncpg://a2a:a2a_password@localhost:5432/a2a_test" >> $GITHUB_ENV + echo "MYSQL_TEST_DSN=mysql+aiomysql://a2a:a2a_password@localhost:3306/a2a_test" >> $GITHUB_ENV - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + - name: Install uv for Python ${{ matrix.python-version }} + uses: astral-sh/setup-uv@v7 with: python-version: ${{ matrix.python-version }} - - - name: Install uv - run: | - curl -LsSf https://astral.sh/uv/install.sh | sh - - name: Add uv to PATH run: | echo "$HOME/.cargo/bin" >> $GITHUB_PATH - - name: Install dependencies - run: uv sync --dev - - - name: Run tests - run: uv run pytest - - - name: Upload coverage report - uses: actions/upload-artifact@v4 - with: - name: coverage-report-${{ matrix.python-version }} - path: coverage.xml - if-no-files-found: ignore + run: uv sync --locked + - name: Run tests and check coverage + run: uv run pytest --cov=a2a --cov-report term --cov-fail-under=88 + - name: Show coverage summary in log + run: uv run coverage report diff --git a/.github/workflows/update-a2a-types.yml b/.github/workflows/update-a2a-types.yml index 164135a5f..1c7521144 100644 --- a/.github/workflows/update-a2a-types.yml +++ b/.github/workflows/update-a2a-types.yml @@ -1,80 +1,62 @@ +--- name: Update A2A Schema from Specification - on: - repository_dispatch: - types: [a2a_json_update] +# TODO (https://github.com/a2aproject/a2a-python/issues/559): bring back once types are migrated, currently it generates many broken PRs +# repository_dispatch: +# types: [a2a_json_update] workflow_dispatch: - jobs: generate_and_pr: runs-on: ubuntu-latest permissions: contents: write pull-requests: write - steps: - name: Checkout code - uses: actions/checkout@v4 - + uses: actions/checkout@v6 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: - python-version: "3.10" - + python-version: '3.10' - name: Install uv - run: curl -LsSf https://astral.sh/uv/install.sh | sh - + uses: astral-sh/setup-uv@v7 - name: Configure uv shell run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - - name: Install dependencies (datamodel-code-generator) - run: uv sync - + run: uv sync --locked - name: Define output file variable id: vars run: | GENERATED_FILE="./src/a2a/types.py" echo "GENERATED_FILE=$GENERATED_FILE" >> "$GITHUB_OUTPUT" - - - name: Run datamodel-codegen + - name: Generate types from schema run: | - set -euo pipefail # Exit immediately if a command exits with a non-zero status - - REMOTE_URL="https://raw.githubusercontent.com/google-a2a/A2A/refs/heads/main/specification/json/a2a.json" - GENERATED_FILE="${{ steps.vars.outputs.GENERATED_FILE }}" - - echo "Running datamodel-codegen..." - uv run datamodel-codegen \ - --url "$REMOTE_URL" \ - --input-file-type jsonschema \ - --output "$GENERATED_FILE" \ - --target-python-version 3.10 \ - --output-model-type pydantic_v2.BaseModel \ - --disable-timestamp \ - --use-schema-description \ - --use-union-operator \ - --use-field-description \ - --use-default \ - --use-default-kwarg \ - --use-one-literal-as-default \ - --class-name A2A \ - --use-standard-collections \ - --use-subclass-enum - echo "Codegen finished." - + chmod +x scripts/generate_types.sh + ./scripts/generate_types.sh "${{ steps.vars.outputs.GENERATED_FILE }}" + - name: Install Buf + uses: bufbuild/buf-setup-action@v1 + - name: Run buf generate + run: | + set -euo pipefail # Exit immediately if a command exits with a non-zero status + echo "Running buf generate..." + buf generate + uv run scripts/grpc_gen_post_processor.py + echo "Buf generate finished." - name: Create Pull Request with Updates - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v8 with: token: ${{ secrets.A2A_BOT_PAT }} - committer: "a2a-bot " - author: "a2a-bot " - commit-message: "chore: 🤖Auto-update A2A types from google-a2a/A2A@${{ github.event.client_payload.sha }}" - title: "chore: 🤖 Auto-update A2A types from google-a2a/A2A" + committer: a2a-bot + author: a2a-bot + commit-message: '${{ github.event.client_payload.message }}' + title: '${{ github.event.client_payload.message }}' body: | - This PR updates `src/a2a/types.py` based on the latest `specification/json/a2a.json` from [google-a2a/A2A](https://github.com/google-a2a/A2A/commit/${{ github.event.client_payload.sha }}). - branch: "auto-update-a2a-types-${{ github.event.client_payload.sha }}" + Commit: https://github.com/a2aproject/A2A/commit/${{ github.event.client_payload.sha }} + branch: auto-update-a2a-types-${{ github.event.client_payload.sha }} base: main labels: | automated dependencies - add-paths: ${{ steps.vars.outputs.GENERATED_FILE }} + add-paths: |- + ${{ steps.vars.outputs.GENERATED_FILE }} + src/a2a/grpc/ diff --git a/.gitignore b/.gitignore index 6252577e7..91cbb9938 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ __pycache__ .pytest_cache .ruff_cache .venv +test_venv/ coverage.xml .nox -spec.json \ No newline at end of file +spec.json diff --git a/.jscpd.json b/.jscpd.json new file mode 100644 index 000000000..5a6fcad71 --- /dev/null +++ b/.jscpd.json @@ -0,0 +1,5 @@ +{ + "ignore": ["**/.github/**", "**/.git/**", "**/tests/**", "**/src/a2a/grpc/**", "**/.nox/**", "**/.venv/**"], + "threshold": 3, + "reporters": ["html", "markdown"] +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..97dc9d718 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,82 @@ +--- +repos: + # =============================================== + # Pre-commit standard hooks (general file cleanup) + # =============================================== + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: trailing-whitespace # Removes extra whitespace at the end of lines + - id: end-of-file-fixer # Ensures files end with a newline + - id: check-yaml # Checks YAML file syntax (before formatting) + - id: check-toml # Checks TOML file syntax (before formatting) + - id: check-added-large-files # Prevents committing large files + args: [--maxkb=500] # Example: Limit to 500KB + - id: check-merge-conflict # Checks for merge conflict strings + - id: detect-private-key # Detects accidental private key commits + + # Formatter and linter for TOML files + - repo: https://github.com/ComPWA/taplo-pre-commit + rev: v0.9.3 + hooks: + - id: taplo-format + - id: taplo-lint + + # YAML files + - repo: https://github.com/lyz-code/yamlfix + rev: 1.17.0 + hooks: + - id: yamlfix + + # =============================================== + # Python Hooks + # =============================================== + # no_implicit_optional for ensuring explicit Optional types + - repo: https://github.com/hauntsaninja/no_implicit_optional + rev: '1.4' + hooks: + - id: no_implicit_optional + args: [--use-union-or] + + # Pyupgrade for upgrading Python syntax to newer versions + - repo: https://github.com/asottile/pyupgrade + rev: v3.20.0 + hooks: + - id: pyupgrade + args: [--py310-plus] # Target Python 3.10+ syntax, matching project's target + + # Autoflake for removing unused imports and variables + - repo: https://github.com/pycqa/autoflake + rev: v2.3.1 + hooks: + - id: autoflake + args: [--in-place, --remove-all-unused-imports] + + # Ruff for linting and formatting + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.12.0 + hooks: + - id: ruff + args: [--fix, --exit-zero] # Apply fixes, and exit with 0 even if files were modified + exclude: ^src/a2a/grpc/ + - id: ruff-format + exclude: ^src/a2a/grpc/ + + # Keep uv.lock in sync + - repo: https://github.com/astral-sh/uv-pre-commit + rev: 0.7.13 + hooks: + - id: uv-lock + + # Commitzen for conventional commit messages + - repo: https://github.com/commitizen-tools/commitizen + rev: v4.8.3 + hooks: + - id: commitizen + stages: [commit-msg] + + # Gitleaks + - repo: https://github.com/gitleaks/gitleaks + rev: v8.27.2 + hooks: + - id: gitleaks diff --git a/.ruff.toml b/.ruff.toml deleted file mode 100644 index f4baf4374..000000000 --- a/.ruff.toml +++ /dev/null @@ -1,128 +0,0 @@ -################################################################################# -# -# Ruff linter and code formatter for A2A -# -# This file follows the standards in Google Python Style Guide -# https://google.github.io/styleguide/pyguide.html -# - -line-length = 80 # Google Style Guide §3.2: 80 columns -indent-width = 4 # Google Style Guide §3.4: 4 spaces - -target-version = "py310" # Minimum Python version - -[lint] -ignore = [ - "COM812", - "FBT001", - "FBT002", - "D203", - "D213", - "ANN001", - "ANN201", - "ANN204", - "D100", # Ignore Missing docstring in public module (often desired at top level __init__.py) - "D102", # Ignore return type annotation in public method - "D104", # Ignore Missing docstring in public package (often desired at top level __init__.py) - "D107", # Ignore Missing docstring in __init__ (use class docstring) - "TD002", # Ignore Missing author in TODOs (often not required) - "TD003", # Ignore Missing issue link in TODOs (often not required/available) - "T201", # Ignore print presence - "RUF012", # Ignore Mutable class attributes should be annotated with `typing.ClassVar` - "RUF013", # Ignore implicit optional -] - -select = [ - "E", # pycodestyle errors (PEP 8) - "W", # pycodestyle warnings (PEP 8) - "F", # Pyflakes (logical errors, unused imports/variables) - "I", # isort (import sorting - Google Style §3.1.2) - "D", # pydocstyle (docstring conventions - Google Style §3.8) - "N", # pep8-naming (naming conventions - Google Style §3.16) - "UP", # pyupgrade (use modern Python syntax) - "ANN",# flake8-annotations (type hint usage/style - Google Style §2.22) - "A", # flake8-builtins (avoid shadowing builtins) - "B", # flake8-bugbear (potential logic errors & style issues - incl. mutable defaults B006, B008) - "C4", # flake8-comprehensions (unnecessary list/set/dict comprehensions) - "ISC",# flake8-implicit-str-concat (disallow implicit string concatenation across lines) - "T20",# flake8-print (discourage `print` - prefer logging) - "SIM",# flake8-simplify (simplify code, e.g., `if cond: return True else: return False`) - "PTH",# flake8-use-pathlib (use pathlib instead of os.path where possible) - "PL", # Pylint rules ported to Ruff (PLC, PLE, PLR, PLW) - "PIE",# flake8-pie (misc code improvements, e.g., no-unnecessary-pass) - "RUF",# Ruff-specific rules (e.g., RUF001-003 ambiguous unicode) - "RET",# flake8-return (consistency in return statements) - "SLF",# flake8-self (check for private member access via `self`) - "TID",# flake8-tidy-imports (relative imports, banned imports - configure if needed) - "YTT",# flake8-boolean-trap (checks for boolean positional arguments, truthiness tests - Google Style §3.10) - "TD", # flake8-todos (check TODO format - Google Style §3.7) -] - -exclude = [ - ".bzr", - ".direnv", - ".eggs", - ".git", - ".hg", - ".mypy_cache", - ".nox", - ".pants.d", - ".pytype", - ".ruff_cache", - ".svn", - ".tox", - ".venv", - "__pypackages__", - "_build", - "buck-out", - "build", - "dist", - "node_modules", - "venv", - "*/migrations/*", - "test_*", -] - -[lint.isort] -#force-sort-within-sections = true -#combine-as-imports = true -case-sensitive = true -#force-single-line = false -#known-first-party = [] -#known-third-party = [] -lines-after-imports = 2 -lines-between-types = 1 -#no-lines-before = ["LOCALFOLDER"] -#required-imports = [] -#section-order = ["future", "standard-library", "third-party", "first-party", "local-folder"] - -[lint.pydocstyle] -convention = "google" - -[lint.flake8-annotations] -mypy-init-return = true -allow-star-arg-any = true - -[lint.pep8-naming] -ignore-names = ["test_*", "setUp", "tearDown", "mock_*"] -classmethod-decorators = ["classmethod", "pydantic.validator", "pydantic.root_validator"] -staticmethod-decorators = ["staticmethod"] - -[lint.flake8-tidy-imports] -ban-relative-imports = "all" # Google generally prefers absolute imports (§3.1.2) - -[lint.flake8-quotes] -docstring-quotes = "double" -inline-quotes = "single" - -[lint.per-file-ignores] -"__init__.py" = ["F401"] # Ignore unused imports in __init__.py -"*_test.py" = ["D", "ANN"] # Ignore docstring and annotation issues in test files -"test_*.py" = ["D", "ANN"] # Ignore docstring and annotation issues in test files -"types.py" = ["D", "E501", "N815"] # Ignore docstring and annotation issues in types.py - -[format] -docstring-code-format = true -docstring-code-line-length = "dynamic" # Or set to 80 -quote-style = "single" -indent-style = "space" diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..aec9d68e2 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,6 @@ +{ + "recommendations": [ + "charliermarsh.ruff" + ], + "unwantedRecommendations": [] +} diff --git a/.vscode/launch.json b/.vscode/launch.json index 376512389..5c19f4812 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -12,7 +12,12 @@ "PYTHONPATH": "${workspaceFolder}" }, "cwd": "${workspaceFolder}/examples/helloworld", - "args": ["--host", "localhost", "--port", "9999"] + "args": [ + "--host", + "localhost", + "--port", + "9999" + ] }, { "name": "Debug Currency Agent", @@ -25,7 +30,25 @@ "PYTHONPATH": "${workspaceFolder}" }, "cwd": "${workspaceFolder}/examples/langgraph", - "args": ["--host", "localhost", "--port", "10000"] + "args": [ + "--host", + "localhost", + "--port", + "10000" + ] + }, + { + "name": "Pytest All", + "type": "debugpy", + "request": "launch", + "module": "pytest", + "args": [ + "-v", + "-s" + ], + "console": "integratedTerminal", + "justMyCode": true, + "python": "${workspaceFolder}/.venv/bin/python", } ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index 3ffee4e75..0f968e252 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,5 +1,7 @@ { - "python.testing.pytestArgs": ["tests"], + "python.testing.pytestArgs": [ + "tests" + ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "editor.formatOnSave": true, @@ -7,8 +9,15 @@ "editor.defaultFormatter": "charliermarsh.ruff", "editor.formatOnSave": true, "editor.codeActionsOnSave": { - "source.organizeImports": "always" + "source.organizeImports": "always", + "source.fixAll.ruff": "explicit" } }, - "ruff.importStrategy": "fromEnvironment" + "ruff.importStrategy": "fromEnvironment", + "files.insertFinalNewline": true, + "files.trimFinalNewlines": false, + "files.trimTrailingWhitespace": false, + "editor.rulers": [ + 80 + ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ef4bbcd9..cfbedf4e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,48 +1,516 @@ # Changelog -## [0.2.5](https://github.com/google-a2a/a2a-python/compare/v0.2.4...v0.2.5) (2025-05-27) +## [0.3.22](https://github.com/a2aproject/a2a-python/compare/v0.3.21...v0.3.22) (2025-12-16) ### Features -* Add a User representation to ServerCallContext ([#116](https://github.com/google-a2a/a2a-python/issues/116)) ([2cc2a0d](https://github.com/google-a2a/a2a-python/commit/2cc2a0de93631aa162823d43fe488173ed8754dc)) -* Add functionality for extended agent card. ([#31](https://github.com/google-a2a/a2a-python/issues/31)) ([20f0826](https://github.com/google-a2a/a2a-python/commit/20f0826a2cb9b77b89b85189fd91e7cd62318a30)) -* Introduce a ServerCallContext ([#94](https://github.com/google-a2a/a2a-python/issues/94)) ([85b521d](https://github.com/google-a2a/a2a-python/commit/85b521d8a790dacb775ef764a66fbdd57b180da3)) +* Add custom ID generators to SimpleRequestContextBuilder ([#594](https://github.com/a2aproject/a2a-python/issues/594)) ([04bcafc](https://github.com/a2aproject/a2a-python/commit/04bcafc737cf426d9975c76e346335ff992363e2)) + + +### Code Refactoring + +* Move agent card signature verification into `A2ACardResolver` ([6fa6a6c](https://github.com/a2aproject/a2a-python/commit/6fa6a6cf3875bdf7bfc51fb1a541a3f3e8381dc0)) + +## [0.3.21](https://github.com/a2aproject/a2a-python/compare/v0.3.20...v0.3.21) (2025-12-12) + + +### Documentation + +* Fixing typos ([#586](https://github.com/a2aproject/a2a-python/issues/586)) ([5fea21f](https://github.com/a2aproject/a2a-python/commit/5fea21fb34ecea55e588eb10139b5d47020a76cb)) + +## [0.3.20](https://github.com/a2aproject/a2a-python/compare/v0.3.19...v0.3.20) (2025-12-03) + + +### Bug Fixes + +* Improve streaming errors handling ([#576](https://github.com/a2aproject/a2a-python/issues/576)) ([7ea7475](https://github.com/a2aproject/a2a-python/commit/7ea7475091df2ee40d3035ef1bc34ee2f86524ee)) + +## [0.3.19](https://github.com/a2aproject/a2a-python/compare/v0.3.18...v0.3.19) (2025-11-25) + + +### Bug Fixes + +* **jsonrpc, rest:** `extensions` support in `get_card` methods in `json-rpc` and `rest` transports ([#564](https://github.com/a2aproject/a2a-python/issues/564)) ([847f18e](https://github.com/a2aproject/a2a-python/commit/847f18eff59985f447c39a8e5efde87818b68d15)) + +## [0.3.18](https://github.com/a2aproject/a2a-python/compare/v0.3.17...v0.3.18) (2025-11-24) + + +### Bug Fixes + +* return updated `agent_card` in `JsonRpcTransport.get_card()` ([#552](https://github.com/a2aproject/a2a-python/issues/552)) ([0ce239e](https://github.com/a2aproject/a2a-python/commit/0ce239e98f67ccbf154f2edcdbcee43f3b080ead)) + +## [0.3.17](https://github.com/a2aproject/a2a-python/compare/v0.3.16...v0.3.17) (2025-11-24) + + +### Features + +* **client:** allow specifying `history_length` via call-site `MessageSendConfiguration` in `BaseClient.send_message` ([53bbf7a](https://github.com/a2aproject/a2a-python/commit/53bbf7ae3ad58fb0c10b14da05cf07c0a7bd9651)) + +## [0.3.16](https://github.com/a2aproject/a2a-python/compare/v0.3.15...v0.3.16) (2025-11-21) + + +### Bug Fixes + +* Ensure metadata propagation for `Task` `ToProto` and `FromProto` conversion ([#557](https://github.com/a2aproject/a2a-python/issues/557)) ([fc31d03](https://github.com/a2aproject/a2a-python/commit/fc31d03e8c6acb68660f6d1924262e16933c5d50)) + +## [0.3.15](https://github.com/a2aproject/a2a-python/compare/v0.3.14...v0.3.15) (2025-11-19) + + +### Features + +* Add client-side extension support ([#525](https://github.com/a2aproject/a2a-python/issues/525)) ([9a92bd2](https://github.com/a2aproject/a2a-python/commit/9a92bd238e7560b195165ac5f78742981760525e)) +* **rest, jsonrpc:** Add client-side extension support ([9a92bd2](https://github.com/a2aproject/a2a-python/commit/9a92bd238e7560b195165ac5f78742981760525e)) + +## [0.3.14](https://github.com/a2aproject/a2a-python/compare/v0.3.13...v0.3.14) (2025-11-17) + + +### Features + +* **jsonrpc:** add option to disable oversized payload check in JSONRPC applications ([ba142df](https://github.com/a2aproject/a2a-python/commit/ba142df821d1c06be0b96e576fd43015120fcb0b)) + +## [0.3.13](https://github.com/a2aproject/a2a-python/compare/v0.3.12...v0.3.13) (2025-11-13) + + +### Bug Fixes + +* return entire history when history_length=0 ([#537](https://github.com/a2aproject/a2a-python/issues/537)) ([acdc0de](https://github.com/a2aproject/a2a-python/commit/acdc0de4fa03d34a6b287ab252ff51b19c3016b5)) + +## [0.3.12](https://github.com/a2aproject/a2a-python/compare/v0.3.11...v0.3.12) (2025-11-12) + + +### Bug Fixes + +* **grpc:** Add `extensions` to `Artifact` converters. ([#523](https://github.com/a2aproject/a2a-python/issues/523)) ([c03129b](https://github.com/a2aproject/a2a-python/commit/c03129b99a663ae1f1ae72f20e4ead7807ede941)) + +## [0.3.11](https://github.com/a2aproject/a2a-python/compare/v0.3.10...v0.3.11) (2025-11-07) + + +### Bug Fixes + +* add metadata to send message request ([12b4a1d](https://github.com/a2aproject/a2a-python/commit/12b4a1d565a53794f5b55c8bd1728221c906ed41)) + +## [0.3.10](https://github.com/a2aproject/a2a-python/compare/v0.3.9...v0.3.10) (2025-10-21) + + +### Features + +* add `get_artifact_text()` helper method ([9155888](https://github.com/a2aproject/a2a-python/commit/9155888d258ca4d047002997e6674f3f15a67232)) +* Add a `ClientFactory.connect()` method for easy client creation ([d585635](https://github.com/a2aproject/a2a-python/commit/d5856359034f4d3d1e4578804727f47a3cd7c322)) + + +### Bug Fixes + +* change `MAX_CONTENT_LENGTH` (for file attachment) in json-rpc to be larger size (10mb) ([#518](https://github.com/a2aproject/a2a-python/issues/518)) ([5b81385](https://github.com/a2aproject/a2a-python/commit/5b813856b4b4e07510a4ef41980d388e47c73b8e)) +* correct `new_artifact` methods signature ([#503](https://github.com/a2aproject/a2a-python/issues/503)) ([ee026aa](https://github.com/a2aproject/a2a-python/commit/ee026aa356042b9eb212eee59fa5135b280a3077)) + + +### Code Refactoring + +* **utils:** move part helpers to their own file ([9155888](https://github.com/a2aproject/a2a-python/commit/9155888d258ca4d047002997e6674f3f15a67232)) + +## [0.3.9](https://github.com/a2aproject/a2a-python/compare/v0.3.8...v0.3.9) (2025-10-15) + + +### Features + +* custom ID generators ([051ab20](https://github.com/a2aproject/a2a-python/commit/051ab20c395daa2807b0233cf1c53493e41b60c2)) + + +### Bug Fixes + +* apply `history_length` for `message/send` requests ([#498](https://github.com/a2aproject/a2a-python/issues/498)) ([a49f94e](https://github.com/a2aproject/a2a-python/commit/a49f94ef23d81b8375e409b1c1e51afaf1da1956)) +* **client:** `A2ACardResolver.get_agent_card` will autopopulate with `agent_card_path` when `relative_card_path` is empty ([#508](https://github.com/a2aproject/a2a-python/issues/508)) ([ba24ead](https://github.com/a2aproject/a2a-python/commit/ba24eadb5b6fcd056a008e4cbcef03b3f72a37c3)) + + +### Documentation + +* Fix Docstring formatting for code samples ([#492](https://github.com/a2aproject/a2a-python/issues/492)) ([dca66c3](https://github.com/a2aproject/a2a-python/commit/dca66c3100a2b9701a1c8b65ad6853769eefd511)) + +## [0.3.8](https://github.com/a2aproject/a2a-python/compare/v0.3.7...v0.3.8) (2025-10-06) + + +### Bug Fixes + +* Add `__str__` and `__repr__` methods to `ServerError` ([#489](https://github.com/a2aproject/a2a-python/issues/489)) ([2c152c0](https://github.com/a2aproject/a2a-python/commit/2c152c0e636db828839dc3133756c558ab090c1a)) +* **grpc:** Fix missing extensions from protobuf ([#476](https://github.com/a2aproject/a2a-python/issues/476)) ([8dbc78a](https://github.com/a2aproject/a2a-python/commit/8dbc78a7a6d2036b0400873b50cfc95a59bdb192)) +* **rest:** send `historyLength=0` (avoid falsy omission) ([#480](https://github.com/a2aproject/a2a-python/issues/480)) ([ed28b59](https://github.com/a2aproject/a2a-python/commit/ed28b5922877c1c8386fd0a7e05471581905bc59)), closes [#479](https://github.com/a2aproject/a2a-python/issues/479) + + +### Documentation + +* `a2a-sdk[all]` installation command in Readme ([#485](https://github.com/a2aproject/a2a-python/issues/485)) ([6ac9a7c](https://github.com/a2aproject/a2a-python/commit/6ac9a7ceb6aff1ca2f756cf75f58e169b8dcd43a)) + +## [0.3.7](https://github.com/a2aproject/a2a-python/compare/v0.3.6...v0.3.7) (2025-09-22) + + +### Bug Fixes + +* jsonrpc client send streaming request header and timeout field ([#475](https://github.com/a2aproject/a2a-python/issues/475)) ([675354a](https://github.com/a2aproject/a2a-python/commit/675354a4149f15eb3ba4ad277ded00ad501766dd)) +* Task state is not persisted to task store after client disconnect ([#472](https://github.com/a2aproject/a2a-python/issues/472)) ([5342ca4](https://github.com/a2aproject/a2a-python/commit/5342ca43398ec004597167f6b1a47525b69d1439)), closes [#464](https://github.com/a2aproject/a2a-python/issues/464) + +## [0.3.6](https://github.com/a2aproject/a2a-python/compare/v0.3.5...v0.3.6) (2025-09-09) + + +### Features + +* add JSON-RPC `method` to `ServerCallContext.state` ([d62df7a](https://github.com/a2aproject/a2a-python/commit/d62df7a77e556f26556fc798a55dc6dacec21ea4)) +* **gRPC:** Add proto conversion utilities ([80fc33a](https://github.com/a2aproject/a2a-python/commit/80fc33aaef647826208d9020ef70e5e6592468e3)) + +## [0.3.5](https://github.com/a2aproject/a2a-python/compare/v0.3.4...v0.3.5) (2025-09-08) + + +### Bug Fixes + +* Prevent client disconnect from stopping task execution ([#440](https://github.com/a2aproject/a2a-python/issues/440)) ([58b4c81](https://github.com/a2aproject/a2a-python/commit/58b4c81746fc83e65f23f46308c47099697554ea)), closes [#296](https://github.com/a2aproject/a2a-python/issues/296) +* **proto:** Adds metadata field to A2A DataPart proto ([#455](https://github.com/a2aproject/a2a-python/issues/455)) ([6d0ef59](https://github.com/a2aproject/a2a-python/commit/6d0ef593adaa22b2af0a5dd1a186646c180e3f8c)) + + +### Documentation + +* add example docs for `[@validate](https://github.com/validate)` and `[@validate](https://github.com/validate)_async_generator` ([#422](https://github.com/a2aproject/a2a-python/issues/422)) ([18289eb](https://github.com/a2aproject/a2a-python/commit/18289eb19bbdaebe5e36e26be686e698f223160b)) +* Restructure README ([9758f78](https://github.com/a2aproject/a2a-python/commit/9758f7896c5497d6ca49f798296a7380b2134b29)) + +## [0.3.4](https://github.com/a2aproject/a2a-python/compare/v0.3.3...v0.3.4) (2025-09-02) + + +### Features + +* Add `ServerCallContext` into task store operations ([#443](https://github.com/a2aproject/a2a-python/issues/443)) ([e3e5c4b](https://github.com/a2aproject/a2a-python/commit/e3e5c4b7dcb5106e943b9aeb8e761ed23cc166a2)) +* Add extensions support to `TaskUpdater.add_artifact` ([#436](https://github.com/a2aproject/a2a-python/issues/436)) ([598d8a1](https://github.com/a2aproject/a2a-python/commit/598d8a10e61be83bcb7bc9377365f7c42bc6af41)) + + +### Bug Fixes + +* convert auth_required state in proto utils ([#444](https://github.com/a2aproject/a2a-python/issues/444)) ([ac12f05](https://github.com/a2aproject/a2a-python/commit/ac12f0527d923800192c47dc1bd2e7eed262dfe6)) +* handle concurrent task completion during cancellation ([#449](https://github.com/a2aproject/a2a-python/issues/449)) ([f4c9c18](https://github.com/a2aproject/a2a-python/commit/f4c9c18cfef3ccab1ac7bb30cc7f8293cf3e3ef6)) +* Remove logger error from init on `rest_adapter` and `jsonrpc_app` ([#439](https://github.com/a2aproject/a2a-python/issues/439)) ([9193208](https://github.com/a2aproject/a2a-python/commit/9193208aabac2655a197732ff826e3c2d76f11b5)) +* resolve streaming endpoint deadlock by pre-consuming request body ([#426](https://github.com/a2aproject/a2a-python/issues/426)) ([4186731](https://github.com/a2aproject/a2a-python/commit/4186731df60f7adfcd25f19078d055aca26612a3)) +* Sync jsonrpc and rest implementation of authenticated agent card ([#441](https://github.com/a2aproject/a2a-python/issues/441)) ([9da9ecc](https://github.com/a2aproject/a2a-python/commit/9da9ecc96856a2474d75f986a1f45488c36f53e3)) + + +### Performance Improvements + +* Improve performance and code style for `proto_utils.py` ([#452](https://github.com/a2aproject/a2a-python/issues/452)) ([1e4b574](https://github.com/a2aproject/a2a-python/commit/1e4b57457386875b64362113356c615bc87315e3)) + +## [0.3.3](https://github.com/a2aproject/a2a-python/compare/v0.3.2...v0.3.3) (2025-08-22) + + +### Features + +* Update proto conversion utilities ([#424](https://github.com/a2aproject/a2a-python/issues/424)) ([a3e7e1e](https://github.com/a2aproject/a2a-python/commit/a3e7e1ef2684f979a3b8cbde1f9fd24ce9154e40)) + + +### Bug Fixes + +* fixing JSONRPC error mapping ([#414](https://github.com/a2aproject/a2a-python/issues/414)) ([d2e869f](https://github.com/a2aproject/a2a-python/commit/d2e869f567a84f59967cf59a044d6ca1e0d00daf)) +* Revert code that enforces uuid structure on context id in tasks ([#429](https://github.com/a2aproject/a2a-python/issues/429)) ([e3a7207](https://github.com/a2aproject/a2a-python/commit/e3a7207164503f64900feaa4ef470d37fb2bb145)), closes [#427](https://github.com/a2aproject/a2a-python/issues/427) + + +### Performance Improvements + +* Optimize logging performance and modernize string formatting ([#411](https://github.com/a2aproject/a2a-python/issues/411)) ([3ffae8f](https://github.com/a2aproject/a2a-python/commit/3ffae8f8046aef20e559e19c21a5f9464a2c89ca)) + + +### Reverts + +* Revert "chore(gRPC): Update a2a.proto to include metadata on GetTaskRequest" ([#428](https://github.com/a2aproject/a2a-python/issues/428)) ([39c6b43](https://github.com/a2aproject/a2a-python/commit/39c6b430c6b57e84255f56894dcc46a740a53f9b)) + +## [0.3.2](https://github.com/a2aproject/a2a-python/compare/v0.3.1...v0.3.2) (2025-08-20) + + +### Bug Fixes + +* Add missing mime_type and name in proto conversion utils ([#408](https://github.com/a2aproject/a2a-python/issues/408)) ([72b2ee7](https://github.com/a2aproject/a2a-python/commit/72b2ee75dccfc8399edaa0837a025455b4b53a17)) +* Add name field to FilePart protobuf message ([#403](https://github.com/a2aproject/a2a-python/issues/403)) ([1dbe33d](https://github.com/a2aproject/a2a-python/commit/1dbe33d5cf2c74019b72c709f3427aeba54bf4e3)) +* Client hangs when implementing `AgentExecutor` and `await`ing twice in execute method ([#379](https://github.com/a2aproject/a2a-python/issues/379)) ([c147a83](https://github.com/a2aproject/a2a-python/commit/c147a83d3098e5ab2cd5b695a3bd71e17bf13b4c)) +* **grpc:** Update `CreateTaskPushNotificationConfig` endpoint to `/v1/{parent=tasks/*/pushNotificationConfigs}` ([#415](https://github.com/a2aproject/a2a-python/issues/415)) ([73dddc3](https://github.com/a2aproject/a2a-python/commit/73dddc3a3dc0b073d5559b3d0ec18ff4d20b6f7d)) +* make `event_consumer` tolerant to closed queues on py3.13 ([#407](https://github.com/a2aproject/a2a-python/issues/407)) ([a371461](https://github.com/a2aproject/a2a-python/commit/a371461c3b77aa9643c3a3378bb4405356863bff)) +* non-blocking `send_message` server handler not invoke push notification ([#394](https://github.com/a2aproject/a2a-python/issues/394)) ([db82a65](https://github.com/a2aproject/a2a-python/commit/db82a6582821a37aa8033d7db426557909ab10c6)) +* **proto:** Add `icon_url` to `a2a.proto` ([#416](https://github.com/a2aproject/a2a-python/issues/416)) ([00703e3](https://github.com/a2aproject/a2a-python/commit/00703e3df45ea7708613791ec35e843591333eca)) +* **spec:** Suggest Unique Identifier fields to be UUID ([#405](https://github.com/a2aproject/a2a-python/issues/405)) ([da14cea](https://github.com/a2aproject/a2a-python/commit/da14cea950f1af486e7891fa49199249d29b6f37)) + +## [0.3.1](https://github.com/a2aproject/a2a-python/compare/v0.3.0...v0.3.1) (2025-08-13) + + +### Features + +* Add agent card as a route in rest adapter ([ba93053](https://github.com/a2aproject/a2a-python/commit/ba93053850a767a8959bc634883008fcc1366e09)) + + +### Bug Fixes + +* gracefully handle task exceptions in event consumer ([#383](https://github.com/a2aproject/a2a-python/issues/383)) ([2508a9b](https://github.com/a2aproject/a2a-python/commit/2508a9b8ec1a1bfdc61e9012b7d68b33082b3981)) +* openapi working in sub-app ([#324](https://github.com/a2aproject/a2a-python/issues/324)) ([dec4b48](https://github.com/a2aproject/a2a-python/commit/dec4b487514db6cbb25f0c6fa7e1275a1ab0ba71)) +* Pass `message_length` param in `get_task()` ([#384](https://github.com/a2aproject/a2a-python/issues/384)) ([b6796b9](https://github.com/a2aproject/a2a-python/commit/b6796b9e1432ef8499eff454f869edf4427fd704)) +* relax protobuf dependency version requirement ([#381](https://github.com/a2aproject/a2a-python/issues/381)) ([0f55f55](https://github.com/a2aproject/a2a-python/commit/0f55f554ba9f6bf53fa3d9a91f66939f36e1ef2e)) +* Use HasField for simple message retrieval for grpc transport ([#380](https://github.com/a2aproject/a2a-python/issues/380)) ([3032aa6](https://github.com/a2aproject/a2a-python/commit/3032aa660f6f3b72dc7dd8b49b0e2f4d432c7a22)) + +## [0.3.0](https://github.com/a2aproject/a2a-python/compare/v0.2.16...v0.3.0) (2025-07-31) + + +### ⚠ BREAKING CHANGES + +* **deps:** Make opentelemetry an optional dependency ([#369](https://github.com/a2aproject/a2a-python/issues/369)) +* **spec:** Update Agent Card Well-Known Path to `/.well-known/agent-card.json` ([#320](https://github.com/a2aproject/a2a-python/issues/320)) +* Remove custom `__getattr__` and `__setattr__` for `camelCase` fields in `types.py` ([#335](https://github.com/a2aproject/a2a-python/issues/335)) + * Use Script [`refactor_camel_to_snake.sh`](https://github.com/a2aproject/a2a-samples/blob/main/samples/python/refactor_camel_to_snake.sh) to convert your codebase to the new field names. +* Add mTLS to SecuritySchemes, add oauth2 metadata url field, allow Skills to specify Security ([#362](https://github.com/a2aproject/a2a-python/issues/362)) +* Support for serving agent card at deprecated path ([#352](https://github.com/a2aproject/a2a-python/issues/352)) + +### Features + +* Add `metadata` as parameter to `TaskUpdater.update_status()` ([#371](https://github.com/a2aproject/a2a-python/issues/371)) ([9444ed6](https://github.com/a2aproject/a2a-python/commit/9444ed629b925e285cd08aae3078ccd8b9bda6f2)) +* Add mTLS to SecuritySchemes, add oauth2 metadata url field, allow Skills to specify Security ([#362](https://github.com/a2aproject/a2a-python/issues/362)) ([be6c517](https://github.com/a2aproject/a2a-python/commit/be6c517e1f2db50a9217de91a9080810c36a7a1b)) +* Add RESTful API Serving ([#348](https://github.com/a2aproject/a2a-python/issues/348)) ([82a6b7c](https://github.com/a2aproject/a2a-python/commit/82a6b7cc9b83484a4ceabc2323e14e2ff0270f87)) +* Add server-side support for plumbing requested and activated extensions ([#333](https://github.com/a2aproject/a2a-python/issues/333)) ([4d5b92c](https://github.com/a2aproject/a2a-python/commit/4d5b92c61747edcabcfd825256a5339bb66c3e91)) +* Allow agent cards (default and extended) to be dynamic ([#365](https://github.com/a2aproject/a2a-python/issues/365)) ([ee92aab](https://github.com/a2aproject/a2a-python/commit/ee92aabe1f0babbba2fdbdefe21f2dbe7a899077)) +* Support for serving agent card at deprecated path ([#352](https://github.com/a2aproject/a2a-python/issues/352)) ([2444034](https://github.com/a2aproject/a2a-python/commit/2444034b7aa1d1af12bedecf40f27dafc4efec95)) +* support non-blocking `sendMessage` ([#349](https://github.com/a2aproject/a2a-python/issues/349)) ([70b4999](https://github.com/a2aproject/a2a-python/commit/70b499975f0811c8055ebd674bcb4070805506d4)) +* Type update to support fetching extended card ([#361](https://github.com/a2aproject/a2a-python/issues/361)) ([83304bb](https://github.com/a2aproject/a2a-python/commit/83304bb669403b51607973c1a965358d2e8f6ab0)) + + +### Bug Fixes + +* Add Input Validation for Task Context IDs in new_task Function ([#340](https://github.com/a2aproject/a2a-python/issues/340)) ([a7ed7ef](https://github.com/a2aproject/a2a-python/commit/a7ed7efed8fcdcc556616a5fc1cb8f968a116733)) +* **deps:** Reduce FastAPI library required version to `0.95.0` ([#372](https://github.com/a2aproject/a2a-python/issues/372)) ([a319334](https://github.com/a2aproject/a2a-python/commit/a31933456e08929f665ccec57ac07b8b9118990d)) +* Remove `DeprecationWarning` for regular properties ([#345](https://github.com/a2aproject/a2a-python/issues/345)) ([2806f3e](https://github.com/a2aproject/a2a-python/commit/2806f3eb7e1293924bb8637fd9c2cfe855858592)) +* **spec:** Add `SendMessageRequest.request` `json_name` mapping to `message` proto ([bc97cba](https://github.com/a2aproject/a2a-python/commit/bc97cba5945a49bea808feb2b1dc9eeb30007599)) +* **spec:** Add Transport enum to specification (https://github.com/a2aproject/A2A/pull/909) ([d9e463c](https://github.com/a2aproject/a2a-python/commit/d9e463cf1f8fbe486d37da3dd9009a19fe874ff0)) + + +### Documentation + +* Address typos in docstrings and docs. ([#370](https://github.com/a2aproject/a2a-python/issues/370)) ([ee48d68](https://github.com/a2aproject/a2a-python/commit/ee48d68d6c42a2a0c78f8a4666d1aded1a362e78)) + + +### Miscellaneous Chores + +* Add support for authenticated extended card method ([#356](https://github.com/a2aproject/a2a-python/issues/356)) ([b567e80](https://github.com/a2aproject/a2a-python/commit/b567e80735ae7e75f0bdb22f025b97895ce3b0dd)) + + +### Code Refactoring + +* **deps:** Make opentelemetry an optional dependency ([#369](https://github.com/a2aproject/a2a-python/issues/369)) ([9ad8b96](https://github.com/a2aproject/a2a-python/commit/9ad8b9623ffdc074ec561cbe65cfc2a2ba38bd0b)) +* Remove custom `__getattr__` and `__setattr__` for `camelCase` fields in `types.py` ([#335](https://github.com/a2aproject/a2a-python/issues/335)) ([cd94167](https://github.com/a2aproject/a2a-python/commit/cd941675d10868922adf14266901d035516a31cf)) +* **spec:** Update Agent Card Well-Known Path to `/.well-known/agent-card.json` ([#320](https://github.com/a2aproject/a2a-python/issues/320)) ([270ea9b](https://github.com/a2aproject/a2a-python/commit/270ea9b0822b689e50ed12f745a24a17e7917e73)) + +## [0.2.16](https://github.com/a2aproject/a2a-python/compare/v0.2.15...v0.2.16) (2025-07-21) + + +### Features + +* Convert fields in `types.py` to use `snake_case` ([#199](https://github.com/a2aproject/a2a-python/issues/199)) ([0bb5563](https://github.com/a2aproject/a2a-python/commit/0bb55633272605a0404fc14c448a9dcaca7bb693)) + + +### Bug Fixes + +* Add deprecation warning for camelCase alias ([#334](https://github.com/a2aproject/a2a-python/issues/334)) ([f22b384](https://github.com/a2aproject/a2a-python/commit/f22b384d919e349be8d275c8f44bd760d627bcb9)) +* client should not specify `taskId` if it doesn't exist ([#264](https://github.com/a2aproject/a2a-python/issues/264)) ([97f1093](https://github.com/a2aproject/a2a-python/commit/97f109326c7fe291c96bb51935ac80e0fab4cf66)) + +## [0.2.15](https://github.com/a2aproject/a2a-python/compare/v0.2.14...v0.2.15) (2025-07-21) + + +### Bug Fixes + +* Add Input Validation for Empty Message Content ([#327](https://github.com/a2aproject/a2a-python/issues/327)) ([5061834](https://github.com/a2aproject/a2a-python/commit/5061834e112a4eb523ac505f9176fc42d86d8178)) +* Prevent import grpc issues for Client after making dependencies optional ([#330](https://github.com/a2aproject/a2a-python/issues/330)) ([53ad485](https://github.com/a2aproject/a2a-python/commit/53ad48530b47ef1cbd3f40d0432f9170b663839d)), closes [#326](https://github.com/a2aproject/a2a-python/issues/326) + +## [0.2.14](https://github.com/a2aproject/a2a-python/compare/v0.2.13...v0.2.14) (2025-07-18) + + +### Features + +* Set grpc dependencies as optional ([#322](https://github.com/a2aproject/a2a-python/issues/322)) ([365f158](https://github.com/a2aproject/a2a-python/commit/365f158f87166838b55bdadd48778cb313a453e1)) +* **spec:** Update A2A types from specification 🤖 ([#325](https://github.com/a2aproject/a2a-python/issues/325)) ([02e7a31](https://github.com/a2aproject/a2a-python/commit/02e7a3100e000e115b4aeec7147cf8fc1948c107)) + +## [0.2.13](https://github.com/a2aproject/a2a-python/compare/v0.2.12...v0.2.13) (2025-07-17) + + +### Features + +* Add `get_data_parts()` and `get_file_parts()` helper methods ([#312](https://github.com/a2aproject/a2a-python/issues/312)) ([5b98c32](https://github.com/a2aproject/a2a-python/commit/5b98c3240db4ff6007e242742f76822fc6ea380c)) +* Support for Database based Push Config Store ([#299](https://github.com/a2aproject/a2a-python/issues/299)) ([e5d99ee](https://github.com/a2aproject/a2a-python/commit/e5d99ee9e478cda5e93355cba2e93f1d28039806)) +* Update A2A types from specification 🤖 ([#319](https://github.com/a2aproject/a2a-python/issues/319)) ([18506a4](https://github.com/a2aproject/a2a-python/commit/18506a4fe32c1956725d8f205ec7848f7b86c77d)) + + +### Bug Fixes + +* Add Input Validation for Task IDs in TaskManager ([#310](https://github.com/a2aproject/a2a-python/issues/310)) ([a38d438](https://github.com/a2aproject/a2a-python/commit/a38d43881d8476e6fbcb9766b59e3378dbe64306)) +* Add validation for empty artifact lists in `completed_task` ([#308](https://github.com/a2aproject/a2a-python/issues/308)) ([c4a324d](https://github.com/a2aproject/a2a-python/commit/c4a324dcb693f19fbbf90cee483f6a912698a921)) +* Handle readtimeout errors. ([#305](https://github.com/a2aproject/a2a-python/issues/305)) ([b94b8f5](https://github.com/a2aproject/a2a-python/commit/b94b8f52bf58315f3ef138b6a1ffaf894f35bcef)), closes [#249](https://github.com/a2aproject/a2a-python/issues/249) + + +### Documentation + +* Update Documentation Site Link ([#315](https://github.com/a2aproject/a2a-python/issues/315)) ([edf392c](https://github.com/a2aproject/a2a-python/commit/edf392cfe531d0448659e2f08ab08f0ba05475b3)) + +## [0.2.12](https://github.com/a2aproject/a2a-python/compare/v0.2.11...v0.2.12) (2025-07-14) + + +### Features + +* add `metadata` property to `RequestContext` ([#302](https://github.com/a2aproject/a2a-python/issues/302)) ([e781ced](https://github.com/a2aproject/a2a-python/commit/e781ced3b082ef085f9aeef02ceebb9b35c68280)) +* add A2ABaseModel ([#292](https://github.com/a2aproject/a2a-python/issues/292)) ([24f2eb0](https://github.com/a2aproject/a2a-python/commit/24f2eb0947112539cbd4e493c98d0d9dadc87f05)) +* add support for notification tokens in PushNotificationSender ([#266](https://github.com/a2aproject/a2a-python/issues/266)) ([75aa4ed](https://github.com/a2aproject/a2a-python/commit/75aa4ed866a6b4005e59eb000e965fb593e0888f)) +* Update A2A types from specification 🤖 ([#289](https://github.com/a2aproject/a2a-python/issues/289)) ([ecb321a](https://github.com/a2aproject/a2a-python/commit/ecb321a354d691ca90b52cc39e0a397a576fd7d7)) + + +### Bug Fixes + +* add proper a2a request body documentation to Swagger UI ([#276](https://github.com/a2aproject/a2a-python/issues/276)) ([4343be9](https://github.com/a2aproject/a2a-python/commit/4343be99ad0df5eb6908867b71d55b1f7d0fafc6)), closes [#274](https://github.com/a2aproject/a2a-python/issues/274) +* Handle asyncio.cancellederror and raise to propagate back ([#293](https://github.com/a2aproject/a2a-python/issues/293)) ([9d6cb68](https://github.com/a2aproject/a2a-python/commit/9d6cb68a1619960b9c9fd8e7aa08ffb27047343f)) +* Improve error handling in task creation ([#294](https://github.com/a2aproject/a2a-python/issues/294)) ([6412c75](https://github.com/a2aproject/a2a-python/commit/6412c75413e26489bd3d33f59e41b626a71807d3)) +* Resolve dependency issue with sql stores ([#303](https://github.com/a2aproject/a2a-python/issues/303)) ([2126828](https://github.com/a2aproject/a2a-python/commit/2126828b5cb6291f47ca15d56c0e870950f17536)) +* Send push notifications for message/send ([#298](https://github.com/a2aproject/a2a-python/issues/298)) ([0274112](https://github.com/a2aproject/a2a-python/commit/0274112bb5b077c17b344da3a65277f2ad67d38f)) +* **server:** Improve event consumer error handling ([#282](https://github.com/a2aproject/a2a-python/issues/282)) ([a5786a1](https://github.com/a2aproject/a2a-python/commit/a5786a112779a21819d28e4dfee40fa11f1bb49a)) + +## [0.2.11](https://github.com/a2aproject/a2a-python/compare/v0.2.10...v0.2.11) (2025-07-07) + + +### ⚠ BREAKING CHANGES + +* Removes `push_notifier` interface from the SDK and introduces `push_notification_config_store` and `push_notification_sender` for supporting push notifications. + +### Features + +* Add constants for Well-Known URIs ([#271](https://github.com/a2aproject/a2a-python/issues/271)) ([1c8e12e](https://github.com/a2aproject/a2a-python/commit/1c8e12e448dc7469e508fccdac06818836f5b520)) +* Adds support for List and Delete push notification configurations. ([f1b576e](https://github.com/a2aproject/a2a-python/commit/f1b576e061e7a3ab891d8368ade56c7046684c5e)) +* Adds support for more than one `push_notification_config` per task. ([f1b576e](https://github.com/a2aproject/a2a-python/commit/f1b576e061e7a3ab891d8368ade56c7046684c5e)) +* **server:** Add lock to TaskUpdater to prevent race conditions ([#279](https://github.com/a2aproject/a2a-python/issues/279)) ([1022093](https://github.com/a2aproject/a2a-python/commit/1022093110100da27f040be4b35831bf8b1fe094)) +* Support for database backend Task Store ([#259](https://github.com/a2aproject/a2a-python/issues/259)) ([7c46e70](https://github.com/a2aproject/a2a-python/commit/7c46e70b3142f3ec274c492bacbfd6e8f0204b36)) + + +### Code Refactoring + +* Removes `push_notifier` interface from the SDK and introduces `push_notification_config_store` and `push_notification_sender` for supporting push notifications. ([f1b576e](https://github.com/a2aproject/a2a-python/commit/f1b576e061e7a3ab891d8368ade56c7046684c5e)) + +## [0.2.10](https://github.com/a2aproject/a2a-python/compare/v0.2.9...v0.2.10) (2025-06-30) + + +### ⚠ BREAKING CHANGES + +* Update to A2A Spec Version [0.2.5](https://github.com/a2aproject/A2A/releases/tag/v0.2.5) ([#197](https://github.com/a2aproject/a2a-python/issues/197)) + +### Features + +* Add `append` and `last_chunk` to `add_artifact` method on `TaskUpdater` ([#186](https://github.com/a2aproject/a2a-python/issues/186)) ([8c6560f](https://github.com/a2aproject/a2a-python/commit/8c6560fd403887fab9d774bfcc923a5f6f459364)) +* add a2a routes to existing app ([#188](https://github.com/a2aproject/a2a-python/issues/188)) ([32fecc7](https://github.com/a2aproject/a2a-python/commit/32fecc7194a61c2f5be0b8795d5dc17cdbab9040)) +* Add middleware to the client SDK ([#171](https://github.com/a2aproject/a2a-python/issues/171)) ([efaabd3](https://github.com/a2aproject/a2a-python/commit/efaabd3b71054142109b553c984da1d6e171db24)) +* Add more task state management methods to TaskUpdater ([#208](https://github.com/a2aproject/a2a-python/issues/208)) ([2b3bf6d](https://github.com/a2aproject/a2a-python/commit/2b3bf6d53ac37ed93fc1b1c012d59c19060be000)) +* raise error for tasks in terminal states ([#215](https://github.com/a2aproject/a2a-python/issues/215)) ([a0bf13b](https://github.com/a2aproject/a2a-python/commit/a0bf13b208c90b439b4be1952c685e702c4917a0)) + +### Bug Fixes + +* `consume_all` doesn't catch `asyncio.TimeoutError` in python 3.10 ([#216](https://github.com/a2aproject/a2a-python/issues/216)) ([39307f1](https://github.com/a2aproject/a2a-python/commit/39307f15a1bb70eb77aee2211da038f403571242)) +* Append metadata and context id when processing TaskStatusUpdateE… ([#238](https://github.com/a2aproject/a2a-python/issues/238)) ([e106020](https://github.com/a2aproject/a2a-python/commit/e10602033fdd4f4e6b61af717ffc242d772545b3)) +* Fix reference to `grpc.aio.ServicerContext` ([#237](https://github.com/a2aproject/a2a-python/issues/237)) ([0c1987b](https://github.com/a2aproject/a2a-python/commit/0c1987bb85f3e21089789ee260a0c62ac98b66a5)) +* Fixes Short Circuit clause for context ID ([#236](https://github.com/a2aproject/a2a-python/issues/236)) ([a5509e6](https://github.com/a2aproject/a2a-python/commit/a5509e6b37701dfb5c729ccc12531e644a12f8ae)) +* Resolve `APIKeySecurityScheme` parsing failed ([#226](https://github.com/a2aproject/a2a-python/issues/226)) ([aa63b98](https://github.com/a2aproject/a2a-python/commit/aa63b982edc2a07fd0df0b01fb9ad18d30b35a79)) +* send notifications on message not streaming ([#219](https://github.com/a2aproject/a2a-python/issues/219)) ([91539d6](https://github.com/a2aproject/a2a-python/commit/91539d69e5c757712c73a41ab95f1ec6656ef5cd)), closes [#218](https://github.com/a2aproject/a2a-python/issues/218) + +## [0.2.9](https://github.com/a2aproject/a2a-python/compare/v0.2.8...v0.2.9) (2025-06-24) + +### Bug Fixes + +* Set `protobuf==5.29.5` and `fastapi>=0.115.2` to prevent version conflicts ([#224](https://github.com/a2aproject/a2a-python/issues/224)) ([1412a85](https://github.com/a2aproject/a2a-python/commit/1412a855b4980d8373ed1cea38c326be74069633)) + +## [0.2.8](https://github.com/a2aproject/a2a-python/compare/v0.2.7...v0.2.8) (2025-06-12) + + +### Features + +* Add HTTP Headers to ServerCallContext for Improved Handler Access ([#182](https://github.com/a2aproject/a2a-python/issues/182)) ([d5e5f5f](https://github.com/a2aproject/a2a-python/commit/d5e5f5f7e7a3cab7de13cff545a874fc58d85e46)) +* Update A2A types from specification 🤖 ([#191](https://github.com/a2aproject/a2a-python/issues/191)) ([174230b](https://github.com/a2aproject/a2a-python/commit/174230bf6dfb6bf287d233a101b98cc4c79cad19)) + + +### Bug Fixes + +* Add `protobuf==6.31.1` to dependencies ([#189](https://github.com/a2aproject/a2a-python/issues/189)) ([ae1c31c](https://github.com/a2aproject/a2a-python/commit/ae1c31c1da47f6965c02e0564dc7d3791dd03e2c)), closes [#185](https://github.com/a2aproject/a2a-python/issues/185) + +## [0.2.7](https://github.com/a2aproject/a2a-python/compare/v0.2.6...v0.2.7) (2025-06-11) + + +### Features + +* Update A2A types from specification 🤖 ([#179](https://github.com/a2aproject/a2a-python/issues/179)) ([3ef4240](https://github.com/a2aproject/a2a-python/commit/3ef42405f6096281fe90b1df399731bd009bde12)) + +## [0.2.6](https://github.com/a2aproject/a2a-python/compare/v0.2.5...v0.2.6) (2025-06-09) + + +### ⚠ BREAKING CHANGES + +* Add FastAPI JSONRPC Application ([#104](https://github.com/a2aproject/a2a-python/issues/104)) + +### Features + +* Add FastAPI JSONRPC Application ([#104](https://github.com/a2aproject/a2a-python/issues/104)) ([0e66e1f](https://github.com/a2aproject/a2a-python/commit/0e66e1f81f98d7e2cf50b1c100e35d13ad7149dc)) +* Add gRPC server and client support ([#162](https://github.com/a2aproject/a2a-python/issues/162)) ([a981605](https://github.com/a2aproject/a2a-python/commit/a981605dbb32e87bd241b64bf2e9bb52831514d1)) +* add reject method to task_updater ([#147](https://github.com/a2aproject/a2a-python/issues/147)) ([2a6ef10](https://github.com/a2aproject/a2a-python/commit/2a6ef109f8b743f8eb53d29090cdec7df143b0b4)) +* Add timestamp to `TaskStatus` updates on `TaskUpdater` ([#140](https://github.com/a2aproject/a2a-python/issues/140)) ([0c9df12](https://github.com/a2aproject/a2a-python/commit/0c9df125b740b947b0e4001421256491b5f87920)) +* **spec:** Add an optional iconUrl field to the AgentCard 🤖 ([a1025f4](https://github.com/a2aproject/a2a-python/commit/a1025f406acd88e7485a5c0f4dd8a42488c41fa2)) + + +### Bug Fixes + +* Correctly adapt starlette BaseUser to A2A User ([#133](https://github.com/a2aproject/a2a-python/issues/133)) ([88d45eb](https://github.com/a2aproject/a2a-python/commit/88d45ebd935724e6c3ad614bf503defae4de5d85)) +* Event consumer should stop on input_required ([#167](https://github.com/a2aproject/a2a-python/issues/167)) ([51c2d8a](https://github.com/a2aproject/a2a-python/commit/51c2d8addf9e89a86a6834e16deb9f4ac0e05cc3)) +* Fix Release Version ([#161](https://github.com/a2aproject/a2a-python/issues/161)) ([011d632](https://github.com/a2aproject/a2a-python/commit/011d632b27b201193813ce24cf25e28d1335d18e)) +* generate StrEnum types for enums ([#134](https://github.com/a2aproject/a2a-python/issues/134)) ([0c49dab](https://github.com/a2aproject/a2a-python/commit/0c49dabcdb9d62de49fda53d7ce5c691b8c1591c)) +* library should be released as 0.2.6 ([d8187e8](https://github.com/a2aproject/a2a-python/commit/d8187e812d6ac01caedf61d4edaca522e583d7da)) +* remove error types from enqueueable events ([#138](https://github.com/a2aproject/a2a-python/issues/138)) ([511992f](https://github.com/a2aproject/a2a-python/commit/511992fe585bd15e956921daeab4046dc4a50a0a)) +* **stream:** don't block event loop in EventQueue ([#151](https://github.com/a2aproject/a2a-python/issues/151)) ([efd9080](https://github.com/a2aproject/a2a-python/commit/efd9080b917c51d6e945572fd123b07f20974a64)) +* **task_updater:** fix potential duplicate artifact_id from default v… ([#156](https://github.com/a2aproject/a2a-python/issues/156)) ([1f0a769](https://github.com/a2aproject/a2a-python/commit/1f0a769c1027797b2f252e4c894352f9f78257ca)) + + +### Documentation + +* remove final and metadata fields from docstring ([#66](https://github.com/a2aproject/a2a-python/issues/66)) ([3c50ee1](https://github.com/a2aproject/a2a-python/commit/3c50ee1f64c103a543c8afb6d2ac3a11063b0f43)) +* Update Links to Documentation Site ([5e7d418](https://github.com/a2aproject/a2a-python/commit/5e7d4180f7ae0ebeb76d976caa5ef68b4277ce54)) + +## [0.2.5](https://github.com/a2aproject/a2a-python/compare/v0.2.4...v0.2.5) (2025-05-27) + + +### Features + +* Add a User representation to ServerCallContext ([#116](https://github.com/a2aproject/a2a-python/issues/116)) ([2cc2a0d](https://github.com/a2aproject/a2a-python/commit/2cc2a0de93631aa162823d43fe488173ed8754dc)) +* Add functionality for extended agent card. ([#31](https://github.com/a2aproject/a2a-python/issues/31)) ([20f0826](https://github.com/a2aproject/a2a-python/commit/20f0826a2cb9b77b89b85189fd91e7cd62318a30)) +* Introduce a ServerCallContext ([#94](https://github.com/a2aproject/a2a-python/issues/94)) ([85b521d](https://github.com/a2aproject/a2a-python/commit/85b521d8a790dacb775ef764a66fbdd57b180da3)) ### Bug Fixes -* fix hello world example for python 3.12 ([#98](https://github.com/google-a2a/a2a-python/issues/98)) ([536e4a1](https://github.com/google-a2a/a2a-python/commit/536e4a11f2f32332968a06e7d0bc4615e047a56c)) -* Remove unused dependencies and update py version ([#119](https://github.com/google-a2a/a2a-python/issues/119)) ([9f8bc02](https://github.com/google-a2a/a2a-python/commit/9f8bc023b45544942583818968f3d320e5ff1c3b)) -* Update hello world test client to match sdk behavior. Also down-level required python version ([#117](https://github.com/google-a2a/a2a-python/issues/117)) ([04c7c45](https://github.com/google-a2a/a2a-python/commit/04c7c452f5001d69524d94095d11971c1e857f75)) -* Update the google adk demos to use ADK v1.0 ([#95](https://github.com/google-a2a/a2a-python/issues/95)) ([c351656](https://github.com/google-a2a/a2a-python/commit/c351656a91c37338668b0cd0c4db5fedd152d743)) +* fix hello world example for python 3.12 ([#98](https://github.com/a2aproject/a2a-python/issues/98)) ([536e4a1](https://github.com/a2aproject/a2a-python/commit/536e4a11f2f32332968a06e7d0bc4615e047a56c)) +* Remove unused dependencies and update py version ([#119](https://github.com/a2aproject/a2a-python/issues/119)) ([9f8bc02](https://github.com/a2aproject/a2a-python/commit/9f8bc023b45544942583818968f3d320e5ff1c3b)) +* Update hello world test client to match sdk behavior. Also down-level required python version ([#117](https://github.com/a2aproject/a2a-python/issues/117)) ([04c7c45](https://github.com/a2aproject/a2a-python/commit/04c7c452f5001d69524d94095d11971c1e857f75)) +* Update the google adk demos to use ADK v1.0 ([#95](https://github.com/a2aproject/a2a-python/issues/95)) ([c351656](https://github.com/a2aproject/a2a-python/commit/c351656a91c37338668b0cd0c4db5fedd152d743)) ### Documentation -* Update README for Python 3.10+ support ([#90](https://github.com/google-a2a/a2a-python/issues/90)) ([e0db20f](https://github.com/google-a2a/a2a-python/commit/e0db20ffc20aa09ee68304cc7e2a67c32ecdd6a8)) +* Update README for Python 3.10+ support ([#90](https://github.com/a2aproject/a2a-python/issues/90)) ([e0db20f](https://github.com/a2aproject/a2a-python/commit/e0db20ffc20aa09ee68304cc7e2a67c32ecdd6a8)) -## [0.2.4](https://github.com/google-a2a/a2a-python/compare/v0.2.3...v0.2.4) (2025-05-22) +## [0.2.4](https://github.com/a2aproject/a2a-python/compare/v0.2.3...v0.2.4) (2025-05-22) ### Features -* Update to support python 3.10 ([#85](https://github.com/google-a2a/a2a-python/issues/85)) ([fd9c3b5](https://github.com/google-a2a/a2a-python/commit/fd9c3b5b0bbef509789a701171d95f690c84750b)) +* Update to support python 3.10 ([#85](https://github.com/a2aproject/a2a-python/issues/85)) ([fd9c3b5](https://github.com/a2aproject/a2a-python/commit/fd9c3b5b0bbef509789a701171d95f690c84750b)) ### Bug Fixes -* Throw exception for task_id mismatches ([#70](https://github.com/google-a2a/a2a-python/issues/70)) ([a9781b5](https://github.com/google-a2a/a2a-python/commit/a9781b589075280bfaaab5742d8b950916c9de74)) +* Throw exception for task_id mismatches ([#70](https://github.com/a2aproject/a2a-python/issues/70)) ([a9781b5](https://github.com/a2aproject/a2a-python/commit/a9781b589075280bfaaab5742d8b950916c9de74)) -## [0.2.3](https://github.com/google-a2a/a2a-python/compare/v0.2.2...v0.2.3) (2025-05-20) +## [0.2.3](https://github.com/a2aproject/a2a-python/compare/v0.2.2...v0.2.3) (2025-05-20) ### Features -* Add request context builder with referenceTasks ([#56](https://github.com/google-a2a/a2a-python/issues/56)) ([f20bfe7](https://github.com/google-a2a/a2a-python/commit/f20bfe74b8cc854c9c29720b2ea3859aff8f509e)) +* Add request context builder with referenceTasks ([#56](https://github.com/a2aproject/a2a-python/issues/56)) ([f20bfe7](https://github.com/a2aproject/a2a-python/commit/f20bfe74b8cc854c9c29720b2ea3859aff8f509e)) -## [0.2.2](https://github.com/google-a2a/a2a-python/compare/v0.2.1...v0.2.2) (2025-05-20) +## [0.2.2](https://github.com/a2aproject/a2a-python/compare/v0.2.1...v0.2.2) (2025-05-20) ### Documentation -* Write/Update Docstrings for Classes/Methods ([#59](https://github.com/google-a2a/a2a-python/issues/59)) ([9f773ef](https://github.com/google-a2a/a2a-python/commit/9f773eff4dddc4eec723d519d0050f21b9ccc042)) +* Write/Update Docstrings for Classes/Methods ([#59](https://github.com/a2aproject/a2a-python/issues/59)) ([9f773ef](https://github.com/a2aproject/a2a-python/commit/9f773eff4dddc4eec723d519d0050f21b9ccc042)) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 289176c74..40d511cd2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,27 +2,6 @@ We'd love to accept your patches and contributions to this project. -## Before you begin - -### Sign our Contributor License Agreement - -Contributions to this project must be accompanied by a -[Contributor License Agreement](https://cla.developers.google.com/about) (CLA). -You (or your employer) retain the copyright to your contribution; this simply -gives us permission to use and redistribute your contributions as part of the -project. - -If you or your current employer have already signed the Google CLA (even if it -was for a different project), you probably don't need to do it again. - -Visit to see your current agreements or to -sign a new one. - -### Review our community guidelines - -This project follows -[Google's Open Source Community Guidelines](https://opensource.google/conduct/). - ## Contribution process ### Code reviews @@ -47,11 +26,3 @@ Here are some additional things to keep in mind during the process: - **Test your changes.** Before you submit a pull request, make sure that your changes work as expected. - **Be patient.** It may take some time for your pull request to be reviewed and merged. - ---- - -## For Google Employees - -Complete the following steps to register your GitHub account and be added as a contributor to this repository. - -1. Register your GitHub account at [go/GitHub](http://go/github). diff --git a/Gemini.md b/Gemini.md new file mode 100644 index 000000000..7f52d33f3 --- /dev/null +++ b/Gemini.md @@ -0,0 +1,27 @@ +**A2A specification:** https://a2a-protocol.org/latest/specification/ + +## Project frameworks +- uv as package manager + +## How to run all tests +1. If dependencies are not installed, install them using the following command + ``` + uv sync --all-extras + ``` + +2. Run tests + ``` + uv run pytest + ``` + +## Other instructions +1. Whenever writing python code, write types as well. +2. After making the changes run ruff to check and fix the formatting issues + ``` + uv run ruff check --fix + ``` +3. Run mypy type checkers to check for type errors + ``` + uv run mypy + ``` +4. Run the unit tests to make sure that none of the unit tests are broken. diff --git a/README.md b/README.md index 8b73dd37e..d7c24cbf8 100644 --- a/README.md +++ b/README.md @@ -1,53 +1,73 @@ # A2A Python SDK [![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](LICENSE) -![PyPI - Version](https://img.shields.io/pypi/v/a2a-sdk) +[![PyPI version](https://img.shields.io/pypi/v/a2a-sdk)](https://pypi.org/project/a2a-sdk/) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/a2a-sdk) - +[![PyPI - Downloads](https://img.shields.io/pypi/dw/a2a-sdk)](https://pypistats.org/packages/a2a-sdk) +[![Python Unit Tests](https://github.com/a2aproject/a2a-python/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/a2aproject/a2a-python/actions/workflows/unit-tests.yml) + + Ask Code Wiki + - -

- A2A Logo -

-

A Python library that helps run agentic applications as A2AServers following the Agent2Agent (A2A) Protocol.

- +
+ A2A Logo +

+ A Python library for running agentic applications as A2A Servers, following the Agent2Agent (A2A) Protocol. +

+
-## Installation - -You can install the A2A SDK using either `uv` or `pip`. - -## Prerequisites +--- -- Python 3.10+ -- `uv` (optional, but recommended) or `pip` - -### Using `uv` +## ✨ Features -When you're working within a uv project or a virtual environment managed by uv, the preferred way to add packages is using uv add. +- **A2A Protocol Compliant:** Build agentic applications that adhere to the Agent2Agent (A2A) Protocol. +- **Extensible:** Easily add support for different communication protocols and database backends. +- **Asynchronous:** Built on modern async Python for high performance. +- **Optional Integrations:** Includes optional support for: + - HTTP servers ([FastAPI](https://fastapi.tiangolo.com/), [Starlette](https://www.starlette.io/)) + - [gRPC](https://grpc.io/) + - [OpenTelemetry](https://opentelemetry.io/) for tracing + - SQL databases ([PostgreSQL](https://www.postgresql.org/), [MySQL](https://www.mysql.com/), [SQLite](https://sqlite.org/)) -```bash -uv add a2a-sdk -``` +--- -### Using `pip` +## 🚀 Getting Started -If you prefer to use pip, the standard Python package installer, you can install `a2a-sdk` as follows +### Prerequisites -```bash -pip install a2a-sdk -``` +- Python 3.10+ +- `uv` (recommended) or `pip` + +### 🔧 Installation + +Install the core SDK and any desired extras using your preferred package manager. + +| Feature | `uv` Command | `pip` Command | +| ------------------------ | ------------------------------------------ | -------------------------------------------- | +| **Core SDK** | `uv add a2a-sdk` | `pip install a2a-sdk` | +| **All Extras** | `uv add "a2a-sdk[all]"` | `pip install "a2a-sdk[all]"` | +| **HTTP Server** | `uv add "a2a-sdk[http-server]"` | `pip install "a2a-sdk[http-server]"` | +| **gRPC Support** | `uv add "a2a-sdk[grpc]"` | `pip install "a2a-sdk[grpc]"` | +| **OpenTelemetry Tracing**| `uv add "a2a-sdk[telemetry]"` | `pip install "a2a-sdk[telemetry]"` | +| **Encryption** | `uv add "a2a-sdk[encryption]"` | `pip install "a2a-sdk[encryption]"` | +| | | | +| **Database Drivers** | | | +| **PostgreSQL** | `uv add "a2a-sdk[postgresql]"` | `pip install "a2a-sdk[postgresql]"` | +| **MySQL** | `uv add "a2a-sdk[mysql]"` | `pip install "a2a-sdk[mysql]"` | +| **SQLite** | `uv add "a2a-sdk[sqlite]"` | `pip install "a2a-sdk[sqlite]"` | +| **All SQL Drivers** | `uv add "a2a-sdk[sql]"` | `pip install "a2a-sdk[sql]"` | ## Examples -### [Helloworld Example](https://github.com/google-a2a/a2a-samples/tree/main/samples/python/agents/helloworld) +### [Helloworld Example](https://github.com/a2aproject/a2a-samples/tree/main/samples/python/agents/helloworld) 1. Run Remote Agent ```bash - git clone https://github.com/google-a2a/a2a-samples.git + git clone https://github.com/a2aproject/a2a-samples.git cd a2a-samples/samples/python/agents/helloworld uv run . ``` @@ -59,12 +79,25 @@ pip install a2a-sdk uv run test_client.py ``` -You can also find more Python samples [here](https://github.com/google-a2a/a2a-samples/tree/main/samples/python) and JavaScript samples [here](https://github.com/google-a2a/a2a-samples/tree/main/samples/js). +3. You can validate your agent using the agent inspector. Follow the instructions at the [a2a-inspector](https://github.com/a2aproject/a2a-inspector) repo. + +--- + +## 🌐 More Examples + +You can find a variety of more detailed examples in the [a2a-samples](https://github.com/a2aproject/a2a-samples) repository: + +- **[Python Examples](https://github.com/a2aproject/a2a-samples/tree/main/samples/python)** +- **[JavaScript Examples](https://github.com/a2aproject/a2a-samples/tree/main/samples/js)** + +--- + +## 🤝 Contributing -## License +Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for guidelines on how to get involved. -This project is licensed under the terms of the [Apache 2.0 License](https://raw.githubusercontent.com/google-a2a/a2a-python/refs/heads/main/LICENSE). +--- -## Contributing +## 📄 License -See [CONTRIBUTING.md](https://github.com/google-a2a/a2a-python/blob/main/CONTRIBUTING.md) for contribution guidelines. +This project is licensed under the Apache 2.0 License. See the [LICENSE](LICENSE) file for more details. diff --git a/buf.gen.yaml b/buf.gen.yaml new file mode 100644 index 000000000..c70bf9e77 --- /dev/null +++ b/buf.gen.yaml @@ -0,0 +1,31 @@ +--- +version: v2 +inputs: + - git_repo: https://github.com/a2aproject/A2A.git + ref: main + subdir: specification/grpc +managed: + enabled: true +# Python Generation +# Using remote plugins. To use local plugins replace remote with local +# pip install protobuf grpcio-tools +# Optionally, install plugin to generate stubs for grpc services +# pip install mypy-protobuf +# Generate python protobuf code +# - local: protoc-gen-python +# - out: src/python +# Generate gRPC stubs +# - local: protoc-gen-grpc-python +# - out: src/python +plugins: + # Generate python protobuf related code + # Generates *_pb2.py files, one for each .proto + - remote: buf.build/protocolbuffers/python:v29.3 + out: src/a2a/grpc + # Generate python service code. + # Generates *_pb2_grpc.py + - remote: buf.build/grpc/python + out: src/a2a/grpc + # Generates *_pb2.pyi files. + - remote: buf.build/protocolbuffers/pyi + out: src/a2a/grpc diff --git a/development.md b/development.md deleted file mode 100644 index c1ecf0295..000000000 --- a/development.md +++ /dev/null @@ -1,22 +0,0 @@ -# Development - -## Type generation from spec - -```bash -uv run datamodel-codegen \ - --url https://raw.githubusercontent.com/google-a2a/A2A/refs/heads/main/specification/json/a2a.json \ - --input-file-type jsonschema \ - --output ./src/a2a/types.py \ - --target-python-version 3.10 \ - --output-model-type pydantic_v2.BaseModel \ - --disable-timestamp \ - --use-schema-description \ - --use-union-operator \ - --use-field-description \ - --use-default \ - --use-default-kwarg \ - --use-one-literal-as-default \ - --class-name A2A \ - --use-standard-collections \ - --use-subclass-enum -``` diff --git a/noxfile.py b/noxfile.py deleted file mode 100644 index e541b2bb3..000000000 --- a/noxfile.py +++ /dev/null @@ -1,150 +0,0 @@ -# pylint: skip-file -# type: ignore -# -*- coding: utf-8 -*- -# -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import pathlib -import subprocess - -import nox - - -DEFAULT_PYTHON_VERSION = '3.10' - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -nox.options.sessions = [ - 'format', -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """Format Python code using autoflake, pyupgrade, and ruff.""" - # Sort Spelling Allowlist - spelling_allow_file = '.github/actions/spelling/allow.txt' - - with open(spelling_allow_file, encoding='utf-8') as file: - unique_words = sorted(set(file)) - - with open(spelling_allow_file, 'w', encoding='utf-8') as file: - file.writelines(unique_words) - - format_all = False - - if format_all: - lint_paths_py = ['.'] - else: - target_branch = 'origin/main' - - unstaged_files = subprocess.run( - [ - 'git', - 'diff', - '--name-only', - '--diff-filter=ACMRTUXB', - target_branch, - ], - stdout=subprocess.PIPE, - text=True, - check=False, - ).stdout.splitlines() - - staged_files = subprocess.run( - [ - 'git', - 'diff', - '--cached', - '--name-only', - '--diff-filter=ACMRTUXB', - target_branch, - ], - stdout=subprocess.PIPE, - text=True, - check=False, - ).stdout.splitlines() - - committed_files = subprocess.run( - [ - 'git', - 'diff', - 'HEAD', - target_branch, - '--name-only', - '--diff-filter=ACMRTUXB', - ], - stdout=subprocess.PIPE, - text=True, - check=False, - ).stdout.splitlines() - - changed_files = sorted( - { - file - for file in (unstaged_files + staged_files + committed_files) - if os.path.isfile(file) - } - ) - - lint_paths_py = [f for f in changed_files if f.endswith('.py')] - - if not lint_paths_py: - session.log('No changed Python files to lint.') - return - - session.install( - 'types-requests', - 'pyupgrade', - 'autoflake', - 'ruff', - 'no_implicit_optional', - ) - - if lint_paths_py: - session.run( - 'no_implicit_optional', - '--use-union-or', - *lint_paths_py, - ) - if not format_all: - session.run( - 'pyupgrade', - '--exit-zero-even-if-changed', - '--py310-plus', - *lint_paths_py, - ) - session.run( - 'autoflake', - '-i', - '-r', - '--remove-all-unused-imports', - *lint_paths_py, - ) - session.run( - 'ruff', - 'check', - '--fix-only', - *lint_paths_py, - ) - session.run( - 'ruff', - 'format', - *lint_paths_py, - ) diff --git a/pyproject.toml b/pyproject.toml index 812429a0f..496d5d51e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,18 +3,16 @@ name = "a2a-sdk" dynamic = ["version"] description = "A2A Python SDK" readme = "README.md" -license = { file = "LICENSE" } +license = "Apache-2.0" authors = [{ name = "Google LLC", email = "googleapis-packages@google.com" }] requires-python = ">=3.10" -keywords = ["A2A", "A2A SDK", "A2A Protocol", "Agent2Agent"] +keywords = ["A2A", "A2A SDK", "A2A Protocol", "Agent2Agent", "Agent 2 Agent"] dependencies = [ - "httpx>=0.28.1", - "httpx-sse>=0.4.0", - "opentelemetry-api>=1.33.0", - "opentelemetry-sdk>=1.33.0", - "pydantic>=2.11.3", - "sse-starlette>=2.3.3", - "starlette>=0.46.2", + "httpx>=0.28.1", + "httpx-sse>=0.4.0", + "pydantic>=2.11.3", + "protobuf>=5.29.5", + "google-api-core>=1.26.0", ] classifiers = [ @@ -30,21 +28,32 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", ] -[project.urls] -homepage = "https://google.github.io/A2A/" -repository = "https://github.com/google-a2a/a2a-python" -changelog = "https://github.com/google-a2a/a2a-python/blob/main/CHANGELOG.md" -documentation = "https://google.github.io/A2A/" +[project.optional-dependencies] +http-server = ["fastapi>=0.115.2", "sse-starlette", "starlette"] +encryption = ["cryptography>=43.0.0"] +grpc = ["grpcio>=1.60", "grpcio-tools>=1.60", "grpcio_reflection>=1.7.0"] +telemetry = ["opentelemetry-api>=1.33.0", "opentelemetry-sdk>=1.33.0"] +postgresql = ["sqlalchemy[asyncio,postgresql-asyncpg]>=2.0.0"] +mysql = ["sqlalchemy[asyncio,aiomysql]>=2.0.0"] +signing = ["PyJWT>=2.0.0"] +sqlite = ["sqlalchemy[asyncio,aiosqlite]>=2.0.0"] -[tool.hatch.build.targets.wheel] -packages = ["src/a2a"] +sql = ["a2a-sdk[postgresql,mysql,sqlite]"] -[tool.pytest.ini_options] -testpaths = ["tests"] -python_files = "test_*.py" -python_functions = "test_*" -addopts = "--cov=src --cov-config=.coveragerc --cov-report term --cov-report xml:coverage.xml --cov-branch" -asyncio_mode = "strict" +all = [ + "a2a-sdk[http-server]", + "a2a-sdk[sql]", + "a2a-sdk[encryption]", + "a2a-sdk[grpc]", + "a2a-sdk[telemetry]", + "a2a-sdk[signing]", +] + +[project.urls] +homepage = "https://a2a-protocol.org/" +repository = "https://github.com/a2aproject/a2a-python" +changelog = "https://github.com/a2aproject/a2a-python/blob/main/CHANGELOG.md" +documentation = "https://a2a-protocol.org/latest/sdk/python/" [build-system] requires = ["hatchling", "uv-dynamic-versioning"] @@ -53,25 +62,50 @@ build-backend = "hatchling.build" [tool.hatch.version] source = "uv-dynamic-versioning" +[tool.hatch.build.targets.wheel] +packages = ["src/a2a"] + [tool.hatch.build.targets.sdist] -exclude = [ - "tests/", +exclude = ["tests/"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = "test_*.py" +python_functions = "test_*" +addopts = "-ra --strict-markers --dist loadgroup" +markers = [ + "asyncio: mark a test as a coroutine that should be run by pytest-asyncio", + "xdist_group: mark a test to run in a specific sequential group for isolation", ] +[tool.pytest-asyncio] +mode = "strict" + [tool.uv-dynamic-versioning] vcs = "git" style = "pep440" [dependency-groups] dev = [ - "datamodel-code-generator>=0.30.0", - "mypy>=1.15.0", - "pytest>=8.3.5", - "pytest-asyncio>=0.26.0", - "pytest-cov>=6.1.1", - "pytest-mock>=3.14.0", - "ruff>=0.11.6", - "uv-dynamic-versioning>=0.8.2", + "datamodel-code-generator>=0.30.0", + "mypy>=1.15.0", + "pytest>=8.3.5", + "pytest-asyncio>=0.26.0", + "pytest-cov>=6.1.1", + "pytest-mock>=3.14.0", + "pytest-xdist>=3.6.1", + "respx>=0.20.2", + "ruff>=0.12.8", + "uv-dynamic-versioning>=0.8.2", + "types-protobuf", + "types-requests", + "pre-commit", + "pyupgrade", + "autoflake", + "no_implicit_optional", + "trio", + "uvicorn>=0.35.0", + "a2a-sdk[all]", ] [[tool.uv.index]] @@ -79,3 +113,212 @@ name = "testpypi" url = "https://test.pypi.org/simple/" publish-url = "https://test.pypi.org/legacy/" explicit = true + +[tool.uv.sources] +a2a-sdk = { workspace = true } + +[tool.mypy] +plugins = ["pydantic.mypy"] +exclude = ["src/a2a/grpc/"] +disable_error_code = [ + "import-not-found", + "annotation-unchecked", + "import-untyped", +] + +[[tool.mypy.overrides]] +module = "examples.*" +follow_imports = "skip" + +[tool.pyright] +include = ["src"] +exclude = [ + "**/__pycache__", + "**/dist", + "**/build", + "**/node_modules", + "**/venv", + "**/.venv", + "src/a2a/grpc/", +] +reportMissingImports = "none" +reportMissingModuleSource = "none" + +[tool.coverage.run] +branch = true +omit = [ + "*/tests/*", + "*/site-packages/*", + "*/__init__.py", + "src/a2a/grpc/*", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "import", + "def __repr__", + "raise NotImplementedError", + "if TYPE_CHECKING", + "@abstractmethod", + "pass", + "raise ImportError", +] + +# +# Ruff linter and code formatter for A2A +# +[tool.ruff] +# This file follows the standards in Google Python Style Guide +# https://google.github.io/styleguide/pyguide.html +line-length = 80 # Google Style Guide §3.2: 80 columns +indent-width = 4 # Google Style Guide §3.4: 4 spaces +target-version = "py310" # Minimum Python version + +[tool.ruff.lint] +ignore = [ + "COM812", # Trailing comma missing. + "FBT001", # Boolean positional arg in function definition + "FBT002", # Boolean default value in function definition + "D203", # 1 blank line required before class docstring (Google: 0) + "D213", # Multi-line docstring summary should start at the second line (Google: first line) + "D100", # Ignore Missing docstring in public module (often desired at top level __init__.py) + "D104", # Ignore Missing docstring in public package (often desired at top level __init__.py) + "D107", # Ignore Missing docstring in __init__ (use class docstring) + "TD002", # Ignore Missing author in TODOs (often not required) + "TD003", # Ignore Missing issue link in TODOs (often not required/available) + "T201", # Ignore print presence + "RUF012", # Ignore Mutable class attributes should be annotated with `typing.ClassVar` + "E501", # Ignore line length (handled by Ruff's dynamic line length) + "ANN002", + "ANN003", + "ANN401", + "TRY003", + "TRY201", + "FIX002", +] + +select = [ + "E", # pycodestyle errors (PEP 8) + "W", # pycodestyle warnings (PEP 8) + "F", # Pyflakes (logical errors, unused imports/variables) + "I", # isort (import sorting - Google Style §3.1.2) + "D", # pydocstyle (docstring conventions - Google Style §3.8) + "N", # pep8-naming (naming conventions - Google Style §3.16) + "UP", # pyupgrade (use modern Python syntax) + "ANN",# flake8-annotations (type hint usage/style - Google Style §2.22) + "A", # flake8-builtins (avoid shadowing builtins) + "B", # flake8-bugbear (potential logic errors & style issues - incl. mutable defaults B006, B008) + "C4", # flake8-comprehensions (unnecessary list/set/dict comprehensions) + "ISC",# flake8-implicit-str-concat (disallow implicit string concatenation across lines) + "T20",# flake8-print (discourage `print` - prefer logging) + "SIM",# flake8-simplify (simplify code, e.g., `if cond: return True else: return False`) + "PTH",# flake8-use-pathlib (use pathlib instead of os.path where possible) + "PL", # Pylint rules ported to Ruff (PLC, PLE, PLR, PLW) + "PIE",# flake8-pie (misc code improvements, e.g., no-unnecessary-pass) + "RUF",# Ruff-specific rules (e.g., RUF001-003 ambiguous unicode, RUF013 implicit optional) + "RET",# flake8-return (consistency in return statements) + "SLF",# flake8-self (check for private member access via `self`) + "TID",# flake8-tidy-imports (relative imports, banned imports - configure if needed) + "YTT",# flake8-boolean-trap (checks for boolean positional arguments, truthiness tests - Google Style §3.10) + "TD", # flake8-todos (check TODO format - Google Style §3.7) + "TCH",# flake8-type-checking (helps manage TYPE_CHECKING blocks and imports) + "PYI",# flake8-pyi (best practices for .pyi stub files, some rules are useful for .py too) + "S", # flake8-bandit (security issues) + "DTZ",# flake8-datetimez (timezone-aware datetimes) + "ERA",# flake8-eradicate (commented-out code) + "Q", # flake8-quotes (quote style consistency) + "RSE",# flake8-raise (modern raise statements) + "TRY",# tryceratops (exception handling best practices) + "PERF",# perflint (performance anti-patterns) + "BLE", + "T10", + "ICN", + "G", + "FIX", + "ASYNC", + "INP", +] + +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", + "*/migrations/*", + "src/a2a/grpc/**", + "tests/**", +] + +[tool.ruff.lint.isort] +case-sensitive = true +lines-after-imports = 2 +lines-between-types = 1 + +[tool.ruff.lint.pydocstyle] +convention = "google" +ignore-decorators = ["typing.overload", "abc.abstractmethod"] + +[tool.ruff.lint.flake8-annotations] +mypy-init-return = true +allow-star-arg-any = false + +[tool.ruff.lint.pep8-naming] +ignore-names = ["test_*", "setUp", "tearDown", "mock_*"] +classmethod-decorators = ["classmethod", "pydantic.validator", "pydantic.root_validator"] +staticmethod-decorators = ["staticmethod"] + +[tool.ruff.lint.flake8-tidy-imports] +ban-relative-imports = "all" # Google generally prefers absolute imports (§3.1.2) + +[tool.ruff.lint.flake8-quotes] +docstring-quotes = "double" +inline-quotes = "single" + +[tool.ruff.lint.per-file-ignores] +"__init__.py" = ["F401", "D", "ANN"] # Ignore unused imports in __init__.py +"*_test.py" = [ + "D", # All pydocstyle rules + "ANN", # Missing type annotation for function argument + "RUF013", # Implicit optional type in test function signatures + "S101", # Use of `assert` detected (expected in tests) + "PLR2004", + "SLF001", +] +"test_*.py" = [ + "D", + "ANN", + "RUF013", + "S101", + "PLR2004", + "SLF001", +] +"types.py" = ["D", "E501"] # Ignore docstring and annotation issues in types.py +"proto_utils.py" = ["D102", "PLR0911"] +"helpers.py" = ["ANN001", "ANN201", "ANN202"] +"scripts/*.py" = ["INP001"] + +[tool.ruff.format] +exclude = [ + "src/a2a/grpc/**", +] +docstring-code-format = true +docstring-code-line-length = "dynamic" +quote-style = "single" +indent-style = "space" diff --git a/scripts/checkout_experimental_types.sh b/scripts/checkout_experimental_types.sh new file mode 100755 index 000000000..a598afaff --- /dev/null +++ b/scripts/checkout_experimental_types.sh @@ -0,0 +1,98 @@ +#!/bin/bash + +# Exit immediately if a command exits with a non-zero status. +# Treat unset variables as an error. +set -euo pipefail + +A2A_SPEC_REPO="https://github.com/a2aproject/A2A.git" # URL for the A2A spec repo. +A2A_SPEC_BRANCH="main" # Name of the branch with experimental changes. +FEATURE_BRANCH="experimental-types" # Name of the feature branch to create. +ROOT_DIR=$(git rev-parse --show-toplevel) + +usage() { + cat <&2 + usage + exit 1 + ;; + esac +done + + +TMP_WORK_DIR=$(mktemp -d) +echo "Created a temporary working directory: $TMP_WORK_DIR" +trap 'rm -rf -- "$TMP_WORK_DIR"' EXIT +cd $TMP_WORK_DIR + +echo "Cloning the \"$A2A_SPEC_REPO\" repository..." +git clone $A2A_SPEC_REPO spec_repo +cd spec_repo + +echo "Checking out the \"$A2A_SPEC_BRANCH\" branch..." +git checkout "$A2A_SPEC_BRANCH" + +echo "Invoking the generate_types.sh script..." +GENERATED_FILE="$ROOT_DIR/src/a2a/types.py" +$ROOT_DIR/scripts/generate_types.sh "$GENERATED_FILE" --input-file "$TMP_WORK_DIR/spec_repo/specification/json/a2a.json" + + +echo "Running buf generate..." +cd "$ROOT_DIR" +buf generate +uv run "$ROOT_DIR/scripts/grpc_gen_post_processor.py" + + +echo "Committing generated types file to the \"$FEATURE_BRANCH\" branch..." +git checkout -b "$FEATURE_BRANCH" +git add "$GENERATED_FILE" "$ROOT_DIR/src/a2a/grpc" +git commit -m "Experimental types" diff --git a/scripts/docker-compose.test.yml b/scripts/docker-compose.test.yml new file mode 100644 index 000000000..a2df936e1 --- /dev/null +++ b/scripts/docker-compose.test.yml @@ -0,0 +1,29 @@ +services: + postgres: + image: postgres:15-alpine + environment: + POSTGRES_USER: a2a + POSTGRES_PASSWORD: a2a_password + POSTGRES_DB: a2a_test + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready"] + interval: 10s + timeout: 5s + retries: 5 + + mysql: + image: mysql:8.0 + environment: + MYSQL_ROOT_PASSWORD: root + MYSQL_DATABASE: a2a_test + MYSQL_USER: a2a + MYSQL_PASSWORD: a2a_password + ports: + - "3306:3306" + healthcheck: + test: ["CMD-SHELL", "mysqladmin ping -h localhost -u root -proot"] + interval: 10s + timeout: 5s + retries: 5 diff --git a/scripts/format.sh b/scripts/format.sh new file mode 100755 index 000000000..2be6be3d0 --- /dev/null +++ b/scripts/format.sh @@ -0,0 +1,89 @@ +#!/bin/bash +set -e +set -o pipefail + +# --- Argument Parsing --- +# Initialize flags +FORMAT_ALL=false +RUFF_UNSAFE_FIXES_FLAG="" + +# Process command-line arguments +while [[ "$#" -gt 0 ]]; do + case "$1" in + --all) + FORMAT_ALL=true + echo "Detected --all flag: Formatting all tracked Python files." + shift # Consume the argument + ;; + --unsafe-fixes) + RUFF_UNSAFE_FIXES_FLAG="--unsafe-fixes" + echo "Detected --unsafe-fixes flag: Ruff will run with unsafe fixes." + shift # Consume the argument + ;; + *) + # Handle unknown arguments or just ignore them + echo "Warning: Unknown argument '$1'. Ignoring." + shift # Consume the argument + ;; + esac +done + +# Sort Spelling Allowlist +SPELLING_ALLOW_FILE=".github/actions/spelling/allow.txt" +if [ -f "$SPELLING_ALLOW_FILE" ]; then + echo "Sorting and de-duplicating $SPELLING_ALLOW_FILE" + sort -u "$SPELLING_ALLOW_FILE" -o "$SPELLING_ALLOW_FILE" +fi + +CHANGED_FILES="" + +if $FORMAT_ALL; then + echo "Finding all tracked Python files in the repository..." + CHANGED_FILES=$(git ls-files -- '*.py' ':!src/a2a/grpc/*') +else + echo "Finding changed Python files based on git diff..." + TARGET_BRANCH="origin/${GITHUB_BASE_REF:-main}" + git fetch origin "${GITHUB_BASE_REF:-main}" --depth=1 + + MERGE_BASE=$(git merge-base HEAD "$TARGET_BRANCH") + + # Get python files changed in this PR, excluding grpc generated files. + CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "$MERGE_BASE" HEAD -- '*.py' ':!src/a2a/grpc/*') +fi + +# Exit if no files were found +if [ -z "$CHANGED_FILES" ]; then + echo "No changed or tracked Python files to format." + exit 0 +fi + +# --- Helper Function --- +# Runs a command on a list of files passed via stdin. +# $1: A string containing the list of files (space-separated). +# $2...: The command and its arguments to run. +run_formatter() { + local files_to_format="$1" + shift # Remove the file list from the arguments + if [ -n "$files_to_format" ]; then + echo "$files_to_format" | xargs -r "$@" + fi +} + +# --- Python File Formatting --- +if [ -n "$CHANGED_FILES" ]; then + echo "--- Formatting Python Files ---" + echo "Files to be formatted:" + echo "$CHANGED_FILES" + + echo "Running autoflake..." + run_formatter "$CHANGED_FILES" autoflake -i -r --remove-all-unused-imports + echo "Running ruff check (fix-only)..." + run_formatter "$CHANGED_FILES" ruff check --fix-only $RUFF_UNSAFE_FIXES_FLAG + echo "Running ruff format..." + run_formatter "$CHANGED_FILES" ruff format + echo "Python formatting complete." +else + echo "No Python files to format." +fi + +echo "All formatting tasks are complete." diff --git a/scripts/generate_types.sh b/scripts/generate_types.sh new file mode 100755 index 000000000..6c01cff57 --- /dev/null +++ b/scripts/generate_types.sh @@ -0,0 +1,134 @@ +#!/bin/bash + +# Exit immediately if a command exits with a non-zero status. +# Treat unset variables as an error. +set -euo pipefail + +# A2A specification version to use +# Can be overridden via environment variable: A2A_SPEC_VERSION=v1.2.0 ./generate_types.sh +# Or via command-line flag: ./generate_types.sh --version v1.2.0 output.py +# Use a specific git tag, branch name, or commit SHA +# Examples: "v1.0.0", "v1.2.0", "main", "abc123def" +A2A_SPEC_VERSION="${A2A_SPEC_VERSION:-v0.3.0}" + +# Build URL based on version format +# Tags use /refs/tags/, branches use /refs/heads/, commits use direct ref +build_remote_url() { + local version="$1" + local base_url="https://raw.githubusercontent.com/a2aproject/A2A" + local spec_path="specification/json/a2a.json" + local url_part + + if [[ "$version" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + # Looks like a version tag (v1.0.0, v1.2.3) + url_part="refs/tags/${version}" + elif [[ "$version" =~ ^[0-9a-f]{7,40}$ ]]; then + # Looks like a commit SHA (7+ hex chars) + url_part="${version}" + else + # Assume it's a branch name (main, develop, etc.) + url_part="refs/heads/${version}" + fi + echo "${base_url}/${url_part}/${spec_path}" +} + +REMOTE_URL=$(build_remote_url "$A2A_SPEC_VERSION") + +GENERATED_FILE="" +INPUT_FILE="" + +# Parse command-line arguments +while [[ $# -gt 0 ]]; do + case "$1" in + --input-file) + INPUT_FILE="$2" + shift 2 + ;; + --version) + A2A_SPEC_VERSION="$2" + REMOTE_URL=$(build_remote_url "$A2A_SPEC_VERSION") + shift 2 + ;; + *) + GENERATED_FILE="$1" + shift 1 + ;; + esac +done + +if [ -z "$GENERATED_FILE" ]; then + cat >&2 <] [--version ] +Options: + --input-file Use a local JSON schema file instead of fetching from remote + --version Specify A2A spec version (default: v0.3.0) + Can be a git tag (v1.0.0), branch (main), or commit SHA +Environment variables: + A2A_SPEC_VERSION Override default spec version +Examples: + $0 src/a2a/types.py + $0 --version v1.2.0 src/a2a/types.py + $0 --input-file local/a2a.json src/a2a/types.py + A2A_SPEC_VERSION=main $0 src/a2a/types.py +EOF + exit 1 +fi + +echo "Running datamodel-codegen..." +declare -a source_args +if [ -n "$INPUT_FILE" ]; then + echo " - Source File: $INPUT_FILE" + if [ ! -f "$INPUT_FILE" ]; then + echo "Error: Input file does not exist: $INPUT_FILE" >&2 + exit 1 + fi + source_args=("--input" "$INPUT_FILE") +else + echo " - A2A Spec Version: $A2A_SPEC_VERSION" + echo " - Source URL: $REMOTE_URL" + + # Validate that the remote URL is accessible + echo " - Validating remote URL..." + if ! curl --fail --silent --head "$REMOTE_URL" >/dev/null 2>&1; then + cat >&2 < None: + """Post processor for the generated code.""" + dir_path = Path(src_folder) + print(dir_path) + if not dir_path.is_dir(): + print('Source folder not found') + sys.exit(1) + + grpc_pattern = '**/*_pb2_grpc.py' + files = dir_path.glob(grpc_pattern) + + for file in files: + print(f'Processing {file}') + try: + with file.open('r', encoding='utf-8') as f: + src_content = f.read() + + # Change import a2a_pb2 as a2a__pb2 + import_pattern = r'^import (\w+_pb2) as (\w+__pb2)$' + # to from . import a2a_pb2 as a2a__pb2 + replacement_pattern = r'from . import \1 as \2' + + fixed_src_content = re.sub( + import_pattern, + replacement_pattern, + src_content, + flags=re.MULTILINE, + ) + + if fixed_src_content != src_content: + with file.open('w', encoding='utf-8') as f: + f.write(fixed_src_content) + print('Imports fixed') + else: + print('No changes needed') + + except Exception as e: # noqa: BLE001 + print(f'Error processing file {file}: {e}') + sys.exit(1) + + +if __name__ == '__main__': + process_generated_code() diff --git a/scripts/run_db_tests.sh b/scripts/run_db_tests.sh new file mode 100755 index 000000000..fd2814ce9 --- /dev/null +++ b/scripts/run_db_tests.sh @@ -0,0 +1,102 @@ +#!/bin/bash +set -e + +# Get the directory of this script +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" + +# Docker compose file path +COMPOSE_FILE="$SCRIPT_DIR/docker-compose.test.yml" + +# Initialize variables +DEBUG_MODE=false +STOP_MODE=false +SERVICES=() +PYTEST_ARGS=() + +# Parse arguments +while [[ $# -gt 0 ]]; do + case $1 in + --debug) + DEBUG_MODE=true + shift + ;; + --stop) + STOP_MODE=true + shift + ;; + --postgres) + SERVICES+=("postgres") + shift + ;; + --mysql) + SERVICES+=("mysql") + shift + ;; + *) + # Preserve other arguments for pytest + PYTEST_ARGS+=("$1") + shift + ;; + esac +done + +# Handle --stop +if [[ "$STOP_MODE" == "true" ]]; then + echo "Stopping test databases..." + docker compose -f "$COMPOSE_FILE" down + exit 0 +fi + +# Default to running both databases if none specified +if [[ ${#SERVICES[@]} -eq 0 ]]; then + SERVICES=("postgres" "mysql") +fi + +# Cleanup function to stop docker containers +cleanup() { + echo "Stopping test databases..." + docker compose -f "$COMPOSE_FILE" down +} + +# Start the databases +echo "Starting/Verifying databases: ${SERVICES[*]}..." +docker compose -f "$COMPOSE_FILE" up -d --wait "${SERVICES[@]}" + +# Set up environment variables based on active services +# Only export DSNs for started services so tests skip missing ones +for service in "${SERVICES[@]}"; do + if [[ "$service" == "postgres" ]]; then + export POSTGRES_TEST_DSN="postgresql+asyncpg://a2a:a2a_password@localhost:5432/a2a_test" + elif [[ "$service" == "mysql" ]]; then + export MYSQL_TEST_DSN="mysql+aiomysql://a2a:a2a_password@localhost:3306/a2a_test" + fi +done + +# Handle --debug mode +if [[ "$DEBUG_MODE" == "true" ]]; then + echo "---------------------------------------------------" + echo "Debug mode enabled. Databases are running." + echo "You can connect to them using the following DSNs." + echo "" + echo "Run the following commands to set up your environment:" + echo "" + [[ -n "$POSTGRES_TEST_DSN" ]] && echo "export POSTGRES_TEST_DSN=\"$POSTGRES_TEST_DSN\"" + [[ -n "$MYSQL_TEST_DSN" ]] && echo "export MYSQL_TEST_DSN=\"$MYSQL_TEST_DSN\"" + echo "" + echo "---------------------------------------------------" + echo "Run ./scripts/run_integration_tests.sh --stop to shut databases down." + exit 0 +fi + +# Register cleanup trap for normal test run +trap cleanup EXIT + +# Run the tests +echo "Running integration tests..." +cd "$PROJECT_ROOT" + +uv run pytest -v \ + tests/server/tasks/test_database_task_store.py \ + tests/server/tasks/test_database_push_notification_config_store.py \ + "${PYTEST_ARGS[@]}" diff --git a/src/a2a/_base.py b/src/a2a/_base.py new file mode 100644 index 000000000..6c50734cd --- /dev/null +++ b/src/a2a/_base.py @@ -0,0 +1,38 @@ +from pydantic import BaseModel, ConfigDict +from pydantic.alias_generators import to_camel + + +def to_camel_custom(snake: str) -> str: + """Convert a snake_case string to camelCase. + + Args: + snake: The string to convert. + + Returns: + The converted camelCase string. + """ + # First, remove any trailing underscores. This is common for names that + # conflict with Python keywords, like 'in_' or 'from_'. + if snake.endswith('_'): + snake = snake.rstrip('_') + return to_camel(snake) + + +class A2ABaseModel(BaseModel): + """Base class for shared behavior across A2A data models. + + Provides a common configuration (e.g., alias-based population) and + serves as the foundation for future extensions or shared utilities. + + This implementation provides backward compatibility for camelCase aliases + by lazy-loading an alias map upon first use. Accessing or setting + attributes via their camelCase alias will raise a DeprecationWarning. + """ + + model_config = ConfigDict( + # SEE: https://docs.pydantic.dev/latest/api/config/#pydantic.config.ConfigDict.populate_by_name + validate_by_name=True, + validate_by_alias=True, + serialize_by_alias=True, + alias_generator=to_camel_custom, + ) diff --git a/src/a2a/auth/user.py b/src/a2a/auth/user.py index fc47c03c2..8b6bf08ec 100644 --- a/src/a2a/auth/user.py +++ b/src/a2a/auth/user.py @@ -21,9 +21,11 @@ class UnauthenticatedUser(User): """A representation that no user has been authenticated in the request.""" @property - def is_authenticated(self): + def is_authenticated(self) -> bool: + """Returns whether the current user is authenticated.""" return False @property def user_name(self) -> str: + """Returns the user name of the current user.""" return '' diff --git a/src/a2a/client/__init__.py b/src/a2a/client/__init__.py index 3455c8675..4fccd0810 100644 --- a/src/a2a/client/__init__.py +++ b/src/a2a/client/__init__.py @@ -1,12 +1,46 @@ """Client-side components for interacting with an A2A agent.""" -from a2a.client.client import A2ACardResolver, A2AClient +import logging + +from a2a.client.auth import ( + AuthInterceptor, + CredentialService, + InMemoryContextCredentialStore, +) +from a2a.client.base_client import BaseClient +from a2a.client.card_resolver import A2ACardResolver +from a2a.client.client import Client, ClientConfig, ClientEvent, Consumer +from a2a.client.client_factory import ClientFactory, minimal_agent_card from a2a.client.errors import ( A2AClientError, A2AClientHTTPError, A2AClientJSONError, + A2AClientTimeoutError, ) from a2a.client.helpers import create_text_message_object +from a2a.client.legacy import A2AClient +from a2a.client.middleware import ClientCallContext, ClientCallInterceptor + + +logger = logging.getLogger(__name__) + +try: + from a2a.client.legacy_grpc import A2AGrpcClient # type: ignore +except ImportError as e: + _original_error = e + logger.debug( + 'A2AGrpcClient not loaded. This is expected if gRPC dependencies are not installed. Error: %s', + _original_error, + ) + + class A2AGrpcClient: # type: ignore + """Placeholder for A2AGrpcClient when dependencies are not installed.""" + + def __init__(self, *args, **kwargs): + raise ImportError( + 'To use A2AGrpcClient, its dependencies must be installed. ' + 'You can install them with \'pip install "a2a-sdk[grpc]"\'' + ) from _original_error __all__ = [ @@ -15,5 +49,19 @@ 'A2AClientError', 'A2AClientHTTPError', 'A2AClientJSONError', + 'A2AClientTimeoutError', + 'A2AGrpcClient', + 'AuthInterceptor', + 'BaseClient', + 'Client', + 'ClientCallContext', + 'ClientCallInterceptor', + 'ClientConfig', + 'ClientEvent', + 'ClientFactory', + 'Consumer', + 'CredentialService', + 'InMemoryContextCredentialStore', 'create_text_message_object', + 'minimal_agent_card', ] diff --git a/src/a2a/client/auth/__init__.py b/src/a2a/client/auth/__init__.py new file mode 100644 index 000000000..8efe65fc0 --- /dev/null +++ b/src/a2a/client/auth/__init__.py @@ -0,0 +1,14 @@ +"""Client-side authentication components for the A2A Python SDK.""" + +from a2a.client.auth.credentials import ( + CredentialService, + InMemoryContextCredentialStore, +) +from a2a.client.auth.interceptor import AuthInterceptor + + +__all__ = [ + 'AuthInterceptor', + 'CredentialService', + 'InMemoryContextCredentialStore', +] diff --git a/src/a2a/client/auth/credentials.py b/src/a2a/client/auth/credentials.py new file mode 100644 index 000000000..11f323709 --- /dev/null +++ b/src/a2a/client/auth/credentials.py @@ -0,0 +1,55 @@ +from abc import ABC, abstractmethod + +from a2a.client.middleware import ClientCallContext + + +class CredentialService(ABC): + """An abstract service for retrieving credentials.""" + + @abstractmethod + async def get_credentials( + self, + security_scheme_name: str, + context: ClientCallContext | None, + ) -> str | None: + """ + Retrieves a credential (e.g., token) for a security scheme. + """ + + +class InMemoryContextCredentialStore(CredentialService): + """A simple in-memory store for session-keyed credentials. + + This class uses the 'sessionId' from the ClientCallContext state to + store and retrieve credentials... + """ + + def __init__(self) -> None: + self._store: dict[str, dict[str, str]] = {} + + async def get_credentials( + self, + security_scheme_name: str, + context: ClientCallContext | None, + ) -> str | None: + """Retrieves credentials from the in-memory store. + + Args: + security_scheme_name: The name of the security scheme. + context: The client call context. + + Returns: + The credential string, or None if not found. + """ + if not context or 'sessionId' not in context.state: + return None + session_id = context.state['sessionId'] + return self._store.get(session_id, {}).get(security_scheme_name) + + async def set_credentials( + self, session_id: str, security_scheme_name: str, credential: str + ) -> None: + """Method to populate the store.""" + if session_id not in self._store: + self._store[session_id] = {} + self._store[session_id][security_scheme_name] = credential diff --git a/src/a2a/client/auth/interceptor.py b/src/a2a/client/auth/interceptor.py new file mode 100644 index 000000000..65c971921 --- /dev/null +++ b/src/a2a/client/auth/interceptor.py @@ -0,0 +1,98 @@ +import logging # noqa: I001 +from typing import Any + +from a2a.client.auth.credentials import CredentialService +from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.types import ( + AgentCard, + APIKeySecurityScheme, + HTTPAuthSecurityScheme, + In, + OAuth2SecurityScheme, + OpenIdConnectSecurityScheme, +) + +logger = logging.getLogger(__name__) + + +class AuthInterceptor(ClientCallInterceptor): + """An interceptor that automatically adds authentication details to requests. + + Based on the agent's security schemes. + """ + + def __init__(self, credential_service: CredentialService): + self._credential_service = credential_service + + async def intercept( + self, + method_name: str, + request_payload: dict[str, Any], + http_kwargs: dict[str, Any], + agent_card: AgentCard | None, + context: ClientCallContext | None, + ) -> tuple[dict[str, Any], dict[str, Any]]: + """Applies authentication headers to the request if credentials are available.""" + if ( + agent_card is None + or agent_card.security is None + or agent_card.security_schemes is None + ): + return request_payload, http_kwargs + + for requirement in agent_card.security: + for scheme_name in requirement: + credential = await self._credential_service.get_credentials( + scheme_name, context + ) + if credential and scheme_name in agent_card.security_schemes: + scheme_def_union = agent_card.security_schemes.get( + scheme_name + ) + if not scheme_def_union: + continue + scheme_def = scheme_def_union.root + + headers = http_kwargs.get('headers', {}) + + match scheme_def: + # Case 1a: HTTP Bearer scheme with an if guard + case HTTPAuthSecurityScheme() if ( + scheme_def.scheme.lower() == 'bearer' + ): + headers['Authorization'] = f'Bearer {credential}' + logger.debug( + "Added Bearer token for scheme '%s' (type: %s).", + scheme_name, + scheme_def.type, + ) + http_kwargs['headers'] = headers + return request_payload, http_kwargs + + # Case 1b: OAuth2 and OIDC schemes, which are implicitly Bearer + case ( + OAuth2SecurityScheme() + | OpenIdConnectSecurityScheme() + ): + headers['Authorization'] = f'Bearer {credential}' + logger.debug( + "Added Bearer token for scheme '%s' (type: %s).", + scheme_name, + scheme_def.type, + ) + http_kwargs['headers'] = headers + return request_payload, http_kwargs + + # Case 2: API Key in Header + case APIKeySecurityScheme(in_=In.header): + headers[scheme_def.name] = credential + logger.debug( + "Added API Key Header for scheme '%s'.", + scheme_name, + ) + http_kwargs['headers'] = headers + return request_payload, http_kwargs + + # Note: Other cases like API keys in query/cookie are not handled and will be skipped. + + return request_payload, http_kwargs diff --git a/src/a2a/client/base_client.py b/src/a2a/client/base_client.py new file mode 100644 index 000000000..c870f3296 --- /dev/null +++ b/src/a2a/client/base_client.py @@ -0,0 +1,289 @@ +from collections.abc import AsyncIterator, Callable +from typing import Any + +from a2a.client.client import ( + Client, + ClientCallContext, + ClientConfig, + ClientEvent, + Consumer, +) +from a2a.client.client_task_manager import ClientTaskManager +from a2a.client.errors import A2AClientInvalidStateError +from a2a.client.middleware import ClientCallInterceptor +from a2a.client.transports.base import ClientTransport +from a2a.types import ( + AgentCard, + GetTaskPushNotificationConfigParams, + Message, + MessageSendConfiguration, + MessageSendParams, + Task, + TaskArtifactUpdateEvent, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskStatusUpdateEvent, +) + + +class BaseClient(Client): + """Base implementation of the A2A client, containing transport-independent logic.""" + + def __init__( + self, + card: AgentCard, + config: ClientConfig, + transport: ClientTransport, + consumers: list[Consumer], + middleware: list[ClientCallInterceptor], + ): + super().__init__(consumers, middleware) + self._card = card + self._config = config + self._transport = transport + + async def send_message( + self, + request: Message, + *, + configuration: MessageSendConfiguration | None = None, + context: ClientCallContext | None = None, + request_metadata: dict[str, Any] | None = None, + extensions: list[str] | None = None, + ) -> AsyncIterator[ClientEvent | Message]: + """Sends a message to the agent. + + This method handles both streaming and non-streaming (polling) interactions + based on the client configuration and agent capabilities. It will yield + events as they are received from the agent. + + Args: + request: The message to send to the agent. + configuration: Optional per-call overrides for message sending behavior. + context: The client call context. + request_metadata: Extensions Metadata attached to the request. + extensions: List of extensions to be activated. + + Yields: + An async iterator of `ClientEvent` or a final `Message` response. + """ + base_config = MessageSendConfiguration( + accepted_output_modes=self._config.accepted_output_modes, + blocking=not self._config.polling, + push_notification_config=( + self._config.push_notification_configs[0] + if self._config.push_notification_configs + else None + ), + ) + if configuration is not None: + update_data = configuration.model_dump( + exclude_unset=True, + by_alias=False, + ) + config = base_config.model_copy(update=update_data) + else: + config = base_config + + params = MessageSendParams( + message=request, configuration=config, metadata=request_metadata + ) + + if not self._config.streaming or not self._card.capabilities.streaming: + response = await self._transport.send_message( + params, context=context, extensions=extensions + ) + result = ( + (response, None) if isinstance(response, Task) else response + ) + await self.consume(result, self._card) + yield result + return + + tracker = ClientTaskManager() + stream = self._transport.send_message_streaming( + params, context=context, extensions=extensions + ) + + first_event = await anext(stream) + # The response from a server may be either exactly one Message or a + # series of Task updates. Separate out the first message for special + # case handling, which allows us to simplify further stream processing. + if isinstance(first_event, Message): + await self.consume(first_event, self._card) + yield first_event + return + + yield await self._process_response(tracker, first_event) + + async for event in stream: + yield await self._process_response(tracker, event) + + async def _process_response( + self, + tracker: ClientTaskManager, + event: Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent, + ) -> ClientEvent: + if isinstance(event, Message): + raise A2AClientInvalidStateError( + 'received a streamed Message from server after first response; this is not supported' + ) + await tracker.process(event) + task = tracker.get_task_or_raise() + update = None if isinstance(event, Task) else event + client_event = (task, update) + await self.consume(client_event, self._card) + return client_event + + async def get_task( + self, + request: TaskQueryParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task. + + Args: + request: The `TaskQueryParams` object specifying the task ID. + context: The client call context. + extensions: List of extensions to be activated. + + Returns: + A `Task` object representing the current state of the task. + """ + return await self._transport.get_task( + request, context=context, extensions=extensions + ) + + async def cancel_task( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Requests the agent to cancel a specific task. + + Args: + request: The `TaskIdParams` object specifying the task ID. + context: The client call context. + extensions: List of extensions to be activated. + + Returns: + A `Task` object containing the updated task status. + """ + return await self._transport.cancel_task( + request, context=context, extensions=extensions + ) + + async def set_task_callback( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task. + + Args: + request: The `TaskPushNotificationConfig` object with the new configuration. + context: The client call context. + extensions: List of extensions to be activated. + + Returns: + The created or updated `TaskPushNotificationConfig` object. + """ + return await self._transport.set_task_callback( + request, context=context, extensions=extensions + ) + + async def get_task_callback( + self, + request: GetTaskPushNotificationConfigParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task. + + Args: + request: The `GetTaskPushNotificationConfigParams` object specifying the task. + context: The client call context. + extensions: List of extensions to be activated. + + Returns: + A `TaskPushNotificationConfig` object containing the configuration. + """ + return await self._transport.get_task_callback( + request, context=context, extensions=extensions + ) + + async def resubscribe( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncIterator[ClientEvent]: + """Resubscribes to a task's event stream. + + This is only available if both the client and server support streaming. + + Args: + request: Parameters to identify the task to resubscribe to. + context: The client call context. + extensions: List of extensions to be activated. + + Yields: + An async iterator of `ClientEvent` objects. + + Raises: + NotImplementedError: If streaming is not supported by the client or server. + """ + if not self._config.streaming or not self._card.capabilities.streaming: + raise NotImplementedError( + 'client and/or server do not support resubscription.' + ) + + tracker = ClientTaskManager() + # Note: resubscribe can only be called on an existing task. As such, + # we should never see Message updates, despite the typing of the service + # definition indicating it may be possible. + async for event in self._transport.resubscribe( + request, context=context, extensions=extensions + ): + yield await self._process_response(tracker, event) + + async def get_card( + self, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> AgentCard: + """Retrieves the agent's card. + + This will fetch the authenticated card if necessary and update the + client's internal state with the new card. + + Args: + context: The client call context. + extensions: List of extensions to be activated. + signature_verifier: A callable used to verify the agent card's signatures. + + Returns: + The `AgentCard` for the agent. + """ + card = await self._transport.get_card( + context=context, + extensions=extensions, + signature_verifier=signature_verifier, + ) + self._card = card + return card + + async def close(self) -> None: + """Closes the underlying transport.""" + await self._transport.close() diff --git a/src/a2a/client/card_resolver.py b/src/a2a/client/card_resolver.py new file mode 100644 index 000000000..adb3c5aee --- /dev/null +++ b/src/a2a/client/card_resolver.py @@ -0,0 +1,113 @@ +import json +import logging + +from collections.abc import Callable +from typing import Any + +import httpx + +from pydantic import ValidationError + +from a2a.client.errors import ( + A2AClientHTTPError, + A2AClientJSONError, +) +from a2a.types import ( + AgentCard, +) +from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH + + +logger = logging.getLogger(__name__) + + +class A2ACardResolver: + """Agent Card resolver.""" + + def __init__( + self, + httpx_client: httpx.AsyncClient, + base_url: str, + agent_card_path: str = AGENT_CARD_WELL_KNOWN_PATH, + ) -> None: + """Initializes the A2ACardResolver. + + Args: + httpx_client: An async HTTP client instance (e.g., httpx.AsyncClient). + base_url: The base URL of the agent's host. + agent_card_path: The path to the agent card endpoint, relative to the base URL. + """ + self.base_url = base_url.rstrip('/') + self.agent_card_path = agent_card_path.lstrip('/') + self.httpx_client = httpx_client + + async def get_agent_card( + self, + relative_card_path: str | None = None, + http_kwargs: dict[str, Any] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> AgentCard: + """Fetches an agent card from a specified path relative to the base_url. + + If relative_card_path is None, it defaults to the resolver's configured + agent_card_path (for the public agent card). + + Args: + relative_card_path: Optional path to the agent card endpoint, + relative to the base URL. If None, uses the default public + agent card path. Use `'/'` for an empty path. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.get request. + signature_verifier: A callable used to verify the agent card's signatures. + + Returns: + An `AgentCard` object representing the agent's capabilities. + + Raises: + A2AClientHTTPError: If an HTTP error occurs during the request. + A2AClientJSONError: If the response body cannot be decoded as JSON + or validated against the AgentCard schema. + """ + if not relative_card_path: + # Use the default public agent card path configured during initialization + path_segment = self.agent_card_path + else: + path_segment = relative_card_path.lstrip('/') + + target_url = f'{self.base_url}/{path_segment}' + + try: + response = await self.httpx_client.get( + target_url, + **(http_kwargs or {}), + ) + response.raise_for_status() + agent_card_data = response.json() + logger.info( + 'Successfully fetched agent card data from %s: %s', + target_url, + agent_card_data, + ) + agent_card = AgentCard.model_validate(agent_card_data) + if signature_verifier: + signature_verifier(agent_card) + except httpx.HTTPStatusError as e: + raise A2AClientHTTPError( + e.response.status_code, + f'Failed to fetch agent card from {target_url}: {e}', + ) from e + except json.JSONDecodeError as e: + raise A2AClientJSONError( + f'Failed to parse JSON for agent card from {target_url}: {e}' + ) from e + except httpx.RequestError as e: + raise A2AClientHTTPError( + 503, + f'Network communication error fetching agent card from {target_url}: {e}', + ) from e + except ValidationError as e: # Pydantic validation error + raise A2AClientJSONError( + f'Failed to validate agent card structure from {target_url}: {e.json()}' + ) from e + + return agent_card diff --git a/src/a2a/client/client.py b/src/a2a/client/client.py index 1899f0b25..286641a79 100644 --- a/src/a2a/client/client.py +++ b/src/a2a/client/client.py @@ -1,411 +1,211 @@ -import json +import dataclasses import logging -from collections.abc import AsyncGenerator + +from abc import ABC, abstractmethod +from collections.abc import AsyncIterator, Callable, Coroutine from typing import Any -from uuid import uuid4 import httpx -from httpx_sse import SSEError, aconnect_sse -from pydantic import ValidationError - -from a2a.client.errors import A2AClientHTTPError, A2AClientJSONError -from a2a.types import (AgentCard, CancelTaskRequest, CancelTaskResponse, - GetTaskPushNotificationConfigRequest, - GetTaskPushNotificationConfigResponse, GetTaskRequest, - GetTaskResponse, SendMessageRequest, - SendMessageResponse, SendStreamingMessageRequest, - SendStreamingMessageResponse, - SetTaskPushNotificationConfigRequest, - SetTaskPushNotificationConfigResponse) -from a2a.utils.telemetry import SpanKind, trace_class - -logger = logging.getLogger(__name__) -class A2ACardResolver: - """Agent Card resolver.""" +from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.client.optionals import Channel +from a2a.types import ( + AgentCard, + GetTaskPushNotificationConfigParams, + Message, + PushNotificationConfig, + Task, + TaskArtifactUpdateEvent, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskStatusUpdateEvent, + TransportProtocol, +) - def __init__( - self, - httpx_client: httpx.AsyncClient, - base_url: str, - agent_card_path: str = '/.well-known/agent.json', - ): - """Initializes the A2ACardResolver. - Args: - httpx_client: An async HTTP client instance (e.g., httpx.AsyncClient). - base_url: The base URL of the agent's host. - agent_card_path: The path to the agent card endpoint, relative to the base URL. - """ - self.base_url = base_url.rstrip('/') - self.agent_card_path = agent_card_path.lstrip('/') - self.httpx_client = httpx_client +logger = logging.getLogger(__name__) - async def get_agent_card( - self, - relative_card_path: str | None = None, - http_kwargs: dict[str, Any] | None = None, - ) -> AgentCard: - """Fetches an agent card from a specified path relative to the base_url. - If relative_card_path is None, it defaults to the resolver's configured - agent_card_path (for the public agent card). +@dataclasses.dataclass +class ClientConfig: + """Configuration class for the A2AClient Factory.""" - Args: - relative_card_path: Optional path to the agent card endpoint, - relative to the base URL. If None, uses the default public - agent card path. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.get request. - - Returns: - An `AgentCard` object representing the agent's capabilities. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON - or validated against the AgentCard schema. - """ - if relative_card_path is None: - # Use the default public agent card path configured during initialization - path_segment = self.agent_card_path - else: - path_segment = relative_card_path.lstrip('/') - - target_url = f'{self.base_url}/{path_segment}' - - try: - response = await self.httpx_client.get( - target_url, - **(http_kwargs or {}), - ) - response.raise_for_status() - agent_card_data = response.json() - logger.info( - 'Successfully fetched agent card data from %s: %s', - target_url, - agent_card_data, - ) - agent_card = AgentCard.model_validate(agent_card_data) - except httpx.HTTPStatusError as e: - raise A2AClientHTTPError( - e.response.status_code, - f'Failed to fetch agent card from {target_url}: {e}', - ) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError( - f'Failed to parse JSON for agent card from {target_url}: {e}' - ) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, - f'Network communication error fetching agent card from {target_url}: {e}', - ) from e - except ValidationError as e: # Pydantic validation error - raise A2AClientJSONError( - f'Failed to validate agent card structure from {target_url}: {e.json()}' - ) from e - - return agent_card - - -@trace_class(kind=SpanKind.CLIENT) -class A2AClient: - """A2A Client for interacting with an A2A agent.""" + streaming: bool = True + """Whether client supports streaming""" - def __init__( - self, - httpx_client: httpx.AsyncClient, - agent_card: AgentCard | None = None, - url: str | None = None, - ): - """Initializes the A2AClient. + polling: bool = False + """Whether client prefers to poll for updates from message:send. It is + the callers job to check if the response is completed and if not run a + polling loop.""" - Requires either an `AgentCard` or a direct `url` to the agent's RPC endpoint. + httpx_client: httpx.AsyncClient | None = None + """Http client to use to connect to agent.""" - Args: - httpx_client: An async HTTP client instance (e.g., httpx.AsyncClient). - agent_card: The agent card object. If provided, `url` is taken from `agent_card.url`. - url: The direct URL to the agent's A2A RPC endpoint. Required if `agent_card` is None. + grpc_channel_factory: Callable[[str], Channel] | None = None + """Generates a grpc connection channel for a given url.""" - Raises: - ValueError: If neither `agent_card` nor `url` is provided. - """ - if agent_card: - self.url = agent_card.url - elif url: - self.url = url - else: - raise ValueError('Must provide either agent_card or url') - - self.httpx_client = httpx_client - - @staticmethod - async def get_client_from_agent_card_url( - httpx_client: httpx.AsyncClient, - base_url: str, - agent_card_path: str = '/.well-known/agent.json', - http_kwargs: dict[str, Any] | None = None, - ) -> 'A2AClient': - """Fetches the public AgentCard and initializes an A2A client. - - This method will always fetch the public agent card. If an authenticated - or extended agent card is required, the A2ACardResolver should be used - directly to fetch the specific card, and then the A2AClient should be - instantiated with it. + supported_transports: list[TransportProtocol | str] = dataclasses.field( + default_factory=list + ) + """Ordered list of transports for connecting to agent + (in order of preference). Empty implies JSONRPC only. - Args: - httpx_client: An async HTTP client instance (e.g., httpx.AsyncClient). - base_url: The base URL of the agent's host. - agent_card_path: The path to the agent card endpoint, relative to the base URL. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.get request when fetching the agent card. - Returns: - An initialized `A2AClient` instance. - - Raises: - A2AClientHTTPError: If an HTTP error occurs fetching the agent card. - A2AClientJSONError: If the agent card response is invalid. - """ - agent_card: AgentCard = await A2ACardResolver( - httpx_client, base_url=base_url, agent_card_path=agent_card_path - ).get_agent_card(http_kwargs=http_kwargs) # Fetches public card by default - return A2AClient(httpx_client=httpx_client, agent_card=agent_card) + This is a string type to allow custom + transports to exist in closed ecosystems. + """ - async def send_message( - self, - request: SendMessageRequest, - *, - http_kwargs: dict[str, Any] | None = None, - ) -> SendMessageResponse: - """Sends a non-streaming message request to the agent. + use_client_preference: bool = False + """Whether to use client transport preferences over server preferences. + Recommended to use server preferences in most situations.""" - Args: - request: The `SendMessageRequest` object containing the message and configuration. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. + accepted_output_modes: list[str] = dataclasses.field(default_factory=list) + """The set of accepted output modes for the client.""" - Returns: - A `SendMessageResponse` object containing the agent's response (Task or Message) or an error. + push_notification_configs: list[PushNotificationConfig] = dataclasses.field( + default_factory=list + ) + """Push notification callbacks to use for every request.""" - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not request.id: - request.id = str(uuid4()) + extensions: list[str] = dataclasses.field(default_factory=list) + """A list of extension URIs the client supports.""" - return SendMessageResponse( - **await self._send_request( - request.model_dump(mode='json', exclude_none=True), - http_kwargs, - ) - ) - async def send_message_streaming( - self, - request: SendStreamingMessageRequest, - *, - http_kwargs: dict[str, Any] | None = None, - ) -> AsyncGenerator[SendStreamingMessageResponse]: - """Sends a streaming message request to the agent and yields responses as they arrive. +UpdateEvent = TaskStatusUpdateEvent | TaskArtifactUpdateEvent | None +# Alias for emitted events from client +ClientEvent = tuple[Task, UpdateEvent] +# Alias for an event consuming callback. It takes either a (task, update) pair +# or a message as well as the agent card for the agent this came from. +Consumer = Callable[ + [ClientEvent | Message, AgentCard], Coroutine[None, Any, Any] +] - This method uses Server-Sent Events (SSE) to receive a stream of updates from the agent. - Args: - request: The `SendStreamingMessageRequest` object containing the message and configuration. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. A default `timeout=None` is set but can be overridden. +class Client(ABC): + """Abstract base class defining the interface for an A2A client. - Yields: - `SendStreamingMessageResponse` objects as they are received in the SSE stream. - These can be Task, Message, TaskStatusUpdateEvent, or TaskArtifactUpdateEvent. + This class provides a standard set of methods for interacting with an A2A + agent, regardless of the underlying transport protocol (e.g., gRPC, JSON-RPC). + It supports sending messages, managing tasks, and handling event streams. + """ - Raises: - A2AClientHTTPError: If an HTTP or SSE protocol error occurs during the request. - A2AClientJSONError: If an SSE event data cannot be decoded as JSON or validated. - """ - if not request.id: - request.id = str(uuid4()) - - # Default to no timeout for streaming, can be overridden by http_kwargs - http_kwargs_with_timeout: dict[str, Any] = { - 'timeout': None, - **(http_kwargs or {}), - } - - async with aconnect_sse( - self.httpx_client, - 'POST', - self.url, - json=request.model_dump(mode='json', exclude_none=True), - **http_kwargs_with_timeout, - ) as event_source: - try: - async for sse in event_source.aiter_sse(): - yield SendStreamingMessageResponse(**json.loads(sse.data)) - except SSEError as e: - raise A2AClientHTTPError( - 400, - f'Invalid SSE response or protocol error: {e}', - ) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e - - async def _send_request( + def __init__( self, - rpc_request_payload: dict[str, Any], - http_kwargs: dict[str, Any] | None = None, - ) -> dict[str, Any]: - """Sends a non-streaming JSON-RPC request to the agent. + consumers: list[Consumer] | None = None, + middleware: list[ClientCallInterceptor] | None = None, + ): + """Initializes the client with consumers and middleware. Args: - rpc_request_payload: JSON RPC payload for sending the request. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - - Returns: - The JSON response payload as a dictionary. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON. + consumers: A list of callables to process events from the agent. + middleware: A list of interceptors to process requests and responses. """ - try: - response = await self.httpx_client.post( - self.url, json=rpc_request_payload, **(http_kwargs or {}) - ) - response.raise_for_status() - return response.json() - except httpx.HTTPStatusError as e: - raise A2AClientHTTPError(e.response.status_code, str(e)) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e - - async def get_task( + if middleware is None: + middleware = [] + if consumers is None: + consumers = [] + self._consumers = consumers + self._middleware = middleware + + @abstractmethod + async def send_message( self, - request: GetTaskRequest, + request: Message, *, - http_kwargs: dict[str, Any] | None = None, - ) -> GetTaskResponse: - """Retrieves the current state and history of a specific task. - - Args: - request: The `GetTaskRequest` object specifying the task ID and history length. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - - Returns: - A `GetTaskResponse` object containing the Task or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. + context: ClientCallContext | None = None, + request_metadata: dict[str, Any] | None = None, + extensions: list[str] | None = None, + ) -> AsyncIterator[ClientEvent | Message]: + """Sends a message to the server. + + This will automatically use the streaming or non-streaming approach + as supported by the server and the client config. Client will + aggregate update events and return an iterator of (`Task`,`Update`) + pairs, or a `Message`. Client will also send these values to any + configured `Consumer`s in the client. """ - if not request.id: - request.id = str(uuid4()) + return + yield - return GetTaskResponse( - **await self._send_request( - request.model_dump(mode='json', exclude_none=True), - http_kwargs, - ) - ) + @abstractmethod + async def get_task( + self, + request: TaskQueryParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + @abstractmethod async def cancel_task( self, - request: CancelTaskRequest, + request: TaskIdParams, *, - http_kwargs: dict[str, Any] | None = None, - ) -> CancelTaskResponse: - """Requests the agent to cancel a specific task. - - Args: - request: The `CancelTaskRequest` object specifying the task ID. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - - Returns: - A `CancelTaskResponse` object containing the updated Task with canceled status or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not request.id: - request.id = str(uuid4()) - - return CancelTaskResponse( - **await self._send_request( - request.model_dump(mode='json', exclude_none=True), - http_kwargs, - ) - ) + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + @abstractmethod async def set_task_callback( self, - request: SetTaskPushNotificationConfigRequest, + request: TaskPushNotificationConfig, *, - http_kwargs: dict[str, Any] | None = None, - ) -> SetTaskPushNotificationConfigResponse: - """Sets or updates the push notification configuration for a specific task. - - Args: - request: The `SetTaskPushNotificationConfigRequest` object specifying the task ID and configuration. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - - Returns: - A `SetTaskPushNotificationConfigResponse` object containing the confirmation or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not request.id: - request.id = str(uuid4()) - - return SetTaskPushNotificationConfigResponse( - **await self._send_request( - request.model_dump(mode='json', exclude_none=True), - http_kwargs, - ) - ) + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + @abstractmethod async def get_task_callback( self, - request: GetTaskPushNotificationConfigRequest, + request: GetTaskPushNotificationConfigParams, *, - http_kwargs: dict[str, Any] | None = None, - ) -> GetTaskPushNotificationConfigResponse: - """Retrieves the push notification configuration for a specific task. + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" - Args: - request: The `GetTaskPushNotificationConfigRequest` object specifying the task ID. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. + @abstractmethod + async def resubscribe( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncIterator[ClientEvent]: + """Resubscribes to a task's event stream.""" + return + yield + + @abstractmethod + async def get_card( + self, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> AgentCard: + """Retrieves the agent's card.""" - Returns: - A `GetTaskPushNotificationConfigResponse` object containing the configuration or an error. + async def add_event_consumer(self, consumer: Consumer) -> None: + """Attaches additional consumers to the `Client`.""" + self._consumers.append(consumer) - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not request.id: - request.id = str(uuid4()) - - return GetTaskPushNotificationConfigResponse( - **await self._send_request( - request.model_dump(mode='json', exclude_none=True), - http_kwargs, - ) - ) + async def add_request_middleware( + self, middleware: ClientCallInterceptor + ) -> None: + """Attaches additional middleware to the `Client`.""" + self._middleware.append(middleware) + + async def consume( + self, + event: tuple[Task, UpdateEvent] | Message | None, + card: AgentCard, + ) -> None: + """Processes the event via all the registered `Consumer`s.""" + if not event: + return + for c in self._consumers: + await c(event, card) diff --git a/src/a2a/client/client_factory.py b/src/a2a/client/client_factory.py new file mode 100644 index 000000000..c3d5762eb --- /dev/null +++ b/src/a2a/client/client_factory.py @@ -0,0 +1,286 @@ +from __future__ import annotations + +import logging + +from collections.abc import Callable +from typing import Any + +import httpx + +from a2a.client.base_client import BaseClient +from a2a.client.card_resolver import A2ACardResolver +from a2a.client.client import Client, ClientConfig, Consumer +from a2a.client.middleware import ClientCallInterceptor +from a2a.client.transports.base import ClientTransport +from a2a.client.transports.jsonrpc import JsonRpcTransport +from a2a.client.transports.rest import RestTransport +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentInterface, + TransportProtocol, +) + + +try: + from a2a.client.transports.grpc import GrpcTransport +except ImportError: + GrpcTransport = None # type: ignore # pyright: ignore + + +logger = logging.getLogger(__name__) + + +TransportProducer = Callable[ + [AgentCard, str, ClientConfig, list[ClientCallInterceptor]], + ClientTransport, +] + + +class ClientFactory: + """ClientFactory is used to generate the appropriate client for the agent. + + The factory is configured with a `ClientConfig` and optionally a list of + `Consumer`s to use for all generated `Client`s. The expected use is: + + .. code-block:: python + + factory = ClientFactory(config, consumers) + # Optionally register custom client implementations + factory.register('my_customer_transport', NewCustomTransportClient) + # Then with an agent card make a client with additional consumers and + # interceptors + client = factory.create(card, additional_consumers, interceptors) + + Now the client can be used consistently regardless of the transport. This + aligns the client configuration with the server's capabilities. + """ + + def __init__( + self, + config: ClientConfig, + consumers: list[Consumer] | None = None, + ): + if consumers is None: + consumers = [] + self._config = config + self._consumers = consumers + self._registry: dict[str, TransportProducer] = {} + self._register_defaults(config.supported_transports) + + def _register_defaults( + self, supported: list[str | TransportProtocol] + ) -> None: + # Empty support list implies JSON-RPC only. + if TransportProtocol.jsonrpc in supported or not supported: + self.register( + TransportProtocol.jsonrpc, + lambda card, url, config, interceptors: JsonRpcTransport( + config.httpx_client or httpx.AsyncClient(), + card, + url, + interceptors, + config.extensions or None, + ), + ) + if TransportProtocol.http_json in supported: + self.register( + TransportProtocol.http_json, + lambda card, url, config, interceptors: RestTransport( + config.httpx_client or httpx.AsyncClient(), + card, + url, + interceptors, + config.extensions or None, + ), + ) + if TransportProtocol.grpc in supported: + if GrpcTransport is None: + raise ImportError( + 'To use GrpcClient, its dependencies must be installed. ' + 'You can install them with \'pip install "a2a-sdk[grpc]"\'' + ) + self.register( + TransportProtocol.grpc, + GrpcTransport.create, + ) + + @classmethod + async def connect( # noqa: PLR0913 + cls, + agent: str | AgentCard, + client_config: ClientConfig | None = None, + consumers: list[Consumer] | None = None, + interceptors: list[ClientCallInterceptor] | None = None, + relative_card_path: str | None = None, + resolver_http_kwargs: dict[str, Any] | None = None, + extra_transports: dict[str, TransportProducer] | None = None, + extensions: list[str] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> Client: + """Convenience method for constructing a client. + + Constructs a client that connects to the specified agent. Note that + creating multiple clients via this method is less efficient than + constructing an instance of ClientFactory and reusing that. + + .. code-block:: python + + # This will search for an AgentCard at /.well-known/agent-card.json + my_agent_url = 'https://travel.agents.example.com' + client = await ClientFactory.connect(my_agent_url) + + + Args: + agent: The base URL of the agent, or the AgentCard to connect to. + client_config: The ClientConfig to use when connecting to the agent. + consumers: A list of `Consumer` methods to pass responses to. + interceptors: A list of interceptors to use for each request. These + are used for things like attaching credentials or http headers + to all outbound requests. + relative_card_path: If the agent field is a URL, this value is used as + the relative path when resolving the agent card. See + A2AAgentCardResolver.get_agent_card for more details. + resolver_http_kwargs: Dictionary of arguments to provide to the httpx + client when resolving the agent card. This value is provided to + A2AAgentCardResolver.get_agent_card as the http_kwargs parameter. + extra_transports: Additional transport protocols to enable when + constructing the client. + extensions: List of extensions to be activated. + signature_verifier: A callable used to verify the agent card's signatures. + + Returns: + A `Client` object. + """ + client_config = client_config or ClientConfig() + if isinstance(agent, str): + if not client_config.httpx_client: + async with httpx.AsyncClient() as client: + resolver = A2ACardResolver(client, agent) + card = await resolver.get_agent_card( + relative_card_path=relative_card_path, + http_kwargs=resolver_http_kwargs, + signature_verifier=signature_verifier, + ) + else: + resolver = A2ACardResolver(client_config.httpx_client, agent) + card = await resolver.get_agent_card( + relative_card_path=relative_card_path, + http_kwargs=resolver_http_kwargs, + signature_verifier=signature_verifier, + ) + else: + card = agent + factory = cls(client_config) + for label, generator in (extra_transports or {}).items(): + factory.register(label, generator) + return factory.create(card, consumers, interceptors, extensions) + + def register(self, label: str, generator: TransportProducer) -> None: + """Register a new transport producer for a given transport label.""" + self._registry[label] = generator + + def create( + self, + card: AgentCard, + consumers: list[Consumer] | None = None, + interceptors: list[ClientCallInterceptor] | None = None, + extensions: list[str] | None = None, + ) -> Client: + """Create a new `Client` for the provided `AgentCard`. + + Args: + card: An `AgentCard` defining the characteristics of the agent. + consumers: A list of `Consumer` methods to pass responses to. + interceptors: A list of interceptors to use for each request. These + are used for things like attaching credentials or http headers + to all outbound requests. + extensions: List of extensions to be activated. + + Returns: + A `Client` object. + + Raises: + If there is no valid matching of the client configuration with the + server configuration, a `ValueError` is raised. + """ + server_preferred = card.preferred_transport or TransportProtocol.jsonrpc + server_set = {server_preferred: card.url} + if card.additional_interfaces: + server_set.update( + {x.transport: x.url for x in card.additional_interfaces} + ) + client_set = self._config.supported_transports or [ + TransportProtocol.jsonrpc + ] + transport_protocol = None + transport_url = None + if self._config.use_client_preference: + for x in client_set: + if x in server_set: + transport_protocol = x + transport_url = server_set[x] + break + else: + for x, url in server_set.items(): + if x in client_set: + transport_protocol = x + transport_url = url + break + if not transport_protocol or not transport_url: + raise ValueError('no compatible transports found.') + if transport_protocol not in self._registry: + raise ValueError(f'no client available for {transport_protocol}') + + all_consumers = self._consumers.copy() + if consumers: + all_consumers.extend(consumers) + + all_extensions = self._config.extensions.copy() + if extensions: + all_extensions.extend(extensions) + self._config.extensions = all_extensions + + transport = self._registry[transport_protocol]( + card, transport_url, self._config, interceptors or [] + ) + + return BaseClient( + card, + self._config, + transport, + all_consumers, + interceptors or [], + ) + + +def minimal_agent_card( + url: str, transports: list[str] | None = None +) -> AgentCard: + """Generates a minimal card to simplify bootstrapping client creation. + + This minimal card is not viable itself to interact with the remote agent. + Instead this is a shorthand way to take a known url and transport option + and interact with the get card endpoint of the agent server to get the + correct agent card. This pattern is necessary for gRPC based card access + as typically these servers won't expose a well known path card. + """ + if transports is None: + transports = [] + return AgentCard( + url=url, + preferred_transport=transports[0] if transports else None, + additional_interfaces=[ + AgentInterface(transport=t, url=url) for t in transports[1:] + ] + if len(transports) > 1 + else [], + supports_authenticated_extended_card=True, + capabilities=AgentCapabilities(), + default_input_modes=[], + default_output_modes=[], + description='', + skills=[], + version='', + name='', + ) diff --git a/src/a2a/client/client_task_manager.py b/src/a2a/client/client_task_manager.py new file mode 100644 index 000000000..060983e13 --- /dev/null +++ b/src/a2a/client/client_task_manager.py @@ -0,0 +1,192 @@ +import logging + +from a2a.client.errors import ( + A2AClientInvalidArgsError, + A2AClientInvalidStateError, +) +from a2a.server.events.event_queue import Event +from a2a.types import ( + Message, + Task, + TaskArtifactUpdateEvent, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) +from a2a.utils import append_artifact_to_task + + +logger = logging.getLogger(__name__) + + +class ClientTaskManager: + """Helps manage a task's lifecycle during execution of a request. + + Responsible for retrieving, saving, and updating the `Task` object based on + events received from the agent. + """ + + def __init__( + self, + ) -> None: + """Initializes the `ClientTaskManager`.""" + self._current_task: Task | None = None + self._task_id: str | None = None + self._context_id: str | None = None + + def get_task(self) -> Task | None: + """Retrieves the current task object, either from memory. + + If `task_id` is set, it returns `_current_task` otherwise None. + + Returns: + The `Task` object if found, otherwise `None`. + """ + if not self._task_id: + logger.debug('task_id is not set, cannot get task.') + return None + + return self._current_task + + def get_task_or_raise(self) -> Task: + """Retrieves the current task object. + + Returns: + The `Task` object. + + Raises: + A2AClientInvalidStateError: If there is no current known Task. + """ + if not (task := self.get_task()): + # Note: The source of this error is either from bad client usage + # or from the server sending invalid updates. It indicates that this + # task manager has not consumed any information about a task, yet + # the caller is attempting to retrieve the current state of the task + # it expects to be present. + raise A2AClientInvalidStateError('no current Task') + return task + + async def save_task_event( + self, event: Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ) -> Task | None: + """Processes a task-related event (Task, Status, Artifact) and saves the updated task state. + + Ensures task and context IDs match or are set from the event. + + Args: + event: The task-related event (`Task`, `TaskStatusUpdateEvent`, or `TaskArtifactUpdateEvent`). + + Returns: + The updated `Task` object after processing the event. + + Raises: + ClientError: If the task ID in the event conflicts with the TaskManager's ID + when the TaskManager's ID is already set. + """ + if isinstance(event, Task): + if self._current_task: + raise A2AClientInvalidArgsError( + 'Task is already set, create new manager for new tasks.' + ) + await self._save_task(event) + return event + task_id_from_event = ( + event.id if isinstance(event, Task) else event.task_id + ) + if not self._task_id: + self._task_id = task_id_from_event + if not self._context_id: + self._context_id = event.context_id + + logger.debug( + 'Processing save of task event of type %s for task_id: %s', + type(event).__name__, + task_id_from_event, + ) + + task = self._current_task + if not task: + task = Task( + status=TaskStatus(state=TaskState.unknown), + id=task_id_from_event, + context_id=self._context_id if self._context_id else '', + ) + if isinstance(event, TaskStatusUpdateEvent): + logger.debug( + 'Updating task %s status to: %s', + event.task_id, + event.status.state, + ) + if event.status.message: + if not task.history: + task.history = [event.status.message] + else: + task.history.append(event.status.message) + if event.metadata: + if not task.metadata: + task.metadata = {} + task.metadata.update(event.metadata) + task.status = event.status + else: + logger.debug('Appending artifact to task %s', task.id) + append_artifact_to_task(task, event) + self._current_task = task + return task + + async def process(self, event: Event) -> Event: + """Processes an event, updates the task state if applicable, stores it, and returns the event. + + If the event is task-related (`Task`, `TaskStatusUpdateEvent`, `TaskArtifactUpdateEvent`), + the internal task state is updated and persisted. + + Args: + event: The event object received from the agent. + + Returns: + The same event object that was processed. + """ + if isinstance( + event, Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ): + await self.save_task_event(event) + + return event + + async def _save_task(self, task: Task) -> None: + """Saves the given task to the `_current_task` and updated `_task_id` and `_context_id`. + + Args: + task: The `Task` object to save. + """ + logger.debug('Saving task with id: %s', task.id) + self._current_task = task + if not self._task_id: + logger.info('New task created with id: %s', task.id) + self._task_id = task.id + self._context_id = task.context_id + + def update_with_message(self, message: Message, task: Task) -> Task: + """Updates a task object adding a new message to its history. + + If the task has a message in its current status, that message is moved + to the history first. + + Args: + message: The new `Message` to add to the history. + task: The `Task` object to update. + + Returns: + The updated `Task` object (updated in-place). + """ + if task.status.message: + if task.history: + task.history.append(task.status.message) + else: + task.history = [task.status.message] + task.status.message = None + if task.history: + task.history.append(message) + else: + task.history = [message] + self._current_task = task + return task diff --git a/src/a2a/client/errors.py b/src/a2a/client/errors.py index da02e5826..890c3726a 100644 --- a/src/a2a/client/errors.py +++ b/src/a2a/client/errors.py @@ -1,5 +1,7 @@ """Custom exceptions for the A2A client.""" +from a2a.types import JSONRPCErrorResponse + class A2AClientError(Exception): """Base exception for A2A Client errors.""" @@ -31,3 +33,55 @@ def __init__(self, message: str): """ self.message = message super().__init__(f'JSON Error: {message}') + + +class A2AClientTimeoutError(A2AClientError): + """Client exception for timeout errors during a request.""" + + def __init__(self, message: str): + """Initializes the A2AClientTimeoutError. + + Args: + message: A descriptive error message. + """ + self.message = message + super().__init__(f'Timeout Error: {message}') + + +class A2AClientInvalidArgsError(A2AClientError): + """Client exception for invalid arguments passed to a method.""" + + def __init__(self, message: str): + """Initializes the A2AClientInvalidArgsError. + + Args: + message: A descriptive error message. + """ + self.message = message + super().__init__(f'Invalid arguments error: {message}') + + +class A2AClientInvalidStateError(A2AClientError): + """Client exception for an invalid client state.""" + + def __init__(self, message: str): + """Initializes the A2AClientInvalidStateError. + + Args: + message: A descriptive error message. + """ + self.message = message + super().__init__(f'Invalid state error: {message}') + + +class A2AClientJSONRPCError(A2AClientError): + """Client exception for JSON-RPC errors returned by the server.""" + + def __init__(self, error: JSONRPCErrorResponse): + """Initializes the A2AClientJsonRPCError. + + Args: + error: The JSON-RPC error object. + """ + self.error = error.error + super().__init__(f'JSON-RPC Error {error.error}') diff --git a/src/a2a/client/helpers.py b/src/a2a/client/helpers.py index 4eedadb86..930c71e6b 100644 --- a/src/a2a/client/helpers.py +++ b/src/a2a/client/helpers.py @@ -15,8 +15,8 @@ def create_text_message_object( content: The text content of the message. Defaults to an empty string. Returns: - A `Message` object with a new UUID messageId. + A `Message` object with a new UUID message_id. """ return Message( - role=role, parts=[Part(TextPart(text=content))], messageId=str(uuid4()) + role=role, parts=[Part(TextPart(text=content))], message_id=str(uuid4()) ) diff --git a/src/a2a/client/legacy.py b/src/a2a/client/legacy.py new file mode 100644 index 000000000..4318543d6 --- /dev/null +++ b/src/a2a/client/legacy.py @@ -0,0 +1,344 @@ +"""Backwards compatibility layer for legacy A2A clients.""" + +import warnings + +from collections.abc import AsyncGenerator +from typing import Any + +import httpx + +from a2a.client.errors import A2AClientJSONRPCError +from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.client.transports.jsonrpc import JsonRpcTransport +from a2a.types import ( + AgentCard, + CancelTaskRequest, + CancelTaskResponse, + CancelTaskSuccessResponse, + GetTaskPushNotificationConfigParams, + GetTaskPushNotificationConfigRequest, + GetTaskPushNotificationConfigResponse, + GetTaskPushNotificationConfigSuccessResponse, + GetTaskRequest, + GetTaskResponse, + GetTaskSuccessResponse, + JSONRPCErrorResponse, + SendMessageRequest, + SendMessageResponse, + SendMessageSuccessResponse, + SendStreamingMessageRequest, + SendStreamingMessageResponse, + SendStreamingMessageSuccessResponse, + SetTaskPushNotificationConfigRequest, + SetTaskPushNotificationConfigResponse, + SetTaskPushNotificationConfigSuccessResponse, + TaskIdParams, + TaskResubscriptionRequest, +) + + +class A2AClient: + """[DEPRECATED] Backwards compatibility wrapper for the JSON-RPC client.""" + + def __init__( + self, + httpx_client: httpx.AsyncClient, + agent_card: AgentCard | None = None, + url: str | None = None, + interceptors: list[ClientCallInterceptor] | None = None, + ): + warnings.warn( + 'A2AClient is deprecated and will be removed in a future version. ' + 'Use ClientFactory to create a client with a JSON-RPC transport.', + DeprecationWarning, + stacklevel=2, + ) + self._transport = JsonRpcTransport( + httpx_client, agent_card, url, interceptors + ) + + async def send_message( + self, + request: SendMessageRequest, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> SendMessageResponse: + """Sends a non-streaming message request to the agent. + + Args: + request: The `SendMessageRequest` object containing the message and configuration. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. + context: The client call context. + + Returns: + A `SendMessageResponse` object containing the agent's response (Task or Message) or an error. + + Raises: + A2AClientHTTPError: If an HTTP error occurs during the request. + A2AClientJSONError: If the response body cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + + try: + result = await self._transport.send_message( + request.params, context=context + ) + return SendMessageResponse( + root=SendMessageSuccessResponse( + id=request.id, jsonrpc='2.0', result=result + ) + ) + except A2AClientJSONRPCError as e: + return SendMessageResponse(JSONRPCErrorResponse(error=e.error)) + + async def send_message_streaming( + self, + request: SendStreamingMessageRequest, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[SendStreamingMessageResponse, None]: + """Sends a streaming message request to the agent and yields responses as they arrive. + + This method uses Server-Sent Events (SSE) to receive a stream of updates from the agent. + + Args: + request: The `SendStreamingMessageRequest` object containing the message and configuration. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. A default `timeout=None` is set but can be overridden. + context: The client call context. + + Yields: + `SendStreamingMessageResponse` objects as they are received in the SSE stream. + These can be Task, Message, TaskStatusUpdateEvent, or TaskArtifactUpdateEvent. + + Raises: + A2AClientHTTPError: If an HTTP or SSE protocol error occurs during the request. + A2AClientJSONError: If an SSE event data cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + + async for result in self._transport.send_message_streaming( + request.params, context=context + ): + yield SendStreamingMessageResponse( + root=SendStreamingMessageSuccessResponse( + id=request.id, jsonrpc='2.0', result=result + ) + ) + + async def get_task( + self, + request: GetTaskRequest, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> GetTaskResponse: + """Retrieves the current state and history of a specific task. + + Args: + request: The `GetTaskRequest` object specifying the task ID and history length. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. + context: The client call context. + + Returns: + A `GetTaskResponse` object containing the Task or an error. + + Raises: + A2AClientHTTPError: If an HTTP error occurs during the request. + A2AClientJSONError: If the response body cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + try: + result = await self._transport.get_task( + request.params, context=context + ) + return GetTaskResponse( + root=GetTaskSuccessResponse( + id=request.id, jsonrpc='2.0', result=result + ) + ) + except A2AClientJSONRPCError as e: + return GetTaskResponse(root=JSONRPCErrorResponse(error=e.error)) + + async def cancel_task( + self, + request: CancelTaskRequest, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> CancelTaskResponse: + """Requests the agent to cancel a specific task. + + Args: + request: The `CancelTaskRequest` object specifying the task ID. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. + context: The client call context. + + Returns: + A `CancelTaskResponse` object containing the updated Task with canceled status or an error. + + Raises: + A2AClientHTTPError: If an HTTP error occurs during the request. + A2AClientJSONError: If the response body cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + try: + result = await self._transport.cancel_task( + request.params, context=context + ) + return CancelTaskResponse( + root=CancelTaskSuccessResponse( + id=request.id, jsonrpc='2.0', result=result + ) + ) + except A2AClientJSONRPCError as e: + return CancelTaskResponse(JSONRPCErrorResponse(error=e.error)) + + async def set_task_callback( + self, + request: SetTaskPushNotificationConfigRequest, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> SetTaskPushNotificationConfigResponse: + """Sets or updates the push notification configuration for a specific task. + + Args: + request: The `SetTaskPushNotificationConfigRequest` object specifying the task ID and configuration. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. + context: The client call context. + + Returns: + A `SetTaskPushNotificationConfigResponse` object containing the confirmation or an error. + + Raises: + A2AClientHTTPError: If an HTTP error occurs during the request. + A2AClientJSONError: If the response body cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + try: + result = await self._transport.set_task_callback( + request.params, context=context + ) + return SetTaskPushNotificationConfigResponse( + root=SetTaskPushNotificationConfigSuccessResponse( + id=request.id, jsonrpc='2.0', result=result + ) + ) + except A2AClientJSONRPCError as e: + return SetTaskPushNotificationConfigResponse( + JSONRPCErrorResponse(error=e.error) + ) + + async def get_task_callback( + self, + request: GetTaskPushNotificationConfigRequest, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> GetTaskPushNotificationConfigResponse: + """Retrieves the push notification configuration for a specific task. + + Args: + request: The `GetTaskPushNotificationConfigRequest` object specifying the task ID. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. + context: The client call context. + + Returns: + A `GetTaskPushNotificationConfigResponse` object containing the configuration or an error. + + Raises: + A2AClientHTTPError: If an HTTP error occurs during the request. + A2AClientJSONError: If the response body cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + params = request.params + if isinstance(params, TaskIdParams): + params = GetTaskPushNotificationConfigParams(id=request.params.id) + try: + result = await self._transport.get_task_callback( + params, context=context + ) + return GetTaskPushNotificationConfigResponse( + root=GetTaskPushNotificationConfigSuccessResponse( + id=request.id, jsonrpc='2.0', result=result + ) + ) + except A2AClientJSONRPCError as e: + return GetTaskPushNotificationConfigResponse( + JSONRPCErrorResponse(error=e.error) + ) + + async def resubscribe( + self, + request: TaskResubscriptionRequest, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[SendStreamingMessageResponse, None]: + """Reconnects to get task updates. + + This method uses Server-Sent Events (SSE) to receive a stream of updates from the agent. + + Args: + request: The `TaskResubscriptionRequest` object containing the task information to reconnect to. + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. A default `timeout=None` is set but can be overridden. + context: The client call context. + + Yields: + `SendStreamingMessageResponse` objects as they are received in the SSE stream. + These can be Task, Message, TaskStatusUpdateEvent, or TaskArtifactUpdateEvent. + + Raises: + A2AClientHTTPError: If an HTTP or SSE protocol error occurs during the request. + A2AClientJSONError: If an SSE event data cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + + async for result in self._transport.resubscribe( + request.params, context=context + ): + yield SendStreamingMessageResponse( + root=SendStreamingMessageSuccessResponse( + id=request.id, jsonrpc='2.0', result=result + ) + ) + + async def get_card( + self, + *, + http_kwargs: dict[str, Any] | None = None, + context: ClientCallContext | None = None, + ) -> AgentCard: + """Retrieves the authenticated card (if necessary) or the public one. + + Args: + http_kwargs: Optional dictionary of keyword arguments to pass to the + underlying httpx.post request. + context: The client call context. + + Returns: + A `AgentCard` object containing the card or an error. + + Raises: + A2AClientHTTPError: If an HTTP error occurs during the request. + A2AClientJSONError: If the response body cannot be decoded as JSON or validated. + """ + if not context and http_kwargs: + context = ClientCallContext(state={'http_kwargs': http_kwargs}) + return await self._transport.get_card(context=context) diff --git a/src/a2a/client/legacy_grpc.py b/src/a2a/client/legacy_grpc.py new file mode 100644 index 000000000..0b62b0096 --- /dev/null +++ b/src/a2a/client/legacy_grpc.py @@ -0,0 +1,44 @@ +"""Backwards compatibility layer for the legacy A2A gRPC client.""" + +import warnings + +from typing import TYPE_CHECKING + +from a2a.client.transports.grpc import GrpcTransport +from a2a.types import AgentCard + + +if TYPE_CHECKING: + from a2a.grpc.a2a_pb2_grpc import A2AServiceStub + + +class A2AGrpcClient(GrpcTransport): + """[DEPRECATED] Backwards compatibility wrapper for the gRPC client.""" + + def __init__( # pylint: disable=super-init-not-called + self, + grpc_stub: 'A2AServiceStub', + agent_card: AgentCard, + ): + warnings.warn( + 'A2AGrpcClient is deprecated and will be removed in a future version. ' + 'Use ClientFactory to create a client with a gRPC transport.', + DeprecationWarning, + stacklevel=2, + ) + # The old gRPC client accepted a stub directly. The new one accepts a + # channel and builds the stub itself. We just have a stub here, so we + # need to handle initialization ourselves. + self.stub = grpc_stub + self.agent_card = agent_card + self._needs_extended_card = ( + agent_card.supports_authenticated_extended_card + if agent_card + else True + ) + + class _NopChannel: + async def close(self) -> None: + pass + + self.channel = _NopChannel() diff --git a/src/a2a/client/middleware.py b/src/a2a/client/middleware.py new file mode 100644 index 000000000..73ada982f --- /dev/null +++ b/src/a2a/client/middleware.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import MutableMapping # noqa: TC003 +from typing import TYPE_CHECKING, Any + +from pydantic import BaseModel, Field + + +if TYPE_CHECKING: + from a2a.types import AgentCard + + +class ClientCallContext(BaseModel): + """A context passed with each client call, allowing for call-specific. + + configuration and data passing. Such as authentication details or + request deadlines. + """ + + state: MutableMapping[str, Any] = Field(default_factory=dict) + + +class ClientCallInterceptor(ABC): + """An abstract base class for client-side call interceptors. + + Interceptors can inspect and modify requests before they are sent, + which is ideal for concerns like authentication, logging, or tracing. + """ + + @abstractmethod + async def intercept( + self, + method_name: str, + request_payload: dict[str, Any], + http_kwargs: dict[str, Any], + agent_card: AgentCard | None, + context: ClientCallContext | None, + ) -> tuple[dict[str, Any], dict[str, Any]]: + """ + Intercepts a client call before the request is sent. + + Args: + method_name: The name of the RPC method (e.g., 'message/send'). + request_payload: The JSON RPC request payload dictionary. + http_kwargs: The keyword arguments for the httpx request. + agent_card: The AgentCard associated with the client. + context: The ClientCallContext for this specific call. + + Returns: + A tuple containing the (potentially modified) request_payload + and http_kwargs. + """ diff --git a/src/a2a/client/optionals.py b/src/a2a/client/optionals.py new file mode 100644 index 000000000..f55f01862 --- /dev/null +++ b/src/a2a/client/optionals.py @@ -0,0 +1,16 @@ +from typing import TYPE_CHECKING + + +# Attempt to import the optional module +try: + from grpc.aio import Channel # pyright: ignore[reportAssignmentType] +except ImportError: + # If grpc.aio is not available, define a dummy type for type checking. + # This dummy type will only be used by type checkers. + if TYPE_CHECKING: + + class Channel: # type: ignore[no-redef] + """Dummy class for type hinting when grpc.aio is not available.""" + + else: + Channel = None # At runtime, pd will be None if the import failed. diff --git a/src/a2a/client/transports/__init__.py b/src/a2a/client/transports/__init__.py new file mode 100644 index 000000000..af7c60f62 --- /dev/null +++ b/src/a2a/client/transports/__init__.py @@ -0,0 +1,19 @@ +"""A2A Client Transports.""" + +from a2a.client.transports.base import ClientTransport +from a2a.client.transports.jsonrpc import JsonRpcTransport +from a2a.client.transports.rest import RestTransport + + +try: + from a2a.client.transports.grpc import GrpcTransport +except ImportError: + GrpcTransport = None # type: ignore + + +__all__ = [ + 'ClientTransport', + 'GrpcTransport', + 'JsonRpcTransport', + 'RestTransport', +] diff --git a/src/a2a/client/transports/base.py b/src/a2a/client/transports/base.py new file mode 100644 index 000000000..0c54a28dc --- /dev/null +++ b/src/a2a/client/transports/base.py @@ -0,0 +1,112 @@ +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator, Callable + +from a2a.client.middleware import ClientCallContext +from a2a.types import ( + AgentCard, + GetTaskPushNotificationConfigParams, + Message, + MessageSendParams, + Task, + TaskArtifactUpdateEvent, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskStatusUpdateEvent, +) + + +class ClientTransport(ABC): + """Abstract base class for a client transport.""" + + @abstractmethod + async def send_message( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task | Message: + """Sends a non-streaming message request to the agent.""" + + @abstractmethod + async def send_message_streaming( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ]: + """Sends a streaming message request to the agent and yields responses as they arrive.""" + return + yield + + @abstractmethod + async def get_task( + self, + request: TaskQueryParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + + @abstractmethod + async def cancel_task( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + + @abstractmethod + async def set_task_callback( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + + @abstractmethod + async def get_task_callback( + self, + request: GetTaskPushNotificationConfigParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" + + @abstractmethod + async def resubscribe( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ]: + """Reconnects to get task updates.""" + return + yield + + @abstractmethod + async def get_card( + self, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> AgentCard: + """Retrieves the AgentCard.""" + + @abstractmethod + async def close(self) -> None: + """Closes the transport.""" diff --git a/src/a2a/client/transports/grpc.py b/src/a2a/client/transports/grpc.py new file mode 100644 index 000000000..6a8b16f92 --- /dev/null +++ b/src/a2a/client/transports/grpc.py @@ -0,0 +1,249 @@ +import logging + +from collections.abc import AsyncGenerator, Callable + + +try: + import grpc +except ImportError as e: + raise ImportError( + 'A2AGrpcClient requires grpcio and grpcio-tools to be installed. ' + 'Install with: ' + "'pip install a2a-sdk[grpc]'" + ) from e + + +from a2a.client.client import ClientConfig +from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.client.optionals import Channel +from a2a.client.transports.base import ClientTransport +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.grpc import a2a_pb2, a2a_pb2_grpc +from a2a.types import ( + AgentCard, + GetTaskPushNotificationConfigParams, + Message, + MessageSendParams, + Task, + TaskArtifactUpdateEvent, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskStatusUpdateEvent, +) +from a2a.utils import proto_utils +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + + +@trace_class(kind=SpanKind.CLIENT) +class GrpcTransport(ClientTransport): + """A gRPC transport for the A2A client.""" + + def __init__( + self, + channel: Channel, + agent_card: AgentCard | None, + extensions: list[str] | None = None, + ): + """Initializes the GrpcTransport.""" + self.agent_card = agent_card + self.channel = channel + self.stub = a2a_pb2_grpc.A2AServiceStub(channel) + self._needs_extended_card = ( + agent_card.supports_authenticated_extended_card + if agent_card + else True + ) + self.extensions = extensions + + def _get_grpc_metadata( + self, + extensions: list[str] | None = None, + ) -> list[tuple[str, str]] | None: + """Creates gRPC metadata for extensions.""" + if extensions is not None: + return [(HTTP_EXTENSION_HEADER, ','.join(extensions))] + if self.extensions is not None: + return [(HTTP_EXTENSION_HEADER, ','.join(self.extensions))] + return None + + @classmethod + def create( + cls, + card: AgentCard, + url: str, + config: ClientConfig, + interceptors: list[ClientCallInterceptor], + ) -> 'GrpcTransport': + """Creates a gRPC transport for the A2A client.""" + if config.grpc_channel_factory is None: + raise ValueError('grpc_channel_factory is required when using gRPC') + return cls(config.grpc_channel_factory(url), card, config.extensions) + + async def send_message( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task | Message: + """Sends a non-streaming message request to the agent.""" + response = await self.stub.SendMessage( + a2a_pb2.SendMessageRequest( + request=proto_utils.ToProto.message(request.message), + configuration=proto_utils.ToProto.message_send_configuration( + request.configuration + ), + metadata=proto_utils.ToProto.metadata(request.metadata), + ), + metadata=self._get_grpc_metadata(extensions), + ) + if response.HasField('task'): + return proto_utils.FromProto.task(response.task) + return proto_utils.FromProto.message(response.msg) + + async def send_message_streaming( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ]: + """Sends a streaming message request to the agent and yields responses as they arrive.""" + stream = self.stub.SendStreamingMessage( + a2a_pb2.SendMessageRequest( + request=proto_utils.ToProto.message(request.message), + configuration=proto_utils.ToProto.message_send_configuration( + request.configuration + ), + metadata=proto_utils.ToProto.metadata(request.metadata), + ), + metadata=self._get_grpc_metadata(extensions), + ) + while True: + response = await stream.read() + if response == grpc.aio.EOF: # pyright: ignore[reportAttributeAccessIssue] + break + yield proto_utils.FromProto.stream_response(response) + + async def resubscribe( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ]: + """Reconnects to get task updates.""" + stream = self.stub.TaskSubscription( + a2a_pb2.TaskSubscriptionRequest(name=f'tasks/{request.id}'), + metadata=self._get_grpc_metadata(extensions), + ) + while True: + response = await stream.read() + if response == grpc.aio.EOF: # pyright: ignore[reportAttributeAccessIssue] + break + yield proto_utils.FromProto.stream_response(response) + + async def get_task( + self, + request: TaskQueryParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + task = await self.stub.GetTask( + a2a_pb2.GetTaskRequest( + name=f'tasks/{request.id}', + history_length=request.history_length, + ), + metadata=self._get_grpc_metadata(extensions), + ) + return proto_utils.FromProto.task(task) + + async def cancel_task( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + task = await self.stub.CancelTask( + a2a_pb2.CancelTaskRequest(name=f'tasks/{request.id}'), + metadata=self._get_grpc_metadata(extensions), + ) + return proto_utils.FromProto.task(task) + + async def set_task_callback( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + config = await self.stub.CreateTaskPushNotificationConfig( + a2a_pb2.CreateTaskPushNotificationConfigRequest( + parent=f'tasks/{request.task_id}', + config_id=request.push_notification_config.id, + config=proto_utils.ToProto.task_push_notification_config( + request + ), + ), + metadata=self._get_grpc_metadata(extensions), + ) + return proto_utils.FromProto.task_push_notification_config(config) + + async def get_task_callback( + self, + request: GetTaskPushNotificationConfigParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" + config = await self.stub.GetTaskPushNotificationConfig( + a2a_pb2.GetTaskPushNotificationConfigRequest( + name=f'tasks/{request.id}/pushNotificationConfigs/{request.push_notification_config_id}', + ), + metadata=self._get_grpc_metadata(extensions), + ) + return proto_utils.FromProto.task_push_notification_config(config) + + async def get_card( + self, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> AgentCard: + """Retrieves the agent's card.""" + card = self.agent_card + if card and not self._needs_extended_card: + return card + if card is None and not self._needs_extended_card: + raise ValueError('Agent card is not available.') + + card_pb = await self.stub.GetAgentCard( + a2a_pb2.GetAgentCardRequest(), + metadata=self._get_grpc_metadata(extensions), + ) + card = proto_utils.FromProto.agent_card(card_pb) + if signature_verifier: + signature_verifier(card) + + self.agent_card = card + self._needs_extended_card = False + return card + + async def close(self) -> None: + """Closes the gRPC channel.""" + await self.channel.close() diff --git a/src/a2a/client/transports/jsonrpc.py b/src/a2a/client/transports/jsonrpc.py new file mode 100644 index 000000000..a58a7cab7 --- /dev/null +++ b/src/a2a/client/transports/jsonrpc.py @@ -0,0 +1,433 @@ +import json +import logging + +from collections.abc import AsyncGenerator, Callable +from typing import Any +from uuid import uuid4 + +import httpx + +from httpx_sse import SSEError, aconnect_sse + +from a2a.client.card_resolver import A2ACardResolver +from a2a.client.errors import ( + A2AClientHTTPError, + A2AClientJSONError, + A2AClientJSONRPCError, + A2AClientTimeoutError, +) +from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.client.transports.base import ClientTransport +from a2a.extensions.common import update_extension_header +from a2a.types import ( + AgentCard, + CancelTaskRequest, + CancelTaskResponse, + GetAuthenticatedExtendedCardRequest, + GetAuthenticatedExtendedCardResponse, + GetTaskPushNotificationConfigParams, + GetTaskPushNotificationConfigRequest, + GetTaskPushNotificationConfigResponse, + GetTaskRequest, + GetTaskResponse, + JSONRPCErrorResponse, + Message, + MessageSendParams, + SendMessageRequest, + SendMessageResponse, + SendStreamingMessageRequest, + SendStreamingMessageResponse, + SetTaskPushNotificationConfigRequest, + SetTaskPushNotificationConfigResponse, + Task, + TaskArtifactUpdateEvent, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskResubscriptionRequest, + TaskStatusUpdateEvent, +) +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + + +@trace_class(kind=SpanKind.CLIENT) +class JsonRpcTransport(ClientTransport): + """A JSON-RPC transport for the A2A client.""" + + def __init__( + self, + httpx_client: httpx.AsyncClient, + agent_card: AgentCard | None = None, + url: str | None = None, + interceptors: list[ClientCallInterceptor] | None = None, + extensions: list[str] | None = None, + ): + """Initializes the JsonRpcTransport.""" + if url: + self.url = url + elif agent_card: + self.url = agent_card.url + else: + raise ValueError('Must provide either agent_card or url') + + self.httpx_client = httpx_client + self.agent_card = agent_card + self.interceptors = interceptors or [] + self._needs_extended_card = ( + agent_card.supports_authenticated_extended_card + if agent_card + else True + ) + self.extensions = extensions + + async def _apply_interceptors( + self, + method_name: str, + request_payload: dict[str, Any], + http_kwargs: dict[str, Any] | None, + context: ClientCallContext | None, + ) -> tuple[dict[str, Any], dict[str, Any]]: + final_http_kwargs = http_kwargs or {} + final_request_payload = request_payload + + for interceptor in self.interceptors: + ( + final_request_payload, + final_http_kwargs, + ) = await interceptor.intercept( + method_name, + final_request_payload, + final_http_kwargs, + self.agent_card, + context, + ) + return final_request_payload, final_http_kwargs + + def _get_http_args( + self, context: ClientCallContext | None + ) -> dict[str, Any] | None: + return context.state.get('http_kwargs') if context else None + + async def send_message( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task | Message: + """Sends a non-streaming message request to the agent.""" + rpc_request = SendMessageRequest(params=request, id=str(uuid4())) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + 'message/send', + rpc_request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + response_data = await self._send_request(payload, modified_kwargs) + response = SendMessageResponse.model_validate(response_data) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + return response.root.result + + async def send_message_streaming( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ]: + """Sends a streaming message request to the agent and yields responses as they arrive.""" + rpc_request = SendStreamingMessageRequest( + params=request, id=str(uuid4()) + ) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + 'message/stream', + rpc_request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + modified_kwargs.setdefault( + 'timeout', self.httpx_client.timeout.as_dict().get('read', None) + ) + headers = dict(self.httpx_client.headers.items()) + headers.update(modified_kwargs.get('headers', {})) + modified_kwargs['headers'] = headers + + async with aconnect_sse( + self.httpx_client, + 'POST', + self.url, + json=payload, + **modified_kwargs, + ) as event_source: + try: + event_source.response.raise_for_status() + async for sse in event_source.aiter_sse(): + if not sse.data: + continue + response = SendStreamingMessageResponse.model_validate( + json.loads(sse.data) + ) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + yield response.root.result + except httpx.HTTPStatusError as e: + raise A2AClientHTTPError(e.response.status_code, str(e)) from e + except SSEError as e: + raise A2AClientHTTPError( + 400, f'Invalid SSE response or protocol error: {e}' + ) from e + except json.JSONDecodeError as e: + raise A2AClientJSONError(str(e)) from e + except httpx.RequestError as e: + raise A2AClientHTTPError( + 503, f'Network communication error: {e}' + ) from e + + async def _send_request( + self, + rpc_request_payload: dict[str, Any], + http_kwargs: dict[str, Any] | None = None, + ) -> dict[str, Any]: + try: + response = await self.httpx_client.post( + self.url, json=rpc_request_payload, **(http_kwargs or {}) + ) + response.raise_for_status() + return response.json() + except httpx.ReadTimeout as e: + raise A2AClientTimeoutError('Client Request timed out') from e + except httpx.HTTPStatusError as e: + raise A2AClientHTTPError(e.response.status_code, str(e)) from e + except json.JSONDecodeError as e: + raise A2AClientJSONError(str(e)) from e + except httpx.RequestError as e: + raise A2AClientHTTPError( + 503, f'Network communication error: {e}' + ) from e + + async def get_task( + self, + request: TaskQueryParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + rpc_request = GetTaskRequest(params=request, id=str(uuid4())) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + 'tasks/get', + rpc_request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + response_data = await self._send_request(payload, modified_kwargs) + response = GetTaskResponse.model_validate(response_data) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + return response.root.result + + async def cancel_task( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + rpc_request = CancelTaskRequest(params=request, id=str(uuid4())) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + 'tasks/cancel', + rpc_request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + response_data = await self._send_request(payload, modified_kwargs) + response = CancelTaskResponse.model_validate(response_data) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + return response.root.result + + async def set_task_callback( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + rpc_request = SetTaskPushNotificationConfigRequest( + params=request, id=str(uuid4()) + ) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + 'tasks/pushNotificationConfig/set', + rpc_request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + response_data = await self._send_request(payload, modified_kwargs) + response = SetTaskPushNotificationConfigResponse.model_validate( + response_data + ) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + return response.root.result + + async def get_task_callback( + self, + request: GetTaskPushNotificationConfigParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" + rpc_request = GetTaskPushNotificationConfigRequest( + params=request, id=str(uuid4()) + ) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + 'tasks/pushNotificationConfig/get', + rpc_request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + response_data = await self._send_request(payload, modified_kwargs) + response = GetTaskPushNotificationConfigResponse.model_validate( + response_data + ) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + return response.root.result + + async def resubscribe( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ]: + """Reconnects to get task updates.""" + rpc_request = TaskResubscriptionRequest(params=request, id=str(uuid4())) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + 'tasks/resubscribe', + rpc_request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + modified_kwargs.setdefault('timeout', None) + + async with aconnect_sse( + self.httpx_client, + 'POST', + self.url, + json=payload, + **modified_kwargs, + ) as event_source: + try: + async for sse in event_source.aiter_sse(): + response = SendStreamingMessageResponse.model_validate_json( + sse.data + ) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + yield response.root.result + except SSEError as e: + raise A2AClientHTTPError( + 400, f'Invalid SSE response or protocol error: {e}' + ) from e + except json.JSONDecodeError as e: + raise A2AClientJSONError(str(e)) from e + except httpx.RequestError as e: + raise A2AClientHTTPError( + 503, f'Network communication error: {e}' + ) from e + + async def get_card( + self, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> AgentCard: + """Retrieves the agent's card.""" + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + card = self.agent_card + + if not card: + resolver = A2ACardResolver(self.httpx_client, self.url) + card = await resolver.get_agent_card( + http_kwargs=modified_kwargs, + signature_verifier=signature_verifier, + ) + self._needs_extended_card = ( + card.supports_authenticated_extended_card + ) + self.agent_card = card + + if not self._needs_extended_card: + return card + + request = GetAuthenticatedExtendedCardRequest(id=str(uuid4())) + payload, modified_kwargs = await self._apply_interceptors( + request.method, + request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + response_data = await self._send_request( + payload, + modified_kwargs, + ) + response = GetAuthenticatedExtendedCardResponse.model_validate( + response_data + ) + if isinstance(response.root, JSONRPCErrorResponse): + raise A2AClientJSONRPCError(response.root) + card = response.root.result + if signature_verifier: + signature_verifier(card) + + self.agent_card = card + self._needs_extended_card = False + return card + + async def close(self) -> None: + """Closes the httpx client.""" + await self.httpx_client.aclose() diff --git a/src/a2a/client/transports/rest.py b/src/a2a/client/transports/rest.py new file mode 100644 index 000000000..96df1e023 --- /dev/null +++ b/src/a2a/client/transports/rest.py @@ -0,0 +1,417 @@ +import json +import logging + +from collections.abc import AsyncGenerator, Callable +from typing import Any + +import httpx + +from google.protobuf.json_format import MessageToDict, Parse, ParseDict +from httpx_sse import SSEError, aconnect_sse + +from a2a.client.card_resolver import A2ACardResolver +from a2a.client.errors import A2AClientHTTPError, A2AClientJSONError +from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.client.transports.base import ClientTransport +from a2a.extensions.common import update_extension_header +from a2a.grpc import a2a_pb2 +from a2a.types import ( + AgentCard, + GetTaskPushNotificationConfigParams, + Message, + MessageSendParams, + Task, + TaskArtifactUpdateEvent, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskStatusUpdateEvent, +) +from a2a.utils import proto_utils +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + + +@trace_class(kind=SpanKind.CLIENT) +class RestTransport(ClientTransport): + """A REST transport for the A2A client.""" + + def __init__( + self, + httpx_client: httpx.AsyncClient, + agent_card: AgentCard | None = None, + url: str | None = None, + interceptors: list[ClientCallInterceptor] | None = None, + extensions: list[str] | None = None, + ): + """Initializes the RestTransport.""" + if url: + self.url = url + elif agent_card: + self.url = agent_card.url + else: + raise ValueError('Must provide either agent_card or url') + if self.url.endswith('/'): + self.url = self.url[:-1] + self.httpx_client = httpx_client + self.agent_card = agent_card + self.interceptors = interceptors or [] + self._needs_extended_card = ( + agent_card.supports_authenticated_extended_card + if agent_card + else True + ) + self.extensions = extensions + + async def _apply_interceptors( + self, + request_payload: dict[str, Any], + http_kwargs: dict[str, Any] | None, + context: ClientCallContext | None, + ) -> tuple[dict[str, Any], dict[str, Any]]: + final_http_kwargs = http_kwargs or {} + final_request_payload = request_payload + # TODO: Implement interceptors for other transports + return final_request_payload, final_http_kwargs + + def _get_http_args( + self, context: ClientCallContext | None + ) -> dict[str, Any] | None: + return context.state.get('http_kwargs') if context else None + + async def _prepare_send_message( + self, + request: MessageSendParams, + context: ClientCallContext | None, + extensions: list[str] | None = None, + ) -> tuple[dict[str, Any], dict[str, Any]]: + pb = a2a_pb2.SendMessageRequest( + request=proto_utils.ToProto.message(request.message), + configuration=proto_utils.ToProto.message_send_configuration( + request.configuration + ), + metadata=( + proto_utils.ToProto.metadata(request.metadata) + if request.metadata + else None + ), + ) + payload = MessageToDict(pb) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + payload, + modified_kwargs, + context, + ) + return payload, modified_kwargs + + async def send_message( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task | Message: + """Sends a non-streaming message request to the agent.""" + payload, modified_kwargs = await self._prepare_send_message( + request, context, extensions + ) + response_data = await self._send_post_request( + '/v1/message:send', payload, modified_kwargs + ) + response_pb = a2a_pb2.SendMessageResponse() + ParseDict(response_data, response_pb) + return proto_utils.FromProto.task_or_message(response_pb) + + async def send_message_streaming( + self, + request: MessageSendParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent | Message + ]: + """Sends a streaming message request to the agent and yields responses as they arrive.""" + payload, modified_kwargs = await self._prepare_send_message( + request, context, extensions + ) + + modified_kwargs.setdefault('timeout', None) + + async with aconnect_sse( + self.httpx_client, + 'POST', + f'{self.url}/v1/message:stream', + json=payload, + **modified_kwargs, + ) as event_source: + try: + event_source.response.raise_for_status() + async for sse in event_source.aiter_sse(): + if not sse.data: + continue + event = a2a_pb2.StreamResponse() + Parse(sse.data, event) + yield proto_utils.FromProto.stream_response(event) + except httpx.HTTPStatusError as e: + raise A2AClientHTTPError(e.response.status_code, str(e)) from e + except SSEError as e: + raise A2AClientHTTPError( + 400, f'Invalid SSE response or protocol error: {e}' + ) from e + except json.JSONDecodeError as e: + raise A2AClientJSONError(str(e)) from e + except httpx.RequestError as e: + raise A2AClientHTTPError( + 503, f'Network communication error: {e}' + ) from e + + async def _send_request(self, request: httpx.Request) -> dict[str, Any]: + try: + response = await self.httpx_client.send(request) + response.raise_for_status() + return response.json() + except httpx.HTTPStatusError as e: + raise A2AClientHTTPError(e.response.status_code, str(e)) from e + except json.JSONDecodeError as e: + raise A2AClientJSONError(str(e)) from e + except httpx.RequestError as e: + raise A2AClientHTTPError( + 503, f'Network communication error: {e}' + ) from e + + async def _send_post_request( + self, + target: str, + rpc_request_payload: dict[str, Any], + http_kwargs: dict[str, Any] | None = None, + ) -> dict[str, Any]: + return await self._send_request( + self.httpx_client.build_request( + 'POST', + f'{self.url}{target}', + json=rpc_request_payload, + **(http_kwargs or {}), + ) + ) + + async def _send_get_request( + self, + target: str, + query_params: dict[str, str], + http_kwargs: dict[str, Any] | None = None, + ) -> dict[str, Any]: + return await self._send_request( + self.httpx_client.build_request( + 'GET', + f'{self.url}{target}', + params=query_params, + **(http_kwargs or {}), + ) + ) + + async def get_task( + self, + request: TaskQueryParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + _payload, modified_kwargs = await self._apply_interceptors( + request.model_dump(mode='json', exclude_none=True), + modified_kwargs, + context, + ) + response_data = await self._send_get_request( + f'/v1/tasks/{request.id}', + {'historyLength': str(request.history_length)} + if request.history_length is not None + else {}, + modified_kwargs, + ) + task = a2a_pb2.Task() + ParseDict(response_data, task) + return proto_utils.FromProto.task(task) + + async def cancel_task( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + pb = a2a_pb2.CancelTaskRequest(name=f'tasks/{request.id}') + payload = MessageToDict(pb) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + payload, + modified_kwargs, + context, + ) + response_data = await self._send_post_request( + f'/v1/tasks/{request.id}:cancel', payload, modified_kwargs + ) + task = a2a_pb2.Task() + ParseDict(response_data, task) + return proto_utils.FromProto.task(task) + + async def set_task_callback( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + pb = a2a_pb2.CreateTaskPushNotificationConfigRequest( + parent=f'tasks/{request.task_id}', + config_id=request.push_notification_config.id, + config=proto_utils.ToProto.task_push_notification_config(request), + ) + payload = MessageToDict(pb) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + payload, modified_kwargs, context + ) + response_data = await self._send_post_request( + f'/v1/tasks/{request.task_id}/pushNotificationConfigs', + payload, + modified_kwargs, + ) + config = a2a_pb2.TaskPushNotificationConfig() + ParseDict(response_data, config) + return proto_utils.FromProto.task_push_notification_config(config) + + async def get_task_callback( + self, + request: GetTaskPushNotificationConfigParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" + pb = a2a_pb2.GetTaskPushNotificationConfigRequest( + name=f'tasks/{request.id}/pushNotificationConfigs/{request.push_notification_config_id}', + ) + payload = MessageToDict(pb) + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + payload, modified_kwargs = await self._apply_interceptors( + payload, + modified_kwargs, + context, + ) + response_data = await self._send_get_request( + f'/v1/tasks/{request.id}/pushNotificationConfigs/{request.push_notification_config_id}', + {}, + modified_kwargs, + ) + config = a2a_pb2.TaskPushNotificationConfig() + ParseDict(response_data, config) + return proto_utils.FromProto.task_push_notification_config(config) + + async def resubscribe( + self, + request: TaskIdParams, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + ) -> AsyncGenerator[ + Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent | Message + ]: + """Reconnects to get task updates.""" + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + modified_kwargs.setdefault('timeout', None) + + async with aconnect_sse( + self.httpx_client, + 'GET', + f'{self.url}/v1/tasks/{request.id}:subscribe', + **modified_kwargs, + ) as event_source: + try: + async for sse in event_source.aiter_sse(): + event = a2a_pb2.StreamResponse() + Parse(sse.data, event) + yield proto_utils.FromProto.stream_response(event) + except SSEError as e: + raise A2AClientHTTPError( + 400, f'Invalid SSE response or protocol error: {e}' + ) from e + except json.JSONDecodeError as e: + raise A2AClientJSONError(str(e)) from e + except httpx.RequestError as e: + raise A2AClientHTTPError( + 503, f'Network communication error: {e}' + ) from e + + async def get_card( + self, + *, + context: ClientCallContext | None = None, + extensions: list[str] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, + ) -> AgentCard: + """Retrieves the agent's card.""" + modified_kwargs = update_extension_header( + self._get_http_args(context), + extensions if extensions is not None else self.extensions, + ) + card = self.agent_card + + if not card: + resolver = A2ACardResolver(self.httpx_client, self.url) + card = await resolver.get_agent_card( + http_kwargs=modified_kwargs, + signature_verifier=signature_verifier, + ) + self._needs_extended_card = ( + card.supports_authenticated_extended_card + ) + self.agent_card = card + + if not self._needs_extended_card: + return card + + _, modified_kwargs = await self._apply_interceptors( + {}, + modified_kwargs, + context, + ) + response_data = await self._send_get_request( + '/v1/card', {}, modified_kwargs + ) + card = AgentCard.model_validate(response_data) + if signature_verifier: + signature_verifier(card) + + self.agent_card = card + self._needs_extended_card = False + return card + + async def close(self) -> None: + """Closes the httpx client.""" + await self.httpx_client.aclose() diff --git a/src/a2a/extensions/__init__.py b/src/a2a/extensions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/a2a/extensions/common.py b/src/a2a/extensions/common.py new file mode 100644 index 000000000..cba3517e4 --- /dev/null +++ b/src/a2a/extensions/common.py @@ -0,0 +1,41 @@ +from typing import Any + +from a2a.types import AgentCard, AgentExtension + + +HTTP_EXTENSION_HEADER = 'X-A2A-Extensions' + + +def get_requested_extensions(values: list[str]) -> set[str]: + """Get the set of requested extensions from an input list. + + This handles the list containing potentially comma-separated values, as + occurs when using a list in an HTTP header. + """ + return { + stripped + for v in values + for ext in v.split(',') + if (stripped := ext.strip()) + } + + +def find_extension_by_uri(card: AgentCard, uri: str) -> AgentExtension | None: + """Find an AgentExtension in an AgentCard given a uri.""" + for ext in card.capabilities.extensions or []: + if ext.uri == uri: + return ext + + return None + + +def update_extension_header( + http_kwargs: dict[str, Any] | None, + extensions: list[str] | None, +) -> dict[str, Any]: + """Update the X-A2A-Extensions header with active extensions.""" + http_kwargs = http_kwargs or {} + if extensions is not None: + headers = http_kwargs.setdefault('headers', {}) + headers[HTTP_EXTENSION_HEADER] = ','.join(extensions) + return http_kwargs diff --git a/src/a2a/grpc/__init__.py b/src/a2a/grpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/a2a/grpc/a2a_pb2.py b/src/a2a/grpc/a2a_pb2.py new file mode 100644 index 000000000..9b4b73013 --- /dev/null +++ b/src/a2a/grpc/a2a_pb2.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: a2a.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'a2a.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ta2a.proto\x12\x06\x61\x32\x61.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xde\x01\n\x18SendMessageConfiguration\x12\x32\n\x15\x61\x63\x63\x65pted_output_modes\x18\x01 \x03(\tR\x13\x61\x63\x63\x65ptedOutputModes\x12K\n\x11push_notification\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.PushNotificationConfigR\x10pushNotification\x12%\n\x0ehistory_length\x18\x03 \x01(\x05R\rhistoryLength\x12\x1a\n\x08\x62locking\x18\x04 \x01(\x08R\x08\x62locking\"\xf1\x01\n\x04Task\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12*\n\x06status\x18\x03 \x01(\x0b\x32\x12.a2a.v1.TaskStatusR\x06status\x12.\n\tartifacts\x18\x04 \x03(\x0b\x32\x10.a2a.v1.ArtifactR\tartifacts\x12)\n\x07history\x18\x05 \x03(\x0b\x32\x0f.a2a.v1.MessageR\x07history\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\x99\x01\n\nTaskStatus\x12\'\n\x05state\x18\x01 \x01(\x0e\x32\x11.a2a.v1.TaskStateR\x05state\x12(\n\x06update\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageR\x07message\x12\x38\n\ttimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ttimestamp\"\xa9\x01\n\x04Part\x12\x14\n\x04text\x18\x01 \x01(\tH\x00R\x04text\x12&\n\x04\x66ile\x18\x02 \x01(\x0b\x32\x10.a2a.v1.FilePartH\x00R\x04\x66ile\x12&\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x10.a2a.v1.DataPartH\x00R\x04\x64\x61ta\x12\x33\n\x08metadata\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadataB\x06\n\x04part\"\x93\x01\n\x08\x46ilePart\x12$\n\rfile_with_uri\x18\x01 \x01(\tH\x00R\x0b\x66ileWithUri\x12(\n\x0f\x66ile_with_bytes\x18\x02 \x01(\x0cH\x00R\rfileWithBytes\x12\x1b\n\tmime_type\x18\x03 \x01(\tR\x08mimeType\x12\x12\n\x04name\x18\x04 \x01(\tR\x04nameB\x06\n\x04\x66ile\"7\n\x08\x44\x61taPart\x12+\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructR\x04\x64\x61ta\"\xff\x01\n\x07Message\x12\x1d\n\nmessage_id\x18\x01 \x01(\tR\tmessageId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12\x17\n\x07task_id\x18\x03 \x01(\tR\x06taskId\x12 \n\x04role\x18\x04 \x01(\x0e\x32\x0c.a2a.v1.RoleR\x04role\x12&\n\x07\x63ontent\x18\x05 \x03(\x0b\x32\x0c.a2a.v1.PartR\x07\x63ontent\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x07 \x03(\tR\nextensions\"\xda\x01\n\x08\x41rtifact\x12\x1f\n\x0b\x61rtifact_id\x18\x01 \x01(\tR\nartifactId\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x04 \x01(\tR\x0b\x64\x65scription\x12\"\n\x05parts\x18\x05 \x03(\x0b\x32\x0c.a2a.v1.PartR\x05parts\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x07 \x03(\tR\nextensions\"\xc6\x01\n\x15TaskStatusUpdateEvent\x12\x17\n\x07task_id\x18\x01 \x01(\tR\x06taskId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12*\n\x06status\x18\x03 \x01(\x0b\x32\x12.a2a.v1.TaskStatusR\x06status\x12\x14\n\x05\x66inal\x18\x04 \x01(\x08R\x05\x66inal\x12\x33\n\x08metadata\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\xeb\x01\n\x17TaskArtifactUpdateEvent\x12\x17\n\x07task_id\x18\x01 \x01(\tR\x06taskId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12,\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x10.a2a.v1.ArtifactR\x08\x61rtifact\x12\x16\n\x06\x61ppend\x18\x04 \x01(\x08R\x06\x61ppend\x12\x1d\n\nlast_chunk\x18\x05 \x01(\x08R\tlastChunk\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\x94\x01\n\x16PushNotificationConfig\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x10\n\x03url\x18\x02 \x01(\tR\x03url\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x42\n\x0e\x61uthentication\x18\x04 \x01(\x0b\x32\x1a.a2a.v1.AuthenticationInfoR\x0e\x61uthentication\"P\n\x12\x41uthenticationInfo\x12\x18\n\x07schemes\x18\x01 \x03(\tR\x07schemes\x12 \n\x0b\x63redentials\x18\x02 \x01(\tR\x0b\x63redentials\"@\n\x0e\x41gentInterface\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\x1c\n\ttransport\x18\x02 \x01(\tR\ttransport\"\xc8\x07\n\tAgentCard\x12)\n\x10protocol_version\x18\x10 \x01(\tR\x0fprotocolVersion\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\x12\x10\n\x03url\x18\x03 \x01(\tR\x03url\x12/\n\x13preferred_transport\x18\x0e \x01(\tR\x12preferredTransport\x12K\n\x15\x61\x64\x64itional_interfaces\x18\x0f \x03(\x0b\x32\x16.a2a.v1.AgentInterfaceR\x14\x61\x64\x64itionalInterfaces\x12\x31\n\x08provider\x18\x04 \x01(\x0b\x32\x15.a2a.v1.AgentProviderR\x08provider\x12\x18\n\x07version\x18\x05 \x01(\tR\x07version\x12+\n\x11\x64ocumentation_url\x18\x06 \x01(\tR\x10\x64ocumentationUrl\x12=\n\x0c\x63\x61pabilities\x18\x07 \x01(\x0b\x32\x19.a2a.v1.AgentCapabilitiesR\x0c\x63\x61pabilities\x12Q\n\x10security_schemes\x18\x08 \x03(\x0b\x32&.a2a.v1.AgentCard.SecuritySchemesEntryR\x0fsecuritySchemes\x12,\n\x08security\x18\t \x03(\x0b\x32\x10.a2a.v1.SecurityR\x08security\x12.\n\x13\x64\x65\x66\x61ult_input_modes\x18\n \x03(\tR\x11\x64\x65\x66\x61ultInputModes\x12\x30\n\x14\x64\x65\x66\x61ult_output_modes\x18\x0b \x03(\tR\x12\x64\x65\x66\x61ultOutputModes\x12*\n\x06skills\x18\x0c \x03(\x0b\x32\x12.a2a.v1.AgentSkillR\x06skills\x12O\n$supports_authenticated_extended_card\x18\r \x01(\x08R!supportsAuthenticatedExtendedCard\x12:\n\nsignatures\x18\x11 \x03(\x0b\x32\x1a.a2a.v1.AgentCardSignatureR\nsignatures\x12\x19\n\x08icon_url\x18\x12 \x01(\tR\x07iconUrl\x1aZ\n\x14SecuritySchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.a2a.v1.SecuritySchemeR\x05value:\x02\x38\x01\"E\n\rAgentProvider\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\"\n\x0corganization\x18\x02 \x01(\tR\x0corganization\"\x98\x01\n\x11\x41gentCapabilities\x12\x1c\n\tstreaming\x18\x01 \x01(\x08R\tstreaming\x12-\n\x12push_notifications\x18\x02 \x01(\x08R\x11pushNotifications\x12\x36\n\nextensions\x18\x03 \x03(\x0b\x32\x16.a2a.v1.AgentExtensionR\nextensions\"\x91\x01\n\x0e\x41gentExtension\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\x12\x1a\n\x08required\x18\x03 \x01(\x08R\x08required\x12/\n\x06params\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x06params\"\xf4\x01\n\nAgentSkill\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x03 \x01(\tR\x0b\x64\x65scription\x12\x12\n\x04tags\x18\x04 \x03(\tR\x04tags\x12\x1a\n\x08\x65xamples\x18\x05 \x03(\tR\x08\x65xamples\x12\x1f\n\x0binput_modes\x18\x06 \x03(\tR\ninputModes\x12!\n\x0coutput_modes\x18\x07 \x03(\tR\x0boutputModes\x12,\n\x08security\x18\x08 \x03(\x0b\x32\x10.a2a.v1.SecurityR\x08security\"\x8b\x01\n\x12\x41gentCardSignature\x12!\n\tprotected\x18\x01 \x01(\tB\x03\xe0\x41\x02R\tprotected\x12!\n\tsignature\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tsignature\x12/\n\x06header\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x06header\"\x8a\x01\n\x1aTaskPushNotificationConfig\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12X\n\x18push_notification_config\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.PushNotificationConfigR\x16pushNotificationConfig\" \n\nStringList\x12\x12\n\x04list\x18\x01 \x03(\tR\x04list\"\x93\x01\n\x08Security\x12\x37\n\x07schemes\x18\x01 \x03(\x0b\x32\x1d.a2a.v1.Security.SchemesEntryR\x07schemes\x1aN\n\x0cSchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x12.a2a.v1.StringListR\x05value:\x02\x38\x01\"\xe6\x03\n\x0eSecurityScheme\x12U\n\x17\x61pi_key_security_scheme\x18\x01 \x01(\x0b\x32\x1c.a2a.v1.APIKeySecuritySchemeH\x00R\x14\x61piKeySecurityScheme\x12[\n\x19http_auth_security_scheme\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.HTTPAuthSecuritySchemeH\x00R\x16httpAuthSecurityScheme\x12T\n\x16oauth2_security_scheme\x18\x03 \x01(\x0b\x32\x1c.a2a.v1.OAuth2SecuritySchemeH\x00R\x14oauth2SecurityScheme\x12k\n\x1fopen_id_connect_security_scheme\x18\x04 \x01(\x0b\x32#.a2a.v1.OpenIdConnectSecuritySchemeH\x00R\x1bopenIdConnectSecurityScheme\x12S\n\x14mtls_security_scheme\x18\x05 \x01(\x0b\x32\x1f.a2a.v1.MutualTlsSecuritySchemeH\x00R\x12mtlsSecuritySchemeB\x08\n\x06scheme\"h\n\x14\x41PIKeySecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x1a\n\x08location\x18\x02 \x01(\tR\x08location\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\"w\n\x16HTTPAuthSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x16\n\x06scheme\x18\x02 \x01(\tR\x06scheme\x12#\n\rbearer_format\x18\x03 \x01(\tR\x0c\x62\x65\x61rerFormat\"\x92\x01\n\x14OAuth2SecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12(\n\x05\x66lows\x18\x02 \x01(\x0b\x32\x12.a2a.v1.OAuthFlowsR\x05\x66lows\x12.\n\x13oauth2_metadata_url\x18\x03 \x01(\tR\x11oauth2MetadataUrl\"n\n\x1bOpenIdConnectSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12-\n\x13open_id_connect_url\x18\x02 \x01(\tR\x10openIdConnectUrl\";\n\x17MutualTlsSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\"\xb0\x02\n\nOAuthFlows\x12S\n\x12\x61uthorization_code\x18\x01 \x01(\x0b\x32\".a2a.v1.AuthorizationCodeOAuthFlowH\x00R\x11\x61uthorizationCode\x12S\n\x12\x63lient_credentials\x18\x02 \x01(\x0b\x32\".a2a.v1.ClientCredentialsOAuthFlowH\x00R\x11\x63lientCredentials\x12\x37\n\x08implicit\x18\x03 \x01(\x0b\x32\x19.a2a.v1.ImplicitOAuthFlowH\x00R\x08implicit\x12\x37\n\x08password\x18\x04 \x01(\x0b\x32\x19.a2a.v1.PasswordOAuthFlowH\x00R\x08passwordB\x06\n\x04\x66low\"\x8a\x02\n\x1a\x41uthorizationCodeOAuthFlow\x12+\n\x11\x61uthorization_url\x18\x01 \x01(\tR\x10\x61uthorizationUrl\x12\x1b\n\ttoken_url\x18\x02 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x03 \x01(\tR\nrefreshUrl\x12\x46\n\x06scopes\x18\x04 \x03(\x0b\x32..a2a.v1.AuthorizationCodeOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xdd\x01\n\x1a\x43lientCredentialsOAuthFlow\x12\x1b\n\ttoken_url\x18\x01 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12\x46\n\x06scopes\x18\x03 \x03(\x0b\x32..a2a.v1.ClientCredentialsOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xdb\x01\n\x11ImplicitOAuthFlow\x12+\n\x11\x61uthorization_url\x18\x01 \x01(\tR\x10\x61uthorizationUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12=\n\x06scopes\x18\x03 \x03(\x0b\x32%.a2a.v1.ImplicitOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xcb\x01\n\x11PasswordOAuthFlow\x12\x1b\n\ttoken_url\x18\x01 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12=\n\x06scopes\x18\x03 \x03(\x0b\x32%.a2a.v1.PasswordOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xc1\x01\n\x12SendMessageRequest\x12.\n\x07request\x18\x01 \x01(\x0b\x32\x0f.a2a.v1.MessageB\x03\xe0\x41\x02R\x07message\x12\x46\n\rconfiguration\x18\x02 \x01(\x0b\x32 .a2a.v1.SendMessageConfigurationR\rconfiguration\x12\x33\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"P\n\x0eGetTaskRequest\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0ehistory_length\x18\x02 \x01(\x05R\rhistoryLength\"\'\n\x11\x43\x61ncelTaskRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\":\n$GetTaskPushNotificationConfigRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"=\n\'DeleteTaskPushNotificationConfigRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xa9\x01\n\'CreateTaskPushNotificationConfigRequest\x12\x1b\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06parent\x12 \n\tconfig_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x08\x63onfigId\x12?\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\".a2a.v1.TaskPushNotificationConfigB\x03\xe0\x41\x02R\x06\x63onfig\"-\n\x17TaskSubscriptionRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"{\n%ListTaskPushNotificationConfigRequest\x12\x16\n\x06parent\x18\x01 \x01(\tR\x06parent\x12\x1b\n\tpage_size\x18\x02 \x01(\x05R\x08pageSize\x12\x1d\n\npage_token\x18\x03 \x01(\tR\tpageToken\"\x15\n\x13GetAgentCardRequest\"m\n\x13SendMessageResponse\x12\"\n\x04task\x18\x01 \x01(\x0b\x32\x0c.a2a.v1.TaskH\x00R\x04task\x12\'\n\x03msg\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageH\x00R\x07messageB\t\n\x07payload\"\xfa\x01\n\x0eStreamResponse\x12\"\n\x04task\x18\x01 \x01(\x0b\x32\x0c.a2a.v1.TaskH\x00R\x04task\x12\'\n\x03msg\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageH\x00R\x07message\x12\x44\n\rstatus_update\x18\x03 \x01(\x0b\x32\x1d.a2a.v1.TaskStatusUpdateEventH\x00R\x0cstatusUpdate\x12J\n\x0f\x61rtifact_update\x18\x04 \x01(\x0b\x32\x1f.a2a.v1.TaskArtifactUpdateEventH\x00R\x0e\x61rtifactUpdateB\t\n\x07payload\"\x8e\x01\n&ListTaskPushNotificationConfigResponse\x12<\n\x07\x63onfigs\x18\x01 \x03(\x0b\x32\".a2a.v1.TaskPushNotificationConfigR\x07\x63onfigs\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken*\xfa\x01\n\tTaskState\x12\x1a\n\x16TASK_STATE_UNSPECIFIED\x10\x00\x12\x18\n\x14TASK_STATE_SUBMITTED\x10\x01\x12\x16\n\x12TASK_STATE_WORKING\x10\x02\x12\x18\n\x14TASK_STATE_COMPLETED\x10\x03\x12\x15\n\x11TASK_STATE_FAILED\x10\x04\x12\x18\n\x14TASK_STATE_CANCELLED\x10\x05\x12\x1d\n\x19TASK_STATE_INPUT_REQUIRED\x10\x06\x12\x17\n\x13TASK_STATE_REJECTED\x10\x07\x12\x1c\n\x18TASK_STATE_AUTH_REQUIRED\x10\x08*;\n\x04Role\x12\x14\n\x10ROLE_UNSPECIFIED\x10\x00\x12\r\n\tROLE_USER\x10\x01\x12\x0e\n\nROLE_AGENT\x10\x02\x32\xbb\n\n\nA2AService\x12\x63\n\x0bSendMessage\x12\x1a.a2a.v1.SendMessageRequest\x1a\x1b.a2a.v1.SendMessageResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/message:send:\x01*\x12k\n\x14SendStreamingMessage\x12\x1a.a2a.v1.SendMessageRequest\x1a\x16.a2a.v1.StreamResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\"\x12/v1/message:stream:\x01*0\x01\x12R\n\x07GetTask\x12\x16.a2a.v1.GetTaskRequest\x1a\x0c.a2a.v1.Task\"!\xda\x41\x04name\x82\xd3\xe4\x93\x02\x14\x12\x12/v1/{name=tasks/*}\x12[\n\nCancelTask\x12\x19.a2a.v1.CancelTaskRequest\x1a\x0c.a2a.v1.Task\"$\x82\xd3\xe4\x93\x02\x1e\"\x19/v1/{name=tasks/*}:cancel:\x01*\x12s\n\x10TaskSubscription\x12\x1f.a2a.v1.TaskSubscriptionRequest\x1a\x16.a2a.v1.StreamResponse\"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/v1/{name=tasks/*}:subscribe0\x01\x12\xc5\x01\n CreateTaskPushNotificationConfig\x12/.a2a.v1.CreateTaskPushNotificationConfigRequest\x1a\".a2a.v1.TaskPushNotificationConfig\"L\xda\x41\rparent,config\x82\xd3\xe4\x93\x02\x36\",/v1/{parent=tasks/*/pushNotificationConfigs}:\x06\x63onfig\x12\xae\x01\n\x1dGetTaskPushNotificationConfig\x12,.a2a.v1.GetTaskPushNotificationConfigRequest\x1a\".a2a.v1.TaskPushNotificationConfig\";\xda\x41\x04name\x82\xd3\xe4\x93\x02.\x12,/v1/{name=tasks/*/pushNotificationConfigs/*}\x12\xbe\x01\n\x1eListTaskPushNotificationConfig\x12-.a2a.v1.ListTaskPushNotificationConfigRequest\x1a..a2a.v1.ListTaskPushNotificationConfigResponse\"=\xda\x41\x06parent\x82\xd3\xe4\x93\x02.\x12,/v1/{parent=tasks/*}/pushNotificationConfigs\x12P\n\x0cGetAgentCard\x12\x1b.a2a.v1.GetAgentCardRequest\x1a\x11.a2a.v1.AgentCard\"\x10\x82\xd3\xe4\x93\x02\n\x12\x08/v1/card\x12\xa8\x01\n DeleteTaskPushNotificationConfig\x12/.a2a.v1.DeleteTaskPushNotificationConfigRequest\x1a\x16.google.protobuf.Empty\";\xda\x41\x04name\x82\xd3\xe4\x93\x02.*,/v1/{name=tasks/*/pushNotificationConfigs/*}Bi\n\ncom.a2a.v1B\x08\x41\x32\x61ProtoP\x01Z\x18google.golang.org/a2a/v1\xa2\x02\x03\x41XX\xaa\x02\x06\x41\x32\x61.V1\xca\x02\x06\x41\x32\x61\\V1\xe2\x02\x12\x41\x32\x61\\V1\\GPBMetadata\xea\x02\x07\x41\x32\x61::V1b\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'a2a_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\ncom.a2a.v1B\010A2aProtoP\001Z\030google.golang.org/a2a/v1\242\002\003AXX\252\002\006A2a.V1\312\002\006A2a\\V1\342\002\022A2a\\V1\\GPBMetadata\352\002\007A2a::V1' + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._loaded_options = None + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_options = b'8\001' + _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._loaded_options = None + _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._serialized_options = b'\340A\002' + _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._loaded_options = None + _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._serialized_options = b'\340A\002' + _globals['_SECURITY_SCHEMESENTRY']._loaded_options = None + _globals['_SECURITY_SCHEMESENTRY']._serialized_options = b'8\001' + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_SENDMESSAGEREQUEST'].fields_by_name['request']._loaded_options = None + _globals['_SENDMESSAGEREQUEST'].fields_by_name['request']._serialized_options = b'\340A\002' + _globals['_GETTASKREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_GETTASKREQUEST'].fields_by_name['name']._serialized_options = b'\340A\002' + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['parent']._loaded_options = None + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['parent']._serialized_options = b'\340A\002' + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config_id']._loaded_options = None + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config_id']._serialized_options = b'\340A\002' + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config']._loaded_options = None + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config']._serialized_options = b'\340A\002' + _globals['_A2ASERVICE'].methods_by_name['SendMessage']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['SendMessage']._serialized_options = b'\202\323\344\223\002\025\"\020/v1/message:send:\001*' + _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._serialized_options = b'\202\323\344\223\002\027\"\022/v1/message:stream:\001*' + _globals['_A2ASERVICE'].methods_by_name['GetTask']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetTask']._serialized_options = b'\332A\004name\202\323\344\223\002\024\022\022/v1/{name=tasks/*}' + _globals['_A2ASERVICE'].methods_by_name['CancelTask']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['CancelTask']._serialized_options = b'\202\323\344\223\002\036\"\031/v1/{name=tasks/*}:cancel:\001*' + _globals['_A2ASERVICE'].methods_by_name['TaskSubscription']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['TaskSubscription']._serialized_options = b'\202\323\344\223\002\036\022\034/v1/{name=tasks/*}:subscribe' + _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._serialized_options = b'\332A\rparent,config\202\323\344\223\0026\",/v1/{parent=tasks/*/pushNotificationConfigs}:\006config' + _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._serialized_options = b'\332A\004name\202\323\344\223\002.\022,/v1/{name=tasks/*/pushNotificationConfigs/*}' + _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfig']._serialized_options = b'\332A\006parent\202\323\344\223\002.\022,/v1/{parent=tasks/*}/pushNotificationConfigs' + _globals['_A2ASERVICE'].methods_by_name['GetAgentCard']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetAgentCard']._serialized_options = b'\202\323\344\223\002\n\022\010/v1/card' + _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._serialized_options = b'\332A\004name\202\323\344\223\002.*,/v1/{name=tasks/*/pushNotificationConfigs/*}' + _globals['_TASKSTATE']._serialized_start=8066 + _globals['_TASKSTATE']._serialized_end=8316 + _globals['_ROLE']._serialized_start=8318 + _globals['_ROLE']._serialized_end=8377 + _globals['_SENDMESSAGECONFIGURATION']._serialized_start=202 + _globals['_SENDMESSAGECONFIGURATION']._serialized_end=424 + _globals['_TASK']._serialized_start=427 + _globals['_TASK']._serialized_end=668 + _globals['_TASKSTATUS']._serialized_start=671 + _globals['_TASKSTATUS']._serialized_end=824 + _globals['_PART']._serialized_start=827 + _globals['_PART']._serialized_end=996 + _globals['_FILEPART']._serialized_start=999 + _globals['_FILEPART']._serialized_end=1146 + _globals['_DATAPART']._serialized_start=1148 + _globals['_DATAPART']._serialized_end=1203 + _globals['_MESSAGE']._serialized_start=1206 + _globals['_MESSAGE']._serialized_end=1461 + _globals['_ARTIFACT']._serialized_start=1464 + _globals['_ARTIFACT']._serialized_end=1682 + _globals['_TASKSTATUSUPDATEEVENT']._serialized_start=1685 + _globals['_TASKSTATUSUPDATEEVENT']._serialized_end=1883 + _globals['_TASKARTIFACTUPDATEEVENT']._serialized_start=1886 + _globals['_TASKARTIFACTUPDATEEVENT']._serialized_end=2121 + _globals['_PUSHNOTIFICATIONCONFIG']._serialized_start=2124 + _globals['_PUSHNOTIFICATIONCONFIG']._serialized_end=2272 + _globals['_AUTHENTICATIONINFO']._serialized_start=2274 + _globals['_AUTHENTICATIONINFO']._serialized_end=2354 + _globals['_AGENTINTERFACE']._serialized_start=2356 + _globals['_AGENTINTERFACE']._serialized_end=2420 + _globals['_AGENTCARD']._serialized_start=2423 + _globals['_AGENTCARD']._serialized_end=3391 + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_start=3301 + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_end=3391 + _globals['_AGENTPROVIDER']._serialized_start=3393 + _globals['_AGENTPROVIDER']._serialized_end=3462 + _globals['_AGENTCAPABILITIES']._serialized_start=3465 + _globals['_AGENTCAPABILITIES']._serialized_end=3617 + _globals['_AGENTEXTENSION']._serialized_start=3620 + _globals['_AGENTEXTENSION']._serialized_end=3765 + _globals['_AGENTSKILL']._serialized_start=3768 + _globals['_AGENTSKILL']._serialized_end=4012 + _globals['_AGENTCARDSIGNATURE']._serialized_start=4015 + _globals['_AGENTCARDSIGNATURE']._serialized_end=4154 + _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_start=4157 + _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_end=4295 + _globals['_STRINGLIST']._serialized_start=4297 + _globals['_STRINGLIST']._serialized_end=4329 + _globals['_SECURITY']._serialized_start=4332 + _globals['_SECURITY']._serialized_end=4479 + _globals['_SECURITY_SCHEMESENTRY']._serialized_start=4401 + _globals['_SECURITY_SCHEMESENTRY']._serialized_end=4479 + _globals['_SECURITYSCHEME']._serialized_start=4482 + _globals['_SECURITYSCHEME']._serialized_end=4968 + _globals['_APIKEYSECURITYSCHEME']._serialized_start=4970 + _globals['_APIKEYSECURITYSCHEME']._serialized_end=5074 + _globals['_HTTPAUTHSECURITYSCHEME']._serialized_start=5076 + _globals['_HTTPAUTHSECURITYSCHEME']._serialized_end=5195 + _globals['_OAUTH2SECURITYSCHEME']._serialized_start=5198 + _globals['_OAUTH2SECURITYSCHEME']._serialized_end=5344 + _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_start=5346 + _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_end=5456 + _globals['_MUTUALTLSSECURITYSCHEME']._serialized_start=5458 + _globals['_MUTUALTLSSECURITYSCHEME']._serialized_end=5517 + _globals['_OAUTHFLOWS']._serialized_start=5520 + _globals['_OAUTHFLOWS']._serialized_end=5824 + _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_start=5827 + _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_end=6093 + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 + _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_start=6096 + _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_end=6317 + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 + _globals['_IMPLICITOAUTHFLOW']._serialized_start=6320 + _globals['_IMPLICITOAUTHFLOW']._serialized_end=6539 + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 + _globals['_PASSWORDOAUTHFLOW']._serialized_start=6542 + _globals['_PASSWORDOAUTHFLOW']._serialized_end=6745 + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 + _globals['_SENDMESSAGEREQUEST']._serialized_start=6748 + _globals['_SENDMESSAGEREQUEST']._serialized_end=6941 + _globals['_GETTASKREQUEST']._serialized_start=6943 + _globals['_GETTASKREQUEST']._serialized_end=7023 + _globals['_CANCELTASKREQUEST']._serialized_start=7025 + _globals['_CANCELTASKREQUEST']._serialized_end=7064 + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7066 + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7124 + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7126 + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7187 + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7190 + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7359 + _globals['_TASKSUBSCRIPTIONREQUEST']._serialized_start=7361 + _globals['_TASKSUBSCRIPTIONREQUEST']._serialized_end=7406 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7408 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7531 + _globals['_GETAGENTCARDREQUEST']._serialized_start=7533 + _globals['_GETAGENTCARDREQUEST']._serialized_end=7554 + _globals['_SENDMESSAGERESPONSE']._serialized_start=7556 + _globals['_SENDMESSAGERESPONSE']._serialized_end=7665 + _globals['_STREAMRESPONSE']._serialized_start=7668 + _globals['_STREAMRESPONSE']._serialized_end=7918 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGRESPONSE']._serialized_start=7921 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGRESPONSE']._serialized_end=8063 + _globals['_A2ASERVICE']._serialized_start=8380 + _globals['_A2ASERVICE']._serialized_end=9719 +# @@protoc_insertion_point(module_scope) diff --git a/src/a2a/grpc/a2a_pb2.pyi b/src/a2a/grpc/a2a_pb2.pyi new file mode 100644 index 000000000..06005e850 --- /dev/null +++ b/src/a2a/grpc/a2a_pb2.pyi @@ -0,0 +1,574 @@ +import datetime + +from google.api import annotations_pb2 as _annotations_pb2 +from google.api import client_pb2 as _client_pb2 +from google.api import field_behavior_pb2 as _field_behavior_pb2 +from google.protobuf import empty_pb2 as _empty_pb2 +from google.protobuf import struct_pb2 as _struct_pb2 +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class TaskState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + TASK_STATE_UNSPECIFIED: _ClassVar[TaskState] + TASK_STATE_SUBMITTED: _ClassVar[TaskState] + TASK_STATE_WORKING: _ClassVar[TaskState] + TASK_STATE_COMPLETED: _ClassVar[TaskState] + TASK_STATE_FAILED: _ClassVar[TaskState] + TASK_STATE_CANCELLED: _ClassVar[TaskState] + TASK_STATE_INPUT_REQUIRED: _ClassVar[TaskState] + TASK_STATE_REJECTED: _ClassVar[TaskState] + TASK_STATE_AUTH_REQUIRED: _ClassVar[TaskState] + +class Role(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ROLE_UNSPECIFIED: _ClassVar[Role] + ROLE_USER: _ClassVar[Role] + ROLE_AGENT: _ClassVar[Role] +TASK_STATE_UNSPECIFIED: TaskState +TASK_STATE_SUBMITTED: TaskState +TASK_STATE_WORKING: TaskState +TASK_STATE_COMPLETED: TaskState +TASK_STATE_FAILED: TaskState +TASK_STATE_CANCELLED: TaskState +TASK_STATE_INPUT_REQUIRED: TaskState +TASK_STATE_REJECTED: TaskState +TASK_STATE_AUTH_REQUIRED: TaskState +ROLE_UNSPECIFIED: Role +ROLE_USER: Role +ROLE_AGENT: Role + +class SendMessageConfiguration(_message.Message): + __slots__ = ("accepted_output_modes", "push_notification", "history_length", "blocking") + ACCEPTED_OUTPUT_MODES_FIELD_NUMBER: _ClassVar[int] + PUSH_NOTIFICATION_FIELD_NUMBER: _ClassVar[int] + HISTORY_LENGTH_FIELD_NUMBER: _ClassVar[int] + BLOCKING_FIELD_NUMBER: _ClassVar[int] + accepted_output_modes: _containers.RepeatedScalarFieldContainer[str] + push_notification: PushNotificationConfig + history_length: int + blocking: bool + def __init__(self, accepted_output_modes: _Optional[_Iterable[str]] = ..., push_notification: _Optional[_Union[PushNotificationConfig, _Mapping]] = ..., history_length: _Optional[int] = ..., blocking: _Optional[bool] = ...) -> None: ... + +class Task(_message.Message): + __slots__ = ("id", "context_id", "status", "artifacts", "history", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + ARTIFACTS_FIELD_NUMBER: _ClassVar[int] + HISTORY_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + context_id: str + status: TaskStatus + artifacts: _containers.RepeatedCompositeFieldContainer[Artifact] + history: _containers.RepeatedCompositeFieldContainer[Message] + metadata: _struct_pb2.Struct + def __init__(self, id: _Optional[str] = ..., context_id: _Optional[str] = ..., status: _Optional[_Union[TaskStatus, _Mapping]] = ..., artifacts: _Optional[_Iterable[_Union[Artifact, _Mapping]]] = ..., history: _Optional[_Iterable[_Union[Message, _Mapping]]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class TaskStatus(_message.Message): + __slots__ = ("state", "update", "timestamp") + STATE_FIELD_NUMBER: _ClassVar[int] + UPDATE_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + state: TaskState + update: Message + timestamp: _timestamp_pb2.Timestamp + def __init__(self, state: _Optional[_Union[TaskState, str]] = ..., update: _Optional[_Union[Message, _Mapping]] = ..., timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + +class Part(_message.Message): + __slots__ = ("text", "file", "data", "metadata") + TEXT_FIELD_NUMBER: _ClassVar[int] + FILE_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + text: str + file: FilePart + data: DataPart + metadata: _struct_pb2.Struct + def __init__(self, text: _Optional[str] = ..., file: _Optional[_Union[FilePart, _Mapping]] = ..., data: _Optional[_Union[DataPart, _Mapping]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class FilePart(_message.Message): + __slots__ = ("file_with_uri", "file_with_bytes", "mime_type", "name") + FILE_WITH_URI_FIELD_NUMBER: _ClassVar[int] + FILE_WITH_BYTES_FIELD_NUMBER: _ClassVar[int] + MIME_TYPE_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + file_with_uri: str + file_with_bytes: bytes + mime_type: str + name: str + def __init__(self, file_with_uri: _Optional[str] = ..., file_with_bytes: _Optional[bytes] = ..., mime_type: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class DataPart(_message.Message): + __slots__ = ("data",) + DATA_FIELD_NUMBER: _ClassVar[int] + data: _struct_pb2.Struct + def __init__(self, data: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class Message(_message.Message): + __slots__ = ("message_id", "context_id", "task_id", "role", "content", "metadata", "extensions") + MESSAGE_ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + ROLE_FIELD_NUMBER: _ClassVar[int] + CONTENT_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + EXTENSIONS_FIELD_NUMBER: _ClassVar[int] + message_id: str + context_id: str + task_id: str + role: Role + content: _containers.RepeatedCompositeFieldContainer[Part] + metadata: _struct_pb2.Struct + extensions: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, message_id: _Optional[str] = ..., context_id: _Optional[str] = ..., task_id: _Optional[str] = ..., role: _Optional[_Union[Role, str]] = ..., content: _Optional[_Iterable[_Union[Part, _Mapping]]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., extensions: _Optional[_Iterable[str]] = ...) -> None: ... + +class Artifact(_message.Message): + __slots__ = ("artifact_id", "name", "description", "parts", "metadata", "extensions") + ARTIFACT_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + PARTS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + EXTENSIONS_FIELD_NUMBER: _ClassVar[int] + artifact_id: str + name: str + description: str + parts: _containers.RepeatedCompositeFieldContainer[Part] + metadata: _struct_pb2.Struct + extensions: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, artifact_id: _Optional[str] = ..., name: _Optional[str] = ..., description: _Optional[str] = ..., parts: _Optional[_Iterable[_Union[Part, _Mapping]]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., extensions: _Optional[_Iterable[str]] = ...) -> None: ... + +class TaskStatusUpdateEvent(_message.Message): + __slots__ = ("task_id", "context_id", "status", "final", "metadata") + TASK_ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + FINAL_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + task_id: str + context_id: str + status: TaskStatus + final: bool + metadata: _struct_pb2.Struct + def __init__(self, task_id: _Optional[str] = ..., context_id: _Optional[str] = ..., status: _Optional[_Union[TaskStatus, _Mapping]] = ..., final: _Optional[bool] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class TaskArtifactUpdateEvent(_message.Message): + __slots__ = ("task_id", "context_id", "artifact", "append", "last_chunk", "metadata") + TASK_ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + ARTIFACT_FIELD_NUMBER: _ClassVar[int] + APPEND_FIELD_NUMBER: _ClassVar[int] + LAST_CHUNK_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + task_id: str + context_id: str + artifact: Artifact + append: bool + last_chunk: bool + metadata: _struct_pb2.Struct + def __init__(self, task_id: _Optional[str] = ..., context_id: _Optional[str] = ..., artifact: _Optional[_Union[Artifact, _Mapping]] = ..., append: _Optional[bool] = ..., last_chunk: _Optional[bool] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class PushNotificationConfig(_message.Message): + __slots__ = ("id", "url", "token", "authentication") + ID_FIELD_NUMBER: _ClassVar[int] + URL_FIELD_NUMBER: _ClassVar[int] + TOKEN_FIELD_NUMBER: _ClassVar[int] + AUTHENTICATION_FIELD_NUMBER: _ClassVar[int] + id: str + url: str + token: str + authentication: AuthenticationInfo + def __init__(self, id: _Optional[str] = ..., url: _Optional[str] = ..., token: _Optional[str] = ..., authentication: _Optional[_Union[AuthenticationInfo, _Mapping]] = ...) -> None: ... + +class AuthenticationInfo(_message.Message): + __slots__ = ("schemes", "credentials") + SCHEMES_FIELD_NUMBER: _ClassVar[int] + CREDENTIALS_FIELD_NUMBER: _ClassVar[int] + schemes: _containers.RepeatedScalarFieldContainer[str] + credentials: str + def __init__(self, schemes: _Optional[_Iterable[str]] = ..., credentials: _Optional[str] = ...) -> None: ... + +class AgentInterface(_message.Message): + __slots__ = ("url", "transport") + URL_FIELD_NUMBER: _ClassVar[int] + TRANSPORT_FIELD_NUMBER: _ClassVar[int] + url: str + transport: str + def __init__(self, url: _Optional[str] = ..., transport: _Optional[str] = ...) -> None: ... + +class AgentCard(_message.Message): + __slots__ = ("protocol_version", "name", "description", "url", "preferred_transport", "additional_interfaces", "provider", "version", "documentation_url", "capabilities", "security_schemes", "security", "default_input_modes", "default_output_modes", "skills", "supports_authenticated_extended_card", "signatures", "icon_url") + class SecuritySchemesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SecurityScheme + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SecurityScheme, _Mapping]] = ...) -> None: ... + PROTOCOL_VERSION_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + URL_FIELD_NUMBER: _ClassVar[int] + PREFERRED_TRANSPORT_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_INTERFACES_FIELD_NUMBER: _ClassVar[int] + PROVIDER_FIELD_NUMBER: _ClassVar[int] + VERSION_FIELD_NUMBER: _ClassVar[int] + DOCUMENTATION_URL_FIELD_NUMBER: _ClassVar[int] + CAPABILITIES_FIELD_NUMBER: _ClassVar[int] + SECURITY_SCHEMES_FIELD_NUMBER: _ClassVar[int] + SECURITY_FIELD_NUMBER: _ClassVar[int] + DEFAULT_INPUT_MODES_FIELD_NUMBER: _ClassVar[int] + DEFAULT_OUTPUT_MODES_FIELD_NUMBER: _ClassVar[int] + SKILLS_FIELD_NUMBER: _ClassVar[int] + SUPPORTS_AUTHENTICATED_EXTENDED_CARD_FIELD_NUMBER: _ClassVar[int] + SIGNATURES_FIELD_NUMBER: _ClassVar[int] + ICON_URL_FIELD_NUMBER: _ClassVar[int] + protocol_version: str + name: str + description: str + url: str + preferred_transport: str + additional_interfaces: _containers.RepeatedCompositeFieldContainer[AgentInterface] + provider: AgentProvider + version: str + documentation_url: str + capabilities: AgentCapabilities + security_schemes: _containers.MessageMap[str, SecurityScheme] + security: _containers.RepeatedCompositeFieldContainer[Security] + default_input_modes: _containers.RepeatedScalarFieldContainer[str] + default_output_modes: _containers.RepeatedScalarFieldContainer[str] + skills: _containers.RepeatedCompositeFieldContainer[AgentSkill] + supports_authenticated_extended_card: bool + signatures: _containers.RepeatedCompositeFieldContainer[AgentCardSignature] + icon_url: str + def __init__(self, protocol_version: _Optional[str] = ..., name: _Optional[str] = ..., description: _Optional[str] = ..., url: _Optional[str] = ..., preferred_transport: _Optional[str] = ..., additional_interfaces: _Optional[_Iterable[_Union[AgentInterface, _Mapping]]] = ..., provider: _Optional[_Union[AgentProvider, _Mapping]] = ..., version: _Optional[str] = ..., documentation_url: _Optional[str] = ..., capabilities: _Optional[_Union[AgentCapabilities, _Mapping]] = ..., security_schemes: _Optional[_Mapping[str, SecurityScheme]] = ..., security: _Optional[_Iterable[_Union[Security, _Mapping]]] = ..., default_input_modes: _Optional[_Iterable[str]] = ..., default_output_modes: _Optional[_Iterable[str]] = ..., skills: _Optional[_Iterable[_Union[AgentSkill, _Mapping]]] = ..., supports_authenticated_extended_card: _Optional[bool] = ..., signatures: _Optional[_Iterable[_Union[AgentCardSignature, _Mapping]]] = ..., icon_url: _Optional[str] = ...) -> None: ... + +class AgentProvider(_message.Message): + __slots__ = ("url", "organization") + URL_FIELD_NUMBER: _ClassVar[int] + ORGANIZATION_FIELD_NUMBER: _ClassVar[int] + url: str + organization: str + def __init__(self, url: _Optional[str] = ..., organization: _Optional[str] = ...) -> None: ... + +class AgentCapabilities(_message.Message): + __slots__ = ("streaming", "push_notifications", "extensions") + STREAMING_FIELD_NUMBER: _ClassVar[int] + PUSH_NOTIFICATIONS_FIELD_NUMBER: _ClassVar[int] + EXTENSIONS_FIELD_NUMBER: _ClassVar[int] + streaming: bool + push_notifications: bool + extensions: _containers.RepeatedCompositeFieldContainer[AgentExtension] + def __init__(self, streaming: _Optional[bool] = ..., push_notifications: _Optional[bool] = ..., extensions: _Optional[_Iterable[_Union[AgentExtension, _Mapping]]] = ...) -> None: ... + +class AgentExtension(_message.Message): + __slots__ = ("uri", "description", "required", "params") + URI_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + REQUIRED_FIELD_NUMBER: _ClassVar[int] + PARAMS_FIELD_NUMBER: _ClassVar[int] + uri: str + description: str + required: bool + params: _struct_pb2.Struct + def __init__(self, uri: _Optional[str] = ..., description: _Optional[str] = ..., required: _Optional[bool] = ..., params: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class AgentSkill(_message.Message): + __slots__ = ("id", "name", "description", "tags", "examples", "input_modes", "output_modes", "security") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + TAGS_FIELD_NUMBER: _ClassVar[int] + EXAMPLES_FIELD_NUMBER: _ClassVar[int] + INPUT_MODES_FIELD_NUMBER: _ClassVar[int] + OUTPUT_MODES_FIELD_NUMBER: _ClassVar[int] + SECURITY_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + description: str + tags: _containers.RepeatedScalarFieldContainer[str] + examples: _containers.RepeatedScalarFieldContainer[str] + input_modes: _containers.RepeatedScalarFieldContainer[str] + output_modes: _containers.RepeatedScalarFieldContainer[str] + security: _containers.RepeatedCompositeFieldContainer[Security] + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[str]] = ..., examples: _Optional[_Iterable[str]] = ..., input_modes: _Optional[_Iterable[str]] = ..., output_modes: _Optional[_Iterable[str]] = ..., security: _Optional[_Iterable[_Union[Security, _Mapping]]] = ...) -> None: ... + +class AgentCardSignature(_message.Message): + __slots__ = ("protected", "signature", "header") + PROTECTED_FIELD_NUMBER: _ClassVar[int] + SIGNATURE_FIELD_NUMBER: _ClassVar[int] + HEADER_FIELD_NUMBER: _ClassVar[int] + protected: str + signature: str + header: _struct_pb2.Struct + def __init__(self, protected: _Optional[str] = ..., signature: _Optional[str] = ..., header: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class TaskPushNotificationConfig(_message.Message): + __slots__ = ("name", "push_notification_config") + NAME_FIELD_NUMBER: _ClassVar[int] + PUSH_NOTIFICATION_CONFIG_FIELD_NUMBER: _ClassVar[int] + name: str + push_notification_config: PushNotificationConfig + def __init__(self, name: _Optional[str] = ..., push_notification_config: _Optional[_Union[PushNotificationConfig, _Mapping]] = ...) -> None: ... + +class StringList(_message.Message): + __slots__ = ("list",) + LIST_FIELD_NUMBER: _ClassVar[int] + list: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, list: _Optional[_Iterable[str]] = ...) -> None: ... + +class Security(_message.Message): + __slots__ = ("schemes",) + class SchemesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: StringList + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[StringList, _Mapping]] = ...) -> None: ... + SCHEMES_FIELD_NUMBER: _ClassVar[int] + schemes: _containers.MessageMap[str, StringList] + def __init__(self, schemes: _Optional[_Mapping[str, StringList]] = ...) -> None: ... + +class SecurityScheme(_message.Message): + __slots__ = ("api_key_security_scheme", "http_auth_security_scheme", "oauth2_security_scheme", "open_id_connect_security_scheme", "mtls_security_scheme") + API_KEY_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + HTTP_AUTH_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + OAUTH2_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + OPEN_ID_CONNECT_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + MTLS_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + api_key_security_scheme: APIKeySecurityScheme + http_auth_security_scheme: HTTPAuthSecurityScheme + oauth2_security_scheme: OAuth2SecurityScheme + open_id_connect_security_scheme: OpenIdConnectSecurityScheme + mtls_security_scheme: MutualTlsSecurityScheme + def __init__(self, api_key_security_scheme: _Optional[_Union[APIKeySecurityScheme, _Mapping]] = ..., http_auth_security_scheme: _Optional[_Union[HTTPAuthSecurityScheme, _Mapping]] = ..., oauth2_security_scheme: _Optional[_Union[OAuth2SecurityScheme, _Mapping]] = ..., open_id_connect_security_scheme: _Optional[_Union[OpenIdConnectSecurityScheme, _Mapping]] = ..., mtls_security_scheme: _Optional[_Union[MutualTlsSecurityScheme, _Mapping]] = ...) -> None: ... + +class APIKeySecurityScheme(_message.Message): + __slots__ = ("description", "location", "name") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + LOCATION_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + description: str + location: str + name: str + def __init__(self, description: _Optional[str] = ..., location: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class HTTPAuthSecurityScheme(_message.Message): + __slots__ = ("description", "scheme", "bearer_format") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + SCHEME_FIELD_NUMBER: _ClassVar[int] + BEARER_FORMAT_FIELD_NUMBER: _ClassVar[int] + description: str + scheme: str + bearer_format: str + def __init__(self, description: _Optional[str] = ..., scheme: _Optional[str] = ..., bearer_format: _Optional[str] = ...) -> None: ... + +class OAuth2SecurityScheme(_message.Message): + __slots__ = ("description", "flows", "oauth2_metadata_url") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + FLOWS_FIELD_NUMBER: _ClassVar[int] + OAUTH2_METADATA_URL_FIELD_NUMBER: _ClassVar[int] + description: str + flows: OAuthFlows + oauth2_metadata_url: str + def __init__(self, description: _Optional[str] = ..., flows: _Optional[_Union[OAuthFlows, _Mapping]] = ..., oauth2_metadata_url: _Optional[str] = ...) -> None: ... + +class OpenIdConnectSecurityScheme(_message.Message): + __slots__ = ("description", "open_id_connect_url") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + OPEN_ID_CONNECT_URL_FIELD_NUMBER: _ClassVar[int] + description: str + open_id_connect_url: str + def __init__(self, description: _Optional[str] = ..., open_id_connect_url: _Optional[str] = ...) -> None: ... + +class MutualTlsSecurityScheme(_message.Message): + __slots__ = ("description",) + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + description: str + def __init__(self, description: _Optional[str] = ...) -> None: ... + +class OAuthFlows(_message.Message): + __slots__ = ("authorization_code", "client_credentials", "implicit", "password") + AUTHORIZATION_CODE_FIELD_NUMBER: _ClassVar[int] + CLIENT_CREDENTIALS_FIELD_NUMBER: _ClassVar[int] + IMPLICIT_FIELD_NUMBER: _ClassVar[int] + PASSWORD_FIELD_NUMBER: _ClassVar[int] + authorization_code: AuthorizationCodeOAuthFlow + client_credentials: ClientCredentialsOAuthFlow + implicit: ImplicitOAuthFlow + password: PasswordOAuthFlow + def __init__(self, authorization_code: _Optional[_Union[AuthorizationCodeOAuthFlow, _Mapping]] = ..., client_credentials: _Optional[_Union[ClientCredentialsOAuthFlow, _Mapping]] = ..., implicit: _Optional[_Union[ImplicitOAuthFlow, _Mapping]] = ..., password: _Optional[_Union[PasswordOAuthFlow, _Mapping]] = ...) -> None: ... + +class AuthorizationCodeOAuthFlow(_message.Message): + __slots__ = ("authorization_url", "token_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + AUTHORIZATION_URL_FIELD_NUMBER: _ClassVar[int] + TOKEN_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + authorization_url: str + token_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, authorization_url: _Optional[str] = ..., token_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class ClientCredentialsOAuthFlow(_message.Message): + __slots__ = ("token_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + TOKEN_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + token_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, token_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class ImplicitOAuthFlow(_message.Message): + __slots__ = ("authorization_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + AUTHORIZATION_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + authorization_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, authorization_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class PasswordOAuthFlow(_message.Message): + __slots__ = ("token_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + TOKEN_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + token_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, token_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class SendMessageRequest(_message.Message): + __slots__ = ("request", "configuration", "metadata") + REQUEST_FIELD_NUMBER: _ClassVar[int] + CONFIGURATION_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + request: Message + configuration: SendMessageConfiguration + metadata: _struct_pb2.Struct + def __init__(self, request: _Optional[_Union[Message, _Mapping]] = ..., configuration: _Optional[_Union[SendMessageConfiguration, _Mapping]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class GetTaskRequest(_message.Message): + __slots__ = ("name", "history_length") + NAME_FIELD_NUMBER: _ClassVar[int] + HISTORY_LENGTH_FIELD_NUMBER: _ClassVar[int] + name: str + history_length: int + def __init__(self, name: _Optional[str] = ..., history_length: _Optional[int] = ...) -> None: ... + +class CancelTaskRequest(_message.Message): + __slots__ = ("name",) + NAME_FIELD_NUMBER: _ClassVar[int] + name: str + def __init__(self, name: _Optional[str] = ...) -> None: ... + +class GetTaskPushNotificationConfigRequest(_message.Message): + __slots__ = ("name",) + NAME_FIELD_NUMBER: _ClassVar[int] + name: str + def __init__(self, name: _Optional[str] = ...) -> None: ... + +class DeleteTaskPushNotificationConfigRequest(_message.Message): + __slots__ = ("name",) + NAME_FIELD_NUMBER: _ClassVar[int] + name: str + def __init__(self, name: _Optional[str] = ...) -> None: ... + +class CreateTaskPushNotificationConfigRequest(_message.Message): + __slots__ = ("parent", "config_id", "config") + PARENT_FIELD_NUMBER: _ClassVar[int] + CONFIG_ID_FIELD_NUMBER: _ClassVar[int] + CONFIG_FIELD_NUMBER: _ClassVar[int] + parent: str + config_id: str + config: TaskPushNotificationConfig + def __init__(self, parent: _Optional[str] = ..., config_id: _Optional[str] = ..., config: _Optional[_Union[TaskPushNotificationConfig, _Mapping]] = ...) -> None: ... + +class TaskSubscriptionRequest(_message.Message): + __slots__ = ("name",) + NAME_FIELD_NUMBER: _ClassVar[int] + name: str + def __init__(self, name: _Optional[str] = ...) -> None: ... + +class ListTaskPushNotificationConfigRequest(_message.Message): + __slots__ = ("parent", "page_size", "page_token") + PARENT_FIELD_NUMBER: _ClassVar[int] + PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] + PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + parent: str + page_size: int + page_token: str + def __init__(self, parent: _Optional[str] = ..., page_size: _Optional[int] = ..., page_token: _Optional[str] = ...) -> None: ... + +class GetAgentCardRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class SendMessageResponse(_message.Message): + __slots__ = ("task", "msg") + TASK_FIELD_NUMBER: _ClassVar[int] + MSG_FIELD_NUMBER: _ClassVar[int] + task: Task + msg: Message + def __init__(self, task: _Optional[_Union[Task, _Mapping]] = ..., msg: _Optional[_Union[Message, _Mapping]] = ...) -> None: ... + +class StreamResponse(_message.Message): + __slots__ = ("task", "msg", "status_update", "artifact_update") + TASK_FIELD_NUMBER: _ClassVar[int] + MSG_FIELD_NUMBER: _ClassVar[int] + STATUS_UPDATE_FIELD_NUMBER: _ClassVar[int] + ARTIFACT_UPDATE_FIELD_NUMBER: _ClassVar[int] + task: Task + msg: Message + status_update: TaskStatusUpdateEvent + artifact_update: TaskArtifactUpdateEvent + def __init__(self, task: _Optional[_Union[Task, _Mapping]] = ..., msg: _Optional[_Union[Message, _Mapping]] = ..., status_update: _Optional[_Union[TaskStatusUpdateEvent, _Mapping]] = ..., artifact_update: _Optional[_Union[TaskArtifactUpdateEvent, _Mapping]] = ...) -> None: ... + +class ListTaskPushNotificationConfigResponse(_message.Message): + __slots__ = ("configs", "next_page_token") + CONFIGS_FIELD_NUMBER: _ClassVar[int] + NEXT_PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + configs: _containers.RepeatedCompositeFieldContainer[TaskPushNotificationConfig] + next_page_token: str + def __init__(self, configs: _Optional[_Iterable[_Union[TaskPushNotificationConfig, _Mapping]]] = ..., next_page_token: _Optional[str] = ...) -> None: ... diff --git a/src/a2a/grpc/a2a_pb2_grpc.py b/src/a2a/grpc/a2a_pb2_grpc.py new file mode 100644 index 000000000..9b0ad41bc --- /dev/null +++ b/src/a2a/grpc/a2a_pb2_grpc.py @@ -0,0 +1,511 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from . import a2a_pb2 as a2a__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class A2AServiceStub(object): + """A2AService defines the gRPC version of the A2A protocol. This has a slightly + different shape than the JSONRPC version to better conform to AIP-127, + where appropriate. The nouns are AgentCard, Message, Task and + TaskPushNotificationConfig. + - Messages are not a standard resource so there is no get/delete/update/list + interface, only a send and stream custom methods. + - Tasks have a get interface and custom cancel and subscribe methods. + - TaskPushNotificationConfig are a resource whose parent is a task. + They have get, list and create methods. + - AgentCard is a static resource with only a get method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SendMessage = channel.unary_unary( + '/a2a.v1.A2AService/SendMessage', + request_serializer=a2a__pb2.SendMessageRequest.SerializeToString, + response_deserializer=a2a__pb2.SendMessageResponse.FromString, + _registered_method=True) + self.SendStreamingMessage = channel.unary_stream( + '/a2a.v1.A2AService/SendStreamingMessage', + request_serializer=a2a__pb2.SendMessageRequest.SerializeToString, + response_deserializer=a2a__pb2.StreamResponse.FromString, + _registered_method=True) + self.GetTask = channel.unary_unary( + '/a2a.v1.A2AService/GetTask', + request_serializer=a2a__pb2.GetTaskRequest.SerializeToString, + response_deserializer=a2a__pb2.Task.FromString, + _registered_method=True) + self.CancelTask = channel.unary_unary( + '/a2a.v1.A2AService/CancelTask', + request_serializer=a2a__pb2.CancelTaskRequest.SerializeToString, + response_deserializer=a2a__pb2.Task.FromString, + _registered_method=True) + self.TaskSubscription = channel.unary_stream( + '/a2a.v1.A2AService/TaskSubscription', + request_serializer=a2a__pb2.TaskSubscriptionRequest.SerializeToString, + response_deserializer=a2a__pb2.StreamResponse.FromString, + _registered_method=True) + self.CreateTaskPushNotificationConfig = channel.unary_unary( + '/a2a.v1.A2AService/CreateTaskPushNotificationConfig', + request_serializer=a2a__pb2.CreateTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=a2a__pb2.TaskPushNotificationConfig.FromString, + _registered_method=True) + self.GetTaskPushNotificationConfig = channel.unary_unary( + '/a2a.v1.A2AService/GetTaskPushNotificationConfig', + request_serializer=a2a__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=a2a__pb2.TaskPushNotificationConfig.FromString, + _registered_method=True) + self.ListTaskPushNotificationConfig = channel.unary_unary( + '/a2a.v1.A2AService/ListTaskPushNotificationConfig', + request_serializer=a2a__pb2.ListTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=a2a__pb2.ListTaskPushNotificationConfigResponse.FromString, + _registered_method=True) + self.GetAgentCard = channel.unary_unary( + '/a2a.v1.A2AService/GetAgentCard', + request_serializer=a2a__pb2.GetAgentCardRequest.SerializeToString, + response_deserializer=a2a__pb2.AgentCard.FromString, + _registered_method=True) + self.DeleteTaskPushNotificationConfig = channel.unary_unary( + '/a2a.v1.A2AService/DeleteTaskPushNotificationConfig', + request_serializer=a2a__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + _registered_method=True) + + +class A2AServiceServicer(object): + """A2AService defines the gRPC version of the A2A protocol. This has a slightly + different shape than the JSONRPC version to better conform to AIP-127, + where appropriate. The nouns are AgentCard, Message, Task and + TaskPushNotificationConfig. + - Messages are not a standard resource so there is no get/delete/update/list + interface, only a send and stream custom methods. + - Tasks have a get interface and custom cancel and subscribe methods. + - TaskPushNotificationConfig are a resource whose parent is a task. + They have get, list and create methods. + - AgentCard is a static resource with only a get method. + """ + + def SendMessage(self, request, context): + """Send a message to the agent. This is a blocking call that will return the + task once it is completed, or a LRO if requested. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SendStreamingMessage(self, request, context): + """SendStreamingMessage is a streaming call that will return a stream of + task update events until the Task is in an interrupted or terminal state. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTask(self, request, context): + """Get the current state of a task from the agent. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CancelTask(self, request, context): + """Cancel a task from the agent. If supported one should expect no + more task updates for the task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def TaskSubscription(self, request, context): + """TaskSubscription is a streaming call that will return a stream of task + update events. This attaches the stream to an existing in process task. + If the task is complete the stream will return the completed task (like + GetTask) and close the stream. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateTaskPushNotificationConfig(self, request, context): + """Set a push notification config for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTaskPushNotificationConfig(self, request, context): + """Get a push notification config for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTaskPushNotificationConfig(self, request, context): + """Get a list of push notifications configured for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetAgentCard(self, request, context): + """GetAgentCard returns the agent card for the agent. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTaskPushNotificationConfig(self, request, context): + """Delete a push notification config for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_A2AServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'SendMessage': grpc.unary_unary_rpc_method_handler( + servicer.SendMessage, + request_deserializer=a2a__pb2.SendMessageRequest.FromString, + response_serializer=a2a__pb2.SendMessageResponse.SerializeToString, + ), + 'SendStreamingMessage': grpc.unary_stream_rpc_method_handler( + servicer.SendStreamingMessage, + request_deserializer=a2a__pb2.SendMessageRequest.FromString, + response_serializer=a2a__pb2.StreamResponse.SerializeToString, + ), + 'GetTask': grpc.unary_unary_rpc_method_handler( + servicer.GetTask, + request_deserializer=a2a__pb2.GetTaskRequest.FromString, + response_serializer=a2a__pb2.Task.SerializeToString, + ), + 'CancelTask': grpc.unary_unary_rpc_method_handler( + servicer.CancelTask, + request_deserializer=a2a__pb2.CancelTaskRequest.FromString, + response_serializer=a2a__pb2.Task.SerializeToString, + ), + 'TaskSubscription': grpc.unary_stream_rpc_method_handler( + servicer.TaskSubscription, + request_deserializer=a2a__pb2.TaskSubscriptionRequest.FromString, + response_serializer=a2a__pb2.StreamResponse.SerializeToString, + ), + 'CreateTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( + servicer.CreateTaskPushNotificationConfig, + request_deserializer=a2a__pb2.CreateTaskPushNotificationConfigRequest.FromString, + response_serializer=a2a__pb2.TaskPushNotificationConfig.SerializeToString, + ), + 'GetTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( + servicer.GetTaskPushNotificationConfig, + request_deserializer=a2a__pb2.GetTaskPushNotificationConfigRequest.FromString, + response_serializer=a2a__pb2.TaskPushNotificationConfig.SerializeToString, + ), + 'ListTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( + servicer.ListTaskPushNotificationConfig, + request_deserializer=a2a__pb2.ListTaskPushNotificationConfigRequest.FromString, + response_serializer=a2a__pb2.ListTaskPushNotificationConfigResponse.SerializeToString, + ), + 'GetAgentCard': grpc.unary_unary_rpc_method_handler( + servicer.GetAgentCard, + request_deserializer=a2a__pb2.GetAgentCardRequest.FromString, + response_serializer=a2a__pb2.AgentCard.SerializeToString, + ), + 'DeleteTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTaskPushNotificationConfig, + request_deserializer=a2a__pb2.DeleteTaskPushNotificationConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'a2a.v1.A2AService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('a2a.v1.A2AService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class A2AService(object): + """A2AService defines the gRPC version of the A2A protocol. This has a slightly + different shape than the JSONRPC version to better conform to AIP-127, + where appropriate. The nouns are AgentCard, Message, Task and + TaskPushNotificationConfig. + - Messages are not a standard resource so there is no get/delete/update/list + interface, only a send and stream custom methods. + - Tasks have a get interface and custom cancel and subscribe methods. + - TaskPushNotificationConfig are a resource whose parent is a task. + They have get, list and create methods. + - AgentCard is a static resource with only a get method. + """ + + @staticmethod + def SendMessage(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/SendMessage', + a2a__pb2.SendMessageRequest.SerializeToString, + a2a__pb2.SendMessageResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def SendStreamingMessage(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/a2a.v1.A2AService/SendStreamingMessage', + a2a__pb2.SendMessageRequest.SerializeToString, + a2a__pb2.StreamResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetTask(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/GetTask', + a2a__pb2.GetTaskRequest.SerializeToString, + a2a__pb2.Task.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CancelTask(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/CancelTask', + a2a__pb2.CancelTaskRequest.SerializeToString, + a2a__pb2.Task.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def TaskSubscription(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/a2a.v1.A2AService/TaskSubscription', + a2a__pb2.TaskSubscriptionRequest.SerializeToString, + a2a__pb2.StreamResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateTaskPushNotificationConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/CreateTaskPushNotificationConfig', + a2a__pb2.CreateTaskPushNotificationConfigRequest.SerializeToString, + a2a__pb2.TaskPushNotificationConfig.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetTaskPushNotificationConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/GetTaskPushNotificationConfig', + a2a__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, + a2a__pb2.TaskPushNotificationConfig.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListTaskPushNotificationConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/ListTaskPushNotificationConfig', + a2a__pb2.ListTaskPushNotificationConfigRequest.SerializeToString, + a2a__pb2.ListTaskPushNotificationConfigResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetAgentCard(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/GetAgentCard', + a2a__pb2.GetAgentCardRequest.SerializeToString, + a2a__pb2.AgentCard.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteTaskPushNotificationConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/a2a.v1.A2AService/DeleteTaskPushNotificationConfig', + a2a__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/src/a2a/server/agent_execution/agent_executor.py b/src/a2a/server/agent_execution/agent_executor.py index 489f752ba..38be9c11c 100644 --- a/src/a2a/server/agent_execution/agent_executor.py +++ b/src/a2a/server/agent_execution/agent_executor.py @@ -12,7 +12,9 @@ class AgentExecutor(ABC): """ @abstractmethod - async def execute(self, context: RequestContext, event_queue: EventQueue): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ) -> None: """Execute the agent's logic for a given request context. The agent should read necessary information from the `context` and @@ -27,7 +29,9 @@ async def execute(self, context: RequestContext, event_queue: EventQueue): """ @abstractmethod - async def cancel(self, context: RequestContext, event_queue: EventQueue): + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: """Request the agent to cancel an ongoing task. The agent should attempt to stop the task identified by the task_id diff --git a/src/a2a/server/agent_execution/context.py b/src/a2a/server/agent_execution/context.py index 274644145..cd9f8f973 100644 --- a/src/a2a/server/agent_execution/context.py +++ b/src/a2a/server/agent_execution/context.py @@ -1,6 +1,11 @@ -import uuid +from typing import Any from a2a.server.context import ServerCallContext +from a2a.server.id_generator import ( + IDGenerator, + IDGeneratorContext, + UUIDGenerator, +) from a2a.types import ( InvalidParamsError, Message, @@ -20,7 +25,7 @@ class RequestContext: tasks. """ - def __init__( + def __init__( # noqa: PLR0913 self, request: MessageSendParams | None = None, task_id: str | None = None, @@ -28,6 +33,8 @@ def __init__( task: Task | None = None, related_tasks: list[Task] | None = None, call_context: ServerCallContext | None = None, + task_id_generator: IDGenerator | None = None, + context_id_generator: IDGenerator | None = None, ): """Initializes the RequestContext. @@ -37,6 +44,9 @@ def __init__( context_id: The ID of the context explicitly provided in the request or path. task: The existing `Task` object retrieved from the store, if any. related_tasks: A list of other tasks related to the current request (e.g., for tool use). + call_context: The server call context associated with this request. + task_id_generator: ID generator for new task IDs. Defaults to UUID generator. + context_id_generator: ID generator for new context IDs. Defaults to UUID generator. """ if related_tasks is None: related_tasks = [] @@ -46,25 +56,31 @@ def __init__( self._current_task = task self._related_tasks = related_tasks self._call_context = call_context + self._task_id_generator = ( + task_id_generator if task_id_generator else UUIDGenerator() + ) + self._context_id_generator = ( + context_id_generator if context_id_generator else UUIDGenerator() + ) # If the task id and context id were provided, make sure they # match the request. Otherwise, create them if self._params: if task_id: - self._params.message.taskId = task_id + self._params.message.task_id = task_id if task and task.id != task_id: raise ServerError(InvalidParamsError(message='bad task id')) else: self._check_or_generate_task_id() if context_id: - self._params.message.contextId = context_id - if task and task.contextId != context_id: + self._params.message.context_id = context_id + if task and task.context_id != context_id: raise ServerError( InvalidParamsError(message='bad context id') ) else: self._check_or_generate_context_id() - def get_user_input(self, delimiter='\n') -> str: + def get_user_input(self, delimiter: str = '\n') -> str: """Extracts text content from the user's message parts. Args: @@ -80,7 +96,7 @@ def get_user_input(self, delimiter='\n') -> str: return get_message_text(self._params.message, delimiter) - def attach_related_task(self, task: Task): + def attach_related_task(self, task: Task) -> None: """Attaches a related task to the context. This is useful for scenarios like tool execution where a new task @@ -124,31 +140,58 @@ def context_id(self) -> str | None: @property def configuration(self) -> MessageSendConfiguration | None: """The `MessageSendConfiguration` from the request, if available.""" - if not self._params: - return None - return self._params.configuration + return self._params.configuration if self._params else None @property def call_context(self) -> ServerCallContext | None: """The server call context associated with this request.""" return self._call_context + @property + def metadata(self) -> dict[str, Any]: + """Metadata associated with the request, if available.""" + return self._params.metadata or {} if self._params else {} + + def add_activated_extension(self, uri: str) -> None: + """Add an extension to the set of activated extensions for this request. + + This causes the extension to be indicated back to the client in the + response. + """ + if self._call_context: + self._call_context.activated_extensions.add(uri) + + @property + def requested_extensions(self) -> set[str]: + """Extensions that the client requested to activate.""" + return ( + self._call_context.requested_extensions + if self._call_context + else set() + ) + def _check_or_generate_task_id(self) -> None: """Ensures a task ID is present, generating one if necessary.""" if not self._params: return - if not self._task_id and not self._params.message.taskId: - self._params.message.taskId = str(uuid.uuid4()) - if self._params.message.taskId: - self._task_id = self._params.message.taskId + if not self._task_id and not self._params.message.task_id: + self._params.message.task_id = self._task_id_generator.generate( + IDGeneratorContext(context_id=self._context_id) + ) + if self._params.message.task_id: + self._task_id = self._params.message.task_id def _check_or_generate_context_id(self) -> None: """Ensures a context ID is present, generating one if necessary.""" if not self._params: return - if not self._context_id and not self._params.message.contextId: - self._params.message.contextId = str(uuid.uuid4()) - if self._params.message.contextId: - self._context_id = self._params.message.contextId + if not self._context_id and not self._params.message.context_id: + self._params.message.context_id = ( + self._context_id_generator.generate( + IDGeneratorContext(task_id=self._task_id) + ) + ) + if self._params.message.context_id: + self._context_id = self._params.message.context_id diff --git a/src/a2a/server/agent_execution/request_context_builder.py b/src/a2a/server/agent_execution/request_context_builder.py index 0e36254b8..2a3ad4db5 100644 --- a/src/a2a/server/agent_execution/request_context_builder.py +++ b/src/a2a/server/agent_execution/request_context_builder.py @@ -6,7 +6,7 @@ class RequestContextBuilder(ABC): - """Builds request context to be supplied to agent executor""" + """Builds request context to be supplied to agent executor.""" @abstractmethod async def build( diff --git a/src/a2a/server/agent_execution/simple_request_context_builder.py b/src/a2a/server/agent_execution/simple_request_context_builder.py index 16a84d7bc..876b6561e 100644 --- a/src/a2a/server/agent_execution/simple_request_context_builder.py +++ b/src/a2a/server/agent_execution/simple_request_context_builder.py @@ -2,20 +2,36 @@ from a2a.server.agent_execution import RequestContext, RequestContextBuilder from a2a.server.context import ServerCallContext +from a2a.server.id_generator import IDGenerator from a2a.server.tasks import TaskStore from a2a.types import MessageSendParams, Task class SimpleRequestContextBuilder(RequestContextBuilder): - """Builds request context and populates referred tasks""" + """Builds request context and populates referred tasks.""" def __init__( self, should_populate_referred_tasks: bool = False, task_store: TaskStore | None = None, + task_id_generator: IDGenerator | None = None, + context_id_generator: IDGenerator | None = None, ) -> None: + """Initializes the SimpleRequestContextBuilder. + + Args: + should_populate_referred_tasks: If True, the builder will fetch tasks + referenced in `params.message.reference_task_ids` and populate the + `related_tasks` field in the RequestContext. Defaults to False. + task_store: The TaskStore instance to use for fetching referred tasks. + Required if `should_populate_referred_tasks` is True. + task_id_generator: ID generator for new task IDs. Defaults to None. + context_id_generator: ID generator for new context IDs. Defaults to None. + """ self._task_store = task_store self._should_populate_referred_tasks = should_populate_referred_tasks + self._task_id_generator = task_id_generator + self._context_id_generator = context_id_generator async def build( self, @@ -25,18 +41,35 @@ async def build( task: Task | None = None, context: ServerCallContext | None = None, ) -> RequestContext: + """Builds the request context for an agent execution. + + This method assembles the RequestContext object. If the builder was + initialized with `should_populate_referred_tasks=True`, it fetches all tasks + referenced in `params.message.reference_task_ids` from the `task_store`. + + Args: + params: The parameters of the incoming message send request. + task_id: The ID of the task being executed. + context_id: The ID of the current execution context. + task: The primary task object associated with the request. + context: The server call context, containing metadata about the call. + + Returns: + An instance of RequestContext populated with the provided information + and potentially a list of related tasks. + """ related_tasks: list[Task] | None = None if ( self._task_store and self._should_populate_referred_tasks and params - and params.message.referenceTaskIds + and params.message.reference_task_ids ): tasks = await asyncio.gather( *[ self._task_store.get(task_id) - for task_id in params.message.referenceTaskIds + for task_id in params.message.reference_task_ids ] ) related_tasks = [x for x in tasks if x is not None] @@ -48,4 +81,6 @@ async def build( task=task, related_tasks=related_tasks, call_context=context, + task_id_generator=self._task_id_generator, + context_id_generator=self._context_id_generator, ) diff --git a/src/a2a/server/apps/__init__.py b/src/a2a/server/apps/__init__.py index e0e1b4824..579deaa54 100644 --- a/src/a2a/server/apps/__init__.py +++ b/src/a2a/server/apps/__init__.py @@ -1,6 +1,18 @@ """HTTP application components for the A2A server.""" -from a2a.server.apps.starlette_app import A2AStarletteApplication +from a2a.server.apps.jsonrpc import ( + A2AFastAPIApplication, + A2AStarletteApplication, + CallContextBuilder, + JSONRPCApplication, +) +from a2a.server.apps.rest import A2ARESTFastAPIApplication -__all__ = ['A2AStarletteApplication'] +__all__ = [ + 'A2AFastAPIApplication', + 'A2ARESTFastAPIApplication', + 'A2AStarletteApplication', + 'CallContextBuilder', + 'JSONRPCApplication', +] diff --git a/src/a2a/server/apps/jsonrpc/__init__.py b/src/a2a/server/apps/jsonrpc/__init__.py new file mode 100644 index 000000000..1121fdbc3 --- /dev/null +++ b/src/a2a/server/apps/jsonrpc/__init__.py @@ -0,0 +1,20 @@ +"""A2A JSON-RPC Applications.""" + +from a2a.server.apps.jsonrpc.fastapi_app import A2AFastAPIApplication +from a2a.server.apps.jsonrpc.jsonrpc_app import ( + CallContextBuilder, + DefaultCallContextBuilder, + JSONRPCApplication, + StarletteUserProxy, +) +from a2a.server.apps.jsonrpc.starlette_app import A2AStarletteApplication + + +__all__ = [ + 'A2AFastAPIApplication', + 'A2AStarletteApplication', + 'CallContextBuilder', + 'DefaultCallContextBuilder', + 'JSONRPCApplication', + 'StarletteUserProxy', +] diff --git a/src/a2a/server/apps/jsonrpc/fastapi_app.py b/src/a2a/server/apps/jsonrpc/fastapi_app.py new file mode 100644 index 000000000..dfd92d87c --- /dev/null +++ b/src/a2a/server/apps/jsonrpc/fastapi_app.py @@ -0,0 +1,187 @@ +import logging + +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from fastapi import FastAPI + + _package_fastapi_installed = True +else: + try: + from fastapi import FastAPI + + _package_fastapi_installed = True + except ImportError: + FastAPI = Any + + _package_fastapi_installed = False + +from a2a.server.apps.jsonrpc.jsonrpc_app import ( + CallContextBuilder, + JSONRPCApplication, +) +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.jsonrpc_handler import RequestHandler +from a2a.types import A2ARequest, AgentCard +from a2a.utils.constants import ( + AGENT_CARD_WELL_KNOWN_PATH, + DEFAULT_RPC_URL, + EXTENDED_AGENT_CARD_PATH, + PREV_AGENT_CARD_WELL_KNOWN_PATH, +) + + +logger = logging.getLogger(__name__) + + +class A2AFastAPI(FastAPI): + """A FastAPI application that adds A2A-specific OpenAPI components.""" + + _a2a_components_added: bool = False + + def openapi(self) -> dict[str, Any]: + """Generates the OpenAPI schema for the application.""" + openapi_schema = super().openapi() + if not self._a2a_components_added: + a2a_request_schema = A2ARequest.model_json_schema( + ref_template='#/components/schemas/{model}' + ) + defs = a2a_request_schema.pop('$defs', {}) + component_schemas = openapi_schema.setdefault( + 'components', {} + ).setdefault('schemas', {}) + component_schemas.update(defs) + component_schemas['A2ARequest'] = a2a_request_schema + self._a2a_components_added = True + return openapi_schema + + +class A2AFastAPIApplication(JSONRPCApplication): + """A FastAPI application implementing the A2A protocol server endpoints. + + Handles incoming JSON-RPC requests, routes them to the appropriate + handler methods, and manages response generation including Server-Sent Events + (SSE). + """ + + def __init__( # noqa: PLR0913 + self, + agent_card: AgentCard, + http_handler: RequestHandler, + extended_agent_card: AgentCard | None = None, + context_builder: CallContextBuilder | None = None, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] + | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard + ] + | None = None, + max_content_length: int | None = 10 * 1024 * 1024, # 10MB + ) -> None: + """Initializes the A2AFastAPIApplication. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + http_handler: The handler instance responsible for processing A2A + requests via http. + extended_agent_card: An optional, distinct AgentCard to be served + at the authenticated extended card endpoint. + context_builder: The CallContextBuilder used to construct the + ServerCallContext passed to the http_handler. If None, no + ServerCallContext is passed. + card_modifier: An optional callback to dynamically modify the public + agent card before it is served. + extended_card_modifier: An optional callback to dynamically modify + the extended agent card before it is served. It receives the + call context. + max_content_length: The maximum allowed content length for incoming + requests. Defaults to 10MB. Set to None for unbounded maximum. + """ + if not _package_fastapi_installed: + raise ImportError( + 'The `fastapi` package is required to use the `A2AFastAPIApplication`.' + ' It can be added as a part of `a2a-sdk` optional dependencies,' + ' `a2a-sdk[http-server]`.' + ) + super().__init__( + agent_card=agent_card, + http_handler=http_handler, + extended_agent_card=extended_agent_card, + context_builder=context_builder, + card_modifier=card_modifier, + extended_card_modifier=extended_card_modifier, + max_content_length=max_content_length, + ) + + def add_routes_to_app( + self, + app: FastAPI, + agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, + rpc_url: str = DEFAULT_RPC_URL, + extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, + ) -> None: + """Adds the routes to the FastAPI application. + + Args: + app: The FastAPI application to add the routes to. + agent_card_url: The URL for the agent card endpoint. + rpc_url: The URL for the A2A JSON-RPC endpoint. + extended_agent_card_url: The URL for the authenticated extended agent card endpoint. + """ + app.post( + rpc_url, + openapi_extra={ + 'requestBody': { + 'content': { + 'application/json': { + 'schema': { + '$ref': '#/components/schemas/A2ARequest' + } + } + }, + 'required': True, + 'description': 'A2ARequest', + } + }, + )(self._handle_requests) + app.get(agent_card_url)(self._handle_get_agent_card) + + if agent_card_url == AGENT_CARD_WELL_KNOWN_PATH: + # For backward compatibility, serve the agent card at the deprecated path as well. + # TODO: remove in a future release + app.get(PREV_AGENT_CARD_WELL_KNOWN_PATH)( + self._handle_get_agent_card + ) + + if self.agent_card.supports_authenticated_extended_card: + app.get(extended_agent_card_url)( + self._handle_get_authenticated_extended_agent_card + ) + + def build( + self, + agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, + rpc_url: str = DEFAULT_RPC_URL, + extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, + **kwargs: Any, + ) -> FastAPI: + """Builds and returns the FastAPI application instance. + + Args: + agent_card_url: The URL for the agent card endpoint. + rpc_url: The URL for the A2A JSON-RPC endpoint. + extended_agent_card_url: The URL for the authenticated extended agent card endpoint. + **kwargs: Additional keyword arguments to pass to the FastAPI constructor. + + Returns: + A configured FastAPI application instance. + """ + app = A2AFastAPI(**kwargs) + + self.add_routes_to_app( + app, agent_card_url, rpc_url, extended_agent_card_url + ) + + return app diff --git a/src/a2a/server/apps/jsonrpc/jsonrpc_app.py b/src/a2a/server/apps/jsonrpc/jsonrpc_app.py new file mode 100644 index 000000000..27839cd35 --- /dev/null +++ b/src/a2a/server/apps/jsonrpc/jsonrpc_app.py @@ -0,0 +1,653 @@ +import contextlib +import json +import logging +import traceback + +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator, Awaitable, Callable +from typing import TYPE_CHECKING, Any + +from pydantic import ValidationError + +from a2a.auth.user import UnauthenticatedUser +from a2a.auth.user import User as A2AUser +from a2a.extensions.common import ( + HTTP_EXTENSION_HEADER, + get_requested_extensions, +) +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.jsonrpc_handler import JSONRPCHandler +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types import ( + A2AError, + A2ARequest, + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetAuthenticatedExtendedCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + InternalError, + InvalidParamsError, + InvalidRequestError, + JSONParseError, + JSONRPCError, + JSONRPCErrorResponse, + JSONRPCRequest, + JSONRPCResponse, + ListTaskPushNotificationConfigRequest, + MethodNotFoundError, + SendMessageRequest, + SendStreamingMessageRequest, + SendStreamingMessageResponse, + SetTaskPushNotificationConfigRequest, + TaskResubscriptionRequest, + UnsupportedOperationError, +) +from a2a.utils.constants import ( + AGENT_CARD_WELL_KNOWN_PATH, + DEFAULT_RPC_URL, + EXTENDED_AGENT_CARD_PATH, + PREV_AGENT_CARD_WELL_KNOWN_PATH, +) +from a2a.utils.errors import MethodNotImplementedError +from a2a.utils.helpers import maybe_await + + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from fastapi import FastAPI + from sse_starlette.sse import EventSourceResponse + from starlette.applications import Starlette + from starlette.authentication import BaseUser + from starlette.exceptions import HTTPException + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + from starlette.status import HTTP_413_REQUEST_ENTITY_TOO_LARGE + + _package_starlette_installed = True +else: + FastAPI = Any + try: + from sse_starlette.sse import EventSourceResponse + from starlette.applications import Starlette + from starlette.authentication import BaseUser + from starlette.exceptions import HTTPException + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + from starlette.status import HTTP_413_REQUEST_ENTITY_TOO_LARGE + + _package_starlette_installed = True + except ImportError: + _package_starlette_installed = False + # Provide placeholder types for runtime type hinting when dependencies are not installed. + # These will not be used if the code path that needs them is guarded by _http_server_installed. + EventSourceResponse = Any + Starlette = Any + BaseUser = Any + HTTPException = Any + Request = Any + JSONResponse = Any + Response = Any + HTTP_413_REQUEST_ENTITY_TOO_LARGE = Any + + +class StarletteUserProxy(A2AUser): + """Adapts the Starlette User class to the A2A user representation.""" + + def __init__(self, user: BaseUser): + self._user = user + + @property + def is_authenticated(self) -> bool: + """Returns whether the current user is authenticated.""" + return self._user.is_authenticated + + @property + def user_name(self) -> str: + """Returns the user name of the current user.""" + return self._user.display_name + + +class CallContextBuilder(ABC): + """A class for building ServerCallContexts using the Starlette Request.""" + + @abstractmethod + def build(self, request: Request) -> ServerCallContext: + """Builds a ServerCallContext from a Starlette Request.""" + + +class DefaultCallContextBuilder(CallContextBuilder): + """A default implementation of CallContextBuilder.""" + + def build(self, request: Request) -> ServerCallContext: + """Builds a ServerCallContext from a Starlette Request. + + Args: + request: The incoming Starlette Request object. + + Returns: + A ServerCallContext instance populated with user and state + information from the request. + """ + user: A2AUser = UnauthenticatedUser() + state = {} + with contextlib.suppress(Exception): + user = StarletteUserProxy(request.user) + state['auth'] = request.auth + state['headers'] = dict(request.headers) + return ServerCallContext( + user=user, + state=state, + requested_extensions=get_requested_extensions( + request.headers.getlist(HTTP_EXTENSION_HEADER) + ), + ) + + +class JSONRPCApplication(ABC): + """Base class for A2A JSONRPC applications. + + Handles incoming JSON-RPC requests, routes them to the appropriate + handler methods, and manages response generation including Server-Sent Events + (SSE). + """ + + # Method-to-model mapping for centralized routing + A2ARequestModel = ( + SendMessageRequest + | SendStreamingMessageRequest + | GetTaskRequest + | CancelTaskRequest + | SetTaskPushNotificationConfigRequest + | GetTaskPushNotificationConfigRequest + | ListTaskPushNotificationConfigRequest + | DeleteTaskPushNotificationConfigRequest + | TaskResubscriptionRequest + | GetAuthenticatedExtendedCardRequest + ) + + METHOD_TO_MODEL: dict[str, type[A2ARequestModel]] = { + model.model_fields['method'].default: model + for model in A2ARequestModel.__args__ + } + + def __init__( # noqa: PLR0913 + self, + agent_card: AgentCard, + http_handler: RequestHandler, + extended_agent_card: AgentCard | None = None, + context_builder: CallContextBuilder | None = None, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] + | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard + ] + | None = None, + max_content_length: int | None = 10 * 1024 * 1024, # 10MB + ) -> None: + """Initializes the JSONRPCApplication. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + http_handler: The handler instance responsible for processing A2A + requests via http. + extended_agent_card: An optional, distinct AgentCard to be served + at the authenticated extended card endpoint. + context_builder: The CallContextBuilder used to construct the + ServerCallContext passed to the http_handler. If None, no + ServerCallContext is passed. + card_modifier: An optional callback to dynamically modify the public + agent card before it is served. + extended_card_modifier: An optional callback to dynamically modify + the extended agent card before it is served. It receives the + call context. + max_content_length: The maximum allowed content length for incoming + requests. Defaults to 10MB. Set to None for unbounded maximum. + """ + if not _package_starlette_installed: + raise ImportError( + 'Packages `starlette` and `sse-starlette` are required to use the' + ' `JSONRPCApplication`. They can be added as a part of `a2a-sdk`' + ' optional dependencies, `a2a-sdk[http-server]`.' + ) + self.agent_card = agent_card + self.extended_agent_card = extended_agent_card + self.card_modifier = card_modifier + self.extended_card_modifier = extended_card_modifier + self.handler = JSONRPCHandler( + agent_card=agent_card, + request_handler=http_handler, + extended_agent_card=extended_agent_card, + extended_card_modifier=extended_card_modifier, + ) + self._context_builder = context_builder or DefaultCallContextBuilder() + self._max_content_length = max_content_length + + def _generate_error_response( + self, request_id: str | int | None, error: JSONRPCError | A2AError + ) -> JSONResponse: + """Creates a Starlette JSONResponse for a JSON-RPC error. + + Logs the error based on its type. + + Args: + request_id: The ID of the request that caused the error. + error: The `JSONRPCError` or `A2AError` object. + + Returns: + A `JSONResponse` object formatted as a JSON-RPC error response. + """ + error_resp = JSONRPCErrorResponse( + id=request_id, + error=error if isinstance(error, JSONRPCError) else error.root, + ) + + log_level = ( + logging.ERROR + if not isinstance(error, A2AError) + or isinstance(error.root, InternalError) + else logging.WARNING + ) + logger.log( + log_level, + "Request Error (ID: %s): Code=%s, Message='%s'%s", + request_id, + error_resp.error.code, + error_resp.error.message, + ', Data=' + str(error_resp.error.data) + if error_resp.error.data + else '', + ) + return JSONResponse( + error_resp.model_dump(mode='json', exclude_none=True), + status_code=200, + ) + + def _allowed_content_length(self, request: Request) -> bool: + """Checks if the request content length is within the allowed maximum. + + Args: + request: The incoming Starlette Request object. + + Returns: + False if the content length is larger than the allowed maximum, True otherwise. + """ + if self._max_content_length is not None: + with contextlib.suppress(ValueError): + content_length = int(request.headers.get('content-length', '0')) + if content_length and content_length > self._max_content_length: + return False + return True + + async def _handle_requests(self, request: Request) -> Response: # noqa: PLR0911 + """Handles incoming POST requests to the main A2A endpoint. + + Parses the request body as JSON, validates it against A2A request types, + dispatches it to the appropriate handler method, and returns the response. + Handles JSON parsing errors, validation errors, and other exceptions, + returning appropriate JSON-RPC error responses. + + Args: + request: The incoming Starlette Request object. + + Returns: + A Starlette Response object (JSONResponse or EventSourceResponse). + + Raises: + (Implicitly handled): Various exceptions are caught and converted + into JSON-RPC error responses by this method. + """ + request_id = None + body = None + + try: + body = await request.json() + if isinstance(body, dict): + request_id = body.get('id') + # Ensure request_id is valid for JSON-RPC response (str/int/None only) + if request_id is not None and not isinstance( + request_id, str | int + ): + request_id = None + # Treat payloads lager than allowed as invalid request (-32600) before routing + if not self._allowed_content_length(request): + return self._generate_error_response( + request_id, + A2AError( + root=InvalidRequestError(message='Payload too large') + ), + ) + logger.debug('Request body: %s', body) + # 1) Validate base JSON-RPC structure only (-32600 on failure) + try: + base_request = JSONRPCRequest.model_validate(body) + except ValidationError as e: + logger.exception('Failed to validate base JSON-RPC request') + return self._generate_error_response( + request_id, + A2AError( + root=InvalidRequestError(data=json.loads(e.json())) + ), + ) + + # 2) Route by method name; unknown -> -32601, known -> validate params (-32602 on failure) + method = base_request.method + + model_class = self.METHOD_TO_MODEL.get(method) + if not model_class: + return self._generate_error_response( + request_id, A2AError(root=MethodNotFoundError()) + ) + try: + specific_request = model_class.model_validate(body) + except ValidationError as e: + logger.exception('Failed to validate base JSON-RPC request') + return self._generate_error_response( + request_id, + A2AError( + root=InvalidParamsError(data=json.loads(e.json())) + ), + ) + + # 3) Build call context and wrap the request for downstream handling + call_context = self._context_builder.build(request) + call_context.state['method'] = method + + request_id = specific_request.id + a2a_request = A2ARequest(root=specific_request) + request_obj = a2a_request.root + + if isinstance( + request_obj, + TaskResubscriptionRequest | SendStreamingMessageRequest, + ): + return await self._process_streaming_request( + request_id, a2a_request, call_context + ) + + return await self._process_non_streaming_request( + request_id, a2a_request, call_context + ) + except MethodNotImplementedError: + traceback.print_exc() + return self._generate_error_response( + request_id, A2AError(root=UnsupportedOperationError()) + ) + except json.decoder.JSONDecodeError as e: + traceback.print_exc() + return self._generate_error_response( + None, A2AError(root=JSONParseError(message=str(e))) + ) + except HTTPException as e: + if e.status_code == HTTP_413_REQUEST_ENTITY_TOO_LARGE: + return self._generate_error_response( + request_id, + A2AError( + root=InvalidRequestError(message='Payload too large') + ), + ) + raise e + except Exception as e: + logger.exception('Unhandled exception') + return self._generate_error_response( + request_id, A2AError(root=InternalError(message=str(e))) + ) + + async def _process_streaming_request( + self, + request_id: str | int | None, + a2a_request: A2ARequest, + context: ServerCallContext, + ) -> Response: + """Processes streaming requests (message/stream or tasks/resubscribe). + + Args: + request_id: The ID of the request. + a2a_request: The validated A2ARequest object. + context: The ServerCallContext for the request. + + Returns: + An `EventSourceResponse` object to stream results to the client. + """ + request_obj = a2a_request.root + handler_result: Any = None + if isinstance( + request_obj, + SendStreamingMessageRequest, + ): + handler_result = self.handler.on_message_send_stream( + request_obj, context + ) + elif isinstance(request_obj, TaskResubscriptionRequest): + handler_result = self.handler.on_resubscribe_to_task( + request_obj, context + ) + + return self._create_response(context, handler_result) + + async def _process_non_streaming_request( + self, + request_id: str | int | None, + a2a_request: A2ARequest, + context: ServerCallContext, + ) -> Response: + """Processes non-streaming requests (message/send, tasks/get, tasks/cancel, tasks/pushNotificationConfig/*). + + Args: + request_id: The ID of the request. + a2a_request: The validated A2ARequest object. + context: The ServerCallContext for the request. + + Returns: + A `JSONResponse` object containing the result or error. + """ + request_obj = a2a_request.root + handler_result: Any = None + match request_obj: + case SendMessageRequest(): + handler_result = await self.handler.on_message_send( + request_obj, context + ) + case CancelTaskRequest(): + handler_result = await self.handler.on_cancel_task( + request_obj, context + ) + case GetTaskRequest(): + handler_result = await self.handler.on_get_task( + request_obj, context + ) + case SetTaskPushNotificationConfigRequest(): + handler_result = ( + await self.handler.set_push_notification_config( + request_obj, + context, + ) + ) + case GetTaskPushNotificationConfigRequest(): + handler_result = ( + await self.handler.get_push_notification_config( + request_obj, + context, + ) + ) + case ListTaskPushNotificationConfigRequest(): + handler_result = ( + await self.handler.list_push_notification_config( + request_obj, + context, + ) + ) + case DeleteTaskPushNotificationConfigRequest(): + handler_result = ( + await self.handler.delete_push_notification_config( + request_obj, + context, + ) + ) + case GetAuthenticatedExtendedCardRequest(): + handler_result = ( + await self.handler.get_authenticated_extended_card( + request_obj, + context, + ) + ) + case _: + logger.error( + 'Unhandled validated request type: %s', type(request_obj) + ) + error = UnsupportedOperationError( + message=f'Request type {type(request_obj).__name__} is unknown.' + ) + handler_result = JSONRPCErrorResponse( + id=request_id, error=error + ) + + return self._create_response(context, handler_result) + + def _create_response( + self, + context: ServerCallContext, + handler_result: ( + AsyncGenerator[SendStreamingMessageResponse] + | JSONRPCErrorResponse + | JSONRPCResponse + ), + ) -> Response: + """Creates a Starlette Response based on the result from the request handler. + + Handles: + - AsyncGenerator for Server-Sent Events (SSE). + - JSONRPCErrorResponse for explicit errors returned by handlers. + - Pydantic RootModels (like GetTaskResponse) containing success or error + payloads. + + Args: + context: The ServerCallContext provided to the request handler. + handler_result: The result from a request handler method. Can be an + async generator for streaming or a Pydantic model for non-streaming. + + Returns: + A Starlette JSONResponse or EventSourceResponse. + """ + headers = {} + if exts := context.activated_extensions: + headers[HTTP_EXTENSION_HEADER] = ', '.join(sorted(exts)) + if isinstance(handler_result, AsyncGenerator): + # Result is a stream of SendStreamingMessageResponse objects + async def event_generator( + stream: AsyncGenerator[SendStreamingMessageResponse], + ) -> AsyncGenerator[dict[str, str]]: + async for item in stream: + yield {'data': item.root.model_dump_json(exclude_none=True)} + + return EventSourceResponse( + event_generator(handler_result), headers=headers + ) + if isinstance(handler_result, JSONRPCErrorResponse): + return JSONResponse( + handler_result.model_dump( + mode='json', + exclude_none=True, + ), + headers=headers, + ) + + return JSONResponse( + handler_result.root.model_dump(mode='json', exclude_none=True), + headers=headers, + ) + + async def _handle_get_agent_card(self, request: Request) -> JSONResponse: + """Handles GET requests for the agent card endpoint. + + Args: + request: The incoming Starlette Request object. + + Returns: + A JSONResponse containing the agent card data. + """ + if request.url.path == PREV_AGENT_CARD_WELL_KNOWN_PATH: + logger.warning( + "Deprecated agent card endpoint '%s' accessed. " + "Please use '%s' instead. This endpoint will be removed in a future version.", + PREV_AGENT_CARD_WELL_KNOWN_PATH, + AGENT_CARD_WELL_KNOWN_PATH, + ) + + card_to_serve = self.agent_card + if self.card_modifier: + card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) + + return JSONResponse( + card_to_serve.model_dump( + exclude_none=True, + by_alias=True, + ) + ) + + async def _handle_get_authenticated_extended_agent_card( + self, request: Request + ) -> JSONResponse: + """Handles GET requests for the authenticated extended agent card.""" + logger.warning( + 'HTTP GET for authenticated extended card has been called by a client. ' + 'This endpoint is deprecated in favor of agent/authenticatedExtendedCard JSON-RPC method and will be removed in a future release.' + ) + if not self.agent_card.supports_authenticated_extended_card: + return JSONResponse( + {'error': 'Extended agent card not supported or not enabled.'}, + status_code=404, + ) + + card_to_serve = self.extended_agent_card + + if self.extended_card_modifier: + context = self._context_builder.build(request) + # If no base extended card is provided, pass the public card to the modifier + base_card = card_to_serve if card_to_serve else self.agent_card + card_to_serve = await maybe_await( + self.extended_card_modifier(base_card, context) + ) + + if card_to_serve: + return JSONResponse( + card_to_serve.model_dump( + exclude_none=True, + by_alias=True, + ) + ) + # If supports_authenticated_extended_card is true, but no + # extended_agent_card was provided, and no modifier produced a card, + # return a 404. + return JSONResponse( + { + 'error': 'Authenticated extended agent card is supported but not configured on the server.' + }, + status_code=404, + ) + + @abstractmethod + def build( + self, + agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, + rpc_url: str = DEFAULT_RPC_URL, + extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, + **kwargs: Any, + ) -> FastAPI | Starlette: + """Builds and returns the JSONRPC application instance. + + Args: + agent_card_url: The URL for the agent card endpoint. + rpc_url: The URL for the A2A JSON-RPC endpoint. + extended_agent_card_url: The URL for the authenticated extended + agent card endpoint. + **kwargs: Additional keyword arguments to pass to the FastAPI constructor. + + Returns: + A configured JSONRPC application instance. + """ + raise NotImplementedError( + 'Subclasses must implement the build method to create the application instance.' + ) diff --git a/src/a2a/server/apps/jsonrpc/starlette_app.py b/src/a2a/server/apps/jsonrpc/starlette_app.py new file mode 100644 index 000000000..ceaf5ced1 --- /dev/null +++ b/src/a2a/server/apps/jsonrpc/starlette_app.py @@ -0,0 +1,201 @@ +import logging + +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from starlette.applications import Starlette + from starlette.routing import Route + + _package_starlette_installed = True + +else: + try: + from starlette.applications import Starlette + from starlette.routing import Route + + _package_starlette_installed = True + except ImportError: + Starlette = Any + Route = Any + + _package_starlette_installed = False + +from a2a.server.apps.jsonrpc.jsonrpc_app import ( + CallContextBuilder, + JSONRPCApplication, +) +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.jsonrpc_handler import RequestHandler +from a2a.types import AgentCard +from a2a.utils.constants import ( + AGENT_CARD_WELL_KNOWN_PATH, + DEFAULT_RPC_URL, + EXTENDED_AGENT_CARD_PATH, + PREV_AGENT_CARD_WELL_KNOWN_PATH, +) + + +logger = logging.getLogger(__name__) + + +class A2AStarletteApplication(JSONRPCApplication): + """A Starlette application implementing the A2A protocol server endpoints. + + Handles incoming JSON-RPC requests, routes them to the appropriate + handler methods, and manages response generation including Server-Sent Events + (SSE). + """ + + def __init__( # noqa: PLR0913 + self, + agent_card: AgentCard, + http_handler: RequestHandler, + extended_agent_card: AgentCard | None = None, + context_builder: CallContextBuilder | None = None, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] + | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard + ] + | None = None, + max_content_length: int | None = 10 * 1024 * 1024, # 10MB + ) -> None: + """Initializes the A2AStarletteApplication. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + http_handler: The handler instance responsible for processing A2A + requests via http. + extended_agent_card: An optional, distinct AgentCard to be served + at the authenticated extended card endpoint. + context_builder: The CallContextBuilder used to construct the + ServerCallContext passed to the http_handler. If None, no + ServerCallContext is passed. + card_modifier: An optional callback to dynamically modify the public + agent card before it is served. + extended_card_modifier: An optional callback to dynamically modify + the extended agent card before it is served. It receives the + call context. + max_content_length: The maximum allowed content length for incoming + requests. Defaults to 10MB. Set to None for unbounded maximum. + """ + if not _package_starlette_installed: + raise ImportError( + 'Packages `starlette` and `sse-starlette` are required to use the' + ' `A2AStarletteApplication`. It can be added as a part of `a2a-sdk`' + ' optional dependencies, `a2a-sdk[http-server]`.' + ) + super().__init__( + agent_card=agent_card, + http_handler=http_handler, + extended_agent_card=extended_agent_card, + context_builder=context_builder, + card_modifier=card_modifier, + extended_card_modifier=extended_card_modifier, + max_content_length=max_content_length, + ) + + def routes( + self, + agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, + rpc_url: str = DEFAULT_RPC_URL, + extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, + ) -> list[Route]: + """Returns the Starlette Routes for handling A2A requests. + + Args: + agent_card_url: The URL path for the agent card endpoint. + rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). + extended_agent_card_url: The URL for the authenticated extended agent card endpoint. + + Returns: + A list of Starlette Route objects. + """ + app_routes = [ + Route( + rpc_url, + self._handle_requests, + methods=['POST'], + name='a2a_handler', + ), + Route( + agent_card_url, + self._handle_get_agent_card, + methods=['GET'], + name='agent_card', + ), + ] + + if agent_card_url == AGENT_CARD_WELL_KNOWN_PATH: + # For backward compatibility, serve the agent card at the deprecated path as well. + # TODO: remove in a future release + app_routes.append( + Route( + PREV_AGENT_CARD_WELL_KNOWN_PATH, + self._handle_get_agent_card, + methods=['GET'], + name='deprecated_agent_card', + ) + ) + + # TODO: deprecated endpoint to be removed in a future release + if self.agent_card.supports_authenticated_extended_card: + app_routes.append( + Route( + extended_agent_card_url, + self._handle_get_authenticated_extended_agent_card, + methods=['GET'], + name='authenticated_extended_agent_card', + ) + ) + return app_routes + + def add_routes_to_app( + self, + app: Starlette, + agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, + rpc_url: str = DEFAULT_RPC_URL, + extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, + ) -> None: + """Adds the routes to the Starlette application. + + Args: + app: The Starlette application to add the routes to. + agent_card_url: The URL path for the agent card endpoint. + rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). + extended_agent_card_url: The URL for the authenticated extended agent card endpoint. + """ + routes = self.routes( + agent_card_url=agent_card_url, + rpc_url=rpc_url, + extended_agent_card_url=extended_agent_card_url, + ) + app.routes.extend(routes) + + def build( + self, + agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, + rpc_url: str = DEFAULT_RPC_URL, + extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, + **kwargs: Any, + ) -> Starlette: + """Builds and returns the Starlette application instance. + + Args: + agent_card_url: The URL path for the agent card endpoint. + rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). + extended_agent_card_url: The URL for the authenticated extended agent card endpoint. + **kwargs: Additional keyword arguments to pass to the Starlette constructor. + + Returns: + A configured Starlette application instance. + """ + app = Starlette(**kwargs) + + self.add_routes_to_app( + app, agent_card_url, rpc_url, extended_agent_card_url + ) + + return app diff --git a/src/a2a/server/apps/rest/__init__.py b/src/a2a/server/apps/rest/__init__.py new file mode 100644 index 000000000..bafe4cb60 --- /dev/null +++ b/src/a2a/server/apps/rest/__init__.py @@ -0,0 +1,8 @@ +"""A2A REST Applications.""" + +from a2a.server.apps.rest.fastapi_app import A2ARESTFastAPIApplication + + +__all__ = [ + 'A2ARESTFastAPIApplication', +] diff --git a/src/a2a/server/apps/rest/fastapi_app.py b/src/a2a/server/apps/rest/fastapi_app.py new file mode 100644 index 000000000..12a03de84 --- /dev/null +++ b/src/a2a/server/apps/rest/fastapi_app.py @@ -0,0 +1,121 @@ +import logging + +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from fastapi import APIRouter, FastAPI, Request, Response + from fastapi.responses import JSONResponse + + _package_fastapi_installed = True +else: + try: + from fastapi import APIRouter, FastAPI, Request, Response + from fastapi.responses import JSONResponse + + _package_fastapi_installed = True + except ImportError: + APIRouter = Any + FastAPI = Any + Request = Any + Response = Any + + _package_fastapi_installed = False + + +from a2a.server.apps.jsonrpc.jsonrpc_app import CallContextBuilder +from a2a.server.apps.rest.rest_adapter import RESTAdapter +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types import AgentCard +from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH + + +logger = logging.getLogger(__name__) + + +class A2ARESTFastAPIApplication: + """A FastAPI application implementing the A2A protocol server REST endpoints. + + Handles incoming REST requests, routes them to the appropriate + handler methods, and manages response generation including Server-Sent Events + (SSE). + """ + + def __init__( # noqa: PLR0913 + self, + agent_card: AgentCard, + http_handler: RequestHandler, + extended_agent_card: AgentCard | None = None, + context_builder: CallContextBuilder | None = None, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] + | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard + ] + | None = None, + ): + """Initializes the A2ARESTFastAPIApplication. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + http_handler: The handler instance responsible for processing A2A + requests via http. + extended_agent_card: An optional, distinct AgentCard to be served + at the authenticated extended card endpoint. + context_builder: The CallContextBuilder used to construct the + ServerCallContext passed to the http_handler. If None, no + ServerCallContext is passed. + card_modifier: An optional callback to dynamically modify the public + agent card before it is served. + extended_card_modifier: An optional callback to dynamically modify + the extended agent card before it is served. It receives the + call context. + """ + if not _package_fastapi_installed: + raise ImportError( + 'The `fastapi` package is required to use the' + ' `A2ARESTFastAPIApplication`. It can be added as a part of' + ' `a2a-sdk` optional dependencies, `a2a-sdk[http-server]`.' + ) + self._adapter = RESTAdapter( + agent_card=agent_card, + http_handler=http_handler, + extended_agent_card=extended_agent_card, + context_builder=context_builder, + card_modifier=card_modifier, + extended_card_modifier=extended_card_modifier, + ) + + def build( + self, + agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, + rpc_url: str = '', + **kwargs: Any, + ) -> FastAPI: + """Builds and returns the FastAPI application instance. + + Args: + agent_card_url: The URL for the agent card endpoint. + rpc_url: The URL for the A2A JSON-RPC endpoint. + extended_agent_card_url: The URL for the authenticated extended agent card endpoint. + **kwargs: Additional keyword arguments to pass to the FastAPI constructor. + + Returns: + A configured FastAPI application instance. + """ + app = FastAPI(**kwargs) + router = APIRouter() + for route, callback in self._adapter.routes().items(): + router.add_api_route( + f'{rpc_url}{route[0]}', callback, methods=[route[1]] + ) + + @router.get(f'{rpc_url}{agent_card_url}') + async def get_agent_card(request: Request) -> Response: + card = await self._adapter.handle_get_agent_card(request) + return JSONResponse(card) + + app.include_router(router) + return app diff --git a/src/a2a/server/apps/rest/rest_adapter.py b/src/a2a/server/apps/rest/rest_adapter.py new file mode 100644 index 000000000..719085604 --- /dev/null +++ b/src/a2a/server/apps/rest/rest_adapter.py @@ -0,0 +1,252 @@ +import functools +import logging + +from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable +from typing import TYPE_CHECKING, Any + +from a2a.utils.helpers import maybe_await + + +if TYPE_CHECKING: + from sse_starlette.sse import EventSourceResponse + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + _package_starlette_installed = True + +else: + try: + from sse_starlette.sse import EventSourceResponse + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + _package_starlette_installed = True + except ImportError: + EventSourceResponse = Any + Request = Any + JSONResponse = Any + Response = Any + + _package_starlette_installed = False + +from a2a.server.apps.jsonrpc import ( + CallContextBuilder, + DefaultCallContextBuilder, +) +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.request_handlers.rest_handler import RESTHandler +from a2a.types import AgentCard, AuthenticatedExtendedCardNotConfiguredError +from a2a.utils.error_handlers import ( + rest_error_handler, + rest_stream_error_handler, +) +from a2a.utils.errors import InvalidRequestError, ServerError + + +logger = logging.getLogger(__name__) + + +class RESTAdapter: + """Adapter to make RequestHandler work with RESTful API. + + Defines REST requests processors and the routes to attach them too, as well as + manages response generation including Server-Sent Events (SSE). + """ + + def __init__( # noqa: PLR0913 + self, + agent_card: AgentCard, + http_handler: RequestHandler, + extended_agent_card: AgentCard | None = None, + context_builder: CallContextBuilder | None = None, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] + | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard + ] + | None = None, + ): + """Initializes the RESTApplication. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + http_handler: The handler instance responsible for processing A2A + requests via http. + extended_agent_card: An optional, distinct AgentCard to be served + at the authenticated extended card endpoint. + context_builder: The CallContextBuilder used to construct the + ServerCallContext passed to the http_handler. If None, no + ServerCallContext is passed. + card_modifier: An optional callback to dynamically modify the public + agent card before it is served. + extended_card_modifier: An optional callback to dynamically modify + the extended agent card before it is served. It receives the + call context. + """ + if not _package_starlette_installed: + raise ImportError( + 'Packages `starlette` and `sse-starlette` are required to use' + ' the `RESTAdapter`. They can be added as a part of `a2a-sdk`' + ' optional dependencies, `a2a-sdk[http-server]`.' + ) + self.agent_card = agent_card + self.extended_agent_card = extended_agent_card + self.card_modifier = card_modifier + self.extended_card_modifier = extended_card_modifier + self.handler = RESTHandler( + agent_card=agent_card, request_handler=http_handler + ) + self._context_builder = context_builder or DefaultCallContextBuilder() + + @rest_error_handler + async def _handle_request( + self, + method: Callable[[Request, ServerCallContext], Awaitable[Any]], + request: Request, + ) -> Response: + call_context = self._context_builder.build(request) + response = await method(request, call_context) + return JSONResponse(content=response) + + @rest_stream_error_handler + async def _handle_streaming_request( + self, + method: Callable[[Request, ServerCallContext], AsyncIterable[Any]], + request: Request, + ) -> EventSourceResponse: + # Pre-consume and cache the request body to prevent deadlock in streaming context + # This is required because Starlette's request.body() can only be consumed once, + # and attempting to consume it after EventSourceResponse starts causes deadlock + try: + await request.body() + except (ValueError, RuntimeError, OSError) as e: + raise ServerError( + error=InvalidRequestError( + message=f'Failed to pre-consume request body: {e}' + ) + ) from e + + call_context = self._context_builder.build(request) + + async def event_generator( + stream: AsyncIterable[Any], + ) -> AsyncIterator[dict[str, dict[str, Any]]]: + async for item in stream: + yield {'data': item} + + return EventSourceResponse( + event_generator(method(request, call_context)) + ) + + async def handle_get_agent_card( + self, request: Request, call_context: ServerCallContext | None = None + ) -> dict[str, Any]: + """Handles GET requests for the agent card endpoint. + + Args: + request: The incoming Starlette Request object. + call_context: ServerCallContext + + Returns: + A JSONResponse containing the agent card data. + """ + card_to_serve = self.agent_card + if self.card_modifier: + card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) + + return card_to_serve.model_dump(mode='json', exclude_none=True) + + async def handle_authenticated_agent_card( + self, request: Request, call_context: ServerCallContext | None = None + ) -> dict[str, Any]: + """Hook for per credential agent card response. + + If a dynamic card is needed based on the credentials provided in the request + override this method and return the customized content. + + Args: + request: The incoming Starlette Request object. + call_context: ServerCallContext + + Returns: + A JSONResponse containing the authenticated card. + """ + if not self.agent_card.supports_authenticated_extended_card: + raise ServerError( + error=AuthenticatedExtendedCardNotConfiguredError( + message='Authenticated card not supported' + ) + ) + card_to_serve = self.extended_agent_card + + if not card_to_serve: + card_to_serve = self.agent_card + + if self.extended_card_modifier: + context = self._context_builder.build(request) + card_to_serve = await maybe_await( + self.extended_card_modifier(card_to_serve, context) + ) + elif self.card_modifier: + card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) + + return card_to_serve.model_dump(mode='json', exclude_none=True) + + def routes(self) -> dict[tuple[str, str], Callable[[Request], Any]]: + """Constructs a dictionary of API routes and their corresponding handlers. + + This method maps URL paths and HTTP methods to the appropriate handler + functions from the RESTHandler. It can be used by a web framework + (like Starlette or FastAPI) to set up the application's endpoints. + + Returns: + A dictionary where each key is a tuple of (path, http_method) and + the value is the callable handler for that route. + """ + routes: dict[tuple[str, str], Callable[[Request], Any]] = { + ('/v1/message:send', 'POST'): functools.partial( + self._handle_request, self.handler.on_message_send + ), + ('/v1/message:stream', 'POST'): functools.partial( + self._handle_streaming_request, + self.handler.on_message_send_stream, + ), + ('/v1/tasks/{id}:cancel', 'POST'): functools.partial( + self._handle_request, self.handler.on_cancel_task + ), + ('/v1/tasks/{id}:subscribe', 'GET'): functools.partial( + self._handle_streaming_request, + self.handler.on_resubscribe_to_task, + ), + ('/v1/tasks/{id}', 'GET'): functools.partial( + self._handle_request, self.handler.on_get_task + ), + ( + '/v1/tasks/{id}/pushNotificationConfigs/{push_id}', + 'GET', + ): functools.partial( + self._handle_request, self.handler.get_push_notification + ), + ( + '/v1/tasks/{id}/pushNotificationConfigs', + 'POST', + ): functools.partial( + self._handle_request, self.handler.set_push_notification + ), + ( + '/v1/tasks/{id}/pushNotificationConfigs', + 'GET', + ): functools.partial( + self._handle_request, self.handler.list_push_notifications + ), + ('/v1/tasks', 'GET'): functools.partial( + self._handle_request, self.handler.list_tasks + ), + } + if self.agent_card.supports_authenticated_extended_card: + routes[('/v1/card', 'GET')] = functools.partial( + self._handle_request, self.handle_authenticated_agent_card + ) + + return routes diff --git a/src/a2a/server/apps/starlette_app.py b/src/a2a/server/apps/starlette_app.py deleted file mode 100644 index 84ef75774..000000000 --- a/src/a2a/server/apps/starlette_app.py +++ /dev/null @@ -1,451 +0,0 @@ -import contextlib -import json -import logging -import traceback - -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator -from typing import Any - -from pydantic import ValidationError -from sse_starlette.sse import EventSourceResponse -from starlette.applications import Starlette -from starlette.authentication import BaseUser -from starlette.requests import Request -from starlette.responses import JSONResponse, Response -from starlette.routing import Route - -from a2a.auth.user import UnauthenticatedUser -from a2a.auth.user import User as A2AUser -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.jsonrpc_handler import JSONRPCHandler -from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.types import ( - A2AError, - A2ARequest, - AgentCard, - CancelTaskRequest, - GetTaskPushNotificationConfigRequest, - GetTaskRequest, - InternalError, - InvalidRequestError, - JSONParseError, - JSONRPCError, - JSONRPCErrorResponse, - JSONRPCResponse, - SendMessageRequest, - SendStreamingMessageRequest, - SendStreamingMessageResponse, - SetTaskPushNotificationConfigRequest, - TaskResubscriptionRequest, - UnsupportedOperationError, -) -from a2a.utils.errors import MethodNotImplementedError - - -logger = logging.getLogger(__name__) - -# Register Starlette User as an implementation of a2a.auth.user.User -A2AUser.register(BaseUser) - - -class CallContextBuilder(ABC): - """A class for building ServerCallContexts using the Starlette Request.""" - - @abstractmethod - def build(self, request: Request) -> ServerCallContext: - """Builds a ServerCallContext from a Starlette Request.""" - - -class DefaultCallContextBuilder(CallContextBuilder): - """A default implementation of CallContextBuilder.""" - - def build(self, request: Request) -> ServerCallContext: - user = UnauthenticatedUser() - state = {} - with contextlib.suppress(Exception): - user = request.user - state['auth'] = request.auth - return ServerCallContext(user=user, state=state) - - -class A2AStarletteApplication: - """A Starlette application implementing the A2A protocol server endpoints. - - Handles incoming JSON-RPC requests, routes them to the appropriate - handler methods, and manages response generation including Server-Sent Events - (SSE). - """ - - def __init__( - self, - agent_card: AgentCard, - http_handler: RequestHandler, - extended_agent_card: AgentCard | None = None, - context_builder: CallContextBuilder | None = None, - ): - """Initializes the A2AStarletteApplication. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - http_handler: The handler instance responsible for processing A2A - requests via http. - extended_agent_card: An optional, distinct AgentCard to be served - at the authenticated extended card endpoint. - context_builder: The CallContextBuilder used to construct the - ServerCallContext passed to the http_handler. If None, no - ServerCallContext is passed. - """ - self.agent_card = agent_card - self.extended_agent_card = extended_agent_card - self.handler = JSONRPCHandler( - agent_card=agent_card, request_handler=http_handler - ) - if ( - self.agent_card.supportsAuthenticatedExtendedCard - and self.extended_agent_card is None - ): - logger.error( - 'AgentCard.supportsAuthenticatedExtendedCard is True, but no extended_agent_card was provided. The /agent/authenticatedExtendedCard endpoint will return 404.' - ) - self._context_builder = context_builder or DefaultCallContextBuilder() - - def _generate_error_response( - self, request_id: str | int | None, error: JSONRPCError | A2AError - ) -> JSONResponse: - """Creates a Starlette JSONResponse for a JSON-RPC error. - - Logs the error based on its type. - - Args: - request_id: The ID of the request that caused the error. - error: The `JSONRPCError` or `A2AError` object. - - Returns: - A `JSONResponse` object formatted as a JSON-RPC error response. - """ - error_resp = JSONRPCErrorResponse( - id=request_id, - error=error if isinstance(error, JSONRPCError) else error.root, - ) - - log_level = ( - logging.ERROR - if not isinstance(error, A2AError) - or isinstance(error.root, InternalError) - else logging.WARNING - ) - logger.log( - log_level, - f'Request Error (ID: {request_id}): ' - f"Code={error_resp.error.code}, Message='{error_resp.error.message}'" - f'{", Data=" + str(error_resp.error.data) if hasattr(error, "data") and error_resp.error.data else ""}', - ) - return JSONResponse( - error_resp.model_dump(mode='json', exclude_none=True), - status_code=200, - ) - - async def _handle_requests(self, request: Request) -> Response: - """Handles incoming POST requests to the main A2A endpoint. - - Parses the request body as JSON, validates it against A2A request types, - dispatches it to the appropriate handler method, and returns the response. - Handles JSON parsing errors, validation errors, and other exceptions, - returning appropriate JSON-RPC error responses. - - Args: - request: The incoming Starlette Request object. - - Returns: - A Starlette Response object (JSONResponse or EventSourceResponse). - - Raises: - (Implicitly handled): Various exceptions are caught and converted - into JSON-RPC error responses by this method. - """ - request_id = None - body = None - - try: - body = await request.json() - a2a_request = A2ARequest.model_validate(body) - call_context = self._context_builder.build(request) - - request_id = a2a_request.root.id - request_obj = a2a_request.root - - if isinstance( - request_obj, - TaskResubscriptionRequest | SendStreamingMessageRequest, - ): - return await self._process_streaming_request( - request_id, a2a_request, call_context - ) - - return await self._process_non_streaming_request( - request_id, a2a_request, call_context - ) - except MethodNotImplementedError: - traceback.print_exc() - return self._generate_error_response( - request_id, A2AError(root=UnsupportedOperationError()) - ) - except json.decoder.JSONDecodeError as e: - traceback.print_exc() - return self._generate_error_response( - None, A2AError(root=JSONParseError(message=str(e))) - ) - except ValidationError as e: - traceback.print_exc() - return self._generate_error_response( - request_id, - A2AError(root=InvalidRequestError(data=json.loads(e.json()))), - ) - except Exception as e: - logger.error(f'Unhandled exception: {e}') - traceback.print_exc() - return self._generate_error_response( - request_id, A2AError(root=InternalError(message=str(e))) - ) - - async def _process_streaming_request( - self, - request_id: str | int | None, - a2a_request: A2ARequest, - context: ServerCallContext, - ) -> Response: - """Processes streaming requests (message/stream or tasks/resubscribe). - - Args: - request_id: The ID of the request. - a2a_request: The validated A2ARequest object. - - Returns: - An `EventSourceResponse` object to stream results to the client. - """ - request_obj = a2a_request.root - handler_result: Any = None - if isinstance( - request_obj, - SendStreamingMessageRequest, - ): - handler_result = self.handler.on_message_send_stream( - request_obj, context - ) - elif isinstance(request_obj, TaskResubscriptionRequest): - handler_result = self.handler.on_resubscribe_to_task( - request_obj, context - ) - - return self._create_response(handler_result) - - async def _process_non_streaming_request( - self, - request_id: str | int | None, - a2a_request: A2ARequest, - context: ServerCallContext, - ) -> Response: - """Processes non-streaming requests (message/send, tasks/get, tasks/cancel, tasks/pushNotificationConfig/*). - - Args: - request_id: The ID of the request. - a2a_request: The validated A2ARequest object. - - Returns: - A `JSONResponse` object containing the result or error. - """ - request_obj = a2a_request.root - handler_result: Any = None - match request_obj: - case SendMessageRequest(): - handler_result = await self.handler.on_message_send( - request_obj, context - ) - case CancelTaskRequest(): - handler_result = await self.handler.on_cancel_task( - request_obj, context - ) - case GetTaskRequest(): - handler_result = await self.handler.on_get_task( - request_obj, context - ) - case SetTaskPushNotificationConfigRequest(): - handler_result = await self.handler.set_push_notification( - request_obj, - context, - ) - case GetTaskPushNotificationConfigRequest(): - handler_result = await self.handler.get_push_notification( - request_obj, - context, - ) - case _: - logger.error( - f'Unhandled validated request type: {type(request_obj)}' - ) - error = UnsupportedOperationError( - message=f'Request type {type(request_obj).__name__} is unknown.' - ) - handler_result = JSONRPCErrorResponse( - id=request_id, error=error - ) - - return self._create_response(handler_result) - - def _create_response( - self, - handler_result: ( - AsyncGenerator[SendStreamingMessageResponse] - | JSONRPCErrorResponse - | JSONRPCResponse - ), - ) -> Response: - """Creates a Starlette Response based on the result from the request handler. - - Handles: - - AsyncGenerator for Server-Sent Events (SSE). - - JSONRPCErrorResponse for explicit errors returned by handlers. - - Pydantic RootModels (like GetTaskResponse) containing success or error - payloads. - - Args: - handler_result: The result from a request handler method. Can be an - async generator for streaming or a Pydantic model for non-streaming. - - Returns: - A Starlette JSONResponse or EventSourceResponse. - """ - if isinstance(handler_result, AsyncGenerator): - # Result is a stream of SendStreamingMessageResponse objects - async def event_generator( - stream: AsyncGenerator[SendStreamingMessageResponse], - ) -> AsyncGenerator[dict[str, str]]: - async for item in stream: - yield {'data': item.root.model_dump_json(exclude_none=True)} - - return EventSourceResponse(event_generator(handler_result)) - if isinstance(handler_result, JSONRPCErrorResponse): - return JSONResponse( - handler_result.model_dump( - mode='json', - exclude_none=True, - ) - ) - - return JSONResponse( - handler_result.root.model_dump(mode='json', exclude_none=True) - ) - - async def _handle_get_agent_card(self, request: Request) -> JSONResponse: - """Handles GET requests for the agent card endpoint. - - Args: - request: The incoming Starlette Request object. - - Returns: - A JSONResponse containing the agent card data. - """ - # The public agent card is a direct serialization of the agent_card - # provided at initialization. - return JSONResponse( - self.agent_card.model_dump(mode='json', exclude_none=True) - ) - - async def _handle_get_authenticated_extended_agent_card( - self, request: Request - ) -> JSONResponse: - """Handles GET requests for the authenticated extended agent card.""" - if not self.agent_card.supportsAuthenticatedExtendedCard: - return JSONResponse( - {'error': 'Extended agent card not supported or not enabled.'}, - status_code=404, - ) - - # If an explicit extended_agent_card is provided, serve that. - if self.extended_agent_card: - return JSONResponse( - self.extended_agent_card.model_dump( - mode='json', exclude_none=True - ) - ) - # If supportsAuthenticatedExtendedCard is true, but no specific - # extended_agent_card was provided during server initialization, - # return a 404 - return JSONResponse( - { - 'error': 'Authenticated extended agent card is supported but not configured on the server.' - }, - status_code=404, - ) - - def routes( - self, - agent_card_url: str = '/.well-known/agent.json', - extended_agent_card_url: str = '/agent/authenticatedExtendedCard', - rpc_url: str = '/', - ) -> list[Route]: - """Returns the Starlette Routes for handling A2A requests. - - Args: - agent_card_url: The URL path for the agent card endpoint. - rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - - Returns: - A list of Starlette Route objects. - """ - app_routes = [ - Route( - rpc_url, - self._handle_requests, - methods=['POST'], - name='a2a_handler', - ), - Route( - agent_card_url, - self._handle_get_agent_card, - methods=['GET'], - name='agent_card', - ), - ] - - if self.agent_card.supportsAuthenticatedExtendedCard: - app_routes.append( - Route( - extended_agent_card_url, - self._handle_get_authenticated_extended_agent_card, - methods=['GET'], - name='authenticated_extended_agent_card', - ) - ) - return app_routes - - def build( - self, - agent_card_url: str = '/.well-known/agent.json', - extended_agent_card_url: str = '/agent/authenticatedExtendedCard', - rpc_url: str = '/', - **kwargs: Any, - ) -> Starlette: - """Builds and returns the Starlette application instance. - - Args: - agent_card_url: The URL path for the agent card endpoint. - rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - **kwargs: Additional keyword arguments to pass to the Starlette - constructor. - - Returns: - A configured Starlette application instance. - """ - app_routes = self.routes( - agent_card_url, extended_agent_card_url, rpc_url - ) - if 'routes' in kwargs: - kwargs['routes'].extend(app_routes) - else: - kwargs['routes'] = app_routes - - return Starlette(**kwargs) diff --git a/src/a2a/server/context.py b/src/a2a/server/context.py index ce7f56bd3..2b34cefee 100644 --- a/src/a2a/server/context.py +++ b/src/a2a/server/context.py @@ -21,3 +21,5 @@ class ServerCallContext(BaseModel): state: State = Field(default={}) user: User = Field(default=UnauthenticatedUser()) + requested_extensions: set[str] = Field(default_factory=set) + activated_extensions: set[str] = Field(default_factory=set) diff --git a/src/a2a/server/events/event_consumer.py b/src/a2a/server/events/event_consumer.py index 518680695..de0f6bd9d 100644 --- a/src/a2a/server/events/event_consumer.py +++ b/src/a2a/server/events/event_consumer.py @@ -4,6 +4,8 @@ from collections.abc import AsyncGenerator +from pydantic import ValidationError + from a2a.server.events.event_queue import Event, EventQueue from a2a.types import ( InternalError, @@ -17,7 +19,7 @@ # This is an alias to the exception for closed queue -QueueClosed = asyncio.QueueEmpty +QueueClosed: type[Exception] = asyncio.QueueEmpty # When using python 3.13 or higher, the closed queue signal is QueueShutdown if sys.version_info >= (3, 13): @@ -60,7 +62,7 @@ async def consume_one(self) -> Event: InternalError(message='Agent did not return any response') ) from e - logger.debug(f'Dequeued event of type: {type(event)} in consume_one.') + logger.debug('Dequeued event of type: %s in consume_one.', type(event)) self.queue.task_done() @@ -93,7 +95,7 @@ async def consume_all(self) -> AsyncGenerator[Event]: self.queue.dequeue_event(), timeout=self._timeout ) logger.debug( - f'Dequeued event of type: {type(event)} in consume_all.' + 'Dequeued event of type: %s in consume_all.', type(event) ) self.queue.task_done() logger.debug( @@ -112,6 +114,7 @@ async def consume_all(self) -> AsyncGenerator[Event]: TaskState.failed, TaskState.rejected, TaskState.unknown, + TaskState.input_required, ) ) ) @@ -122,20 +125,30 @@ async def consume_all(self) -> AsyncGenerator[Event]: # other part is waiting for an event or a closed queue. if is_final_event: logger.debug('Stopping event consumption in consume_all.') - await self.queue.close() + await self.queue.close(True) yield event break yield event except TimeoutError: # continue polling until there is a final event continue - except QueueClosed: + except asyncio.TimeoutError: # pyright: ignore [reportUnusedExcept] + # This class was made an alias of built-in TimeoutError after 3.11 + continue + except (QueueClosed, asyncio.QueueEmpty): # Confirm that the queue is closed, e.g. we aren't on # python 3.12 and get a queue empty error on an open queue if self.queue.is_closed(): break + except ValidationError: + logger.exception('Invalid event format received') + continue + except Exception as e: + logger.exception('Stopping event consumption due to exception') + self._exception = e + continue - def agent_task_callback(self, agent_task: asyncio.Task[None]): + def agent_task_callback(self, agent_task: asyncio.Task[None]) -> None: """Callback to handle exceptions from the agent's execution task. If the agent's asyncio task raises an exception, this callback is @@ -145,5 +158,5 @@ def agent_task_callback(self, agent_task: asyncio.Task[None]): agent_task: The asyncio.Task that completed. """ logger.debug('Agent task callback triggered.') - if agent_task.exception() is not None: + if not agent_task.cancelled() and agent_task.done(): self._exception = agent_task.exception() diff --git a/src/a2a/server/events/event_queue.py b/src/a2a/server/events/event_queue.py index ba8d96471..357fcb02e 100644 --- a/src/a2a/server/events/event_queue.py +++ b/src/a2a/server/events/event_queue.py @@ -14,14 +14,11 @@ logger = logging.getLogger(__name__) -Event = ( - Message - | Task - | TaskStatusUpdateEvent - | TaskArtifactUpdateEvent -) +Event = Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent """Type alias for events that can be enqueued.""" +DEFAULT_MAX_QUEUE_SIZE = 1024 + @trace_class(kind=SpanKind.SERVER) class EventQueue: @@ -32,27 +29,37 @@ class EventQueue: to create child queues that receive the same events. """ - def __init__(self) -> None: + def __init__(self, max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE) -> None: """Initializes the EventQueue.""" - self.queue: asyncio.Queue[Event] = asyncio.Queue() + # Make sure the `asyncio.Queue` is bounded. + # If it's unbounded (maxsize=0), then `queue.put()` never needs to wait, + # and so the streaming won't work correctly. + if max_queue_size <= 0: + raise ValueError('max_queue_size must be greater than 0') + + self.queue: asyncio.Queue[Event] = asyncio.Queue(maxsize=max_queue_size) self._children: list[EventQueue] = [] self._is_closed = False self._lock = asyncio.Lock() logger.debug('EventQueue initialized.') - def enqueue_event(self, event: Event): + async def enqueue_event(self, event: Event) -> None: """Enqueues an event to this queue and all its children. Args: event: The event object to enqueue. """ - if self._is_closed: - logger.warning('Queue is closed. Event will not be enqueued.') - return - logger.debug(f'Enqueuing event of type: {type(event)}') - self.queue.put_nowait(event) + async with self._lock: + if self._is_closed: + logger.warning('Queue is closed. Event will not be enqueued.') + return + + logger.debug('Enqueuing event of type: %s', type(event)) + + # Make sure to use put instead of put_nowait to avoid blocking the event loop. + await self.queue.put(event) for child in self._children: - child.enqueue_event(event) + await child.enqueue_event(event) async def dequeue_event(self, no_wait: bool = False) -> Event: """Dequeues an event from the queue. @@ -66,7 +73,7 @@ async def dequeue_event(self, no_wait: bool = False) -> Event: closed but when there are no events on the queue. Two ways to avoid this are to call this with no_wait = True which won't block, but is the callers responsibility to retry as appropriate. Alternatively, one can - use a async Task management solution to cancel the get task if the queue + use an async Task management solution to cancel the get task if the queue has closed or some other condition is met. The implementation of the EventConsumer uses an async.wait with a timeout to abort the dequeue_event call and retry, when it will return with a closed error. @@ -83,7 +90,12 @@ async def dequeue_event(self, no_wait: bool = False) -> Event: asyncio.QueueShutDown: If the queue has been closed and is empty. """ async with self._lock: - if self._is_closed and self.queue.empty(): + if ( + sys.version_info < (3, 13) + and self._is_closed + and self.queue.empty() + ): + # On 3.13+, skip early raise; await self.queue.get() will raise QueueShutDown after shutdown() logger.warning('Queue is closed. Event will not be dequeued.') raise asyncio.QueueEmpty('Queue is closed.') @@ -91,13 +103,13 @@ async def dequeue_event(self, no_wait: bool = False) -> Event: logger.debug('Attempting to dequeue event (no_wait=True).') event = self.queue.get_nowait() logger.debug( - f'Dequeued event (no_wait=True) of type: {type(event)}' + 'Dequeued event (no_wait=True) of type: %s', type(event) ) return event logger.debug('Attempting to dequeue event (waiting).') event = await self.queue.get() - logger.debug(f'Dequeued event (waited) of type: {type(event)}') + logger.debug('Dequeued event (waited) of type: %s', type(event)) return event def task_done(self) -> None: @@ -120,30 +132,113 @@ def tap(self) -> 'EventQueue': self._children.append(queue) return queue - async def close(self): - """Closes the queue for future push events. + async def close(self, immediate: bool = False) -> None: + """Closes the queue for future push events and also closes all child queues. + + Once closed, no new events can be enqueued. Behavior is consistent across + Python versions: + - Python >= 3.13: Uses `asyncio.Queue.shutdown` to stop the queue. With + `immediate=True` the queue is shut down and pending events are cleared; with + `immediate=False` the queue is shut down and we wait for it to drain via + `queue.join()`. + - Python < 3.13: Emulates the same semantics by clearing on `immediate=True` + or awaiting `queue.join()` on `immediate=False`. + + Consumers attempting to dequeue after close on an empty queue will observe + `asyncio.QueueShutDown` on Python >= 3.13 and `asyncio.QueueEmpty` on + Python < 3.13. + + Args: + immediate (bool): + - True: Immediately closes the queue and clears all unprocessed events without waiting for them to be consumed. This is suitable for scenarios where you need to forcefully interrupt and quickly release resources. + - False (default): Gracefully closes the queue, waiting for all queued events to be processed (i.e., the queue is drained) before closing. This is suitable when you want to ensure all events are handled. - Once closed, `dequeue_event` will eventually raise `asyncio.QueueShutDown` - when the queue is empty. Also closes all child queues. """ logger.debug('Closing EventQueue.') async with self._lock: # If already closed, just return. - if self._is_closed: + if self._is_closed and not immediate: return - self._is_closed = True - # If using python 3.13 or higher, use the shutdown method + if not self._is_closed: + self._is_closed = True + # If using python 3.13 or higher, use shutdown but match <3.13 semantics if sys.version_info >= (3, 13): - self.queue.shutdown() - for child in self._children: - child.close() + if immediate: + # Immediate: stop queue and clear any pending events, then close children + self.queue.shutdown(True) + await self.clear_events(True) + for child in self._children: + await child.close(True) + return + # Graceful: prevent further gets/puts via shutdown, then wait for drain and children + self.queue.shutdown(False) + await asyncio.gather( + self.queue.join(), *(child.close() for child in self._children) + ) # Otherwise, join the queue else: - tasks = [asyncio.create_task(self.queue.join())] - for child in self._children: - tasks.append(asyncio.create_task(child.close())) - await asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED) + if immediate: + await self.clear_events(True) + for child in self._children: + await child.close(immediate) + return + await asyncio.gather( + self.queue.join(), *(child.close() for child in self._children) + ) def is_closed(self) -> bool: """Checks if the queue is closed.""" return self._is_closed + + async def clear_events(self, clear_child_queues: bool = True) -> None: + """Clears all events from the current queue and optionally all child queues. + + This method removes all pending events from the queue without processing them. + Child queues can be optionally cleared based on the clear_child_queues parameter. + + Args: + clear_child_queues: If True (default), clear all child queues as well. + If False, only clear the current queue, leaving child queues untouched. + """ + logger.debug('Clearing all events from EventQueue and child queues.') + + # Clear all events from the queue, even if closed + cleared_count = 0 + async with self._lock: + try: + while True: + event = self.queue.get_nowait() + logger.debug( + 'Discarding unprocessed event of type: %s, content: %s', + type(event), + event, + ) + self.queue.task_done() + cleared_count += 1 + except asyncio.QueueEmpty: + pass + except Exception as e: + # Handle Python 3.13+ QueueShutDown + if ( + sys.version_info >= (3, 13) + and type(e).__name__ == 'QueueShutDown' + ): + pass + else: + raise + + if cleared_count > 0: + logger.debug( + 'Cleared %d unprocessed events from EventQueue.', + cleared_count, + ) + + # Clear all child queues (lock released before awaiting child tasks) + if clear_child_queues and self._children: + child_tasks = [ + asyncio.create_task(child.clear_events()) + for child in self._children + ] + + if child_tasks: + await asyncio.gather(*child_tasks, return_exceptions=True) diff --git a/src/a2a/server/events/in_memory_queue_manager.py b/src/a2a/server/events/in_memory_queue_manager.py index 7d7dc861b..53a3b7dd2 100644 --- a/src/a2a/server/events/in_memory_queue_manager.py +++ b/src/a2a/server/events/in_memory_queue_manager.py @@ -26,7 +26,7 @@ def __init__(self) -> None: self._task_queue: dict[str, EventQueue] = {} self._lock = asyncio.Lock() - async def add(self, task_id: str, queue: EventQueue): + async def add(self, task_id: str, queue: EventQueue) -> None: """Adds a new event queue for a task ID. Raises: @@ -34,7 +34,7 @@ async def add(self, task_id: str, queue: EventQueue): """ async with self._lock: if task_id in self._task_queue: - raise TaskQueueExists() + raise TaskQueueExists self._task_queue[task_id] = queue async def get(self, task_id: str) -> EventQueue | None: @@ -59,7 +59,7 @@ async def tap(self, task_id: str) -> EventQueue | None: return None return self._task_queue[task_id].tap() - async def close(self, task_id: str): + async def close(self, task_id: str) -> None: """Closes and removes the event queue for a task ID. Raises: @@ -67,7 +67,7 @@ async def close(self, task_id: str): """ async with self._lock: if task_id not in self._task_queue: - raise NoTaskQueue() + raise NoTaskQueue queue = self._task_queue.pop(task_id) await queue.close() diff --git a/src/a2a/server/events/queue_manager.py b/src/a2a/server/events/queue_manager.py index 7330a0978..ed69aae68 100644 --- a/src/a2a/server/events/queue_manager.py +++ b/src/a2a/server/events/queue_manager.py @@ -7,7 +7,7 @@ class QueueManager(ABC): """Interface for managing the event queue lifecycles per task.""" @abstractmethod - async def add(self, task_id: str, queue: EventQueue): + async def add(self, task_id: str, queue: EventQueue) -> None: """Adds a new event queue associated with a task ID.""" @abstractmethod @@ -19,7 +19,7 @@ async def tap(self, task_id: str) -> EventQueue | None: """Creates a child event queue (tap) for an existing task ID.""" @abstractmethod - async def close(self, task_id: str): + async def close(self, task_id: str) -> None: """Closes and removes the event queue for a task ID.""" @abstractmethod @@ -27,9 +27,9 @@ async def create_or_tap(self, task_id: str) -> EventQueue: """Creates a queue if one doesn't exist, otherwise taps the existing one.""" -class TaskQueueExists(Exception): +class TaskQueueExists(Exception): # noqa: N818 """Exception raised when attempting to add a queue for a task ID that already exists.""" -class NoTaskQueue(Exception): +class NoTaskQueue(Exception): # noqa: N818 """Exception raised when attempting to access or close a queue for a task ID that does not exist.""" diff --git a/src/a2a/server/id_generator.py b/src/a2a/server/id_generator.py new file mode 100644 index 000000000..c523adc97 --- /dev/null +++ b/src/a2a/server/id_generator.py @@ -0,0 +1,28 @@ +import uuid + +from abc import ABC, abstractmethod + +from pydantic import BaseModel + + +class IDGeneratorContext(BaseModel): + """Context for providing additional information to ID generators.""" + + task_id: str | None = None + context_id: str | None = None + + +class IDGenerator(ABC): + """Interface for generating unique identifiers.""" + + @abstractmethod + def generate(self, context: IDGeneratorContext) -> str: + pass + + +class UUIDGenerator(IDGenerator): + """UUID implementation of the IDGenerator interface.""" + + def generate(self, context: IDGeneratorContext) -> str: + """Generates a random UUID, ignoring the context.""" + return str(uuid.uuid4()) diff --git a/src/a2a/server/models.py b/src/a2a/server/models.py new file mode 100644 index 000000000..4b0f7504c --- /dev/null +++ b/src/a2a/server/models.py @@ -0,0 +1,256 @@ +from typing import TYPE_CHECKING, Any, Generic, TypeVar + + +if TYPE_CHECKING: + from typing_extensions import override +else: + + def override(func): # noqa: ANN001, ANN201 + """Override decorator.""" + return func + + +from pydantic import BaseModel + +from a2a.types import Artifact, Message, TaskStatus + + +try: + from sqlalchemy import JSON, Dialect, LargeBinary, String + from sqlalchemy.orm import ( + DeclarativeBase, + Mapped, + declared_attr, + mapped_column, + ) + from sqlalchemy.types import TypeDecorator +except ImportError as e: + raise ImportError( + 'Database models require SQLAlchemy. ' + 'Install with one of: ' + "'pip install a2a-sdk[postgresql]', " + "'pip install a2a-sdk[mysql]', " + "'pip install a2a-sdk[sqlite]', " + "or 'pip install a2a-sdk[sql]'" + ) from e + + +T = TypeVar('T', bound=BaseModel) + + +class PydanticType(TypeDecorator[T], Generic[T]): + """SQLAlchemy type that handles Pydantic model serialization.""" + + impl = JSON + cache_ok = True + + def __init__(self, pydantic_type: type[T], **kwargs: dict[str, Any]): + """Initialize the PydanticType. + + Args: + pydantic_type: The Pydantic model type to handle. + **kwargs: Additional arguments for TypeDecorator. + """ + self.pydantic_type = pydantic_type + super().__init__(**kwargs) + + def process_bind_param( + self, value: T | None, dialect: Dialect + ) -> dict[str, Any] | None: + """Convert Pydantic model to a JSON-serializable dictionary for the database.""" + if value is None: + return None + return ( + value.model_dump(mode='json') + if isinstance(value, BaseModel) + else value + ) + + def process_result_value( + self, value: dict[str, Any] | None, dialect: Dialect + ) -> T | None: + """Convert a JSON-like dictionary from the database back to a Pydantic model.""" + if value is None: + return None + return self.pydantic_type.model_validate(value) + + +class PydanticListType(TypeDecorator, Generic[T]): + """SQLAlchemy type that handles lists of Pydantic models.""" + + impl = JSON + cache_ok = True + + def __init__(self, pydantic_type: type[T], **kwargs: dict[str, Any]): + """Initialize the PydanticListType. + + Args: + pydantic_type: The Pydantic model type for items in the list. + **kwargs: Additional arguments for TypeDecorator. + """ + self.pydantic_type = pydantic_type + super().__init__(**kwargs) + + def process_bind_param( + self, value: list[T] | None, dialect: Dialect + ) -> list[dict[str, Any]] | None: + """Convert a list of Pydantic models to a JSON-serializable list for the DB.""" + if value is None: + return None + return [ + item.model_dump(mode='json') + if isinstance(item, BaseModel) + else item + for item in value + ] + + def process_result_value( + self, value: list[dict[str, Any]] | None, dialect: Dialect + ) -> list[T] | None: + """Convert a JSON-like list from the DB back to a list of Pydantic models.""" + if value is None: + return None + return [self.pydantic_type.model_validate(item) for item in value] + + +# Base class for all database models +class Base(DeclarativeBase): + """Base class for declarative models in A2A SDK.""" + + +# TaskMixin that can be used with any table name +class TaskMixin: + """Mixin providing standard task columns with proper type handling.""" + + id: Mapped[str] = mapped_column(String(36), primary_key=True, index=True) + context_id: Mapped[str] = mapped_column(String(36), nullable=False) + kind: Mapped[str] = mapped_column( + String(16), nullable=False, default='task' + ) + + # Properly typed Pydantic fields with automatic serialization + status: Mapped[TaskStatus] = mapped_column(PydanticType(TaskStatus)) + artifacts: Mapped[list[Artifact] | None] = mapped_column( + PydanticListType(Artifact), nullable=True + ) + history: Mapped[list[Message] | None] = mapped_column( + PydanticListType(Message), nullable=True + ) + + # Using declared_attr to avoid conflict with Pydantic's metadata + @declared_attr + @classmethod + def task_metadata(cls) -> Mapped[dict[str, Any] | None]: + """Define the 'metadata' column, avoiding name conflicts with Pydantic.""" + return mapped_column(JSON, nullable=True, name='metadata') + + @override + def __repr__(self) -> str: + """Return a string representation of the task.""" + return ( + f'<{self.__class__.__name__}(id="{self.id}", ' + f'context_id="{self.context_id}", status="{self.status}")>' + ) + + +def create_task_model( + table_name: str = 'tasks', base: type[DeclarativeBase] = Base +) -> type: + """Create a TaskModel class with a configurable table name. + + Args: + table_name: Name of the database table. Defaults to 'tasks'. + base: Base declarative class to use. Defaults to the SDK's Base class. + + Returns: + TaskModel class with the specified table name. + + Example: + .. code-block:: python + + # Create a task model with default table name + TaskModel = create_task_model() + + # Create a task model with custom table name + CustomTaskModel = create_task_model('my_tasks') + + # Use with a custom base + from myapp.database import Base as MyBase + + TaskModel = create_task_model('tasks', MyBase) + """ + + class TaskModel(TaskMixin, base): # type: ignore + __tablename__ = table_name + + @override + def __repr__(self) -> str: + """Return a string representation of the task.""" + return ( + f'' + ) + + # Set a dynamic name for better debugging + TaskModel.__name__ = f'TaskModel_{table_name}' + TaskModel.__qualname__ = f'TaskModel_{table_name}' + + return TaskModel + + +# Default TaskModel for backward compatibility +class TaskModel(TaskMixin, Base): + """Default task model with standard table name.""" + + __tablename__ = 'tasks' + + +# PushNotificationConfigMixin that can be used with any table name +class PushNotificationConfigMixin: + """Mixin providing standard push notification config columns.""" + + task_id: Mapped[str] = mapped_column(String(36), primary_key=True) + config_id: Mapped[str] = mapped_column(String(255), primary_key=True) + config_data: Mapped[bytes] = mapped_column(LargeBinary, nullable=False) + + @override + def __repr__(self) -> str: + """Return a string representation of the push notification config.""" + return ( + f'<{self.__class__.__name__}(task_id="{self.task_id}", ' + f'config_id="{self.config_id}")>' + ) + + +def create_push_notification_config_model( + table_name: str = 'push_notification_configs', + base: type[DeclarativeBase] = Base, +) -> type: + """Create a PushNotificationConfigModel class with a configurable table name.""" + + class PushNotificationConfigModel(PushNotificationConfigMixin, base): # type: ignore + __tablename__ = table_name + + @override + def __repr__(self) -> str: + """Return a string representation of the push notification config.""" + return ( + f'' + ) + + PushNotificationConfigModel.__name__ = ( + f'PushNotificationConfigModel_{table_name}' + ) + PushNotificationConfigModel.__qualname__ = ( + f'PushNotificationConfigModel_{table_name}' + ) + + return PushNotificationConfigModel + + +# Default PushNotificationConfigModel for backward compatibility +class PushNotificationConfigModel(PushNotificationConfigMixin, Base): + """Default push notification config model with standard table name.""" + + __tablename__ = 'push_notification_configs' diff --git a/src/a2a/server/request_handlers/__init__.py b/src/a2a/server/request_handlers/__init__.py index f0d2667d8..43ebc8e25 100644 --- a/src/a2a/server/request_handlers/__init__.py +++ b/src/a2a/server/request_handlers/__init__.py @@ -1,5 +1,7 @@ """Request handler components for the A2A server.""" +import logging + from a2a.server.request_handlers.default_request_handler import ( DefaultRequestHandler, ) @@ -9,11 +11,37 @@ build_error_response, prepare_response_object, ) +from a2a.server.request_handlers.rest_handler import RESTHandler + + +logger = logging.getLogger(__name__) + +try: + from a2a.server.request_handlers.grpc_handler import ( + GrpcHandler, # type: ignore + ) +except ImportError as e: + _original_error = e + logger.debug( + 'GrpcHandler not loaded. This is expected if gRPC dependencies are not installed. Error: %s', + _original_error, + ) + + class GrpcHandler: # type: ignore + """Placeholder for GrpcHandler when dependencies are not installed.""" + + def __init__(self, *args, **kwargs): + raise ImportError( + 'To use GrpcHandler, its dependencies must be installed. ' + 'You can install them with \'pip install "a2a-sdk[grpc]"\'' + ) from _original_error __all__ = [ 'DefaultRequestHandler', + 'GrpcHandler', 'JSONRPCHandler', + 'RESTHandler', 'RequestHandler', 'build_error_response', 'prepare_response_object', diff --git a/src/a2a/server/request_handlers/default_request_handler.py b/src/a2a/server/request_handlers/default_request_handler.py index 09b1d3049..30d1ee891 100644 --- a/src/a2a/server/request_handlers/default_request_handler.py +++ b/src/a2a/server/request_handlers/default_request_handler.py @@ -20,30 +20,43 @@ ) from a2a.server.request_handlers.request_handler import RequestHandler from a2a.server.tasks import ( - PushNotifier, + PushNotificationConfigStore, + PushNotificationSender, ResultAggregator, TaskManager, TaskStore, ) from a2a.types import ( + DeleteTaskPushNotificationConfigParams, + GetTaskPushNotificationConfigParams, InternalError, + InvalidParamsError, + ListTaskPushNotificationConfigParams, Message, - MessageSendConfiguration, MessageSendParams, - PushNotificationConfig, Task, TaskIdParams, + TaskNotCancelableError, TaskNotFoundError, TaskPushNotificationConfig, TaskQueryParams, + TaskState, UnsupportedOperationError, ) from a2a.utils.errors import ServerError +from a2a.utils.task import apply_history_length from a2a.utils.telemetry import SpanKind, trace_class logger = logging.getLogger(__name__) +TERMINAL_TASK_STATES = { + TaskState.completed, + TaskState.canceled, + TaskState.failed, + TaskState.rejected, +} + @trace_class(kind=SpanKind.SERVER) class DefaultRequestHandler(RequestHandler): @@ -55,13 +68,15 @@ class DefaultRequestHandler(RequestHandler): """ _running_agents: dict[str, asyncio.Task] + _background_tasks: set[asyncio.Task] - def __init__( + def __init__( # noqa: PLR0913 self, agent_executor: AgentExecutor, task_store: TaskStore, queue_manager: QueueManager | None = None, - push_notifier: PushNotifier | None = None, + push_config_store: PushNotificationConfigStore | None = None, + push_sender: PushNotificationSender | None = None, request_context_builder: RequestContextBuilder | None = None, ) -> None: """Initializes the DefaultRequestHandler. @@ -70,14 +85,16 @@ def __init__( agent_executor: The `AgentExecutor` instance to run agent logic. task_store: The `TaskStore` instance to manage task persistence. queue_manager: The `QueueManager` instance to manage event queues. Defaults to `InMemoryQueueManager`. - push_notifier: The `PushNotifier` instance for sending push notifications. Defaults to None. + push_config_store: The `PushNotificationConfigStore` instance for managing push notification configurations. Defaults to None. + push_sender: The `PushNotificationSender` instance for sending push notifications. Defaults to None. request_context_builder: The `RequestContextBuilder` instance used to build request contexts. Defaults to `SimpleRequestContextBuilder`. """ self.agent_executor = agent_executor self.task_store = task_store self._queue_manager = queue_manager or InMemoryQueueManager() - self._push_notifier = push_notifier + self._push_config_store = push_config_store + self._push_sender = push_sender self._request_context_builder = ( request_context_builder or SimpleRequestContextBuilder( @@ -87,6 +104,9 @@ def __init__( # TODO: Likely want an interface for managing this, like AgentExecutionManager. self._running_agents = {} self._running_agents_lock = asyncio.Lock() + # Tracks background tasks (e.g., deferred cleanups) to avoid orphaning + # asyncio tasks and to surface unexpected exceptions. + self._background_tasks = set() async def on_get_task( self, @@ -94,10 +114,12 @@ async def on_get_task( context: ServerCallContext | None = None, ) -> Task | None: """Default handler for 'tasks/get'.""" - task: Task | None = await self.task_store.get(params.id) + task: Task | None = await self.task_store.get(params.id, context) if not task: raise ServerError(error=TaskNotFoundError()) - return task + + # Apply historyLength parameter if specified + return apply_history_length(task, params.history_length) async def on_cancel_task( self, params: TaskIdParams, context: ServerCallContext | None = None @@ -106,15 +128,24 @@ async def on_cancel_task( Attempts to cancel the task managed by the `AgentExecutor`. """ - task: Task | None = await self.task_store.get(params.id) + task: Task | None = await self.task_store.get(params.id, context) if not task: raise ServerError(error=TaskNotFoundError()) + # Check if task is in a non-cancelable state (completed, canceled, failed, rejected) + if task.status.state in TERMINAL_TASK_STATES: + raise ServerError( + error=TaskNotCancelableError( + message=f'Task cannot be canceled - current state: {task.status.state}' + ) + ) + task_manager = TaskManager( task_id=task.id, - context_id=task.contextId, + context_id=task.context_id, task_store=self.task_store, initial_message=None, + context=context, ) result_aggregator = ResultAggregator(task_manager) @@ -126,7 +157,7 @@ async def on_cancel_task( RequestContext( None, task_id=task.id, - context_id=task.contextId, + context_id=task.context_id, task=task, ), queue, @@ -137,14 +168,21 @@ async def on_cancel_task( consumer = EventConsumer(queue) result = await result_aggregator.consume_all(consumer) - if isinstance(result, Task): - return result + if not isinstance(result, Task): + raise ServerError( + error=InternalError( + message='Agent did not return valid response for cancel' + ) + ) - raise ServerError( - error=InternalError( - message='Agent did not return valid response for cancel' + if result.status.state != TaskState.canceled: + raise ServerError( + error=TaskNotCancelableError( + message=f'Task cannot be canceled - current state: {result.status.state}' + ) ) - ) + + return result async def _run_event_stream( self, request: RequestContext, queue: EventQueue @@ -158,89 +196,163 @@ async def _run_event_stream( await self.agent_executor.execute(request, queue) await queue.close() - async def on_message_send( + async def _setup_message_execution( self, params: MessageSendParams, context: ServerCallContext | None = None, - ) -> Message | Task: - """Default handler for 'message/send' interface (non-streaming). + ) -> tuple[TaskManager, str, EventQueue, ResultAggregator, asyncio.Task]: + """Common setup logic for both streaming and non-streaming message handling. - Starts the agent execution for the message and waits for the final - result (Task or Message). + Returns: + A tuple of (task_manager, task_id, queue, result_aggregator, producer_task) """ + # Create task manager and validate existing task task_manager = TaskManager( - task_id=params.message.taskId, - context_id=params.message.contextId, + task_id=params.message.task_id, + context_id=params.message.context_id, task_store=self.task_store, initial_message=params.message, + context=context, ) task: Task | None = await task_manager.get_task() + if task: - task = task_manager.update_with_message(params.message, task) - if self.should_add_push_info(params): - assert isinstance(self._push_notifier, PushNotifier) - assert isinstance( - params.configuration, MessageSendConfiguration - ) - assert isinstance( - params.configuration.pushNotificationConfig, - PushNotificationConfig, + if task.status.state in TERMINAL_TASK_STATES: + raise ServerError( + error=InvalidParamsError( + message=f'Task {task.id} is in terminal state: {task.status.state.value}' + ) ) - await self._push_notifier.set_info( - task.id, params.configuration.pushNotificationConfig + + task = task_manager.update_with_message(params.message, task) + elif params.message.task_id: + raise ServerError( + error=TaskNotFoundError( + message=f'Task {params.message.task_id} was specified but does not exist' ) + ) + + # Build request context request_context = await self._request_context_builder.build( params=params, task_id=task.id if task else None, - context_id=params.message.contextId, + context_id=params.message.context_id, task=task, context=context, ) - task_id = cast(str, request_context.task_id) + task_id = cast('str', request_context.task_id) # Always assign a task ID. We may not actually upgrade to a task, but # dictating the task ID at this layer is useful for tracking running # agents. + + if ( + self._push_config_store + and params.configuration + and params.configuration.push_notification_config + ): + await self._push_config_store.set_info( + task_id, params.configuration.push_notification_config + ) + queue = await self._queue_manager.create_or_tap(task_id) result_aggregator = ResultAggregator(task_manager) # TODO: to manage the non-blocking flows. producer_task = asyncio.create_task( - self._run_event_stream( - request_context, - queue, - ) + self._run_event_stream(request_context, queue) ) await self._register_producer(task_id, producer_task) + return task_manager, task_id, queue, result_aggregator, producer_task + + def _validate_task_id_match(self, task_id: str, event_task_id: str) -> None: + """Validates that agent-generated task ID matches the expected task ID.""" + if task_id != event_task_id: + logger.error( + 'Agent generated task_id=%s does not match the RequestContext task_id=%s.', + event_task_id, + task_id, + ) + raise ServerError( + InternalError(message='Task ID mismatch in agent response') + ) + + async def _send_push_notification_if_needed( + self, task_id: str, result_aggregator: ResultAggregator + ) -> None: + """Sends push notification if configured and task is available.""" + if self._push_sender and task_id: + latest_task = await result_aggregator.current_result + if isinstance(latest_task, Task): + await self._push_sender.send_notification(latest_task) + + async def on_message_send( + self, + params: MessageSendParams, + context: ServerCallContext | None = None, + ) -> Message | Task: + """Default handler for 'message/send' interface (non-streaming). + + Starts the agent execution for the message and waits for the final + result (Task or Message). + """ + ( + _task_manager, + task_id, + queue, + result_aggregator, + producer_task, + ) = await self._setup_message_execution(params, context) + consumer = EventConsumer(queue) producer_task.add_done_callback(consumer.agent_task_callback) - interrupted = False + blocking = True # Default to blocking behavior + if params.configuration and params.configuration.blocking is False: + blocking = False + + interrupted_or_non_blocking = False try: + # Create async callback for push notifications + async def push_notification_callback() -> None: + await self._send_push_notification_if_needed( + task_id, result_aggregator + ) + ( result, - interrupted, - ) = await result_aggregator.consume_and_break_on_interrupt(consumer) - if not result: - raise ServerError(error=InternalError()) - - if isinstance(result, Task) and task_id != result.id: - logger.error( - f'Agent generated task_id={result.id} does not match the RequestContext task_id={task_id}.' - ) - raise ServerError( - InternalError(message='Task ID mismatch in agent response') - ) + interrupted_or_non_blocking, + ) = await result_aggregator.consume_and_break_on_interrupt( + consumer, + blocking=blocking, + event_callback=push_notification_callback, + ) + except Exception: + logger.exception('Agent execution failed') + raise finally: - if interrupted: - # TODO: Track this disconnected cleanup task. - asyncio.create_task( + if interrupted_or_non_blocking: + cleanup_task = asyncio.create_task( self._cleanup_producer(producer_task, task_id) ) + cleanup_task.set_name(f'cleanup_producer:{task_id}') + self._track_background_task(cleanup_task) else: await self._cleanup_producer(producer_task, task_id) + if not result: + raise ServerError(error=InternalError()) + + if isinstance(result, Task): + self._validate_task_id_match(task_id, result.id) + if params.configuration: + result = apply_history_length( + result, params.configuration.history_length + ) + + await self._send_push_notification_if_needed(task_id, result_aggregator) + return result async def on_message_send_stream( @@ -253,82 +365,39 @@ async def on_message_send_stream( Starts the agent execution and yields events as they are produced by the agent. """ - task_manager = TaskManager( - task_id=params.message.taskId, - context_id=params.message.contextId, - task_store=self.task_store, - initial_message=params.message, - ) - task: Task | None = await task_manager.get_task() - - if task: - task = task_manager.update_with_message(params.message, task) - - if self.should_add_push_info(params): - assert isinstance(self._push_notifier, PushNotifier) - assert isinstance( - params.configuration, MessageSendConfiguration - ) - assert isinstance( - params.configuration.pushNotificationConfig, - PushNotificationConfig, - ) - await self._push_notifier.set_info( - task.id, params.configuration.pushNotificationConfig - ) - else: - queue = EventQueue() - result_aggregator = ResultAggregator(task_manager) - request_context = await self._request_context_builder.build( - params=params, - task_id=task.id if task else None, - context_id=params.message.contextId, - task=task, - context=context, - ) - - task_id = cast(str, request_context.task_id) - queue = await self._queue_manager.create_or_tap(task_id) - producer_task = asyncio.create_task( - self._run_event_stream( - request_context, - queue, - ) - ) - await self._register_producer(task_id, producer_task) + ( + _task_manager, + task_id, + queue, + result_aggregator, + producer_task, + ) = await self._setup_message_execution(params, context) + consumer = EventConsumer(queue) + producer_task.add_done_callback(consumer.agent_task_callback) try: - consumer = EventConsumer(queue) - producer_task.add_done_callback(consumer.agent_task_callback) async for event in result_aggregator.consume_and_emit(consumer): if isinstance(event, Task): - if task_id != event.id: - logger.error( - f'Agent generated task_id={event.id} does not match the RequestContext task_id={task_id}.' - ) - raise ServerError( - InternalError( - message='Task ID mismatch in agent response' - ) - ) - - if ( - self._push_notifier - and params.configuration - and params.configuration.pushNotificationConfig - ): - await self._push_notifier.set_info( - task_id, - params.configuration.pushNotificationConfig, - ) - - if self._push_notifier and task_id: - latest_task = await result_aggregator.current_result - if isinstance(latest_task, Task): - await self._push_notifier.send_notification(latest_task) + self._validate_task_id_match(task_id, event.id) + + await self._send_push_notification_if_needed( + task_id, result_aggregator + ) yield event + except (asyncio.CancelledError, GeneratorExit): + # Client disconnected: continue consuming and persisting events in the background + bg_task = asyncio.create_task( + result_aggregator.consume_all(consumer) + ) + bg_task.set_name(f'background_consume:{task_id}') + self._track_background_task(bg_task) + raise finally: - await self._cleanup_producer(producer_task, task_id) + cleanup_task = asyncio.create_task( + self._cleanup_producer(producer_task, task_id) + ) + cleanup_task.set_name(f'cleanup_producer:{task_id}') + self._track_background_task(cleanup_task) async def _register_producer( self, task_id: str, producer_task: asyncio.Task @@ -337,6 +406,29 @@ async def _register_producer( async with self._running_agents_lock: self._running_agents[task_id] = producer_task + def _track_background_task(self, task: asyncio.Task) -> None: + """Tracks a background task and logs exceptions on completion. + + This avoids unreferenced tasks (and associated lint warnings) while + ensuring any exceptions are surfaced in logs. + """ + self._background_tasks.add(task) + + def _on_done(completed: asyncio.Task) -> None: + try: + # Retrieve result to raise exceptions, if any + completed.result() + except asyncio.CancelledError: + name = completed.get_name() + logger.debug('Background task %s cancelled', name) + except Exception: + name = completed.get_name() + logger.exception('Background task %s failed', name) + finally: + self._background_tasks.discard(completed) + + task.add_done_callback(_on_done) + async def _cleanup_producer( self, producer_task: asyncio.Task, @@ -357,42 +449,49 @@ async def on_set_task_push_notification_config( Requires a `PushNotifier` to be configured. """ - if not self._push_notifier: + if not self._push_config_store: raise ServerError(error=UnsupportedOperationError()) - task: Task | None = await self.task_store.get(params.taskId) + task: Task | None = await self.task_store.get(params.task_id, context) if not task: raise ServerError(error=TaskNotFoundError()) - await self._push_notifier.set_info( - params.taskId, - params.pushNotificationConfig, + await self._push_config_store.set_info( + params.task_id, + params.push_notification_config, ) return params async def on_get_task_push_notification_config( self, - params: TaskIdParams, + params: TaskIdParams | GetTaskPushNotificationConfigParams, context: ServerCallContext | None = None, ) -> TaskPushNotificationConfig: """Default handler for 'tasks/pushNotificationConfig/get'. - Requires a `PushNotifier` to be configured. + Requires a `PushConfigStore` to be configured. """ - if not self._push_notifier: + if not self._push_config_store: raise ServerError(error=UnsupportedOperationError()) - task: Task | None = await self.task_store.get(params.id) + task: Task | None = await self.task_store.get(params.id, context) if not task: raise ServerError(error=TaskNotFoundError()) - push_notification_config = await self._push_notifier.get_info(params.id) - if not push_notification_config: - raise ServerError(error=InternalError()) + push_notification_config = await self._push_config_store.get_info( + params.id + ) + if not push_notification_config or not push_notification_config[0]: + raise ServerError( + error=InternalError( + message='Push notification config not found' + ) + ) return TaskPushNotificationConfig( - taskId=params.id, pushNotificationConfig=push_notification_config + task_id=params.id, + push_notification_config=push_notification_config[0], ) async def on_resubscribe_to_task( @@ -405,15 +504,23 @@ async def on_resubscribe_to_task( Allows a client to re-attach to a running streaming task's event stream. Requires the task and its queue to still be active. """ - task: Task | None = await self.task_store.get(params.id) + task: Task | None = await self.task_store.get(params.id, context) if not task: raise ServerError(error=TaskNotFoundError()) + if task.status.state in TERMINAL_TASK_STATES: + raise ServerError( + error=InvalidParamsError( + message=f'Task {task.id} is in terminal state: {task.status.state.value}' + ) + ) + task_manager = TaskManager( task_id=task.id, - context_id=task.contextId, + context_id=task.context_id, task_store=self.task_store, initial_message=None, + context=context, ) result_aggregator = ResultAggregator(task_manager) @@ -426,9 +533,49 @@ async def on_resubscribe_to_task( async for event in result_aggregator.consume_and_emit(consumer): yield event - def should_add_push_info(self, params: MessageSendParams) -> bool: - return bool( - self._push_notifier - and params.configuration - and params.configuration.pushNotificationConfig + async def on_list_task_push_notification_config( + self, + params: ListTaskPushNotificationConfigParams, + context: ServerCallContext | None = None, + ) -> list[TaskPushNotificationConfig]: + """Default handler for 'tasks/pushNotificationConfig/list'. + + Requires a `PushConfigStore` to be configured. + """ + if not self._push_config_store: + raise ServerError(error=UnsupportedOperationError()) + + task: Task | None = await self.task_store.get(params.id, context) + if not task: + raise ServerError(error=TaskNotFoundError()) + + push_notification_config_list = await self._push_config_store.get_info( + params.id + ) + + return [ + TaskPushNotificationConfig( + task_id=params.id, push_notification_config=config + ) + for config in push_notification_config_list + ] + + async def on_delete_task_push_notification_config( + self, + params: DeleteTaskPushNotificationConfigParams, + context: ServerCallContext | None = None, + ) -> None: + """Default handler for 'tasks/pushNotificationConfig/delete'. + + Requires a `PushConfigStore` to be configured. + """ + if not self._push_config_store: + raise ServerError(error=UnsupportedOperationError()) + + task: Task | None = await self.task_store.get(params.id, context) + if not task: + raise ServerError(error=TaskNotFoundError()) + + await self._push_config_store.delete_info( + params.id, params.push_notification_config_id ) diff --git a/src/a2a/server/request_handlers/grpc_handler.py b/src/a2a/server/request_handlers/grpc_handler.py new file mode 100644 index 000000000..105b99471 --- /dev/null +++ b/src/a2a/server/request_handlers/grpc_handler.py @@ -0,0 +1,423 @@ +# ruff: noqa: N802 +import contextlib +import logging + +from abc import ABC, abstractmethod +from collections.abc import AsyncIterable, Awaitable, Sequence + + +try: + import grpc + import grpc.aio + + from grpc.aio import Metadata +except ImportError as e: + raise ImportError( + 'GrpcHandler requires grpcio and grpcio-tools to be installed. ' + 'Install with: ' + "'pip install a2a-sdk[grpc]'" + ) from e + +from collections.abc import Callable + +import a2a.grpc.a2a_pb2_grpc as a2a_grpc + +from a2a import types +from a2a.auth.user import UnauthenticatedUser +from a2a.extensions.common import ( + HTTP_EXTENSION_HEADER, + get_requested_extensions, +) +from a2a.grpc import a2a_pb2 +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types import AgentCard, TaskNotFoundError +from a2a.utils import proto_utils +from a2a.utils.errors import ServerError +from a2a.utils.helpers import maybe_await, validate, validate_async_generator + + +logger = logging.getLogger(__name__) + +# For now we use a trivial wrapper on the grpc context object + + +class CallContextBuilder(ABC): + """A class for building ServerCallContexts using the Starlette Request.""" + + @abstractmethod + def build(self, context: grpc.aio.ServicerContext) -> ServerCallContext: + """Builds a ServerCallContext from a gRPC Request.""" + + +def _get_metadata_value( + context: grpc.aio.ServicerContext, key: str +) -> list[str]: + md = context.invocation_metadata + raw_values: list[str | bytes] = [] + if isinstance(md, Metadata): + raw_values = md.get_all(key) + elif isinstance(md, Sequence): + lower_key = key.lower() + raw_values = [e for (k, e) in md if k.lower() == lower_key] + return [e if isinstance(e, str) else e.decode('utf-8') for e in raw_values] + + +class DefaultCallContextBuilder(CallContextBuilder): + """A default implementation of CallContextBuilder.""" + + def build(self, context: grpc.aio.ServicerContext) -> ServerCallContext: + """Builds the ServerCallContext.""" + user = UnauthenticatedUser() + state = {} + with contextlib.suppress(Exception): + state['grpc_context'] = context + return ServerCallContext( + user=user, + state=state, + requested_extensions=get_requested_extensions( + _get_metadata_value(context, HTTP_EXTENSION_HEADER) + ), + ) + + +class GrpcHandler(a2a_grpc.A2AServiceServicer): + """Maps incoming gRPC requests to the appropriate request handler method.""" + + def __init__( + self, + agent_card: AgentCard, + request_handler: RequestHandler, + context_builder: CallContextBuilder | None = None, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] + | None = None, + ): + """Initializes the GrpcHandler. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + request_handler: The underlying `RequestHandler` instance to + delegate requests to. + context_builder: The CallContextBuilder object. If none the + DefaultCallContextBuilder is used. + card_modifier: An optional callback to dynamically modify the public + agent card before it is served. + """ + self.agent_card = agent_card + self.request_handler = request_handler + self.context_builder = context_builder or DefaultCallContextBuilder() + self.card_modifier = card_modifier + + async def SendMessage( + self, + request: a2a_pb2.SendMessageRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.SendMessageResponse: + """Handles the 'SendMessage' gRPC method. + + Args: + request: The incoming `SendMessageRequest` object. + context: Context provided by the server. + + Returns: + A `SendMessageResponse` object containing the result (Task or + Message) or throws an error response if a `ServerError` is raised + by the handler. + """ + try: + # Construct the server context object + server_context = self.context_builder.build(context) + # Transform the proto object to the python internal objects + a2a_request = proto_utils.FromProto.message_send_params( + request, + ) + task_or_message = await self.request_handler.on_message_send( + a2a_request, server_context + ) + self._set_extension_metadata(context, server_context) + return proto_utils.ToProto.task_or_message(task_or_message) + except ServerError as e: + await self.abort_context(e, context) + return a2a_pb2.SendMessageResponse() + + @validate_async_generator( + lambda self: self.agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def SendStreamingMessage( + self, + request: a2a_pb2.SendMessageRequest, + context: grpc.aio.ServicerContext, + ) -> AsyncIterable[a2a_pb2.StreamResponse]: + """Handles the 'StreamMessage' gRPC method. + + Yields response objects as they are produced by the underlying handler's + stream. + + Args: + request: The incoming `SendMessageRequest` object. + context: Context provided by the server. + + Yields: + `StreamResponse` objects containing streaming events + (Task, Message, TaskStatusUpdateEvent, TaskArtifactUpdateEvent) + or gRPC error responses if a `ServerError` is raised. + """ + server_context = self.context_builder.build(context) + # Transform the proto object to the python internal objects + a2a_request = proto_utils.FromProto.message_send_params( + request, + ) + try: + async for event in self.request_handler.on_message_send_stream( + a2a_request, server_context + ): + yield proto_utils.ToProto.stream_response(event) + self._set_extension_metadata(context, server_context) + except ServerError as e: + await self.abort_context(e, context) + return + + async def CancelTask( + self, + request: a2a_pb2.CancelTaskRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.Task: + """Handles the 'CancelTask' gRPC method. + + Args: + request: The incoming `CancelTaskRequest` object. + context: Context provided by the server. + + Returns: + A `Task` object containing the updated Task or a gRPC error. + """ + try: + server_context = self.context_builder.build(context) + task_id_params = proto_utils.FromProto.task_id_params(request) + task = await self.request_handler.on_cancel_task( + task_id_params, server_context + ) + if task: + return proto_utils.ToProto.task(task) + await self.abort_context( + ServerError(error=TaskNotFoundError()), context + ) + except ServerError as e: + await self.abort_context(e, context) + return a2a_pb2.Task() + + @validate_async_generator( + lambda self: self.agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def TaskSubscription( + self, + request: a2a_pb2.TaskSubscriptionRequest, + context: grpc.aio.ServicerContext, + ) -> AsyncIterable[a2a_pb2.StreamResponse]: + """Handles the 'TaskSubscription' gRPC method. + + Yields response objects as they are produced by the underlying handler's + stream. + + Args: + request: The incoming `TaskSubscriptionRequest` object. + context: Context provided by the server. + + Yields: + `StreamResponse` objects containing streaming events + """ + try: + server_context = self.context_builder.build(context) + async for event in self.request_handler.on_resubscribe_to_task( + proto_utils.FromProto.task_id_params(request), + server_context, + ): + yield proto_utils.ToProto.stream_response(event) + except ServerError as e: + await self.abort_context(e, context) + + async def GetTaskPushNotificationConfig( + self, + request: a2a_pb2.GetTaskPushNotificationConfigRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.TaskPushNotificationConfig: + """Handles the 'GetTaskPushNotificationConfig' gRPC method. + + Args: + request: The incoming `GetTaskPushNotificationConfigRequest` object. + context: Context provided by the server. + + Returns: + A `TaskPushNotificationConfig` object containing the config. + """ + try: + server_context = self.context_builder.build(context) + config = ( + await self.request_handler.on_get_task_push_notification_config( + proto_utils.FromProto.task_id_params(request), + server_context, + ) + ) + return proto_utils.ToProto.task_push_notification_config(config) + except ServerError as e: + await self.abort_context(e, context) + return a2a_pb2.TaskPushNotificationConfig() + + @validate( + lambda self: self.agent_card.capabilities.push_notifications, + 'Push notifications are not supported by the agent', + ) + async def CreateTaskPushNotificationConfig( + self, + request: a2a_pb2.CreateTaskPushNotificationConfigRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.TaskPushNotificationConfig: + """Handles the 'CreateTaskPushNotificationConfig' gRPC method. + + Requires the agent to support push notifications. + + Args: + request: The incoming `CreateTaskPushNotificationConfigRequest` object. + context: Context provided by the server. + + Returns: + A `TaskPushNotificationConfig` object + + Raises: + ServerError: If push notifications are not supported by the agent + (due to the `@validate` decorator). + """ + try: + server_context = self.context_builder.build(context) + config = ( + await self.request_handler.on_set_task_push_notification_config( + proto_utils.FromProto.task_push_notification_config_request( + request, + ), + server_context, + ) + ) + return proto_utils.ToProto.task_push_notification_config(config) + except ServerError as e: + await self.abort_context(e, context) + return a2a_pb2.TaskPushNotificationConfig() + + async def GetTask( + self, + request: a2a_pb2.GetTaskRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.Task: + """Handles the 'GetTask' gRPC method. + + Args: + request: The incoming `GetTaskRequest` object. + context: Context provided by the server. + + Returns: + A `Task` object. + """ + try: + server_context = self.context_builder.build(context) + task = await self.request_handler.on_get_task( + proto_utils.FromProto.task_query_params(request), server_context + ) + if task: + return proto_utils.ToProto.task(task) + await self.abort_context( + ServerError(error=TaskNotFoundError()), context + ) + except ServerError as e: + await self.abort_context(e, context) + return a2a_pb2.Task() + + async def GetAgentCard( + self, + request: a2a_pb2.GetAgentCardRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.AgentCard: + """Get the agent card for the agent served.""" + card_to_serve = self.agent_card + if self.card_modifier: + card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) + return proto_utils.ToProto.agent_card(card_to_serve) + + async def abort_context( + self, error: ServerError, context: grpc.aio.ServicerContext + ) -> None: + """Sets the grpc errors appropriately in the context.""" + match error.error: + case types.JSONParseError(): + await context.abort( + grpc.StatusCode.INTERNAL, + f'JSONParseError: {error.error.message}', + ) + case types.InvalidRequestError(): + await context.abort( + grpc.StatusCode.INVALID_ARGUMENT, + f'InvalidRequestError: {error.error.message}', + ) + case types.MethodNotFoundError(): + await context.abort( + grpc.StatusCode.NOT_FOUND, + f'MethodNotFoundError: {error.error.message}', + ) + case types.InvalidParamsError(): + await context.abort( + grpc.StatusCode.INVALID_ARGUMENT, + f'InvalidParamsError: {error.error.message}', + ) + case types.InternalError(): + await context.abort( + grpc.StatusCode.INTERNAL, + f'InternalError: {error.error.message}', + ) + case types.TaskNotFoundError(): + await context.abort( + grpc.StatusCode.NOT_FOUND, + f'TaskNotFoundError: {error.error.message}', + ) + case types.TaskNotCancelableError(): + await context.abort( + grpc.StatusCode.UNIMPLEMENTED, + f'TaskNotCancelableError: {error.error.message}', + ) + case types.PushNotificationNotSupportedError(): + await context.abort( + grpc.StatusCode.UNIMPLEMENTED, + f'PushNotificationNotSupportedError: {error.error.message}', + ) + case types.UnsupportedOperationError(): + await context.abort( + grpc.StatusCode.UNIMPLEMENTED, + f'UnsupportedOperationError: {error.error.message}', + ) + case types.ContentTypeNotSupportedError(): + await context.abort( + grpc.StatusCode.UNIMPLEMENTED, + f'ContentTypeNotSupportedError: {error.error.message}', + ) + case types.InvalidAgentResponseError(): + await context.abort( + grpc.StatusCode.INTERNAL, + f'InvalidAgentResponseError: {error.error.message}', + ) + case _: + await context.abort( + grpc.StatusCode.UNKNOWN, + f'Unknown error type: {error.error}', + ) + + def _set_extension_metadata( + self, + context: grpc.aio.ServicerContext, + server_context: ServerCallContext, + ) -> None: + if server_context.activated_extensions: + context.set_trailing_metadata( + [ + (HTTP_EXTENSION_HEADER, e) + for e in sorted(server_context.activated_extensions) + ] + ) diff --git a/src/a2a/server/request_handlers/jsonrpc_handler.py b/src/a2a/server/request_handlers/jsonrpc_handler.py index 13d2854b8..6df872fca 100644 --- a/src/a2a/server/request_handlers/jsonrpc_handler.py +++ b/src/a2a/server/request_handlers/jsonrpc_handler.py @@ -1,15 +1,22 @@ import logging -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Awaitable, Callable from a2a.server.context import ServerCallContext from a2a.server.request_handlers.request_handler import RequestHandler from a2a.server.request_handlers.response_helpers import prepare_response_object from a2a.types import ( AgentCard, + AuthenticatedExtendedCardNotConfiguredError, CancelTaskRequest, CancelTaskResponse, CancelTaskSuccessResponse, + DeleteTaskPushNotificationConfigRequest, + DeleteTaskPushNotificationConfigResponse, + DeleteTaskPushNotificationConfigSuccessResponse, + GetAuthenticatedExtendedCardRequest, + GetAuthenticatedExtendedCardResponse, + GetAuthenticatedExtendedCardSuccessResponse, GetTaskPushNotificationConfigRequest, GetTaskPushNotificationConfigResponse, GetTaskPushNotificationConfigSuccessResponse, @@ -18,6 +25,9 @@ GetTaskSuccessResponse, InternalError, JSONRPCErrorResponse, + ListTaskPushNotificationConfigRequest, + ListTaskPushNotificationConfigResponse, + ListTaskPushNotificationConfigSuccessResponse, Message, SendMessageRequest, SendMessageResponse, @@ -36,7 +46,7 @@ TaskStatusUpdateEvent, ) from a2a.utils.errors import ServerError -from a2a.utils.helpers import validate +from a2a.utils.helpers import maybe_await, validate from a2a.utils.telemetry import SpanKind, trace_class @@ -51,15 +61,31 @@ def __init__( self, agent_card: AgentCard, request_handler: RequestHandler, + extended_agent_card: AgentCard | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard + ] + | None = None, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] + | None = None, ): """Initializes the JSONRPCHandler. Args: agent_card: The AgentCard describing the agent's capabilities. request_handler: The underlying `RequestHandler` instance to delegate requests to. + extended_agent_card: An optional, distinct Extended AgentCard to be served + extended_card_modifier: An optional callback to dynamically modify + the extended agent card before it is served. It receives the + call context. + card_modifier: An optional callback to dynamically modify the public + agent card before it is served. """ self.agent_card = agent_card self.request_handler = request_handler + self.extended_agent_card = extended_agent_card + self.extended_card_modifier = extended_card_modifier + self.card_modifier = card_modifier async def on_message_send( self, @@ -158,15 +184,6 @@ async def on_cancel_task( task = await self.request_handler.on_cancel_task( request.params, context ) - if task: - return prepare_response_object( - request.id, - task, - (Task,), - CancelTaskSuccessResponse, - CancelTaskResponse, - ) - raise ServerError(error=TaskNotFoundError()) except ServerError as e: return CancelTaskResponse( root=JSONRPCErrorResponse( @@ -174,6 +191,19 @@ async def on_cancel_task( ) ) + if task: + return prepare_response_object( + request.id, + task, + (Task,), + CancelTaskSuccessResponse, + CancelTaskResponse, + ) + + return CancelTaskResponse( + root=JSONRPCErrorResponse(id=request.id, error=TaskNotFoundError()) + ) + async def on_resubscribe_to_task( self, request: TaskResubscriptionRequest, @@ -214,7 +244,7 @@ async def on_resubscribe_to_task( ) ) - async def get_push_notification( + async def get_push_notification_config( self, request: GetTaskPushNotificationConfigRequest, context: ServerCallContext | None = None, @@ -249,10 +279,10 @@ async def get_push_notification( ) @validate( - lambda self: self.agent_card.capabilities.pushNotifications, + lambda self: self.agent_card.capabilities.push_notifications, 'Push notifications are not supported by the agent', ) - async def set_push_notification( + async def set_push_notification_config( self, request: SetTaskPushNotificationConfigRequest, context: ServerCallContext | None = None, @@ -310,18 +340,125 @@ async def on_get_task( task = await self.request_handler.on_get_task( request.params, context ) - if task: - return prepare_response_object( - request.id, - task, - (Task,), - GetTaskSuccessResponse, - GetTaskResponse, - ) - raise ServerError(error=TaskNotFoundError()) except ServerError as e: return GetTaskResponse( root=JSONRPCErrorResponse( id=request.id, error=e.error if e.error else InternalError() ) ) + + if task: + return prepare_response_object( + request.id, + task, + (Task,), + GetTaskSuccessResponse, + GetTaskResponse, + ) + + return GetTaskResponse( + root=JSONRPCErrorResponse(id=request.id, error=TaskNotFoundError()) + ) + + async def list_push_notification_config( + self, + request: ListTaskPushNotificationConfigRequest, + context: ServerCallContext | None = None, + ) -> ListTaskPushNotificationConfigResponse: + """Handles the 'tasks/pushNotificationConfig/list' JSON-RPC method. + + Args: + request: The incoming `ListTaskPushNotificationConfigRequest` object. + context: Context provided by the server. + + Returns: + A `ListTaskPushNotificationConfigResponse` object containing the config or a JSON-RPC error. + """ + try: + config = await self.request_handler.on_list_task_push_notification_config( + request.params, context + ) + return prepare_response_object( + request.id, + config, + (list,), + ListTaskPushNotificationConfigSuccessResponse, + ListTaskPushNotificationConfigResponse, + ) + except ServerError as e: + return ListTaskPushNotificationConfigResponse( + root=JSONRPCErrorResponse( + id=request.id, error=e.error if e.error else InternalError() + ) + ) + + async def delete_push_notification_config( + self, + request: DeleteTaskPushNotificationConfigRequest, + context: ServerCallContext | None = None, + ) -> DeleteTaskPushNotificationConfigResponse: + """Handles the 'tasks/pushNotificationConfig/list' JSON-RPC method. + + Args: + request: The incoming `DeleteTaskPushNotificationConfigRequest` object. + context: Context provided by the server. + + Returns: + A `DeleteTaskPushNotificationConfigResponse` object containing the config or a JSON-RPC error. + """ + try: + ( + await self.request_handler.on_delete_task_push_notification_config( + request.params, context + ) + ) + return DeleteTaskPushNotificationConfigResponse( + root=DeleteTaskPushNotificationConfigSuccessResponse( + id=request.id, result=None + ) + ) + except ServerError as e: + return DeleteTaskPushNotificationConfigResponse( + root=JSONRPCErrorResponse( + id=request.id, error=e.error if e.error else InternalError() + ) + ) + + async def get_authenticated_extended_card( + self, + request: GetAuthenticatedExtendedCardRequest, + context: ServerCallContext | None = None, + ) -> GetAuthenticatedExtendedCardResponse: + """Handles the 'agent/authenticatedExtendedCard' JSON-RPC method. + + Args: + request: The incoming `GetAuthenticatedExtendedCardRequest` object. + context: Context provided by the server. + + Returns: + A `GetAuthenticatedExtendedCardResponse` object containing the config or a JSON-RPC error. + """ + if not self.agent_card.supports_authenticated_extended_card: + raise ServerError( + error=AuthenticatedExtendedCardNotConfiguredError( + message='Authenticated card not supported' + ) + ) + + base_card = self.extended_agent_card + if base_card is None: + base_card = self.agent_card + + card_to_serve = base_card + if self.extended_card_modifier and context: + card_to_serve = await maybe_await( + self.extended_card_modifier(base_card, context) + ) + elif self.card_modifier: + card_to_serve = await maybe_await(self.card_modifier(base_card)) + + return GetAuthenticatedExtendedCardResponse( + root=GetAuthenticatedExtendedCardSuccessResponse( + id=request.id, result=card_to_serve + ) + ) diff --git a/src/a2a/server/request_handlers/request_handler.py b/src/a2a/server/request_handlers/request_handler.py index 811c8da25..7ce76cc90 100644 --- a/src/a2a/server/request_handlers/request_handler.py +++ b/src/a2a/server/request_handlers/request_handler.py @@ -4,6 +4,9 @@ from a2a.server.context import ServerCallContext from a2a.server.events.event_queue import Event from a2a.types import ( + DeleteTaskPushNotificationConfigParams, + GetTaskPushNotificationConfigParams, + ListTaskPushNotificationConfigParams, Message, MessageSendParams, Task, @@ -122,7 +125,7 @@ async def on_set_task_push_notification_config( @abstractmethod async def on_get_task_push_notification_config( self, - params: TaskIdParams, + params: TaskIdParams | GetTaskPushNotificationConfigParams, context: ServerCallContext | None = None, ) -> TaskPushNotificationConfig: """Handles the 'tasks/pushNotificationConfig/get' method. @@ -159,3 +162,39 @@ async def on_resubscribe_to_task( """ raise ServerError(error=UnsupportedOperationError()) yield + + @abstractmethod + async def on_list_task_push_notification_config( + self, + params: ListTaskPushNotificationConfigParams, + context: ServerCallContext | None = None, + ) -> list[TaskPushNotificationConfig]: + """Handles the 'tasks/pushNotificationConfig/list' method. + + Retrieves the current push notification configurations for a task. + + Args: + params: Parameters including the task ID. + context: Context provided by the server. + + Returns: + The `list[TaskPushNotificationConfig]` for the task. + """ + + @abstractmethod + async def on_delete_task_push_notification_config( + self, + params: DeleteTaskPushNotificationConfigParams, + context: ServerCallContext | None = None, + ) -> None: + """Handles the 'tasks/pushNotificationConfig/delete' method. + + Deletes a push notification configuration associated with a task. + + Args: + params: Parameters including the task ID. + context: Context provided by the server. + + Returns: + None + """ diff --git a/src/a2a/server/request_handlers/response_helpers.py b/src/a2a/server/request_handlers/response_helpers.py index b4e48ad9a..4c55c4197 100644 --- a/src/a2a/server/request_handlers/response_helpers.py +++ b/src/a2a/server/request_handlers/response_helpers.py @@ -7,6 +7,8 @@ A2AError, CancelTaskResponse, CancelTaskSuccessResponse, + DeleteTaskPushNotificationConfigResponse, + DeleteTaskPushNotificationConfigSuccessResponse, GetTaskPushNotificationConfigResponse, GetTaskPushNotificationConfigSuccessResponse, GetTaskResponse, @@ -14,6 +16,8 @@ InvalidAgentResponseError, JSONRPCError, JSONRPCErrorResponse, + ListTaskPushNotificationConfigResponse, + ListTaskPushNotificationConfigSuccessResponse, Message, SendMessageResponse, SendMessageSuccessResponse, @@ -36,6 +40,8 @@ SetTaskPushNotificationConfigResponse, GetTaskPushNotificationConfigResponse, SendStreamingMessageResponse, + ListTaskPushNotificationConfigResponse, + DeleteTaskPushNotificationConfigResponse, ) """Type variable for RootModel response types.""" @@ -48,6 +54,8 @@ SetTaskPushNotificationConfigSuccessResponse, GetTaskPushNotificationConfigSuccessResponse, SendStreamingMessageSuccessResponse, + ListTaskPushNotificationConfigSuccessResponse, + DeleteTaskPushNotificationConfigSuccessResponse, ) """Type variable for SuccessResponse types.""" @@ -60,6 +68,7 @@ | TaskPushNotificationConfig | A2AError | JSONRPCError + | list[TaskPushNotificationConfig] ) """Type alias for possible event types produced by handlers.""" diff --git a/src/a2a/server/request_handlers/rest_handler.py b/src/a2a/server/request_handlers/rest_handler.py new file mode 100644 index 000000000..59057487c --- /dev/null +++ b/src/a2a/server/request_handlers/rest_handler.py @@ -0,0 +1,307 @@ +import logging + +from collections.abc import AsyncIterable, AsyncIterator +from typing import TYPE_CHECKING, Any + +from google.protobuf.json_format import MessageToDict, MessageToJson, Parse + + +if TYPE_CHECKING: + from starlette.requests import Request +else: + try: + from starlette.requests import Request + except ImportError: + Request = Any + + +from a2a.grpc import a2a_pb2 +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types import ( + AgentCard, + GetTaskPushNotificationConfigParams, + TaskIdParams, + TaskNotFoundError, + TaskQueryParams, +) +from a2a.utils import proto_utils +from a2a.utils.errors import ServerError +from a2a.utils.helpers import validate +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + + +@trace_class(kind=SpanKind.SERVER) +class RESTHandler: + """Maps incoming REST-like (JSON+HTTP) requests to the appropriate request handler method and formats responses. + + This uses the protobuf definitions of the gRPC service as the source of truth. By + doing this, it ensures that this implementation and the gRPC transcoding + (via Envoy) are equivalent. This handler should be used if using the gRPC handler + with Envoy is not feasible for a given deployment solution. Use this handler + and a related application if you desire to ONLY server the RESTful API. + """ + + def __init__( + self, + agent_card: AgentCard, + request_handler: RequestHandler, + ): + """Initializes the RESTHandler. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + request_handler: The underlying `RequestHandler` instance to delegate requests to. + """ + self.agent_card = agent_card + self.request_handler = request_handler + + async def on_message_send( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'message/send' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `dict` containing the result (Task or Message) + """ + body = await request.body() + params = a2a_pb2.SendMessageRequest() + Parse(body, params) + # Transform the proto object to the python internal objects + a2a_request = proto_utils.FromProto.message_send_params( + params, + ) + task_or_message = await self.request_handler.on_message_send( + a2a_request, context + ) + return MessageToDict( + proto_utils.ToProto.task_or_message(task_or_message) + ) + + @validate( + lambda self: self.agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def on_message_send_stream( + self, + request: Request, + context: ServerCallContext, + ) -> AsyncIterator[str]: + """Handles the 'message/stream' REST method. + + Yields response objects as they are produced by the underlying handler's stream. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Yields: + JSON serialized objects containing streaming events + (Task, Message, TaskStatusUpdateEvent, TaskArtifactUpdateEvent) as JSON + """ + body = await request.body() + params = a2a_pb2.SendMessageRequest() + Parse(body, params) + # Transform the proto object to the python internal objects + a2a_request = proto_utils.FromProto.message_send_params( + params, + ) + async for event in self.request_handler.on_message_send_stream( + a2a_request, context + ): + response = proto_utils.ToProto.stream_response(event) + yield MessageToJson(response) + + async def on_cancel_task( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/cancel' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `dict` containing the updated Task + """ + task_id = request.path_params['id'] + task = await self.request_handler.on_cancel_task( + TaskIdParams(id=task_id), context + ) + if task: + return MessageToDict(proto_utils.ToProto.task(task)) + raise ServerError(error=TaskNotFoundError()) + + @validate( + lambda self: self.agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def on_resubscribe_to_task( + self, + request: Request, + context: ServerCallContext, + ) -> AsyncIterable[str]: + """Handles the 'tasks/resubscribe' REST method. + + Yields response objects as they are produced by the underlying handler's stream. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Yields: + JSON serialized objects containing streaming events + """ + task_id = request.path_params['id'] + async for event in self.request_handler.on_resubscribe_to_task( + TaskIdParams(id=task_id), context + ): + yield MessageToJson(proto_utils.ToProto.stream_response(event)) + + async def get_push_notification( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/pushNotificationConfig/get' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `dict` containing the config + """ + task_id = request.path_params['id'] + push_id = request.path_params['push_id'] + params = GetTaskPushNotificationConfigParams( + id=task_id, push_notification_config_id=push_id + ) + config = ( + await self.request_handler.on_get_task_push_notification_config( + params, context + ) + ) + return MessageToDict( + proto_utils.ToProto.task_push_notification_config(config) + ) + + @validate( + lambda self: self.agent_card.capabilities.push_notifications, + 'Push notifications are not supported by the agent', + ) + async def set_push_notification( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/pushNotificationConfig/set' REST method. + + Requires the agent to support push notifications. + + Args: + request: The incoming `TaskPushNotificationConfig` object. + context: Context provided by the server. + + Returns: + A `dict` containing the config object. + + Raises: + ServerError: If push notifications are not supported by the agent + (due to the `@validate` decorator), A2AError if processing error is + found. + """ + task_id = request.path_params['id'] + body = await request.body() + params = a2a_pb2.CreateTaskPushNotificationConfigRequest() + Parse(body, params) + a2a_request = ( + proto_utils.FromProto.task_push_notification_config_request( + params, + ) + ) + a2a_request.task_id = task_id + config = ( + await self.request_handler.on_set_task_push_notification_config( + a2a_request, context + ) + ) + return MessageToDict( + proto_utils.ToProto.task_push_notification_config(config) + ) + + async def on_get_task( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'v1/tasks/{id}' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `Task` object containing the Task. + """ + task_id = request.path_params['id'] + history_length_str = request.query_params.get('historyLength') + history_length = int(history_length_str) if history_length_str else None + params = TaskQueryParams(id=task_id, history_length=history_length) + task = await self.request_handler.on_get_task(params, context) + if task: + return MessageToDict(proto_utils.ToProto.task(task)) + raise ServerError(error=TaskNotFoundError()) + + async def list_push_notifications( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/pushNotificationConfig/list' REST method. + + This method is currently not implemented. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A list of `dict` representing the `TaskPushNotificationConfig` objects. + + Raises: + NotImplementedError: This method is not yet implemented. + """ + raise NotImplementedError('list notifications not implemented') + + async def list_tasks( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/list' REST method. + + This method is currently not implemented. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A list of dict representing the`Task` objects. + + Raises: + NotImplementedError: This method is not yet implemented. + """ + raise NotImplementedError('list tasks not implemented') diff --git a/src/a2a/server/tasks/__init__.py b/src/a2a/server/tasks/__init__.py index ab8f52f0f..641195ead 100644 --- a/src/a2a/server/tasks/__init__.py +++ b/src/a2a/server/tasks/__init__.py @@ -1,18 +1,78 @@ """Components for managing tasks within the A2A server.""" -from a2a.server.tasks.inmemory_push_notifier import InMemoryPushNotifier +import logging + +from a2a.server.tasks.base_push_notification_sender import ( + BasePushNotificationSender, +) +from a2a.server.tasks.inmemory_push_notification_config_store import ( + InMemoryPushNotificationConfigStore, +) from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore -from a2a.server.tasks.push_notifier import PushNotifier +from a2a.server.tasks.push_notification_config_store import ( + PushNotificationConfigStore, +) +from a2a.server.tasks.push_notification_sender import PushNotificationSender from a2a.server.tasks.result_aggregator import ResultAggregator from a2a.server.tasks.task_manager import TaskManager from a2a.server.tasks.task_store import TaskStore from a2a.server.tasks.task_updater import TaskUpdater +logger = logging.getLogger(__name__) + +try: + from a2a.server.tasks.database_task_store import ( + DatabaseTaskStore, # type: ignore + ) +except ImportError as e: + _original_error = e + # If the database task store is not available, we can still use in-memory stores. + logger.debug( + 'DatabaseTaskStore not loaded. This is expected if database dependencies are not installed. Error: %s', + e, + ) + + class DatabaseTaskStore: # type: ignore + """Placeholder for DatabaseTaskStore when dependencies are not installed.""" + + def __init__(self, *args, **kwargs): + raise ImportError( + 'To use DatabaseTaskStore, its dependencies must be installed. ' + 'You can install them with \'pip install "a2a-sdk[sql]"\'' + ) from _original_error + + +try: + from a2a.server.tasks.database_push_notification_config_store import ( + DatabasePushNotificationConfigStore, # type: ignore + ) +except ImportError as e: + _original_error = e + # If the database push notification config store is not available, we can still use in-memory stores. + logger.debug( + 'DatabasePushNotificationConfigStore not loaded. This is expected if database dependencies are not installed. Error: %s', + e, + ) + + class DatabasePushNotificationConfigStore: # type: ignore + """Placeholder for DatabasePushNotificationConfigStore when dependencies are not installed.""" + + def __init__(self, *args, **kwargs): + raise ImportError( + 'To use DatabasePushNotificationConfigStore, its dependencies must be installed. ' + 'You can install them with \'pip install "a2a-sdk[sql]"\'' + ) from _original_error + + __all__ = [ - 'InMemoryPushNotifier', + 'BasePushNotificationSender', + 'DatabasePushNotificationConfigStore', + 'DatabaseTaskStore', + 'InMemoryPushNotificationConfigStore', 'InMemoryTaskStore', - 'PushNotifier', + 'PushNotificationConfigStore', + 'PushNotificationSender', 'ResultAggregator', 'TaskManager', 'TaskStore', diff --git a/src/a2a/server/tasks/base_push_notification_sender.py b/src/a2a/server/tasks/base_push_notification_sender.py new file mode 100644 index 000000000..087d2973d --- /dev/null +++ b/src/a2a/server/tasks/base_push_notification_sender.py @@ -0,0 +1,74 @@ +import asyncio +import logging + +import httpx + +from a2a.server.tasks.push_notification_config_store import ( + PushNotificationConfigStore, +) +from a2a.server.tasks.push_notification_sender import PushNotificationSender +from a2a.types import PushNotificationConfig, Task + + +logger = logging.getLogger(__name__) + + +class BasePushNotificationSender(PushNotificationSender): + """Base implementation of PushNotificationSender interface.""" + + def __init__( + self, + httpx_client: httpx.AsyncClient, + config_store: PushNotificationConfigStore, + ) -> None: + """Initializes the BasePushNotificationSender. + + Args: + httpx_client: An async HTTP client instance to send notifications. + config_store: A PushNotificationConfigStore instance to retrieve configurations. + """ + self._client = httpx_client + self._config_store = config_store + + async def send_notification(self, task: Task) -> None: + """Sends a push notification for a task if configuration exists.""" + push_configs = await self._config_store.get_info(task.id) + if not push_configs: + return + + awaitables = [ + self._dispatch_notification(task, push_info) + for push_info in push_configs + ] + results = await asyncio.gather(*awaitables) + + if not all(results): + logger.warning( + 'Some push notifications failed to send for task_id=%s', task.id + ) + + async def _dispatch_notification( + self, task: Task, push_info: PushNotificationConfig + ) -> bool: + url = push_info.url + try: + headers = None + if push_info.token: + headers = {'X-A2A-Notification-Token': push_info.token} + response = await self._client.post( + url, + json=task.model_dump(mode='json', exclude_none=True), + headers=headers, + ) + response.raise_for_status() + logger.info( + 'Push-notification sent for task_id=%s to URL: %s', task.id, url + ) + except Exception: + logger.exception( + 'Error sending push-notification for task_id=%s to URL: %s.', + task.id, + url, + ) + return False + return True diff --git a/src/a2a/server/tasks/database_push_notification_config_store.py b/src/a2a/server/tasks/database_push_notification_config_store.py new file mode 100644 index 000000000..e125f22a1 --- /dev/null +++ b/src/a2a/server/tasks/database_push_notification_config_store.py @@ -0,0 +1,295 @@ +# ruff: noqa: PLC0415 +import json +import logging + +from typing import TYPE_CHECKING + +from pydantic import ValidationError + + +try: + from sqlalchemy import ( + Table, + delete, + select, + ) + from sqlalchemy.ext.asyncio import ( + AsyncEngine, + AsyncSession, + async_sessionmaker, + ) + from sqlalchemy.orm import class_mapper +except ImportError as e: + raise ImportError( + 'DatabasePushNotificationConfigStore requires SQLAlchemy and a database driver. ' + 'Install with one of: ' + "'pip install a2a-sdk[postgresql]', " + "'pip install a2a-sdk[mysql]', " + "'pip install a2a-sdk[sqlite]', " + "or 'pip install a2a-sdk[sql]'" + ) from e + +from a2a.server.models import ( + Base, + PushNotificationConfigModel, + create_push_notification_config_model, +) +from a2a.server.tasks.push_notification_config_store import ( + PushNotificationConfigStore, +) +from a2a.types import PushNotificationConfig + + +if TYPE_CHECKING: + from cryptography.fernet import Fernet + + +logger = logging.getLogger(__name__) + + +class DatabasePushNotificationConfigStore(PushNotificationConfigStore): + """SQLAlchemy-based implementation of PushNotificationConfigStore. + + Stores push notification configurations in a database supported by SQLAlchemy. + """ + + engine: AsyncEngine + async_session_maker: async_sessionmaker[AsyncSession] + create_table: bool + _initialized: bool + config_model: type[PushNotificationConfigModel] + _fernet: 'Fernet | None' + + def __init__( + self, + engine: AsyncEngine, + create_table: bool = True, + table_name: str = 'push_notification_configs', + encryption_key: str | bytes | None = None, + ) -> None: + """Initializes the DatabasePushNotificationConfigStore. + + Args: + engine: An existing SQLAlchemy AsyncEngine to be used by the store. + create_table: If true, create the table on initialization. + table_name: Name of the database table. Defaults to 'push_notification_configs'. + encryption_key: A key for encrypting sensitive configuration data. + If provided, `config_data` will be encrypted in the database. + The key must be a URL-safe base64-encoded 32-byte key. + """ + logger.debug( + 'Initializing DatabasePushNotificationConfigStore with existing engine, table: %s', + table_name, + ) + self.engine = engine + self.async_session_maker = async_sessionmaker( + self.engine, expire_on_commit=False + ) + self.create_table = create_table + self._initialized = False + self.config_model = ( + PushNotificationConfigModel + if table_name == 'push_notification_configs' + else create_push_notification_config_model(table_name) + ) + self._fernet = None + + if encryption_key: + try: + from cryptography.fernet import Fernet + except ImportError as e: + raise ImportError( + "DatabasePushNotificationConfigStore with encryption requires the 'cryptography' " + 'library. Install with: ' + "'pip install a2a-sdk[encryption]'" + ) from e + + if isinstance(encryption_key, str): + encryption_key = encryption_key.encode('utf-8') + self._fernet = Fernet(encryption_key) + logger.debug( + 'Encryption enabled for push notification config store.' + ) + + async def initialize(self) -> None: + """Initialize the database and create the table if needed.""" + if self._initialized: + return + + logger.debug( + 'Initializing database schema for push notification configs...' + ) + if self.create_table: + async with self.engine.begin() as conn: + mapper = class_mapper(self.config_model) + tables_to_create = [ + table for table in mapper.tables if isinstance(table, Table) + ] + await conn.run_sync( + Base.metadata.create_all, tables=tables_to_create + ) + self._initialized = True + logger.debug( + 'Database schema for push notification configs initialized.' + ) + + async def _ensure_initialized(self) -> None: + """Ensure the database connection is initialized.""" + if not self._initialized: + await self.initialize() + + def _to_orm( + self, task_id: str, config: PushNotificationConfig + ) -> PushNotificationConfigModel: + """Maps a Pydantic PushNotificationConfig to a SQLAlchemy model instance. + + The config data is serialized to JSON bytes, and encrypted if a key is configured. + """ + json_payload = config.model_dump_json().encode('utf-8') + + if self._fernet: + data_to_store = self._fernet.encrypt(json_payload) + else: + data_to_store = json_payload + + return self.config_model( + task_id=task_id, + config_id=config.id, + config_data=data_to_store, + ) + + def _from_orm( + self, model_instance: PushNotificationConfigModel + ) -> PushNotificationConfig: + """Maps a SQLAlchemy model instance to a Pydantic PushNotificationConfig. + + Handles decryption if a key is configured, with a fallback to plain JSON. + """ + payload = model_instance.config_data + + if self._fernet: + from cryptography.fernet import InvalidToken + + try: + decrypted_payload = self._fernet.decrypt(payload) + return PushNotificationConfig.model_validate_json( + decrypted_payload + ) + except (json.JSONDecodeError, ValidationError) as e: + logger.exception( + 'Failed to parse decrypted push notification config for task %s, config %s. ' + 'Data is corrupted or not valid JSON after decryption.', + model_instance.task_id, + model_instance.config_id, + ) + raise ValueError( + 'Failed to parse decrypted push notification config data' + ) from e + except InvalidToken: + # Decryption failed. This could be because the data is not encrypted. + # We'll log a warning and try to parse it as plain JSON as a fallback. + logger.warning( + 'Failed to decrypt push notification config for task %s, config %s. ' + 'Attempting to parse as unencrypted JSON. ' + 'This may indicate an incorrect encryption key or unencrypted data in the database.', + model_instance.task_id, + model_instance.config_id, + ) + # Fall through to the unencrypted parsing logic below. + + # Try to parse as plain JSON. + try: + return PushNotificationConfig.model_validate_json(payload) + except (json.JSONDecodeError, ValidationError) as e: + if self._fernet: + logger.exception( + 'Failed to parse push notification config for task %s, config %s. ' + 'Decryption failed and the data is not valid JSON. ' + 'This likely indicates the data is corrupted or encrypted with a different key.', + model_instance.task_id, + model_instance.config_id, + ) + else: + # if no key is configured and the payload is not valid JSON. + logger.exception( + 'Failed to parse push notification config for task %s, config %s. ' + 'Data is not valid JSON and no encryption key is configured.', + model_instance.task_id, + model_instance.config_id, + ) + raise ValueError( + 'Failed to parse push notification config data. ' + 'Data is not valid JSON, or it is encrypted with the wrong key.' + ) from e + + async def set_info( + self, task_id: str, notification_config: PushNotificationConfig + ) -> None: + """Sets or updates the push notification configuration for a task.""" + await self._ensure_initialized() + + config_to_save = notification_config.model_copy() + if config_to_save.id is None: + config_to_save.id = task_id + + db_config = self._to_orm(task_id, config_to_save) + async with self.async_session_maker.begin() as session: + await session.merge(db_config) + logger.debug( + 'Push notification config for task %s with config id %s saved/updated.', + task_id, + config_to_save.id, + ) + + async def get_info(self, task_id: str) -> list[PushNotificationConfig]: + """Retrieves all push notification configurations for a task.""" + await self._ensure_initialized() + async with self.async_session_maker() as session: + stmt = select(self.config_model).where( + self.config_model.task_id == task_id + ) + result = await session.execute(stmt) + models = result.scalars().all() + + configs = [] + for model in models: + try: + configs.append(self._from_orm(model)) + except ValueError: # noqa: PERF203 + logger.exception( + 'Could not deserialize push notification config for task %s, config %s', + model.task_id, + model.config_id, + ) + return configs + + async def delete_info( + self, task_id: str, config_id: str | None = None + ) -> None: + """Deletes push notification configurations for a task. + + If config_id is provided, only that specific configuration is deleted. + If config_id is None, all configurations for the task are deleted. + """ + await self._ensure_initialized() + async with self.async_session_maker.begin() as session: + stmt = delete(self.config_model).where( + self.config_model.task_id == task_id + ) + if config_id is not None: + stmt = stmt.where(self.config_model.config_id == config_id) + + result = await session.execute(stmt) + + if result.rowcount > 0: + logger.info( + 'Deleted %s push notification config(s) for task %s.', + result.rowcount, + task_id, + ) + else: + logger.warning( + 'Attempted to delete push notification config for task %s with config_id: %s that does not exist.', + task_id, + config_id, + ) diff --git a/src/a2a/server/tasks/database_task_store.py b/src/a2a/server/tasks/database_task_store.py new file mode 100644 index 000000000..07ba7e970 --- /dev/null +++ b/src/a2a/server/tasks/database_task_store.py @@ -0,0 +1,166 @@ +import logging + + +try: + from sqlalchemy import Table, delete, select + from sqlalchemy.ext.asyncio import ( + AsyncEngine, + AsyncSession, + async_sessionmaker, + ) + from sqlalchemy.orm import class_mapper +except ImportError as e: + raise ImportError( + 'DatabaseTaskStore requires SQLAlchemy and a database driver. ' + 'Install with one of: ' + "'pip install a2a-sdk[postgresql]', " + "'pip install a2a-sdk[mysql]', " + "'pip install a2a-sdk[sqlite]', " + "or 'pip install a2a-sdk[sql]'" + ) from e + +from a2a.server.context import ServerCallContext +from a2a.server.models import Base, TaskModel, create_task_model +from a2a.server.tasks.task_store import TaskStore +from a2a.types import Task # Task is the Pydantic model + + +logger = logging.getLogger(__name__) + + +class DatabaseTaskStore(TaskStore): + """SQLAlchemy-based implementation of TaskStore. + + Stores task objects in a database supported by SQLAlchemy. + """ + + engine: AsyncEngine + async_session_maker: async_sessionmaker[AsyncSession] + create_table: bool + _initialized: bool + task_model: type[TaskModel] + + def __init__( + self, + engine: AsyncEngine, + create_table: bool = True, + table_name: str = 'tasks', + ) -> None: + """Initializes the DatabaseTaskStore. + + Args: + engine: An existing SQLAlchemy AsyncEngine to be used by Task Store + create_table: If true, create tasks table on initialization. + table_name: Name of the database table. Defaults to 'tasks'. + """ + logger.debug( + 'Initializing DatabaseTaskStore with existing engine, table: %s', + table_name, + ) + self.engine = engine + self.async_session_maker = async_sessionmaker( + self.engine, expire_on_commit=False + ) + self.create_table = create_table + self._initialized = False + + self.task_model = ( + TaskModel + if table_name == 'tasks' + else create_task_model(table_name) + ) + + async def initialize(self) -> None: + """Initialize the database and create the table if needed.""" + if self._initialized: + return + + logger.debug('Initializing database schema...') + if self.create_table: + async with self.engine.begin() as conn: + mapper = class_mapper(self.task_model) + tables_to_create = [ + table for table in mapper.tables if isinstance(table, Table) + ] + await conn.run_sync( + Base.metadata.create_all, tables=tables_to_create + ) + self._initialized = True + logger.debug('Database schema initialized.') + + async def _ensure_initialized(self) -> None: + """Ensure the database connection is initialized.""" + if not self._initialized: + await self.initialize() + + def _to_orm(self, task: Task) -> TaskModel: + """Maps a Pydantic Task to a SQLAlchemy TaskModel instance.""" + return self.task_model( + id=task.id, + context_id=task.context_id, + kind=task.kind, + status=task.status, + artifacts=task.artifacts, + history=task.history, + task_metadata=task.metadata, + ) + + def _from_orm(self, task_model: TaskModel) -> Task: + """Maps a SQLAlchemy TaskModel to a Pydantic Task instance.""" + # Map database columns to Pydantic model fields + task_data_from_db = { + 'id': task_model.id, + 'context_id': task_model.context_id, + 'kind': task_model.kind, + 'status': task_model.status, + 'artifacts': task_model.artifacts, + 'history': task_model.history, + 'metadata': task_model.task_metadata, # Map task_metadata column to metadata field + } + # Pydantic's model_validate will parse the nested dicts/lists from JSON + return Task.model_validate(task_data_from_db) + + async def save( + self, task: Task, context: ServerCallContext | None = None + ) -> None: + """Saves or updates a task in the database.""" + await self._ensure_initialized() + db_task = self._to_orm(task) + async with self.async_session_maker.begin() as session: + await session.merge(db_task) + logger.debug('Task %s saved/updated successfully.', task.id) + + async def get( + self, task_id: str, context: ServerCallContext | None = None + ) -> Task | None: + """Retrieves a task from the database by ID.""" + await self._ensure_initialized() + async with self.async_session_maker() as session: + stmt = select(self.task_model).where(self.task_model.id == task_id) + result = await session.execute(stmt) + task_model = result.scalar_one_or_none() + if task_model: + task = self._from_orm(task_model) + logger.debug('Task %s retrieved successfully.', task_id) + return task + + logger.debug('Task %s not found in store.', task_id) + return None + + async def delete( + self, task_id: str, context: ServerCallContext | None = None + ) -> None: + """Deletes a task from the database by ID.""" + await self._ensure_initialized() + + async with self.async_session_maker.begin() as session: + stmt = delete(self.task_model).where(self.task_model.id == task_id) + result = await session.execute(stmt) + # Commit is automatic when using session.begin() + + if result.rowcount > 0: + logger.info('Task %s deleted successfully.', task_id) + else: + logger.warning( + 'Attempted to delete nonexistent task with id: %s', task_id + ) diff --git a/src/a2a/server/tasks/inmemory_push_notification_config_store.py b/src/a2a/server/tasks/inmemory_push_notification_config_store.py new file mode 100644 index 000000000..c5bc5dbe6 --- /dev/null +++ b/src/a2a/server/tasks/inmemory_push_notification_config_store.py @@ -0,0 +1,68 @@ +import asyncio +import logging + +from a2a.server.tasks.push_notification_config_store import ( + PushNotificationConfigStore, +) +from a2a.types import PushNotificationConfig + + +logger = logging.getLogger(__name__) + + +class InMemoryPushNotificationConfigStore(PushNotificationConfigStore): + """In-memory implementation of PushNotificationConfigStore interface. + + Stores push notification configurations in memory + """ + + def __init__(self) -> None: + """Initializes the InMemoryPushNotificationConfigStore.""" + self.lock = asyncio.Lock() + self._push_notification_infos: dict[ + str, list[PushNotificationConfig] + ] = {} + + async def set_info( + self, task_id: str, notification_config: PushNotificationConfig + ) -> None: + """Sets or updates the push notification configuration for a task in memory.""" + async with self.lock: + if task_id not in self._push_notification_infos: + self._push_notification_infos[task_id] = [] + + if notification_config.id is None: + notification_config.id = task_id + + for config in self._push_notification_infos[task_id]: + if config.id == notification_config.id: + self._push_notification_infos[task_id].remove(config) + break + + self._push_notification_infos[task_id].append(notification_config) + + async def get_info(self, task_id: str) -> list[PushNotificationConfig]: + """Retrieves the push notification configuration for a task from memory.""" + async with self.lock: + return self._push_notification_infos.get(task_id) or [] + + async def delete_info( + self, task_id: str, config_id: str | None = None + ) -> None: + """Deletes the push notification configuration for a task from memory.""" + async with self.lock: + if config_id is None: + config_id = task_id + + if task_id in self._push_notification_infos: + configurations = self._push_notification_infos[task_id] + if not configurations: + return + + for config in configurations: + if config.id == config_id: + configurations.remove(config) + break + + if len(configurations) == 0: + del self._push_notification_infos[task_id] diff --git a/src/a2a/server/tasks/inmemory_push_notifier.py b/src/a2a/server/tasks/inmemory_push_notifier.py deleted file mode 100644 index 7c6829011..000000000 --- a/src/a2a/server/tasks/inmemory_push_notifier.py +++ /dev/null @@ -1,62 +0,0 @@ -import asyncio -import logging - -import httpx - -from a2a.server.tasks.push_notifier import PushNotifier -from a2a.types import PushNotificationConfig, Task - - -logger = logging.getLogger(__name__) - - -class InMemoryPushNotifier(PushNotifier): - """In-memory implementation of PushNotifier interface. - - Stores push notification configurations in memory and uses an httpx client - to send notifications. - """ - - def __init__(self, httpx_client: httpx.AsyncClient) -> None: - """Initializes the InMemoryPushNotifier. - - Args: - httpx_client: An async HTTP client instance to send notifications. - """ - self._client = httpx_client - self.lock = asyncio.Lock() - self._push_notification_infos: dict[str, PushNotificationConfig] = {} - - async def set_info( - self, task_id: str, notification_config: PushNotificationConfig - ): - """Sets or updates the push notification configuration for a task in memory.""" - async with self.lock: - self._push_notification_infos[task_id] = notification_config - - async def get_info(self, task_id: str) -> PushNotificationConfig | None: - """Retrieves the push notification configuration for a task from memory.""" - async with self.lock: - return self._push_notification_infos.get(task_id) - - async def delete_info(self, task_id: str): - """Deletes the push notification configuration for a task from memory.""" - async with self.lock: - if task_id in self._push_notification_infos: - del self._push_notification_infos[task_id] - - async def send_notification(self, task: Task): - """Sends a push notification for a task if configuration exists.""" - push_info = await self.get_info(task.id) - if not push_info: - return - url = push_info.url - - try: - response = await self._client.post( - url, json=task.model_dump(mode='json', exclude_none=True) - ) - response.raise_for_status() - logger.info(f'Push-notification sent for URL: {url}') - except Exception as e: - logger.error(f'Error sending push-notification: {e}') diff --git a/src/a2a/server/tasks/inmemory_task_store.py b/src/a2a/server/tasks/inmemory_task_store.py index 26c098230..4e192af08 100644 --- a/src/a2a/server/tasks/inmemory_task_store.py +++ b/src/a2a/server/tasks/inmemory_task_store.py @@ -1,6 +1,7 @@ import asyncio import logging +from a2a.server.context import ServerCallContext from a2a.server.tasks.task_store import TaskStore from a2a.types import Task @@ -21,13 +22,17 @@ def __init__(self) -> None: self.tasks: dict[str, Task] = {} self.lock = asyncio.Lock() - async def save(self, task: Task) -> None: + async def save( + self, task: Task, context: ServerCallContext | None = None + ) -> None: """Saves or updates a task in the in-memory store.""" async with self.lock: self.tasks[task.id] = task logger.debug('Task %s saved successfully.', task.id) - async def get(self, task_id: str) -> Task | None: + async def get( + self, task_id: str, context: ServerCallContext | None = None + ) -> Task | None: """Retrieves a task from the in-memory store by ID.""" async with self.lock: logger.debug('Attempting to get task with id: %s', task_id) @@ -38,7 +43,9 @@ async def get(self, task_id: str) -> Task | None: logger.debug('Task %s not found in store.', task_id) return task - async def delete(self, task_id: str) -> None: + async def delete( + self, task_id: str, context: ServerCallContext | None = None + ) -> None: """Deletes a task from the in-memory store by ID.""" async with self.lock: logger.debug('Attempting to delete task with id: %s', task_id) diff --git a/src/a2a/server/tasks/push_notification_config_store.py b/src/a2a/server/tasks/push_notification_config_store.py new file mode 100644 index 000000000..efe46b40a --- /dev/null +++ b/src/a2a/server/tasks/push_notification_config_store.py @@ -0,0 +1,23 @@ +from abc import ABC, abstractmethod + +from a2a.types import PushNotificationConfig + + +class PushNotificationConfigStore(ABC): + """Interface for storing and retrieving push notification configurations for tasks.""" + + @abstractmethod + async def set_info( + self, task_id: str, notification_config: PushNotificationConfig + ) -> None: + """Sets or updates the push notification configuration for a task.""" + + @abstractmethod + async def get_info(self, task_id: str) -> list[PushNotificationConfig]: + """Retrieves the push notification configuration for a task.""" + + @abstractmethod + async def delete_info( + self, task_id: str, config_id: str | None = None + ) -> None: + """Deletes the push notification configuration for a task.""" diff --git a/src/a2a/server/tasks/push_notification_sender.py b/src/a2a/server/tasks/push_notification_sender.py new file mode 100644 index 000000000..d9389d4a4 --- /dev/null +++ b/src/a2a/server/tasks/push_notification_sender.py @@ -0,0 +1,11 @@ +from abc import ABC, abstractmethod + +from a2a.types import Task + + +class PushNotificationSender(ABC): + """Interface for sending push notifications for tasks.""" + + @abstractmethod + async def send_notification(self, task: Task) -> None: + """Sends a push notification containing the latest task state.""" diff --git a/src/a2a/server/tasks/push_notifier.py b/src/a2a/server/tasks/push_notifier.py deleted file mode 100644 index ca1246b89..000000000 --- a/src/a2a/server/tasks/push_notifier.py +++ /dev/null @@ -1,25 +0,0 @@ -from abc import ABC, abstractmethod - -from a2a.types import PushNotificationConfig, Task - - -class PushNotifier(ABC): - """PushNotifier interface to store, retrieve push notification for tasks and send push notifications.""" - - @abstractmethod - async def set_info( - self, task_id: str, notification_config: PushNotificationConfig - ): - """Sets or updates the push notification configuration for a task.""" - - @abstractmethod - async def get_info(self, task_id: str) -> PushNotificationConfig | None: - """Retrieves the push notification configuration for a task.""" - - @abstractmethod - async def delete_info(self, task_id: str): - """Deletes the push notification configuration for a task.""" - - @abstractmethod - async def send_notification(self, task: Task): - """Sends a push notification containing the latest task state.""" diff --git a/src/a2a/server/tasks/result_aggregator.py b/src/a2a/server/tasks/result_aggregator.py index a3a3326fd..fb1ab62ef 100644 --- a/src/a2a/server/tasks/result_aggregator.py +++ b/src/a2a/server/tasks/result_aggregator.py @@ -1,7 +1,7 @@ import asyncio import logging -from collections.abc import AsyncGenerator, AsyncIterator +from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable from a2a.server.events import Event, EventConsumer from a2a.server.tasks.task_manager import TaskManager @@ -24,7 +24,10 @@ class ResultAggregator: Task object and emit that Task object. """ - def __init__(self, task_manager: TaskManager): + def __init__( + self, + task_manager: TaskManager, + ) -> None: """Initializes the ResultAggregator. Args: @@ -92,21 +95,30 @@ async def consume_all( return await self.task_manager.get_task() async def consume_and_break_on_interrupt( - self, consumer: EventConsumer + self, + consumer: EventConsumer, + blocking: bool = True, + event_callback: Callable[[], Awaitable[None]] | None = None, ) -> tuple[Task | Message | None, bool]: """Processes the event stream until completion or an interruptable state is encountered. - Interruptable states currently include `TaskState.auth_required`. + If `blocking` is False, it returns after the first event that creates a Task or Message. + If `blocking` is True, it waits for completion unless an `auth_required` + state is encountered, which is always an interruption. If interrupted, consumption continues in a background task. Args: consumer: The `EventConsumer` to read events from. + blocking: If `False`, the method returns as soon as a task/message + is available. If `True`, it waits for a terminal state. + event_callback: Optional async callback function to be called after each event + is processed in the background continuation. + Mainly used for push notifications currently. Returns: A tuple containing: - The current aggregated result (`Task` or `Message`) at the point of completion or interruption. - - A boolean indicating whether the consumption was interrupted (`True`) - or completed naturally (`False`). + - A boolean indicating whether the consumption was interrupted (`True`) or completed naturally (`False`). Raises: BaseException: If the `EventConsumer` raises an exception during consumption. @@ -118,10 +130,15 @@ async def consume_and_break_on_interrupt( self._message = event return event, False await self.task_manager.process(event) - if ( + + should_interrupt = False + is_auth_required = ( isinstance(event, Task | TaskStatusUpdateEvent) and event.status.state == TaskState.auth_required - ): + ) + + # Always interrupt on auth_required, as it needs external action. + if is_auth_required: # auth-required is a special state: the message should be # escalated back to the caller, but the agent is expected to # continue producing events once the authorization is received @@ -131,14 +148,28 @@ async def consume_and_break_on_interrupt( logger.debug( 'Encountered an auth-required task: breaking synchronous message/send flow.' ) + should_interrupt = True + # For non-blocking calls, interrupt as soon as a task is available. + elif not blocking: + logger.debug( + 'Non-blocking call: returning task after first event.' + ) + should_interrupt = True + + if should_interrupt: + # Continue consuming the rest of the events in the background. # TODO: We should track all outstanding tasks to ensure they eventually complete. - asyncio.create_task(self._continue_consuming(event_stream)) + asyncio.create_task( # noqa: RUF006 + self._continue_consuming(event_stream, event_callback) + ) interrupted = True break return await self.task_manager.get_task(), interrupted async def _continue_consuming( - self, event_stream: AsyncIterator[Event] + self, + event_stream: AsyncIterator[Event], + event_callback: Callable[[], Awaitable[None]] | None = None, ) -> None: """Continues processing an event stream in a background task. @@ -147,6 +178,9 @@ async def _continue_consuming( Args: event_stream: The remaining `AsyncIterator` of events from the consumer. + event_callback: Optional async callback function to be called after each event is processed. """ async for event in event_stream: await self.task_manager.process(event) + if event_callback: + await event_callback() diff --git a/src/a2a/server/tasks/task_manager.py b/src/a2a/server/tasks/task_manager.py index ca42b69b9..5c363703b 100644 --- a/src/a2a/server/tasks/task_manager.py +++ b/src/a2a/server/tasks/task_manager.py @@ -1,5 +1,6 @@ import logging +from a2a.server.context import ServerCallContext from a2a.server.events.event_queue import Event from a2a.server.tasks.task_store import TaskStore from a2a.types import ( @@ -31,6 +32,7 @@ def __init__( context_id: str | None, task_store: TaskStore, initial_message: Message | None, + context: ServerCallContext | None = None, ): """Initializes the TaskManager. @@ -40,12 +42,17 @@ def __init__( task_store: The `TaskStore` instance for persistence. initial_message: The `Message` that initiated the task, if any. Used when creating a new task object. + context: The `ServerCallContext` that this task is produced under. """ + if task_id is not None and not (isinstance(task_id, str) and task_id): + raise ValueError('Task ID must be a non-empty string') + self.task_id = task_id self.context_id = context_id self.task_store = task_store self._initial_message = initial_message self._current_task: Task | None = None + self._call_context: ServerCallContext | None = context logger.debug( 'TaskManager initialized with task_id: %s, context_id: %s', task_id, @@ -71,7 +78,9 @@ async def get_task(self) -> Task | None: logger.debug( 'Attempting to get task from store with id: %s', self.task_id ) - self._current_task = await self.task_store.get(self.task_id) + self._current_task = await self.task_store.get( + self.task_id, self._call_context + ) if self._current_task: logger.debug('Task %s retrieved successfully.', self.task_id) else: @@ -96,7 +105,7 @@ async def save_task_event( when the TaskManager's ID is already set. """ task_id_from_event = ( - event.id if isinstance(event, Task) else event.taskId + event.id if isinstance(event, Task) else event.task_id ) # If task id is known, make sure it is matched if self.task_id and self.task_id != task_id_from_event: @@ -107,8 +116,14 @@ async def save_task_event( ) if not self.task_id: self.task_id = task_id_from_event - if not self.context_id and self.context_id != event.contextId: - self.context_id = event.contextId + if self.context_id and self.context_id != event.context_id: + raise ServerError( + error=InvalidParamsError( + message=f"Context in event doesn't match TaskManager {self.context_id} : {event.context_id}" + ) + ) + if not self.context_id: + self.context_id = event.context_id logger.debug( 'Processing save of task event of type %s for task_id: %s', @@ -130,7 +145,10 @@ async def save_task_event( task.history = [task.status.message] else: task.history.append(task.status.message) - + if event.metadata: + if not task.metadata: + task.metadata = {} + task.metadata.update(event.metadata) task.status = event.status else: logger.debug('Appending artifact to task %s', task.id) @@ -155,17 +173,17 @@ async def ensure_task( logger.debug( 'Attempting to retrieve existing task with id: %s', self.task_id ) - task = await self.task_store.get(self.task_id) + task = await self.task_store.get(self.task_id, self._call_context) if not task: logger.info( 'Task not found or task_id not set. Creating new task for event (task_id: %s, context_id: %s).', - event.taskId, - event.contextId, + event.task_id, + event.context_id, ) # streaming agent did not previously stream task object. # Create a task object with the available information and persist the event - task = self._init_task_obj(event.taskId, event.contextId) + task = self._init_task_obj(event.task_id, event.context_id) await self._save_task(task) return task @@ -207,7 +225,7 @@ def _init_task_obj(self, task_id: str, context_id: str) -> Task: history = [self._initial_message] if self._initial_message else [] return Task( id=task_id, - contextId=context_id, + context_id=context_id, status=TaskStatus(state=TaskState.submitted), history=history, ) @@ -219,12 +237,12 @@ async def _save_task(self, task: Task) -> None: task: The `Task` object to save. """ logger.debug('Saving task with id: %s', task.id) - await self.task_store.save(task) + await self.task_store.save(task, self._call_context) self._current_task = task if not self.task_id: logger.info('New task created with id: %s', task.id) self.task_id = task.id - self.context_id = task.contextId + self.context_id = task.context_id def update_with_message(self, message: Message, task: Task) -> Task: """Updates a task object in memory by adding a new message to its history. diff --git a/src/a2a/server/tasks/task_store.py b/src/a2a/server/tasks/task_store.py index 6d7ce59d1..16b36edb9 100644 --- a/src/a2a/server/tasks/task_store.py +++ b/src/a2a/server/tasks/task_store.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod +from a2a.server.context import ServerCallContext from a2a.types import Task @@ -10,13 +11,19 @@ class TaskStore(ABC): """ @abstractmethod - async def save(self, task: Task): + async def save( + self, task: Task, context: ServerCallContext | None = None + ) -> None: """Saves or updates a task in the store.""" @abstractmethod - async def get(self, task_id: str) -> Task | None: + async def get( + self, task_id: str, context: ServerCallContext | None = None + ) -> Task | None: """Retrieves a task from the store by ID.""" @abstractmethod - async def delete(self, task_id: str): + async def delete( + self, task_id: str, context: ServerCallContext | None = None + ) -> None: """Deletes a task from the store by ID.""" diff --git a/src/a2a/server/tasks/task_updater.py b/src/a2a/server/tasks/task_updater.py index c079edd44..b61ab7001 100644 --- a/src/a2a/server/tasks/task_updater.py +++ b/src/a2a/server/tasks/task_updater.py @@ -1,8 +1,14 @@ -import uuid +import asyncio +from datetime import datetime, timezone from typing import Any from a2a.server.events import EventQueue +from a2a.server.id_generator import ( + IDGenerator, + IDGeneratorContext, + UUIDGenerator, +) from a2a.types import ( Artifact, Message, @@ -21,47 +27,96 @@ class TaskUpdater: Simplifies the process of creating and enqueueing standard task events. """ - def __init__(self, event_queue: EventQueue, task_id: str, context_id: str): + def __init__( + self, + event_queue: EventQueue, + task_id: str, + context_id: str, + artifact_id_generator: IDGenerator | None = None, + message_id_generator: IDGenerator | None = None, + ): """Initializes the TaskUpdater. Args: event_queue: The `EventQueue` associated with the task. task_id: The ID of the task. context_id: The context ID of the task. + artifact_id_generator: ID generator for new artifact IDs. Defaults to UUID generator. + message_id_generator: ID generator for new message IDs. Defaults to UUID generator. """ self.event_queue = event_queue self.task_id = task_id self.context_id = context_id + self._lock = asyncio.Lock() + self._terminal_state_reached = False + self._terminal_states = { + TaskState.completed, + TaskState.canceled, + TaskState.failed, + TaskState.rejected, + } + self._artifact_id_generator = ( + artifact_id_generator if artifact_id_generator else UUIDGenerator() + ) + self._message_id_generator = ( + message_id_generator if message_id_generator else UUIDGenerator() + ) - def update_status( - self, state: TaskState, message: Message | None = None, final=False - ): + async def update_status( + self, + state: TaskState, + message: Message | None = None, + final: bool = False, + timestamp: str | None = None, + metadata: dict[str, Any] | None = None, + ) -> None: """Updates the status of the task and publishes a `TaskStatusUpdateEvent`. Args: state: The new state of the task. message: An optional message associated with the status update. final: If True, indicates this is the final status update for the task. + timestamp: Optional ISO 8601 datetime string. Defaults to current time. + metadata: Optional metadata for extensions. """ - self.event_queue.enqueue_event( - TaskStatusUpdateEvent( - taskId=self.task_id, - contextId=self.context_id, - final=final, - status=TaskStatus( - state=state, - message=message, - ), + async with self._lock: + if self._terminal_state_reached: + raise RuntimeError( + f'Task {self.task_id} is already in a terminal state.' + ) + if state in self._terminal_states: + self._terminal_state_reached = True + final = True + + current_timestamp = ( + timestamp + if timestamp + else datetime.now(timezone.utc).isoformat() + ) + await self.event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=self.task_id, + context_id=self.context_id, + final=final, + metadata=metadata, + status=TaskStatus( + state=state, + message=message, + timestamp=current_timestamp, + ), + ) ) - ) - def add_artifact( + async def add_artifact( # noqa: PLR0913 self, parts: list[Part], - artifact_id: str = str(uuid.uuid4()), + artifact_id: str | None = None, name: str | None = None, metadata: dict[str, Any] | None = None, - ): + append: bool | None = None, + last_chunk: bool | None = None, + extensions: list[str] | None = None, + ) -> None: """Adds an artifact chunk to the task and publishes a `TaskArtifactUpdateEvent`. Args: @@ -71,46 +126,87 @@ def add_artifact( metadata: Optional metadata for the artifact. append: Optional boolean indicating if this chunk appends to a previous one. last_chunk: Optional boolean indicating if this is the last chunk. + extensions: Optional list of extensions for the artifact. """ - self.event_queue.enqueue_event( + if not artifact_id: + artifact_id = self._artifact_id_generator.generate( + IDGeneratorContext( + task_id=self.task_id, context_id=self.context_id + ) + ) + + await self.event_queue.enqueue_event( TaskArtifactUpdateEvent( - taskId=self.task_id, - contextId=self.context_id, + task_id=self.task_id, + context_id=self.context_id, artifact=Artifact( - artifactId=artifact_id, + artifact_id=artifact_id, name=name, parts=parts, metadata=metadata, + extensions=extensions, ), + append=append, + last_chunk=last_chunk, ) ) - def complete(self, message: Message | None = None): + async def complete(self, message: Message | None = None) -> None: """Marks the task as completed and publishes a final status update.""" - self.update_status( + await self.update_status( TaskState.completed, message=message, final=True, ) - def failed(self, message: Message | None = None): + async def failed(self, message: Message | None = None) -> None: """Marks the task as failed and publishes a final status update.""" - self.update_status(TaskState.failed, message=message, final=True) + await self.update_status(TaskState.failed, message=message, final=True) - def submit(self, message: Message | None = None): + async def reject(self, message: Message | None = None) -> None: + """Marks the task as rejected and publishes a final status update.""" + await self.update_status( + TaskState.rejected, message=message, final=True + ) + + async def submit(self, message: Message | None = None) -> None: """Marks the task as submitted and publishes a status update.""" - self.update_status( + await self.update_status( TaskState.submitted, message=message, ) - def start_work(self, message: Message | None = None): + async def start_work(self, message: Message | None = None) -> None: """Marks the task as working and publishes a status update.""" - self.update_status( + await self.update_status( TaskState.working, message=message, ) + async def cancel(self, message: Message | None = None) -> None: + """Marks the task as cancelled and publishes a finalstatus update.""" + await self.update_status( + TaskState.canceled, message=message, final=True + ) + + async def requires_input( + self, message: Message | None = None, final: bool = False + ) -> None: + """Marks the task as input required and publishes a status update.""" + await self.update_status( + TaskState.input_required, + message=message, + final=final, + ) + + async def requires_auth( + self, message: Message | None = None, final: bool = False + ) -> None: + """Marks the task as auth required and publishes a status update.""" + await self.update_status( + TaskState.auth_required, message=message, final=final + ) + def new_agent_message( self, parts: list[Part], @@ -123,7 +219,6 @@ def new_agent_message( Args: parts: A list of `Part` objects for the message content. - final: Optional boolean indicating if this is the final message in a stream. metadata: Optional metadata for the message. Returns: @@ -131,9 +226,13 @@ def new_agent_message( """ return Message( role=Role.agent, - taskId=self.task_id, - contextId=self.context_id, - messageId=str(uuid.uuid4()), + task_id=self.task_id, + context_id=self.context_id, + message_id=self._message_id_generator.generate( + IDGeneratorContext( + task_id=self.task_id, context_id=self.context_id + ) + ), metadata=metadata, parts=parts, ) diff --git a/src/a2a/types.py b/src/a2a/types.py index b1aed42e5..918a06b5e 100644 --- a/src/a2a/types.py +++ b/src/a2a/types.py @@ -1,12 +1,14 @@ # generated by datamodel-codegen: -# filename: https://raw.githubusercontent.com/google-a2a/A2A/refs/heads/main/specification/json/a2a.json +# filename: https://raw.githubusercontent.com/a2aproject/A2A/refs/heads/main/specification/json/a2a.json from __future__ import annotations from enum import Enum from typing import Any, Literal -from pydantic import BaseModel, Field, RootModel +from pydantic import Field, RootModel + +from a2a._base import A2ABaseModel class A2A(RootModel[Any]): @@ -15,7 +17,7 @@ class A2A(RootModel[Any]): class In(str, Enum): """ - The location of the API key. Valid values are "query", "header", or "cookie". + The location of the API key. """ cookie = 'cookie' @@ -23,717 +25,970 @@ class In(str, Enum): query = 'query' -class APIKeySecurityScheme(BaseModel): +class APIKeySecurityScheme(A2ABaseModel): """ - API Key security scheme. + Defines a security scheme using an API key. """ description: str | None = None """ - Description of this security scheme. + An optional description for the security scheme. """ - in_: In = Field(..., alias='in') + in_: In """ - The location of the API key. Valid values are "query", "header", or "cookie". + The location of the API key. """ name: str """ - The name of the header, query or cookie parameter to be used. + The name of the header, query, or cookie parameter to be used. """ type: Literal['apiKey'] = 'apiKey' + """ + The type of the security scheme. Must be 'apiKey'. + """ -class AgentCapabilities(BaseModel): +class AgentCardSignature(A2ABaseModel): """ - Defines optional capabilities supported by an agent. + AgentCardSignature represents a JWS signature of an AgentCard. + This follows the JSON format of an RFC 7515 JSON Web Signature (JWS). """ - pushNotifications: bool | None = None + header: dict[str, Any] | None = None """ - true if the agent can notify updates to client. + The unprotected JWS header values. """ - stateTransitionHistory: bool | None = None + protected: str """ - true if the agent exposes status change history for tasks. + The protected JWS header for the signature. This is a Base64url-encoded + JSON object, as per RFC 7515. """ - streaming: bool | None = None + signature: str """ - true if the agent supports SSE. + The computed signature, Base64url-encoded. """ -class AgentProvider(BaseModel): +class AgentExtension(A2ABaseModel): + """ + A declaration of a protocol extension supported by an Agent. + """ + + description: str | None = None + """ + A human-readable description of how this agent uses the extension. + """ + params: dict[str, Any] | None = None + """ + Optional, extension-specific configuration parameters. + """ + required: bool | None = None + """ + If true, the client must understand and comply with the extension's requirements + to interact with the agent. + """ + uri: str + """ + The unique URI identifying the extension. + """ + + +class AgentInterface(A2ABaseModel): + """ + Declares a combination of a target URL and a transport protocol for interacting with the agent. + This allows agents to expose the same functionality over multiple transport mechanisms. + """ + + transport: str = Field(..., examples=['JSONRPC', 'GRPC', 'HTTP+JSON']) + """ + The transport protocol supported at this URL. + """ + url: str = Field( + ..., + examples=[ + 'https://api.example.com/a2a/v1', + 'https://grpc.example.com/a2a', + 'https://rest.example.com/v1', + ], + ) + """ + The URL where this interface is available. Must be a valid absolute HTTPS URL in production. + """ + + +class AgentProvider(A2ABaseModel): """ Represents the service provider of an agent. """ organization: str """ - Agent provider's organization name. + The name of the agent provider's organization. """ url: str """ - Agent provider's URL. + A URL for the agent provider's website or relevant documentation. """ -class AgentSkill(BaseModel): +class AgentSkill(A2ABaseModel): """ - Represents a unit of capability that an agent can perform. + Represents a distinct capability or function that an agent can perform. """ description: str """ - Description of the skill - will be used by the client or a human - as a hint to understand what the skill does. + A detailed description of the skill, intended to help clients or users + understand its purpose and functionality. """ - examples: list[str] | None = None + examples: list[str] | None = Field( + default=None, examples=[['I need a recipe for bread']] + ) """ - The set of example scenarios that the skill can perform. - Will be used by the client as a hint to understand how the skill can be used. + Example prompts or scenarios that this skill can handle. Provides a hint to + the client on how to use the skill. """ id: str """ - Unique identifier for the agent's skill. + A unique identifier for the agent's skill. """ - inputModes: list[str] | None = None + input_modes: list[str] | None = None """ - The set of interaction modes that the skill supports - (if different than the default). - Supported mime types for input. + The set of supported input MIME types for this skill, overriding the agent's defaults. """ name: str """ - Human readable name of the skill. + A human-readable name for the skill. + """ + output_modes: list[str] | None = None + """ + The set of supported output MIME types for this skill, overriding the agent's defaults. + """ + security: list[dict[str, list[str]]] | None = Field( + default=None, examples=[[{'google': ['oidc']}]] + ) + """ + Security schemes necessary for the agent to leverage this skill. + As in the overall AgentCard.security, this list represents a logical OR of security + requirement objects. Each object is a set of security schemes that must be used together + (a logical AND). + """ + tags: list[str] = Field( + ..., examples=[['cooking', 'customer support', 'billing']] + ) + """ + A set of keywords describing the skill's capabilities. + """ + + +class AuthenticatedExtendedCardNotConfiguredError(A2ABaseModel): + """ + An A2A-specific error indicating that the agent does not have an Authenticated Extended Card configured """ - outputModes: list[str] | None = None + + code: Literal[-32007] = -32007 + """ + The error code for when an authenticated extended card is not configured. """ - Supported mime types for output. + data: Any | None = None + """ + A primitive or structured value containing additional information about the error. + This may be omitted. """ - tags: list[str] + message: str | None = 'Authenticated Extended Card is not configured' """ - Set of tagwords describing classes of capabilities for this specific skill. + The error message. """ -class AuthorizationCodeOAuthFlow(BaseModel): +class AuthorizationCodeOAuthFlow(A2ABaseModel): """ - Configuration details for a supported OAuth Flow + Defines configuration details for the OAuth 2.0 Authorization Code flow. """ - authorizationUrl: str + authorization_url: str """ - The authorization URL to be used for this flow. This MUST be in the form of a URL. The OAuth2 - standard requires the use of TLS + The authorization URL to be used for this flow. + This MUST be a URL and use TLS. """ - refreshUrl: str | None = None + refresh_url: str | None = None """ - The URL to be used for obtaining refresh tokens. This MUST be in the form of a URL. The OAuth2 - standard requires the use of TLS. + The URL to be used for obtaining refresh tokens. + This MUST be a URL and use TLS. """ scopes: dict[str, str] """ - The available scopes for the OAuth2 security scheme. A map between the scope name and a short - description for it. The map MAY be empty. + The available scopes for the OAuth2 security scheme. A map between the scope + name and a short description for it. """ - tokenUrl: str + token_url: str """ - The token URL to be used for this flow. This MUST be in the form of a URL. The OAuth2 standard - requires the use of TLS. + The token URL to be used for this flow. + This MUST be a URL and use TLS. """ -class ClientCredentialsOAuthFlow(BaseModel): +class ClientCredentialsOAuthFlow(A2ABaseModel): """ - Configuration details for a supported OAuth Flow + Defines configuration details for the OAuth 2.0 Client Credentials flow. """ - refreshUrl: str | None = None + refresh_url: str | None = None """ - The URL to be used for obtaining refresh tokens. This MUST be in the form of a URL. The OAuth2 - standard requires the use of TLS. + The URL to be used for obtaining refresh tokens. This MUST be a URL. """ scopes: dict[str, str] """ - The available scopes for the OAuth2 security scheme. A map between the scope name and a short - description for it. The map MAY be empty. + The available scopes for the OAuth2 security scheme. A map between the scope + name and a short description for it. """ - tokenUrl: str + token_url: str """ - The token URL to be used for this flow. This MUST be in the form of a URL. The OAuth2 standard - requires the use of TLS. + The token URL to be used for this flow. This MUST be a URL. """ -class ContentTypeNotSupportedError(BaseModel): +class ContentTypeNotSupportedError(A2ABaseModel): """ - A2A specific error indicating incompatible content types between request and agent capabilities. + An A2A-specific error indicating an incompatibility between the requested + content types and the agent's capabilities. """ code: Literal[-32005] = -32005 """ - A Number that indicates the error type that occurred. + The error code for an unsupported content type. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Incompatible content types' """ - A String providing a short description of the error. + The error message. """ -class DataPart(BaseModel): +class DataPart(A2ABaseModel): """ - Represents a structured data segment within a message part. + Represents a structured data segment (e.g., JSON) within a message or artifact. """ data: dict[str, Any] """ - Structured data content + The structured data content. """ kind: Literal['data'] = 'data' """ - Part type - data for DataParts + The type of this part, used as a discriminator. Always 'data'. """ metadata: dict[str, Any] | None = None """ - Optional metadata associated with the part. + Optional metadata associated with this part. + """ + + +class DeleteTaskPushNotificationConfigParams(A2ABaseModel): + """ + Defines parameters for deleting a specific push notification configuration for a task. + """ + + id: str + """ + The unique identifier (e.g. UUID) of the task. + """ + metadata: dict[str, Any] | None = None + """ + Optional metadata associated with the request. + """ + push_notification_config_id: str + """ + The ID of the push notification configuration to delete. + """ + + +class DeleteTaskPushNotificationConfigRequest(A2ABaseModel): + """ + Represents a JSON-RPC request for the `tasks/pushNotificationConfig/delete` method. + """ + + id: str | int + """ + The identifier for this request. """ + jsonrpc: Literal['2.0'] = '2.0' + """ + The version of the JSON-RPC protocol. MUST be exactly "2.0". + """ + method: Literal['tasks/pushNotificationConfig/delete'] = ( + 'tasks/pushNotificationConfig/delete' + ) + """ + The method name. Must be 'tasks/pushNotificationConfig/delete'. + """ + params: DeleteTaskPushNotificationConfigParams + """ + The parameters identifying the push notification configuration to delete. + """ + +class DeleteTaskPushNotificationConfigSuccessResponse(A2ABaseModel): + """ + Represents a successful JSON-RPC response for the `tasks/pushNotificationConfig/delete` method. + """ -class FileBase(BaseModel): + id: str | int | None = None """ - Represents the base entity for FileParts + The identifier established by the client. + """ + jsonrpc: Literal['2.0'] = '2.0' """ + The version of the JSON-RPC protocol. MUST be exactly "2.0". + """ + result: None + """ + The result is null on successful deletion. + """ + - mimeType: str | None = None +class FileBase(A2ABaseModel): """ - Optional mimeType for the file + Defines base properties for a file. + """ + + mime_type: str | None = None + """ + The MIME type of the file (e.g., "application/pdf"). """ name: str | None = None """ - Optional name for the file + An optional name for the file (e.g., "document.pdf"). """ -class FileWithBytes(BaseModel): +class FileWithBytes(A2ABaseModel): """ - Define the variant where 'bytes' is present and 'uri' is absent + Represents a file with its content provided directly as a base64-encoded string. """ bytes: str """ - base64 encoded content of the file + The base64-encoded content of the file. """ - mimeType: str | None = None + mime_type: str | None = None """ - Optional mimeType for the file + The MIME type of the file (e.g., "application/pdf"). """ name: str | None = None """ - Optional name for the file + An optional name for the file (e.g., "document.pdf"). """ -class FileWithUri(BaseModel): +class FileWithUri(A2ABaseModel): """ - Define the variant where 'uri' is present and 'bytes' is absent + Represents a file with its content located at a specific URI. """ - mimeType: str | None = None + mime_type: str | None = None """ - Optional mimeType for the file + The MIME type of the file (e.g., "application/pdf"). """ name: str | None = None """ - Optional name for the file + An optional name for the file (e.g., "document.pdf"). """ uri: str """ - URL for the File content + A URL pointing to the file's content. + """ + + +class GetAuthenticatedExtendedCardRequest(A2ABaseModel): + """ + Represents a JSON-RPC request for the `agent/getAuthenticatedExtendedCard` method. + """ + + id: str | int + """ + The identifier for this request. + """ + jsonrpc: Literal['2.0'] = '2.0' + """ + The version of the JSON-RPC protocol. MUST be exactly "2.0". + """ + method: Literal['agent/getAuthenticatedExtendedCard'] = ( + 'agent/getAuthenticatedExtendedCard' + ) + """ + The method name. Must be 'agent/getAuthenticatedExtendedCard'. """ -class HTTPAuthSecurityScheme(BaseModel): +class GetTaskPushNotificationConfigParams(A2ABaseModel): """ - HTTP Authentication security scheme. + Defines parameters for fetching a specific push notification configuration for a task. """ - bearerFormat: str | None = None + id: str + """ + The unique identifier (e.g. UUID) of the task. + """ + metadata: dict[str, Any] | None = None + """ + Optional metadata associated with the request. + """ + push_notification_config_id: str | None = None + """ + The ID of the push notification configuration to retrieve. + """ + + +class HTTPAuthSecurityScheme(A2ABaseModel): + """ + Defines a security scheme using HTTP authentication. """ - A hint to the client to identify how the bearer token is formatted. Bearer tokens are usually - generated by an authorization server, so this information is primarily for documentation - purposes. + + bearer_format: str | None = None + """ + A hint to the client to identify how the bearer token is formatted (e.g., "JWT"). + This is primarily for documentation purposes. """ description: str | None = None """ - Description of this security scheme. + An optional description for the security scheme. """ scheme: str """ - The name of the HTTP Authentication scheme to be used in the Authorization header as defined - in RFC7235. The values used SHOULD be registered in the IANA Authentication Scheme registry. - The value is case-insensitive, as defined in RFC7235. + The name of the HTTP Authentication scheme to be used in the Authorization header, + as defined in RFC7235 (e.g., "Bearer"). + This value should be registered in the IANA Authentication Scheme registry. """ type: Literal['http'] = 'http' + """ + The type of the security scheme. Must be 'http'. + """ -class ImplicitOAuthFlow(BaseModel): +class ImplicitOAuthFlow(A2ABaseModel): """ - Configuration details for a supported OAuth Flow + Defines configuration details for the OAuth 2.0 Implicit flow. """ - authorizationUrl: str + authorization_url: str """ - The authorization URL to be used for this flow. This MUST be in the form of a URL. The OAuth2 - standard requires the use of TLS + The authorization URL to be used for this flow. This MUST be a URL. """ - refreshUrl: str | None = None + refresh_url: str | None = None """ - The URL to be used for obtaining refresh tokens. This MUST be in the form of a URL. The OAuth2 - standard requires the use of TLS. + The URL to be used for obtaining refresh tokens. This MUST be a URL. """ scopes: dict[str, str] """ - The available scopes for the OAuth2 security scheme. A map between the scope name and a short - description for it. The map MAY be empty. + The available scopes for the OAuth2 security scheme. A map between the scope + name and a short description for it. """ -class InternalError(BaseModel): +class InternalError(A2ABaseModel): """ - JSON-RPC error indicating an internal JSON-RPC error on the server. + An error indicating an internal error on the server. """ code: Literal[-32603] = -32603 """ - A Number that indicates the error type that occurred. + The error code for an internal server error. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Internal error' """ - A String providing a short description of the error. + The error message. """ -class InvalidAgentResponseError(BaseModel): +class InvalidAgentResponseError(A2ABaseModel): """ - A2A specific error indicating agent returned invalid response for the current method + An A2A-specific error indicating that the agent returned a response that + does not conform to the specification for the current method. """ code: Literal[-32006] = -32006 """ - A Number that indicates the error type that occurred. + The error code for an invalid agent response. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Invalid agent response' """ - A String providing a short description of the error. + The error message. """ -class InvalidParamsError(BaseModel): +class InvalidParamsError(A2ABaseModel): """ - JSON-RPC error indicating invalid method parameter(s). + An error indicating that the method parameters are invalid. """ code: Literal[-32602] = -32602 """ - A Number that indicates the error type that occurred. + The error code for an invalid parameters error. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Invalid parameters' """ - A String providing a short description of the error. + The error message. """ -class InvalidRequestError(BaseModel): +class InvalidRequestError(A2ABaseModel): """ - JSON-RPC error indicating the JSON sent is not a valid Request object. + An error indicating that the JSON sent is not a valid Request object. """ code: Literal[-32600] = -32600 """ - A Number that indicates the error type that occurred. + The error code for an invalid request. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Request payload validation error' """ - A String providing a short description of the error. + The error message. """ -class JSONParseError(BaseModel): +class JSONParseError(A2ABaseModel): """ - JSON-RPC error indicating invalid JSON was received by the server. + An error indicating that the server received invalid JSON. """ code: Literal[-32700] = -32700 """ - A Number that indicates the error type that occurred. + The error code for a JSON parse error. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Invalid JSON payload' """ - A String providing a short description of the error. + The error message. """ -class JSONRPCError(BaseModel): +class JSONRPCError(A2ABaseModel): """ - Represents a JSON-RPC 2.0 Error object. - This is typically included in a JSONRPCErrorResponse when an error occurs. + Represents a JSON-RPC 2.0 Error object, included in an error response. """ code: int """ - A Number that indicates the error type that occurred. + A number that indicates the error type that occurred. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str """ - A String providing a short description of the error. + A string providing a short description of the error. """ -class JSONRPCMessage(BaseModel): +class JSONRPCMessage(A2ABaseModel): """ - Base interface for any JSON-RPC 2.0 request or response. + Defines the base structure for any JSON-RPC 2.0 request, response, or notification. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + A unique identifier established by the client. It must be a String, a Number, or null. + The server must reply with the same value in the response. This property is omitted for notifications. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ -class JSONRPCRequest(BaseModel): +class JSONRPCRequest(A2ABaseModel): """ Represents a JSON-RPC 2.0 Request object. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + A unique identifier established by the client. It must be a String, a Number, or null. + The server must reply with the same value in the response. This property is omitted for notifications. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: str """ - A String containing the name of the method to be invoked. + A string containing the name of the method to be invoked. """ params: dict[str, Any] | None = None """ - A Structured value that holds the parameter values to be used during the invocation of the method. + A structured value holding the parameter values to be used during the method invocation. """ -class JSONRPCSuccessResponse(BaseModel): +class JSONRPCSuccessResponse(A2ABaseModel): """ - Represents a JSON-RPC 2.0 Success Response object. + Represents a successful JSON-RPC 2.0 Response object. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ result: Any """ - The result object on success + The value of this member is determined by the method invoked on the Server. + """ + + +class ListTaskPushNotificationConfigParams(A2ABaseModel): + """ + Defines parameters for listing all push notification configurations associated with a task. + """ + + id: str + """ + The unique identifier (e.g. UUID) of the task. + """ + metadata: dict[str, Any] | None = None + """ + Optional metadata associated with the request. + """ + + +class ListTaskPushNotificationConfigRequest(A2ABaseModel): + """ + Represents a JSON-RPC request for the `tasks/pushNotificationConfig/list` method. + """ + + id: str | int + """ + The identifier for this request. + """ + jsonrpc: Literal['2.0'] = '2.0' + """ + The version of the JSON-RPC protocol. MUST be exactly "2.0". + """ + method: Literal['tasks/pushNotificationConfig/list'] = ( + 'tasks/pushNotificationConfig/list' + ) + """ + The method name. Must be 'tasks/pushNotificationConfig/list'. + """ + params: ListTaskPushNotificationConfigParams + """ + The parameters identifying the task whose configurations are to be listed. """ class Role(str, Enum): """ - Message sender's role + Identifies the sender of the message. `user` for the client, `agent` for the service. """ agent = 'agent' user = 'user' -class MethodNotFoundError(BaseModel): +class MethodNotFoundError(A2ABaseModel): """ - JSON-RPC error indicating the method does not exist or is not available. + An error indicating that the requested method does not exist or is not available. """ code: Literal[-32601] = -32601 """ - A Number that indicates the error type that occurred. + The error code for a method not found error. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Method not found' """ - A String providing a short description of the error. + The error message. + """ + + +class MutualTLSSecurityScheme(A2ABaseModel): + """ + Defines a security scheme using mTLS authentication. """ + description: str | None = None + """ + An optional description for the security scheme. + """ + type: Literal['mutualTLS'] = 'mutualTLS' + """ + The type of the security scheme. Must be 'mutualTLS'. + """ -class OpenIdConnectSecurityScheme(BaseModel): + +class OpenIdConnectSecurityScheme(A2ABaseModel): """ - OpenID Connect security scheme configuration. + Defines a security scheme using OpenID Connect. """ description: str | None = None """ - Description of this security scheme. + An optional description for the security scheme. """ - openIdConnectUrl: str + open_id_connect_url: str """ - Well-known URL to discover the [[OpenID-Connect-Discovery]] provider metadata. + The OpenID Connect Discovery URL for the OIDC provider's metadata. """ type: Literal['openIdConnect'] = 'openIdConnect' + """ + The type of the security scheme. Must be 'openIdConnect'. + """ -class PartBase(BaseModel): +class PartBase(A2ABaseModel): """ - Base properties common to all message parts. + Defines base properties common to all message or artifact parts. """ metadata: dict[str, Any] | None = None """ - Optional metadata associated with the part. + Optional metadata associated with this part. """ -class PasswordOAuthFlow(BaseModel): +class PasswordOAuthFlow(A2ABaseModel): """ - Configuration details for a supported OAuth Flow + Defines configuration details for the OAuth 2.0 Resource Owner Password flow. """ - refreshUrl: str | None = None + refresh_url: str | None = None """ - The URL to be used for obtaining refresh tokens. This MUST be in the form of a URL. The OAuth2 - standard requires the use of TLS. + The URL to be used for obtaining refresh tokens. This MUST be a URL. """ scopes: dict[str, str] """ - The available scopes for the OAuth2 security scheme. A map between the scope name and a short - description for it. The map MAY be empty. + The available scopes for the OAuth2 security scheme. A map between the scope + name and a short description for it. """ - tokenUrl: str + token_url: str """ - The token URL to be used for this flow. This MUST be in the form of a URL. The OAuth2 standard - requires the use of TLS. + The token URL to be used for this flow. This MUST be a URL. """ -class PushNotificationAuthenticationInfo(BaseModel): +class PushNotificationAuthenticationInfo(A2ABaseModel): """ - Defines authentication details for push notifications. + Defines authentication details for a push notification endpoint. """ credentials: str | None = None """ - Optional credentials + Optional credentials required by the push notification endpoint. """ schemes: list[str] """ - Supported authentication schemes - e.g. Basic, Bearer + A list of supported authentication schemes (e.g., 'Basic', 'Bearer'). """ -class PushNotificationConfig(BaseModel): +class PushNotificationConfig(A2ABaseModel): """ - Configuration for setting up push notifications for task updates. + Defines the configuration for setting up push notifications for task updates. """ authentication: PushNotificationAuthenticationInfo | None = None + """ + Optional authentication details for the agent to use when calling the notification URL. + """ + id: str | None = None + """ + A unique identifier (e.g. UUID) for the push notification configuration, set by the client + to support multiple notification callbacks. + """ token: str | None = None """ - Token unique to this task/session. + A unique token for this task or session to validate incoming push notifications. """ url: str """ - URL for sending the push notifications. + The callback URL where the agent should send push notifications. """ -class PushNotificationNotSupportedError(BaseModel): +class PushNotificationNotSupportedError(A2ABaseModel): """ - A2A specific error indicating the agent does not support push notifications. + An A2A-specific error indicating that the agent does not support push notifications. """ code: Literal[-32003] = -32003 """ - A Number that indicates the error type that occurred. + The error code for when push notifications are not supported. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Push Notification is not supported' """ - A String providing a short description of the error. + The error message. """ -class SecuritySchemeBase(BaseModel): +class SecuritySchemeBase(A2ABaseModel): """ - Base properties shared by all security schemes. + Defines base properties shared by all security scheme objects. """ description: str | None = None """ - Description of this security scheme. + An optional description for the security scheme. """ -class TaskIdParams(BaseModel): +class TaskIdParams(A2ABaseModel): """ - Parameters containing only a task ID, used for simple task operations. + Defines parameters containing a task ID, used for simple task operations. """ id: str """ - Task id. + The unique identifier (e.g. UUID) of the task. """ metadata: dict[str, Any] | None = None + """ + Optional metadata associated with the request. + """ -class TaskNotCancelableError(BaseModel): +class TaskNotCancelableError(A2ABaseModel): """ - A2A specific error indicating the task is in a state where it cannot be canceled. + An A2A-specific error indicating that the task is in a state where it cannot be canceled. """ code: Literal[-32002] = -32002 """ - A Number that indicates the error type that occurred. + The error code for a task that cannot be canceled. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Task cannot be canceled' """ - A String providing a short description of the error. + The error message. """ -class TaskNotFoundError(BaseModel): +class TaskNotFoundError(A2ABaseModel): """ - A2A specific error indicating the requested task ID was not found. + An A2A-specific error indicating that the requested task ID was not found. """ code: Literal[-32001] = -32001 """ - A Number that indicates the error type that occurred. + The error code for a task not found error. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'Task not found' """ - A String providing a short description of the error. + The error message. """ -class TaskPushNotificationConfig(BaseModel): +class TaskPushNotificationConfig(A2ABaseModel): """ - Parameters for setting or getting push notification configuration for a task + A container associating a push notification configuration with a specific task. """ - pushNotificationConfig: PushNotificationConfig + push_notification_config: PushNotificationConfig """ - Push notification configuration. + The push notification configuration for this task. """ - taskId: str + task_id: str """ - Task id. + The unique identifier (e.g. UUID) of the task. """ -class TaskQueryParams(BaseModel): +class TaskQueryParams(A2ABaseModel): """ - Parameters for querying a task, including optional history length. + Defines parameters for querying a task, with an option to limit history length. """ - historyLength: int | None = None + history_length: int | None = None """ - Number of recent messages to be retrieved. + The number of most recent messages from the task's history to retrieve. """ id: str """ - Task id. + The unique identifier (e.g. UUID) of the task. """ metadata: dict[str, Any] | None = None + """ + Optional metadata associated with the request. + """ -class TaskResubscriptionRequest(BaseModel): +class TaskResubscriptionRequest(A2ABaseModel): """ - JSON-RPC request model for the 'tasks/resubscribe' method. + Represents a JSON-RPC request for the `tasks/resubscribe` method, used to resume a streaming connection. """ id: str | int """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier for this request. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: Literal['tasks/resubscribe'] = 'tasks/resubscribe' """ - A String containing the name of the method to be invoked. + The method name. Must be 'tasks/resubscribe'. """ params: TaskIdParams """ - A Structured value that holds the parameter values to be used during the invocation of the method. + The parameters identifying the task to resubscribe to. """ class TaskState(str, Enum): """ - Represents the possible states of a Task. + Defines the lifecycle states of a Task. """ submitted = 'submitted' @@ -747,42 +1002,52 @@ class TaskState(str, Enum): unknown = 'unknown' -class TextPart(BaseModel): +class TextPart(A2ABaseModel): """ - Represents a text segment within parts. + Represents a text segment within a message or artifact. """ kind: Literal['text'] = 'text' """ - Part type - text for TextParts + The type of this part, used as a discriminator. Always 'text'. """ metadata: dict[str, Any] | None = None """ - Optional metadata associated with the part. + Optional metadata associated with this part. """ text: str """ - Text content + The string content of the text part. """ -class UnsupportedOperationError(BaseModel): +class TransportProtocol(str, Enum): """ - A2A specific error indicating the requested operation is not supported by the agent. + Supported A2A transport protocols. + """ + + jsonrpc = 'JSONRPC' + grpc = 'GRPC' + http_json = 'HTTP+JSON' + + +class UnsupportedOperationError(A2ABaseModel): + """ + An A2A-specific error indicating that the requested operation is not supported by the agent. """ code: Literal[-32004] = -32004 """ - A Number that indicates the error type that occurred. + The error code for an unsupported operation. """ data: Any | None = None """ - A Primitive or Structured value that contains additional information about the error. + A primitive or structured value containing additional information about the error. This may be omitted. """ message: str | None = 'This operation is not supported' """ - A String providing a short description of the error. + The error message. """ @@ -799,6 +1064,7 @@ class A2AError( | UnsupportedOperationError | ContentTypeNotSupportedError | InvalidAgentResponseError + | AuthenticatedExtendedCardNotConfiguredError ] ): root: ( @@ -813,123 +1079,147 @@ class A2AError( | UnsupportedOperationError | ContentTypeNotSupportedError | InvalidAgentResponseError + | AuthenticatedExtendedCardNotConfiguredError ) + """ + A discriminated union of all standard JSON-RPC and A2A-specific error types. + """ + + +class AgentCapabilities(A2ABaseModel): + """ + Defines optional capabilities supported by an agent. + """ + + extensions: list[AgentExtension] | None = None + """ + A list of protocol extensions supported by the agent. + """ + push_notifications: bool | None = None + """ + Indicates if the agent supports sending push notifications for asynchronous task updates. + """ + state_transition_history: bool | None = None + """ + Indicates if the agent provides a history of state transitions for a task. + """ + streaming: bool | None = None + """ + Indicates if the agent supports Server-Sent Events (SSE) for streaming responses. + """ -class CancelTaskRequest(BaseModel): +class CancelTaskRequest(A2ABaseModel): """ - JSON-RPC request model for the 'tasks/cancel' method. + Represents a JSON-RPC request for the `tasks/cancel` method. """ id: str | int """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier for this request. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: Literal['tasks/cancel'] = 'tasks/cancel' """ - A String containing the name of the method to be invoked. + The method name. Must be 'tasks/cancel'. """ params: TaskIdParams """ - A Structured value that holds the parameter values to be used during the invocation of the method. + The parameters identifying the task to cancel. """ -class FilePart(BaseModel): +class FilePart(A2ABaseModel): """ - Represents a File segment within parts. + Represents a file segment within a message or artifact. The file content can be + provided either directly as bytes or as a URI. """ file: FileWithBytes | FileWithUri """ - File content either as url or bytes + The file content, represented as either a URI or as base64-encoded bytes. """ kind: Literal['file'] = 'file' """ - Part type - file for FileParts + The type of this part, used as a discriminator. Always 'file'. """ metadata: dict[str, Any] | None = None """ - Optional metadata associated with the part. + Optional metadata associated with this part. """ -class GetTaskPushNotificationConfigRequest(BaseModel): +class GetTaskPushNotificationConfigRequest(A2ABaseModel): """ - JSON-RPC request model for the 'tasks/pushNotificationConfig/get' method. + Represents a JSON-RPC request for the `tasks/pushNotificationConfig/get` method. """ id: str | int """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier for this request. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: Literal['tasks/pushNotificationConfig/get'] = ( 'tasks/pushNotificationConfig/get' ) """ - A String containing the name of the method to be invoked. + The method name. Must be 'tasks/pushNotificationConfig/get'. """ - params: TaskIdParams + params: TaskIdParams | GetTaskPushNotificationConfigParams """ - A Structured value that holds the parameter values to be used during the invocation of the method. + The parameters for getting a push notification configuration. """ -class GetTaskPushNotificationConfigSuccessResponse(BaseModel): +class GetTaskPushNotificationConfigSuccessResponse(A2ABaseModel): """ - JSON-RPC success response model for the 'tasks/pushNotificationConfig/get' method. + Represents a successful JSON-RPC response for the `tasks/pushNotificationConfig/get` method. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ result: TaskPushNotificationConfig """ - The result object on success. + The result, containing the requested push notification configuration. """ -class GetTaskRequest(BaseModel): +class GetTaskRequest(A2ABaseModel): """ - JSON-RPC request model for the 'tasks/get' method. + Represents a JSON-RPC request for the `tasks/get` method. """ id: str | int """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier for this request. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: Literal['tasks/get'] = 'tasks/get' """ - A String containing the name of the method to be invoked. + The method name. Must be 'tasks/get'. """ params: TaskQueryParams """ - A Structured value that holds the parameter values to be used during the invocation of the method. + The parameters for querying a task. """ -class JSONRPCErrorResponse(BaseModel): +class JSONRPCErrorResponse(A2ABaseModel): """ Represents a JSON-RPC 2.0 Error Response object. """ @@ -947,225 +1237,288 @@ class JSONRPCErrorResponse(BaseModel): | UnsupportedOperationError | ContentTypeNotSupportedError | InvalidAgentResponseError + | AuthenticatedExtendedCardNotConfiguredError ) + """ + An object describing the error that occurred. + """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ -class MessageSendConfiguration(BaseModel): +class ListTaskPushNotificationConfigSuccessResponse(A2ABaseModel): """ - Configuration for the send message request. + Represents a successful JSON-RPC response for the `tasks/pushNotificationConfig/list` method. """ - acceptedOutputModes: list[str] + id: str | int | None = None """ - Accepted output modalities by the client. + The identifier established by the client. + """ + jsonrpc: Literal['2.0'] = '2.0' + """ + The version of the JSON-RPC protocol. MUST be exactly "2.0". + """ + result: list[TaskPushNotificationConfig] + """ + The result, containing an array of all push notification configurations for the task. + """ + + +class MessageSendConfiguration(A2ABaseModel): + """ + Defines configuration options for a `message/send` or `message/stream` request. + """ + + accepted_output_modes: list[str] | None = None + """ + A list of output MIME types the client is prepared to accept in the response. """ blocking: bool | None = None """ - If the server should treat the client as a blocking request. + If true, the client will wait for the task to complete. The server may reject this if the task is long-running. """ - historyLength: int | None = None + history_length: int | None = None """ - Number of recent messages to be retrieved. + The number of most recent messages from the task's history to retrieve in the response. """ - pushNotificationConfig: PushNotificationConfig | None = None + push_notification_config: PushNotificationConfig | None = None """ - Where the server should send notifications when disconnected. + Configuration for the agent to send push notifications for updates after the initial response. """ -class OAuthFlows(BaseModel): +class OAuthFlows(A2ABaseModel): """ - Allows configuration of the supported OAuth Flows + Defines the configuration for the supported OAuth 2.0 flows. """ - authorizationCode: AuthorizationCodeOAuthFlow | None = None + authorization_code: AuthorizationCodeOAuthFlow | None = None """ Configuration for the OAuth Authorization Code flow. Previously called accessCode in OpenAPI 2.0. """ - clientCredentials: ClientCredentialsOAuthFlow | None = None + client_credentials: ClientCredentialsOAuthFlow | None = None """ - Configuration for the OAuth Client Credentials flow. Previously called application in OpenAPI 2.0 + Configuration for the OAuth Client Credentials flow. Previously called application in OpenAPI 2.0. """ implicit: ImplicitOAuthFlow | None = None """ - Configuration for the OAuth Implicit flow + Configuration for the OAuth Implicit flow. """ password: PasswordOAuthFlow | None = None """ - Configuration for the OAuth Resource Owner Password flow + Configuration for the OAuth Resource Owner Password flow. """ class Part(RootModel[TextPart | FilePart | DataPart]): root: TextPart | FilePart | DataPart """ - Represents a part of a message, which can be text, a file, or structured data. + A discriminated union representing a part of a message or artifact, which can + be text, a file, or structured data. """ -class SetTaskPushNotificationConfigRequest(BaseModel): +class SetTaskPushNotificationConfigRequest(A2ABaseModel): """ - JSON-RPC request model for the 'tasks/pushNotificationConfig/set' method. + Represents a JSON-RPC request for the `tasks/pushNotificationConfig/set` method. """ id: str | int """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier for this request. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: Literal['tasks/pushNotificationConfig/set'] = ( 'tasks/pushNotificationConfig/set' ) """ - A String containing the name of the method to be invoked. + The method name. Must be 'tasks/pushNotificationConfig/set'. """ params: TaskPushNotificationConfig """ - A Structured value that holds the parameter values to be used during the invocation of the method. + The parameters for setting the push notification configuration. """ -class SetTaskPushNotificationConfigSuccessResponse(BaseModel): +class SetTaskPushNotificationConfigSuccessResponse(A2ABaseModel): """ - JSON-RPC success response model for the 'tasks/pushNotificationConfig/set' method. + Represents a successful JSON-RPC response for the `tasks/pushNotificationConfig/set` method. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ result: TaskPushNotificationConfig """ - The result object on success. + The result, containing the configured push notification settings. """ -class Artifact(BaseModel): +class Artifact(A2ABaseModel): """ - Represents an artifact generated for a task. + Represents a file, data structure, or other resource generated by an agent during a task. """ - artifactId: str + artifact_id: str """ - Unique identifier for the artifact. + A unique identifier (e.g. UUID) for the artifact within the scope of the task. """ description: str | None = None """ - Optional description for the artifact. + An optional, human-readable description of the artifact. + """ + extensions: list[str] | None = None + """ + The URIs of extensions that are relevant to this artifact. """ metadata: dict[str, Any] | None = None """ - Extension metadata. + Optional metadata for extensions. The key is an extension-specific identifier. """ name: str | None = None """ - Optional name for the artifact. + An optional, human-readable name for the artifact. """ parts: list[Part] """ - Artifact parts. + An array of content parts that make up the artifact. + """ + + +class DeleteTaskPushNotificationConfigResponse( + RootModel[ + JSONRPCErrorResponse | DeleteTaskPushNotificationConfigSuccessResponse + ] +): + root: JSONRPCErrorResponse | DeleteTaskPushNotificationConfigSuccessResponse + """ + Represents a JSON-RPC response for the `tasks/pushNotificationConfig/delete` method. """ class GetTaskPushNotificationConfigResponse( - RootModel[JSONRPCErrorResponse | GetTaskPushNotificationConfigSuccessResponse] + RootModel[ + JSONRPCErrorResponse | GetTaskPushNotificationConfigSuccessResponse + ] ): root: JSONRPCErrorResponse | GetTaskPushNotificationConfigSuccessResponse """ - JSON-RPC response for the 'tasks/pushNotificationConfig/set' method. + Represents a JSON-RPC response for the `tasks/pushNotificationConfig/get` method. + """ + + +class ListTaskPushNotificationConfigResponse( + RootModel[ + JSONRPCErrorResponse | ListTaskPushNotificationConfigSuccessResponse + ] +): + root: JSONRPCErrorResponse | ListTaskPushNotificationConfigSuccessResponse + """ + Represents a JSON-RPC response for the `tasks/pushNotificationConfig/list` method. """ -class Message(BaseModel): +class Message(A2ABaseModel): """ - Represents a single message exchanged between user and agent. + Represents a single message in the conversation between a user and an agent. """ - contextId: str | None = None + context_id: str | None = None + """ + The context ID for this message, used to group related interactions. """ - The context the message is associated with + extensions: list[str] | None = None + """ + The URIs of extensions that are relevant to this message. """ kind: Literal['message'] = 'message' """ - Event type + The type of this object, used as a discriminator. Always 'message' for a Message. """ - messageId: str + message_id: str """ - Identifier created by the message creator + A unique identifier for the message, typically a UUID, generated by the sender. """ metadata: dict[str, Any] | None = None """ - Extension metadata. + Optional metadata for extensions. The key is an extension-specific identifier. """ parts: list[Part] """ - Message content + An array of content parts that form the message body. A message can be + composed of multiple parts of different types (e.g., text and files). """ - referenceTaskIds: list[str] | None = None + reference_task_ids: list[str] | None = None """ - List of tasks referenced as context by this message. + A list of other task IDs that this message references for additional context. """ role: Role """ - Message sender's role + Identifies the sender of the message. `user` for the client, `agent` for the service. """ - taskId: str | None = None + task_id: str | None = None """ - Identifier of task the message is related to + The ID of the task this message is part of. Can be omitted for the first message of a new task. """ -class MessageSendParams(BaseModel): +class MessageSendParams(A2ABaseModel): """ - Sent by the client to the agent as a request. May create, continue or restart a task. + Defines the parameters for a request to send a message to an agent. This can be used + to create a new task, continue an existing one, or restart a task. """ configuration: MessageSendConfiguration | None = None """ - Send message configuration. + Optional configuration for the send request. """ message: Message """ - The message being sent to the server. + The message object being sent to the agent. """ metadata: dict[str, Any] | None = None """ - Extension metadata. + Optional metadata for extensions. """ -class OAuth2SecurityScheme(BaseModel): +class OAuth2SecurityScheme(A2ABaseModel): """ - OAuth2.0 security scheme configuration. + Defines a security scheme using OAuth 2.0. """ description: str | None = None """ - Description of this security scheme. + An optional description for the security scheme. """ flows: OAuthFlows """ - An object containing configuration information for the flow types supported. + An object containing configuration information for the supported OAuth 2.0 flows. + """ + oauth2_metadata_url: str | None = None + """ + URL to the oauth2 authorization server metadata + [RFC8414](https://datatracker.ietf.org/doc/html/rfc8414). TLS is required. """ type: Literal['oauth2'] = 'oauth2' + """ + The type of the security scheme. Must be 'oauth2'. + """ class SecurityScheme( @@ -1174,6 +1527,7 @@ class SecurityScheme( | HTTPAuthSecurityScheme | OAuth2SecurityScheme | OpenIdConnectSecurityScheme + | MutualTLSSecurityScheme ] ): root: ( @@ -1181,149 +1535,157 @@ class SecurityScheme( | HTTPAuthSecurityScheme | OAuth2SecurityScheme | OpenIdConnectSecurityScheme + | MutualTLSSecurityScheme ) """ - Mirrors the OpenAPI Security Scheme Object - (https://swagger.io/specification/#security-scheme-object) + Defines a security scheme that can be used to secure an agent's endpoints. + This is a discriminated union type based on the OpenAPI 3.0 Security Scheme Object. """ -class SendMessageRequest(BaseModel): +class SendMessageRequest(A2ABaseModel): """ - JSON-RPC request model for the 'message/send' method. + Represents a JSON-RPC request for the `message/send` method. """ id: str | int """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier for this request. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: Literal['message/send'] = 'message/send' """ - A String containing the name of the method to be invoked. + The method name. Must be 'message/send'. """ params: MessageSendParams """ - A Structured value that holds the parameter values to be used during the invocation of the method. + The parameters for sending a message. """ -class SendStreamingMessageRequest(BaseModel): +class SendStreamingMessageRequest(A2ABaseModel): """ - JSON-RPC request model for the 'message/stream' method. + Represents a JSON-RPC request for the `message/stream` method. """ id: str | int """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier for this request. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ method: Literal['message/stream'] = 'message/stream' """ - A String containing the name of the method to be invoked. + The method name. Must be 'message/stream'. """ params: MessageSendParams """ - A Structured value that holds the parameter values to be used during the invocation of the method. + The parameters for sending a message. """ class SetTaskPushNotificationConfigResponse( - RootModel[JSONRPCErrorResponse | SetTaskPushNotificationConfigSuccessResponse] + RootModel[ + JSONRPCErrorResponse | SetTaskPushNotificationConfigSuccessResponse + ] ): root: JSONRPCErrorResponse | SetTaskPushNotificationConfigSuccessResponse """ - JSON-RPC response for the 'tasks/pushNotificationConfig/set' method. + Represents a JSON-RPC response for the `tasks/pushNotificationConfig/set` method. """ -class TaskArtifactUpdateEvent(BaseModel): +class TaskArtifactUpdateEvent(A2ABaseModel): """ - Sent by server during sendStream or subscribe requests + An event sent by the agent to notify the client that an artifact has been + generated or updated. This is typically used in streaming models. """ append: bool | None = None """ - Indicates if this artifact appends to a previous one + If true, the content of this artifact should be appended to a previously sent artifact with the same ID. """ artifact: Artifact """ - Generated artifact + The artifact that was generated or updated. """ - contextId: str + context_id: str """ - The context the task is associated with + The context ID associated with the task. """ kind: Literal['artifact-update'] = 'artifact-update' """ - Event type + The type of this event, used as a discriminator. Always 'artifact-update'. """ - lastChunk: bool | None = None + last_chunk: bool | None = None """ - Indicates if this is the last chunk of the artifact + If true, this is the final chunk of the artifact. """ metadata: dict[str, Any] | None = None """ - Extension metadata. + Optional metadata for extensions. """ - taskId: str + task_id: str """ - Task id + The ID of the task this artifact belongs to. """ -class TaskStatus(BaseModel): +class TaskStatus(A2ABaseModel): """ - TaskState and accompanying message. + Represents the status of a task at a specific point in time. """ message: Message | None = None """ - Additional status updates for client + An optional, human-readable message providing more details about the current status. """ state: TaskState - timestamp: str | None = None """ - ISO 8601 datetime string when the status was recorded. + The current state of the task's lifecycle. + """ + timestamp: str | None = Field( + default=None, examples=['2023-10-27T10:00:00Z'] + ) + """ + An ISO 8601 datetime string indicating when this status was recorded. """ -class TaskStatusUpdateEvent(BaseModel): +class TaskStatusUpdateEvent(A2ABaseModel): """ - Sent by server during sendStream or subscribe requests + An event sent by the agent to notify the client of a change in a task's status. + This is typically used in streaming or subscription models. """ - contextId: str + context_id: str """ - The context the task is associated with + The context ID associated with the task. """ final: bool """ - Indicates the end of the event stream + If true, this is the final event in the stream for this interaction. """ kind: Literal['status-update'] = 'status-update' """ - Event type + The type of this event, used as a discriminator. Always 'status-update'. """ metadata: dict[str, Any] | None = None """ - Extension metadata. + Optional metadata for extensions. """ status: TaskStatus """ - Current status of the task + The new status of the task. """ - taskId: str + task_id: str """ - Task id + The ID of the task that was updated. """ @@ -1336,6 +1698,9 @@ class A2ARequest( | SetTaskPushNotificationConfigRequest | GetTaskPushNotificationConfigRequest | TaskResubscriptionRequest + | ListTaskPushNotificationConfigRequest + | DeleteTaskPushNotificationConfigRequest + | GetAuthenticatedExtendedCardRequest ] ): root: ( @@ -1346,197 +1711,283 @@ class A2ARequest( | SetTaskPushNotificationConfigRequest | GetTaskPushNotificationConfigRequest | TaskResubscriptionRequest + | ListTaskPushNotificationConfigRequest + | DeleteTaskPushNotificationConfigRequest + | GetAuthenticatedExtendedCardRequest ) """ - A2A supported request types + A discriminated union representing all possible JSON-RPC 2.0 requests supported by the A2A specification. """ -class AgentCard(BaseModel): +class AgentCard(A2ABaseModel): + """ + The AgentCard is a self-describing manifest for an agent. It provides essential + metadata including the agent's identity, capabilities, skills, supported + communication methods, and security requirements. """ - An AgentCard conveys key information: - - Overall details (version, name, description, uses) - - Skills: A set of capabilities the agent can perform - - Default modalities/content types supported by the agent. - - Authentication requirements + + additional_interfaces: list[AgentInterface] | None = None """ + A list of additional supported interfaces (transport and URL combinations). + This allows agents to expose multiple transports, potentially at different URLs. + + Best practices: + - SHOULD include all supported transports for completeness + - SHOULD include an entry matching the main 'url' and 'preferredTransport' + - MAY reuse URLs if multiple transports are available at the same endpoint + - MUST accurately declare the transport available at each URL + Clients can select any interface from this list based on their transport capabilities + and preferences. This enables transport negotiation and fallback scenarios. + """ capabilities: AgentCapabilities """ - Optional capabilities supported by the agent. + A declaration of optional capabilities supported by the agent. """ - defaultInputModes: list[str] + default_input_modes: list[str] """ - The set of interaction modes that the agent supports across all skills. This can be overridden per-skill. - Supported mime types for input. + Default set of supported input MIME types for all skills, which can be + overridden on a per-skill basis. """ - defaultOutputModes: list[str] + default_output_modes: list[str] """ - Supported mime types for output. + Default set of supported output MIME types for all skills, which can be + overridden on a per-skill basis. """ - description: str + description: str = Field( + ..., examples=['Agent that helps users with recipes and cooking.'] + ) """ - A human-readable description of the agent. Used to assist users and - other agents in understanding what the agent can do. + A human-readable description of the agent, assisting users and other agents + in understanding its purpose. """ - documentationUrl: str | None = None + documentation_url: str | None = None """ - A URL to documentation for the agent. + An optional URL to the agent's documentation. """ - name: str + icon_url: str | None = None + """ + An optional URL to an icon for the agent. + """ + name: str = Field(..., examples=['Recipe Agent']) + """ + A human-readable name for the agent. + """ + preferred_transport: str | None = Field( + default='JSONRPC', examples=['JSONRPC', 'GRPC', 'HTTP+JSON'] + ) + """ + The transport protocol for the preferred endpoint (the main 'url' field). + If not specified, defaults to 'JSONRPC'. + + IMPORTANT: The transport specified here MUST be available at the main 'url'. + This creates a binding between the main URL and its supported transport protocol. + Clients should prefer this transport and URL combination when both are supported. """ - Human readable name of the agent. + protocol_version: str | None = '0.3.0' + """ + The version of the A2A protocol this agent supports. """ provider: AgentProvider | None = None """ - The service provider of the agent + Information about the agent's service provider. """ - security: list[dict[str, list[str]]] | None = None + security: list[dict[str, list[str]]] | None = Field( + default=None, + examples=[[{'oauth': ['read']}, {'api-key': [], 'mtls': []}]], + ) """ - Security requirements for contacting the agent. + A list of security requirement objects that apply to all agent interactions. Each object + lists security schemes that can be used. Follows the OpenAPI 3.0 Security Requirement Object. + This list can be seen as an OR of ANDs. Each object in the list describes one possible + set of security requirements that must be present on a request. This allows specifying, + for example, "callers must either use OAuth OR an API Key AND mTLS." """ - securitySchemes: dict[str, SecurityScheme] | None = None + security_schemes: dict[str, SecurityScheme] | None = None """ - Security scheme details used for authenticating with this agent. + A declaration of the security schemes available to authorize requests. The key is the + scheme name. Follows the OpenAPI 3.0 Security Scheme Object. + """ + signatures: list[AgentCardSignature] | None = None + """ + JSON Web Signatures computed for this AgentCard. """ skills: list[AgentSkill] """ - Skills are a unit of capability that an agent can perform. + The set of skills, or distinct capabilities, that the agent can perform. """ - supportsAuthenticatedExtendedCard: bool | None = None + supports_authenticated_extended_card: bool | None = None """ - true if the agent supports providing an extended agent card when the user is authenticated. - Defaults to false if not specified. + If true, the agent can provide an extended agent card with additional details + to authenticated users. Defaults to false. """ - url: str + url: str = Field(..., examples=['https://api.example.com/a2a/v1']) """ - A URL to the address the agent is hosted at. + The preferred endpoint URL for interacting with the agent. + This URL MUST support the transport specified by 'preferredTransport'. """ - version: str + version: str = Field(..., examples=['1.0.0']) """ - The version of the agent - format is up to the provider. + The agent's own version number. The format is defined by the provider. """ -class Task(BaseModel): +class GetAuthenticatedExtendedCardSuccessResponse(A2ABaseModel): + """ + Represents a successful JSON-RPC response for the `agent/getAuthenticatedExtendedCard` method. + """ + + id: str | int | None = None + """ + The identifier established by the client. + """ + jsonrpc: Literal['2.0'] = '2.0' + """ + The version of the JSON-RPC protocol. MUST be exactly "2.0". + """ + result: AgentCard + """ + The result is an Agent Card object. + """ + + +class Task(A2ABaseModel): + """ + Represents a single, stateful operation or conversation between a client and an agent. + """ + artifacts: list[Artifact] | None = None """ - Collection of artifacts created by the agent. + A collection of artifacts generated by the agent during the execution of the task. """ - contextId: str + context_id: str """ - Server-generated id for contextual alignment across interactions + A server-generated unique identifier (e.g. UUID) for maintaining context across multiple related tasks or interactions. """ history: list[Message] | None = None + """ + An array of messages exchanged during the task, representing the conversation history. + """ id: str """ - Unique identifier for the task + A unique identifier (e.g. UUID) for the task, generated by the server for a new task. """ kind: Literal['task'] = 'task' """ - Event type + The type of this object, used as a discriminator. Always 'task' for a Task. """ metadata: dict[str, Any] | None = None """ - Extension metadata. + Optional metadata for extensions. The key is an extension-specific identifier. """ status: TaskStatus """ - Current status of the task + The current status of the task, including its state and a descriptive message. """ -class CancelTaskSuccessResponse(BaseModel): +class CancelTaskSuccessResponse(A2ABaseModel): """ - JSON-RPC success response model for the 'tasks/cancel' method. + Represents a successful JSON-RPC response for the `tasks/cancel` method. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ result: Task """ - The result object on success. + The result, containing the final state of the canceled Task object. """ -class GetTaskSuccessResponse(BaseModel): +class GetAuthenticatedExtendedCardResponse( + RootModel[ + JSONRPCErrorResponse | GetAuthenticatedExtendedCardSuccessResponse + ] +): + root: JSONRPCErrorResponse | GetAuthenticatedExtendedCardSuccessResponse + """ + Represents a JSON-RPC response for the `agent/getAuthenticatedExtendedCard` method. + """ + + +class GetTaskSuccessResponse(A2ABaseModel): """ - JSON-RPC success response for the 'tasks/get' method. + Represents a successful JSON-RPC response for the `tasks/get` method. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ result: Task """ - The result object on success. + The result, containing the requested Task object. """ -class SendMessageSuccessResponse(BaseModel): +class SendMessageSuccessResponse(A2ABaseModel): """ - JSON-RPC success response model for the 'message/send' method. + Represents a successful JSON-RPC response for the `message/send` method. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ result: Task | Message """ - The result object on success + The result, which can be a direct reply Message or the initial Task object. """ -class SendStreamingMessageSuccessResponse(BaseModel): +class SendStreamingMessageSuccessResponse(A2ABaseModel): """ - JSON-RPC success response model for the 'message/stream' method. + Represents a successful JSON-RPC response for the `message/stream` method. + The server may send multiple response objects for a single request. """ id: str | int | None = None """ - An identifier established by the Client that MUST contain a String, Number. - Numbers SHOULD NOT contain fractional parts. + The identifier established by the client. """ jsonrpc: Literal['2.0'] = '2.0' """ - Specifies the version of the JSON-RPC protocol. MUST be exactly "2.0". + The version of the JSON-RPC protocol. MUST be exactly "2.0". """ result: Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent """ - The result object on success + The result, which can be a Message, Task, or a streaming update event. """ -class CancelTaskResponse(RootModel[JSONRPCErrorResponse | CancelTaskSuccessResponse]): +class CancelTaskResponse( + RootModel[JSONRPCErrorResponse | CancelTaskSuccessResponse] +): root: JSONRPCErrorResponse | CancelTaskSuccessResponse """ - JSON-RPC response for the 'tasks/cancel' method. + Represents a JSON-RPC response for the `tasks/cancel` method. """ class GetTaskResponse(RootModel[JSONRPCErrorResponse | GetTaskSuccessResponse]): root: JSONRPCErrorResponse | GetTaskSuccessResponse """ - JSON-RPC response for the 'tasks/get' method. + Represents a JSON-RPC response for the `tasks/get` method. """ @@ -1549,6 +2000,9 @@ class JSONRPCResponse( | CancelTaskSuccessResponse | SetTaskPushNotificationConfigSuccessResponse | GetTaskPushNotificationConfigSuccessResponse + | ListTaskPushNotificationConfigSuccessResponse + | DeleteTaskPushNotificationConfigSuccessResponse + | GetAuthenticatedExtendedCardSuccessResponse ] ): root: ( @@ -1559,16 +2013,22 @@ class JSONRPCResponse( | CancelTaskSuccessResponse | SetTaskPushNotificationConfigSuccessResponse | GetTaskPushNotificationConfigSuccessResponse + | ListTaskPushNotificationConfigSuccessResponse + | DeleteTaskPushNotificationConfigSuccessResponse + | GetAuthenticatedExtendedCardSuccessResponse ) """ - Represents a JSON-RPC 2.0 Response object. + A discriminated union representing all possible JSON-RPC 2.0 responses + for the A2A specification methods. """ -class SendMessageResponse(RootModel[JSONRPCErrorResponse | SendMessageSuccessResponse]): +class SendMessageResponse( + RootModel[JSONRPCErrorResponse | SendMessageSuccessResponse] +): root: JSONRPCErrorResponse | SendMessageSuccessResponse """ - JSON-RPC response model for the 'message/send' method. + Represents a JSON-RPC response for the `message/send` method. """ @@ -1577,5 +2037,5 @@ class SendStreamingMessageResponse( ): root: JSONRPCErrorResponse | SendStreamingMessageSuccessResponse """ - JSON-RPC response model for the 'message/stream' method. + Represents a JSON-RPC response for the `message/stream` method. """ diff --git a/src/a2a/utils/__init__.py b/src/a2a/utils/__init__.py index eac4ee17e..e5b5663dd 100644 --- a/src/a2a/utils/__init__.py +++ b/src/a2a/utils/__init__.py @@ -1,10 +1,17 @@ """Utility functions for the A2A Python SDK.""" from a2a.utils.artifact import ( + get_artifact_text, new_artifact, new_data_artifact, new_text_artifact, ) +from a2a.utils.constants import ( + AGENT_CARD_WELL_KNOWN_PATH, + DEFAULT_RPC_URL, + EXTENDED_AGENT_CARD_PATH, + PREV_AGENT_CARD_WELL_KNOWN_PATH, +) from a2a.utils.helpers import ( append_artifact_to_task, are_modalities_compatible, @@ -13,10 +20,14 @@ ) from a2a.utils.message import ( get_message_text, - get_text_parts, new_agent_parts_message, new_agent_text_message, ) +from a2a.utils.parts import ( + get_data_parts, + get_file_parts, + get_text_parts, +) from a2a.utils.task import ( completed_task, new_task, @@ -24,11 +35,18 @@ __all__ = [ + 'AGENT_CARD_WELL_KNOWN_PATH', + 'DEFAULT_RPC_URL', + 'EXTENDED_AGENT_CARD_PATH', + 'PREV_AGENT_CARD_WELL_KNOWN_PATH', 'append_artifact_to_task', 'are_modalities_compatible', 'build_text_artifact', 'completed_task', 'create_task_obj', + 'get_artifact_text', + 'get_data_parts', + 'get_file_parts', 'get_message_text', 'get_text_parts', 'new_agent_parts_message', diff --git a/src/a2a/utils/artifact.py b/src/a2a/utils/artifact.py index ee91a8915..5053ca421 100644 --- a/src/a2a/utils/artifact.py +++ b/src/a2a/utils/artifact.py @@ -5,10 +5,13 @@ from typing import Any from a2a.types import Artifact, DataPart, Part, TextPart +from a2a.utils.parts import get_text_parts def new_artifact( - parts: list[Part], name: str, description: str = '' + parts: list[Part], + name: str, + description: str | None = None, ) -> Artifact: """Creates a new Artifact object. @@ -18,10 +21,10 @@ def new_artifact( description: An optional description of the artifact. Returns: - A new `Artifact` object with a generated artifactId. + A new `Artifact` object with a generated artifact_id. """ return Artifact( - artifactId=str(uuid.uuid4()), + artifact_id=str(uuid.uuid4()), parts=parts, name=name, description=description, @@ -31,7 +34,7 @@ def new_artifact( def new_text_artifact( name: str, text: str, - description: str = '', + description: str | None = None, ) -> Artifact: """Creates a new Artifact object containing only a single TextPart. @@ -41,7 +44,7 @@ def new_text_artifact( description: An optional description of the artifact. Returns: - A new `Artifact` object with a generated artifactId. + A new `Artifact` object with a generated artifact_id. """ return new_artifact( [Part(root=TextPart(text=text))], @@ -53,7 +56,7 @@ def new_text_artifact( def new_data_artifact( name: str, data: dict[str, Any], - description: str = '', + description: str | None = None, ) -> Artifact: """Creates a new Artifact object containing only a single DataPart. @@ -63,10 +66,23 @@ def new_data_artifact( description: An optional description of the artifact. Returns: - A new `Artifact` object with a generated artifactId. + A new `Artifact` object with a generated artifact_id. """ return new_artifact( [Part(root=DataPart(data=data))], name, description, ) + + +def get_artifact_text(artifact: Artifact, delimiter: str = '\n') -> str: + """Extracts and joins all text content from an Artifact's parts. + + Args: + artifact: The `Artifact` object. + delimiter: The string to use when joining text from multiple TextParts. + + Returns: + A single string containing all text content, or an empty string if no text parts are found. + """ + return delimiter.join(get_text_parts(artifact.parts)) diff --git a/src/a2a/utils/constants.py b/src/a2a/utils/constants.py new file mode 100644 index 000000000..2935251a5 --- /dev/null +++ b/src/a2a/utils/constants.py @@ -0,0 +1,6 @@ +"""Constants for well-known URIs used throughout the A2A Python SDK.""" + +AGENT_CARD_WELL_KNOWN_PATH = '/.well-known/agent-card.json' +PREV_AGENT_CARD_WELL_KNOWN_PATH = '/.well-known/agent.json' +EXTENDED_AGENT_CARD_PATH = '/agent/authenticatedExtendedCard' +DEFAULT_RPC_URL = '/' diff --git a/src/a2a/utils/error_handlers.py b/src/a2a/utils/error_handlers.py new file mode 100644 index 000000000..53cdb9f56 --- /dev/null +++ b/src/a2a/utils/error_handlers.py @@ -0,0 +1,129 @@ +import functools +import logging + +from collections.abc import Awaitable, Callable, Coroutine +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from starlette.responses import JSONResponse, Response +else: + try: + from starlette.responses import JSONResponse, Response + except ImportError: + JSONResponse = Any + Response = Any + + +from a2a._base import A2ABaseModel +from a2a.types import ( + AuthenticatedExtendedCardNotConfiguredError, + ContentTypeNotSupportedError, + InternalError, + InvalidAgentResponseError, + InvalidParamsError, + InvalidRequestError, + JSONParseError, + MethodNotFoundError, + PushNotificationNotSupportedError, + TaskNotCancelableError, + TaskNotFoundError, + UnsupportedOperationError, +) +from a2a.utils.errors import ServerError + + +logger = logging.getLogger(__name__) + +A2AErrorToHttpStatus: dict[type[A2ABaseModel], int] = { + JSONParseError: 400, + InvalidRequestError: 400, + MethodNotFoundError: 404, + InvalidParamsError: 422, + InternalError: 500, + TaskNotFoundError: 404, + TaskNotCancelableError: 409, + PushNotificationNotSupportedError: 501, + UnsupportedOperationError: 501, + ContentTypeNotSupportedError: 415, + InvalidAgentResponseError: 502, + AuthenticatedExtendedCardNotConfiguredError: 404, +} + + +def rest_error_handler( + func: Callable[..., Awaitable[Response]], +) -> Callable[..., Awaitable[Response]]: + """Decorator to catch ServerError and map it to an appropriate JSONResponse.""" + + @functools.wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Response: + try: + return await func(*args, **kwargs) + except ServerError as e: + error = e.error or InternalError( + message='Internal error due to unknown reason' + ) + http_code = A2AErrorToHttpStatus.get(type(error), 500) + + log_level = ( + logging.ERROR + if isinstance(error, InternalError) + else logging.WARNING + ) + logger.log( + log_level, + "Request error: Code=%s, Message='%s'%s", + error.code, + error.message, + ', Data=' + str(error.data) if error.data else '', + ) + return JSONResponse( + content={'message': error.message}, status_code=http_code + ) + except Exception: + logger.exception('Unknown error occurred') + return JSONResponse( + content={'message': 'unknown exception'}, status_code=500 + ) + + return wrapper + + +def rest_stream_error_handler( + func: Callable[..., Coroutine[Any, Any, Any]], +) -> Callable[..., Coroutine[Any, Any, Any]]: + """Decorator to catch ServerError for a streaming method,log it and then rethrow it to be handled by framework.""" + + @functools.wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + return await func(*args, **kwargs) + except ServerError as e: + error = e.error or InternalError( + message='Internal error due to unknown reason' + ) + + log_level = ( + logging.ERROR + if isinstance(error, InternalError) + else logging.WARNING + ) + logger.log( + log_level, + "Request error: Code=%s, Message='%s'%s", + error.code, + error.message, + ', Data=' + str(error.data) if error.data else '', + ) + # Since the stream has started, we can't return a JSONResponse. + # Instead, we run the error handling logic (provides logging) + # and reraise the error and let server framework manage + raise e + except Exception as e: + # Since the stream has started, we can't return a JSONResponse. + # Instead, we run the error handling logic (provides logging) + # and reraise the error and let server framework manage + raise e + + return wrapper diff --git a/src/a2a/utils/errors.py b/src/a2a/utils/errors.py index 2964172d6..f2b6cc2b4 100644 --- a/src/a2a/utils/errors.py +++ b/src/a2a/utils/errors.py @@ -1,6 +1,7 @@ """Custom exceptions for A2A server-side errors.""" from a2a.types import ( + AuthenticatedExtendedCardNotConfiguredError, ContentTypeNotSupportedError, InternalError, InvalidAgentResponseError, @@ -57,6 +58,7 @@ def __init__( | UnsupportedOperationError | ContentTypeNotSupportedError | InvalidAgentResponseError + | AuthenticatedExtendedCardNotConfiguredError | None ), ): @@ -64,6 +66,17 @@ def __init__( Args: error: The specific A2A or JSON-RPC error model instance. - If None, an `InternalError` will be used when formatting the response. """ self.error = error + + def __str__(self) -> str: + """Returns a readable representation of the internal Pydantic error.""" + if self.error is None: + return 'None' + if self.error.message is None: + return self.error.__class__.__name__ + return self.error.message + + def __repr__(self) -> str: + """Returns an unambiguous representation for developers showing how the ServerError was constructed with the internal Pydantic error.""" + return f'{self.__class__.__name__}({self.error!r})' diff --git a/src/a2a/utils/helpers.py b/src/a2a/utils/helpers.py index 4e3228b26..8164674e5 100644 --- a/src/a2a/utils/helpers.py +++ b/src/a2a/utils/helpers.py @@ -1,12 +1,16 @@ """General utility functions for the A2A Python SDK.""" +import functools +import inspect +import json import logging -from collections.abc import Callable -from typing import Any +from collections.abc import Awaitable, Callable +from typing import Any, TypeVar from uuid import uuid4 from a2a.types import ( + AgentCard, Artifact, MessageSendParams, Part, @@ -20,6 +24,9 @@ from a2a.utils.telemetry import trace_function +T = TypeVar('T') + + logger = logging.getLogger(__name__) @@ -35,12 +42,12 @@ def create_task_obj(message_send_params: MessageSendParams) -> Task: Returns: A new `Task` object initialized with 'submitted' status and the input message in history. """ - if not message_send_params.message.contextId: - message_send_params.message.contextId = str(uuid4()) + if not message_send_params.message.context_id: + message_send_params.message.context_id = str(uuid4()) return Task( id=str(uuid4()), - contextId=message_send_params.message.contextId, + context_id=message_send_params.message.context_id, status=TaskStatus(state=TaskState.submitted), history=[message_send_params.message], ) @@ -61,7 +68,7 @@ def append_artifact_to_task(task: Task, event: TaskArtifactUpdateEvent) -> None: task.artifacts = [] new_artifact_data: Artifact = event.artifact - artifact_id: str = new_artifact_data.artifactId + artifact_id: str = new_artifact_data.artifact_id append_parts: bool = event.append or False existing_artifact: Artifact | None = None @@ -69,7 +76,7 @@ def append_artifact_to_task(task: Task, event: TaskArtifactUpdateEvent) -> None: # Find existing artifact by its id for i, art in enumerate(task.artifacts): - if hasattr(art, 'artifactId') and art.artifactId == artifact_id: + if art.artifact_id == artifact_id: existing_artifact = art existing_artifact_list_index = i break @@ -79,26 +86,32 @@ def append_artifact_to_task(task: Task, event: TaskArtifactUpdateEvent) -> None: if existing_artifact_list_index is not None: # Replace the existing artifact entirely with the new data logger.debug( - f'Replacing artifact at id {artifact_id} for task {task.id}' + 'Replacing artifact at id %s for task %s', artifact_id, task.id ) task.artifacts[existing_artifact_list_index] = new_artifact_data else: # Append the new artifact since no artifact with this index exists yet logger.debug( - f'Adding new artifact with id {artifact_id} for task {task.id}' + 'Adding new artifact with id %s for task %s', + artifact_id, + task.id, ) task.artifacts.append(new_artifact_data) elif existing_artifact: # Append new parts to the existing artifact's part list logger.debug( - f'Appending parts to artifact id {artifact_id} for task {task.id}' + 'Appending parts to artifact id %s for task %s', + artifact_id, + task.id, ) existing_artifact.parts.extend(new_artifact_data.parts) else: # We received a chunk to append, but we don't have an existing artifact. # we will ignore this chunk logger.warning( - f'Received append=True for nonexistent artifact index {artifact_id} in task {task.id}. Ignoring chunk.' + 'Received append=True for nonexistent artifact index %s in task %s. Ignoring chunk.', + artifact_id, + task.id, ) @@ -114,12 +127,12 @@ def build_text_artifact(text: str, artifact_id: str) -> Artifact: """ text_part = TextPart(text=text) part = Part(root=text_part) - return Artifact(parts=[part], artifactId=artifact_id) + return Artifact(parts=[part], artifact_id=artifact_id) def validate( expression: Callable[[Any], bool], error_message: str | None = None -): +) -> Callable: """Decorator that validates if a given expression evaluates to True. Typically used on class methods to check capabilities or configuration @@ -131,18 +144,176 @@ def validate( and returns a boolean. error_message: An optional custom error message for the `UnsupportedOperationError`. If None, the string representation of the expression will be used. + + Examples: + Demonstrating with an async method: + >>> import asyncio + >>> from a2a.utils.errors import ServerError + >>> + >>> class MyAgent: + ... def __init__(self, streaming_enabled: bool): + ... self.streaming_enabled = streaming_enabled + ... + ... @validate( + ... lambda self: self.streaming_enabled, + ... 'Streaming is not enabled for this agent', + ... ) + ... async def stream_response(self, message: str): + ... return f'Streaming: {message}' + >>> + >>> async def run_async_test(): + ... # Successful call + ... agent_ok = MyAgent(streaming_enabled=True) + ... result = await agent_ok.stream_response('hello') + ... print(result) + ... + ... # Call that fails validation + ... agent_fail = MyAgent(streaming_enabled=False) + ... try: + ... await agent_fail.stream_response('world') + ... except ServerError as e: + ... print(e.error.message) + >>> + >>> asyncio.run(run_async_test()) + Streaming: hello + Streaming is not enabled for this agent + + Demonstrating with a sync method: + >>> class SecureAgent: + ... def __init__(self): + ... self.auth_enabled = False + ... + ... @validate( + ... lambda self: self.auth_enabled, + ... 'Authentication must be enabled for this operation', + ... ) + ... def secure_operation(self, data: str): + ... return f'Processing secure data: {data}' + >>> + >>> # Error case example + >>> agent = SecureAgent() + >>> try: + ... agent.secure_operation('secret') + ... except ServerError as e: + ... print(e.error.message) + Authentication must be enabled for this operation + + Note: + This decorator works with both sync and async methods automatically. """ - def decorator(function): - def wrapper(self, *args, **kwargs): + def decorator(function: Callable) -> Callable: + if inspect.iscoroutinefunction(function): + + @functools.wraps(function) + async def async_wrapper(self: Any, *args, **kwargs) -> Any: + if not expression(self): + final_message = error_message or str(expression) + logger.error('Unsupported Operation: %s', final_message) + raise ServerError( + UnsupportedOperationError(message=final_message) + ) + return await function(self, *args, **kwargs) + + return async_wrapper + + @functools.wraps(function) + def sync_wrapper(self: Any, *args, **kwargs) -> Any: if not expression(self): final_message = error_message or str(expression) - logger.error(f'Unsupported Operation: {final_message}') + logger.error('Unsupported Operation: %s', final_message) raise ServerError( UnsupportedOperationError(message=final_message) ) return function(self, *args, **kwargs) + return sync_wrapper + + return decorator + + +def validate_async_generator( + expression: Callable[[Any], bool], error_message: str | None = None +): + """Decorator that validates if a given expression evaluates to True for async generators. + + Typically used on class methods to check capabilities or configuration + before executing the method's logic. If the expression is False, + a `ServerError` with an `UnsupportedOperationError` is raised. + + Args: + expression: A callable that takes the instance (`self`) as its argument + and returns a boolean. + error_message: An optional custom error message for the `UnsupportedOperationError`. + If None, the string representation of the expression will be used. + + Examples: + Streaming capability validation with success case: + >>> import asyncio + >>> from a2a.utils.errors import ServerError + >>> + >>> class StreamingAgent: + ... def __init__(self, streaming_enabled: bool): + ... self.streaming_enabled = streaming_enabled + ... + ... @validate_async_generator( + ... lambda self: self.streaming_enabled, + ... 'Streaming is not supported by this agent', + ... ) + ... async def stream_messages(self, count: int): + ... for i in range(count): + ... yield f'Message {i}' + >>> + >>> async def run_streaming_test(): + ... # Successful streaming + ... agent = StreamingAgent(streaming_enabled=True) + ... async for msg in agent.stream_messages(2): + ... print(msg) + >>> + >>> asyncio.run(run_streaming_test()) + Message 0 + Message 1 + + Error case - validation fails: + >>> class FeatureAgent: + ... def __init__(self): + ... self.features = {'real_time': False} + ... + ... @validate_async_generator( + ... lambda self: self.features.get('real_time', False), + ... 'Real-time feature must be enabled to stream updates', + ... ) + ... async def real_time_updates(self): + ... yield 'This should not be yielded' + >>> + >>> async def run_error_test(): + ... agent = FeatureAgent() + ... try: + ... async for _ in agent.real_time_updates(): + ... pass + ... except ServerError as e: + ... print(e.error.message) + >>> + >>> asyncio.run(run_error_test()) + Real-time feature must be enabled to stream updates + + Note: + This decorator is specifically for async generator methods (async def with yield). + The validation happens before the generator starts yielding values. + """ + + def decorator(function): + @functools.wraps(function) + async def wrapper(self, *args, **kwargs): + if not expression(self): + final_message = error_message or str(expression) + logger.error('Unsupported Operation: %s', final_message) + raise ServerError( + UnsupportedOperationError(message=final_message) + ) + async for i in function(self, *args, **kwargs): + yield i + return wrapper return decorator @@ -174,3 +345,36 @@ def are_modalities_compatible( return True return any(x in server_output_modes for x in client_output_modes) + + +def _clean_empty(d: Any) -> Any: + """Recursively remove empty strings, lists and dicts from a dictionary.""" + if isinstance(d, dict): + cleaned_dict: dict[Any, Any] = { + k: _clean_empty(v) for k, v in d.items() + } + return {k: v for k, v in cleaned_dict.items() if v} + if isinstance(d, list): + cleaned_list: list[Any] = [_clean_empty(v) for v in d] + return [v for v in cleaned_list if v] + return d if d not in ['', [], {}] else None + + +def canonicalize_agent_card(agent_card: AgentCard) -> str: + """Canonicalizes the Agent Card JSON according to RFC 8785 (JCS).""" + card_dict = agent_card.model_dump( + exclude={'signatures'}, + exclude_defaults=True, + exclude_none=True, + by_alias=True, + ) + # Recursively remove empty values + cleaned_dict = _clean_empty(card_dict) + return json.dumps(cleaned_dict, separators=(',', ':'), sort_keys=True) + + +async def maybe_await(value: T | Awaitable[T]) -> T: + """Awaits a value if it's awaitable, otherwise simply provides it back.""" + if inspect.isawaitable(value): + return await value + return value diff --git a/src/a2a/utils/message.py b/src/a2a/utils/message.py index fd58a2fa0..bfd675fdf 100644 --- a/src/a2a/utils/message.py +++ b/src/a2a/utils/message.py @@ -8,6 +8,7 @@ Role, TextPart, ) +from a2a.utils.parts import get_text_parts def new_agent_text_message( @@ -28,9 +29,9 @@ def new_agent_text_message( return Message( role=Role.agent, parts=[Part(root=TextPart(text=text))], - messageId=str(uuid.uuid4()), - taskId=task_id, - contextId=context_id, + message_id=str(uuid.uuid4()), + task_id=task_id, + context_id=context_id, ) @@ -38,7 +39,7 @@ def new_agent_parts_message( parts: list[Part], context_id: str | None = None, task_id: str | None = None, -): +) -> Message: """Creates a new agent message containing a list of Parts. Args: @@ -52,25 +53,13 @@ def new_agent_parts_message( return Message( role=Role.agent, parts=parts, - messageId=str(uuid.uuid4()), - taskId=task_id, - contextId=context_id, + message_id=str(uuid.uuid4()), + task_id=task_id, + context_id=context_id, ) -def get_text_parts(parts: list[Part]) -> list[str]: - """Extracts text content from all TextPart objects in a list of Parts. - - Args: - parts: A list of `Part` objects. - - Returns: - A list of strings containing the text content from any `TextPart` objects found. - """ - return [part.root.text for part in parts if isinstance(part.root, TextPart)] - - -def get_message_text(message: Message, delimiter='\n') -> str: +def get_message_text(message: Message, delimiter: str = '\n') -> str: """Extracts and joins all text content from a Message's parts. Args: diff --git a/src/a2a/utils/parts.py b/src/a2a/utils/parts.py new file mode 100644 index 000000000..f32076c8c --- /dev/null +++ b/src/a2a/utils/parts.py @@ -0,0 +1,48 @@ +"""Utility functions for creating and handling A2A Parts objects.""" + +from typing import Any + +from a2a.types import ( + DataPart, + FilePart, + FileWithBytes, + FileWithUri, + Part, + TextPart, +) + + +def get_text_parts(parts: list[Part]) -> list[str]: + """Extracts text content from all TextPart objects in a list of Parts. + + Args: + parts: A list of `Part` objects. + + Returns: + A list of strings containing the text content from any `TextPart` objects found. + """ + return [part.root.text for part in parts if isinstance(part.root, TextPart)] + + +def get_data_parts(parts: list[Part]) -> list[dict[str, Any]]: + """Extracts dictionary data from all DataPart objects in a list of Parts. + + Args: + parts: A list of `Part` objects. + + Returns: + A list of dictionaries containing the data from any `DataPart` objects found. + """ + return [part.root.data for part in parts if isinstance(part.root, DataPart)] + + +def get_file_parts(parts: list[Part]) -> list[FileWithBytes | FileWithUri]: + """Extracts file data from all FilePart objects in a list of Parts. + + Args: + parts: A list of `Part` objects. + + Returns: + A list of `FileWithBytes` or `FileWithUri` objects containing the file data from any `FilePart` objects found. + """ + return [part.root.file for part in parts if isinstance(part.root, FilePart)] diff --git a/src/a2a/utils/proto_utils.py b/src/a2a/utils/proto_utils.py new file mode 100644 index 000000000..57272c89e --- /dev/null +++ b/src/a2a/utils/proto_utils.py @@ -0,0 +1,1100 @@ +# mypy: disable-error-code="arg-type" +"""Utils for converting between proto and Python types.""" + +import json +import logging +import re + +from typing import Any + +from google.protobuf import json_format, struct_pb2 + +from a2a import types +from a2a.grpc import a2a_pb2 +from a2a.utils.errors import ServerError + + +logger = logging.getLogger(__name__) + + +# Regexp patterns for matching +_TASK_NAME_MATCH = re.compile(r'tasks/([^/]+)') +_TASK_PUSH_CONFIG_NAME_MATCH = re.compile( + r'tasks/([^/]+)/pushNotificationConfigs/([^/]+)' +) + + +def dict_to_struct(dictionary: dict[str, Any]) -> struct_pb2.Struct: + """Converts a Python dict to a Struct proto. + + Unfortunately, using `json_format.ParseDict` does not work because this + wants the dictionary to be an exact match of the Struct proto with fields + and keys and values, not the traditional Python dict structure. + + Args: + dictionary: The Python dict to convert. + + Returns: + The Struct proto. + """ + struct = struct_pb2.Struct() + for key, val in dictionary.items(): + if isinstance(val, dict): + struct[key] = dict_to_struct(val) + else: + struct[key] = val + return struct + + +def make_dict_serializable(value: Any) -> Any: + """Dict pre-processing utility: converts non-serializable values to serializable form. + + Use this when you want to normalize a dictionary before dict->Struct conversion. + + Args: + value: The value to convert. + + Returns: + A serializable value. + """ + if isinstance(value, str | int | float | bool) or value is None: + return value + if isinstance(value, dict): + return {k: make_dict_serializable(v) for k, v in value.items()} + if isinstance(value, list | tuple): + return [make_dict_serializable(item) for item in value] + return str(value) + + +def normalize_large_integers_to_strings( + value: Any, max_safe_digits: int = 15 +) -> Any: + """Integer preprocessing utility: converts large integers to strings. + + Use this when you want to convert large integers to strings considering + JavaScript's MAX_SAFE_INTEGER (2^53 - 1) limitation. + + Args: + value: The value to convert. + max_safe_digits: Maximum safe integer digits (default: 15). + + Returns: + A normalized value. + """ + max_safe_int = 10**max_safe_digits - 1 + + def _normalize(item: Any) -> Any: + if isinstance(item, int) and abs(item) > max_safe_int: + return str(item) + if isinstance(item, dict): + return {k: _normalize(v) for k, v in item.items()} + if isinstance(item, list | tuple): + return [_normalize(i) for i in item] + return item + + return _normalize(value) + + +def parse_string_integers_in_dict(value: Any, max_safe_digits: int = 15) -> Any: + """String post-processing utility: converts large integer strings back to integers. + + Use this when you want to restore large integer strings to integers + after Struct->dict conversion. + + Args: + value: The value to convert. + max_safe_digits: Maximum safe integer digits (default: 15). + + Returns: + A parsed value. + """ + if isinstance(value, dict): + return { + k: parse_string_integers_in_dict(v, max_safe_digits) + for k, v in value.items() + } + if isinstance(value, list | tuple): + return [ + parse_string_integers_in_dict(item, max_safe_digits) + for item in value + ] + if isinstance(value, str): + # Handle potential negative numbers. + stripped_value = value.lstrip('-') + if stripped_value.isdigit() and len(stripped_value) > max_safe_digits: + return int(value) + return value + + +class ToProto: + """Converts Python types to proto types.""" + + @classmethod + def message(cls, message: types.Message | None) -> a2a_pb2.Message | None: + if message is None: + return None + return a2a_pb2.Message( + message_id=message.message_id, + content=[cls.part(p) for p in message.parts], + context_id=message.context_id or '', + task_id=message.task_id or '', + role=cls.role(message.role), + metadata=cls.metadata(message.metadata), + extensions=message.extensions or [], + ) + + @classmethod + def metadata( + cls, metadata: dict[str, Any] | None + ) -> struct_pb2.Struct | None: + if metadata is None: + return None + return dict_to_struct(metadata) + + @classmethod + def part(cls, part: types.Part) -> a2a_pb2.Part: + if isinstance(part.root, types.TextPart): + return a2a_pb2.Part( + text=part.root.text, metadata=cls.metadata(part.root.metadata) + ) + if isinstance(part.root, types.FilePart): + return a2a_pb2.Part( + file=cls.file(part.root.file), + metadata=cls.metadata(part.root.metadata), + ) + if isinstance(part.root, types.DataPart): + return a2a_pb2.Part( + data=cls.data(part.root.data), + metadata=cls.metadata(part.root.metadata), + ) + raise ValueError(f'Unsupported part type: {part.root}') + + @classmethod + def data(cls, data: dict[str, Any]) -> a2a_pb2.DataPart: + return a2a_pb2.DataPart(data=dict_to_struct(data)) + + @classmethod + def file( + cls, file: types.FileWithUri | types.FileWithBytes + ) -> a2a_pb2.FilePart: + if isinstance(file, types.FileWithUri): + return a2a_pb2.FilePart( + file_with_uri=file.uri, mime_type=file.mime_type, name=file.name + ) + return a2a_pb2.FilePart( + file_with_bytes=file.bytes.encode('utf-8'), + mime_type=file.mime_type, + name=file.name, + ) + + @classmethod + def task(cls, task: types.Task) -> a2a_pb2.Task: + return a2a_pb2.Task( + id=task.id, + context_id=task.context_id, + status=cls.task_status(task.status), + artifacts=( + [cls.artifact(a) for a in task.artifacts] + if task.artifacts + else None + ), + history=( + [cls.message(h) for h in task.history] # type: ignore[misc] + if task.history + else None + ), + metadata=cls.metadata(task.metadata), + ) + + @classmethod + def task_status(cls, status: types.TaskStatus) -> a2a_pb2.TaskStatus: + return a2a_pb2.TaskStatus( + state=cls.task_state(status.state), + update=cls.message(status.message), + ) + + @classmethod + def task_state(cls, state: types.TaskState) -> a2a_pb2.TaskState: + match state: + case types.TaskState.submitted: + return a2a_pb2.TaskState.TASK_STATE_SUBMITTED + case types.TaskState.working: + return a2a_pb2.TaskState.TASK_STATE_WORKING + case types.TaskState.completed: + return a2a_pb2.TaskState.TASK_STATE_COMPLETED + case types.TaskState.canceled: + return a2a_pb2.TaskState.TASK_STATE_CANCELLED + case types.TaskState.failed: + return a2a_pb2.TaskState.TASK_STATE_FAILED + case types.TaskState.input_required: + return a2a_pb2.TaskState.TASK_STATE_INPUT_REQUIRED + case types.TaskState.auth_required: + return a2a_pb2.TaskState.TASK_STATE_AUTH_REQUIRED + case types.TaskState.rejected: + return a2a_pb2.TaskState.TASK_STATE_REJECTED + case _: + return a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED + + @classmethod + def artifact(cls, artifact: types.Artifact) -> a2a_pb2.Artifact: + return a2a_pb2.Artifact( + artifact_id=artifact.artifact_id, + description=artifact.description, + metadata=cls.metadata(artifact.metadata), + name=artifact.name, + parts=[cls.part(p) for p in artifact.parts], + extensions=artifact.extensions or [], + ) + + @classmethod + def authentication_info( + cls, info: types.PushNotificationAuthenticationInfo + ) -> a2a_pb2.AuthenticationInfo: + return a2a_pb2.AuthenticationInfo( + schemes=info.schemes, + credentials=info.credentials, + ) + + @classmethod + def push_notification_config( + cls, config: types.PushNotificationConfig + ) -> a2a_pb2.PushNotificationConfig: + auth_info = ( + cls.authentication_info(config.authentication) + if config.authentication + else None + ) + return a2a_pb2.PushNotificationConfig( + id=config.id or '', + url=config.url, + token=config.token, + authentication=auth_info, + ) + + @classmethod + def task_artifact_update_event( + cls, event: types.TaskArtifactUpdateEvent + ) -> a2a_pb2.TaskArtifactUpdateEvent: + return a2a_pb2.TaskArtifactUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + artifact=cls.artifact(event.artifact), + metadata=cls.metadata(event.metadata), + append=event.append or False, + last_chunk=event.last_chunk or False, + ) + + @classmethod + def task_status_update_event( + cls, event: types.TaskStatusUpdateEvent + ) -> a2a_pb2.TaskStatusUpdateEvent: + return a2a_pb2.TaskStatusUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + status=cls.task_status(event.status), + metadata=cls.metadata(event.metadata), + final=event.final, + ) + + @classmethod + def message_send_configuration( + cls, config: types.MessageSendConfiguration | None + ) -> a2a_pb2.SendMessageConfiguration: + if not config: + return a2a_pb2.SendMessageConfiguration() + return a2a_pb2.SendMessageConfiguration( + accepted_output_modes=config.accepted_output_modes, + push_notification=cls.push_notification_config( + config.push_notification_config + ) + if config.push_notification_config + else None, + history_length=config.history_length, + blocking=config.blocking or False, + ) + + @classmethod + def update_event( + cls, + event: types.Task + | types.Message + | types.TaskStatusUpdateEvent + | types.TaskArtifactUpdateEvent, + ) -> a2a_pb2.StreamResponse: + """Converts a task, message, or task update event to a StreamResponse.""" + return cls.stream_response(event) + + @classmethod + def task_or_message( + cls, event: types.Task | types.Message + ) -> a2a_pb2.SendMessageResponse: + if isinstance(event, types.Message): + return a2a_pb2.SendMessageResponse( + msg=cls.message(event), + ) + return a2a_pb2.SendMessageResponse( + task=cls.task(event), + ) + + @classmethod + def stream_response( + cls, + event: ( + types.Message + | types.Task + | types.TaskStatusUpdateEvent + | types.TaskArtifactUpdateEvent + ), + ) -> a2a_pb2.StreamResponse: + if isinstance(event, types.Message): + return a2a_pb2.StreamResponse(msg=cls.message(event)) + if isinstance(event, types.Task): + return a2a_pb2.StreamResponse(task=cls.task(event)) + if isinstance(event, types.TaskStatusUpdateEvent): + return a2a_pb2.StreamResponse( + status_update=cls.task_status_update_event(event), + ) + if isinstance(event, types.TaskArtifactUpdateEvent): + return a2a_pb2.StreamResponse( + artifact_update=cls.task_artifact_update_event(event), + ) + raise ValueError(f'Unsupported event type: {type(event)}') + + @classmethod + def task_push_notification_config( + cls, config: types.TaskPushNotificationConfig + ) -> a2a_pb2.TaskPushNotificationConfig: + return a2a_pb2.TaskPushNotificationConfig( + name=f'tasks/{config.task_id}/pushNotificationConfigs/{config.push_notification_config.id}', + push_notification_config=cls.push_notification_config( + config.push_notification_config, + ), + ) + + @classmethod + def agent_card( + cls, + card: types.AgentCard, + ) -> a2a_pb2.AgentCard: + return a2a_pb2.AgentCard( + capabilities=cls.capabilities(card.capabilities), + default_input_modes=list(card.default_input_modes), + default_output_modes=list(card.default_output_modes), + description=card.description, + documentation_url=card.documentation_url, + name=card.name, + provider=cls.provider(card.provider), + security=cls.security(card.security), + security_schemes=cls.security_schemes(card.security_schemes), + skills=[cls.skill(x) for x in card.skills] if card.skills else [], + url=card.url, + version=card.version, + supports_authenticated_extended_card=bool( + card.supports_authenticated_extended_card + ), + preferred_transport=card.preferred_transport, + protocol_version=card.protocol_version, + additional_interfaces=[ + cls.agent_interface(x) for x in card.additional_interfaces + ] + if card.additional_interfaces + else None, + signatures=[cls.agent_card_signature(x) for x in card.signatures] + if card.signatures + else None, + ) + + @classmethod + def agent_card_signature( + cls, signature: types.AgentCardSignature + ) -> a2a_pb2.AgentCardSignature: + return a2a_pb2.AgentCardSignature( + protected=signature.protected, + signature=signature.signature, + header=dict_to_struct(signature.header) + if signature.header is not None + else None, + ) + + @classmethod + def agent_interface( + cls, + interface: types.AgentInterface, + ) -> a2a_pb2.AgentInterface: + return a2a_pb2.AgentInterface( + transport=interface.transport, + url=interface.url, + ) + + @classmethod + def capabilities( + cls, capabilities: types.AgentCapabilities + ) -> a2a_pb2.AgentCapabilities: + return a2a_pb2.AgentCapabilities( + streaming=bool(capabilities.streaming), + push_notifications=bool(capabilities.push_notifications), + extensions=[ + cls.extension(x) for x in capabilities.extensions or [] + ], + ) + + @classmethod + def extension( + cls, + extension: types.AgentExtension, + ) -> a2a_pb2.AgentExtension: + return a2a_pb2.AgentExtension( + uri=extension.uri, + description=extension.description, + params=dict_to_struct(extension.params) + if extension.params + else None, + required=extension.required, + ) + + @classmethod + def provider( + cls, provider: types.AgentProvider | None + ) -> a2a_pb2.AgentProvider | None: + if not provider: + return None + return a2a_pb2.AgentProvider( + organization=provider.organization, + url=provider.url, + ) + + @classmethod + def security( + cls, + security: list[dict[str, list[str]]] | None, + ) -> list[a2a_pb2.Security] | None: + if not security: + return None + return [ + a2a_pb2.Security( + schemes={k: a2a_pb2.StringList(list=v) for (k, v) in s.items()} + ) + for s in security + ] + + @classmethod + def security_schemes( + cls, + schemes: dict[str, types.SecurityScheme] | None, + ) -> dict[str, a2a_pb2.SecurityScheme] | None: + if not schemes: + return None + return {k: cls.security_scheme(v) for (k, v) in schemes.items()} + + @classmethod + def security_scheme( + cls, + scheme: types.SecurityScheme, + ) -> a2a_pb2.SecurityScheme: + if isinstance(scheme.root, types.APIKeySecurityScheme): + return a2a_pb2.SecurityScheme( + api_key_security_scheme=a2a_pb2.APIKeySecurityScheme( + description=scheme.root.description, + location=scheme.root.in_.value, + name=scheme.root.name, + ) + ) + if isinstance(scheme.root, types.HTTPAuthSecurityScheme): + return a2a_pb2.SecurityScheme( + http_auth_security_scheme=a2a_pb2.HTTPAuthSecurityScheme( + description=scheme.root.description, + scheme=scheme.root.scheme, + bearer_format=scheme.root.bearer_format, + ) + ) + if isinstance(scheme.root, types.OAuth2SecurityScheme): + return a2a_pb2.SecurityScheme( + oauth2_security_scheme=a2a_pb2.OAuth2SecurityScheme( + description=scheme.root.description, + flows=cls.oauth2_flows(scheme.root.flows), + ) + ) + if isinstance(scheme.root, types.MutualTLSSecurityScheme): + return a2a_pb2.SecurityScheme( + mtls_security_scheme=a2a_pb2.MutualTlsSecurityScheme( + description=scheme.root.description, + ) + ) + return a2a_pb2.SecurityScheme( + open_id_connect_security_scheme=a2a_pb2.OpenIdConnectSecurityScheme( + description=scheme.root.description, + open_id_connect_url=scheme.root.open_id_connect_url, + ) + ) + + @classmethod + def oauth2_flows(cls, flows: types.OAuthFlows) -> a2a_pb2.OAuthFlows: + if flows.authorization_code: + return a2a_pb2.OAuthFlows( + authorization_code=a2a_pb2.AuthorizationCodeOAuthFlow( + authorization_url=flows.authorization_code.authorization_url, + refresh_url=flows.authorization_code.refresh_url, + scopes=dict(flows.authorization_code.scopes.items()), + token_url=flows.authorization_code.token_url, + ), + ) + if flows.client_credentials: + return a2a_pb2.OAuthFlows( + client_credentials=a2a_pb2.ClientCredentialsOAuthFlow( + refresh_url=flows.client_credentials.refresh_url, + scopes=dict(flows.client_credentials.scopes.items()), + token_url=flows.client_credentials.token_url, + ), + ) + if flows.implicit: + return a2a_pb2.OAuthFlows( + implicit=a2a_pb2.ImplicitOAuthFlow( + authorization_url=flows.implicit.authorization_url, + refresh_url=flows.implicit.refresh_url, + scopes=dict(flows.implicit.scopes.items()), + ), + ) + if flows.password: + return a2a_pb2.OAuthFlows( + password=a2a_pb2.PasswordOAuthFlow( + refresh_url=flows.password.refresh_url, + scopes=dict(flows.password.scopes.items()), + token_url=flows.password.token_url, + ), + ) + raise ValueError('Unknown oauth flow definition') + + @classmethod + def skill(cls, skill: types.AgentSkill) -> a2a_pb2.AgentSkill: + return a2a_pb2.AgentSkill( + id=skill.id, + name=skill.name, + description=skill.description, + tags=skill.tags, + examples=skill.examples, + input_modes=skill.input_modes, + output_modes=skill.output_modes, + ) + + @classmethod + def role(cls, role: types.Role) -> a2a_pb2.Role: + match role: + case types.Role.user: + return a2a_pb2.Role.ROLE_USER + case types.Role.agent: + return a2a_pb2.Role.ROLE_AGENT + case _: + return a2a_pb2.Role.ROLE_UNSPECIFIED + + +class FromProto: + """Converts proto types to Python types.""" + + @classmethod + def message(cls, message: a2a_pb2.Message) -> types.Message: + return types.Message( + message_id=message.message_id, + parts=[cls.part(p) for p in message.content], + context_id=message.context_id or None, + task_id=message.task_id or None, + role=cls.role(message.role), + metadata=cls.metadata(message.metadata), + extensions=list(message.extensions) or None, + ) + + @classmethod + def metadata(cls, metadata: struct_pb2.Struct) -> dict[str, Any]: + if not metadata.fields: + return {} + return json_format.MessageToDict(metadata) + + @classmethod + def part(cls, part: a2a_pb2.Part) -> types.Part: + if part.HasField('text'): + return types.Part( + root=types.TextPart( + text=part.text, + metadata=cls.metadata(part.metadata) + if part.metadata + else None, + ), + ) + if part.HasField('file'): + return types.Part( + root=types.FilePart( + file=cls.file(part.file), + metadata=cls.metadata(part.metadata) + if part.metadata + else None, + ), + ) + if part.HasField('data'): + return types.Part( + root=types.DataPart( + data=cls.data(part.data), + metadata=cls.metadata(part.metadata) + if part.metadata + else None, + ), + ) + raise ValueError(f'Unsupported part type: {part}') + + @classmethod + def data(cls, data: a2a_pb2.DataPart) -> dict[str, Any]: + json_data = json_format.MessageToJson(data.data) + return json.loads(json_data) + + @classmethod + def file( + cls, file: a2a_pb2.FilePart + ) -> types.FileWithUri | types.FileWithBytes: + common_args = { + 'mime_type': file.mime_type or None, + 'name': file.name or None, + } + if file.HasField('file_with_uri'): + return types.FileWithUri( + uri=file.file_with_uri, + **common_args, + ) + return types.FileWithBytes( + bytes=file.file_with_bytes.decode('utf-8'), + **common_args, + ) + + @classmethod + def task_or_message( + cls, event: a2a_pb2.SendMessageResponse + ) -> types.Task | types.Message: + if event.HasField('msg'): + return cls.message(event.msg) + return cls.task(event.task) + + @classmethod + def task(cls, task: a2a_pb2.Task) -> types.Task: + return types.Task( + id=task.id, + context_id=task.context_id, + status=cls.task_status(task.status), + artifacts=[cls.artifact(a) for a in task.artifacts], + history=[cls.message(h) for h in task.history], + metadata=cls.metadata(task.metadata), + ) + + @classmethod + def task_status(cls, status: a2a_pb2.TaskStatus) -> types.TaskStatus: + return types.TaskStatus( + state=cls.task_state(status.state), + message=cls.message(status.update), + ) + + @classmethod + def task_state(cls, state: a2a_pb2.TaskState) -> types.TaskState: + match state: + case a2a_pb2.TaskState.TASK_STATE_SUBMITTED: + return types.TaskState.submitted + case a2a_pb2.TaskState.TASK_STATE_WORKING: + return types.TaskState.working + case a2a_pb2.TaskState.TASK_STATE_COMPLETED: + return types.TaskState.completed + case a2a_pb2.TaskState.TASK_STATE_CANCELLED: + return types.TaskState.canceled + case a2a_pb2.TaskState.TASK_STATE_FAILED: + return types.TaskState.failed + case a2a_pb2.TaskState.TASK_STATE_INPUT_REQUIRED: + return types.TaskState.input_required + case a2a_pb2.TaskState.TASK_STATE_AUTH_REQUIRED: + return types.TaskState.auth_required + case a2a_pb2.TaskState.TASK_STATE_REJECTED: + return types.TaskState.rejected + case _: + return types.TaskState.unknown + + @classmethod + def artifact(cls, artifact: a2a_pb2.Artifact) -> types.Artifact: + return types.Artifact( + artifact_id=artifact.artifact_id, + description=artifact.description, + metadata=cls.metadata(artifact.metadata), + name=artifact.name, + parts=[cls.part(p) for p in artifact.parts], + extensions=artifact.extensions or None, + ) + + @classmethod + def task_artifact_update_event( + cls, event: a2a_pb2.TaskArtifactUpdateEvent + ) -> types.TaskArtifactUpdateEvent: + return types.TaskArtifactUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + artifact=cls.artifact(event.artifact), + metadata=cls.metadata(event.metadata), + append=event.append, + last_chunk=event.last_chunk, + ) + + @classmethod + def task_status_update_event( + cls, event: a2a_pb2.TaskStatusUpdateEvent + ) -> types.TaskStatusUpdateEvent: + return types.TaskStatusUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + status=cls.task_status(event.status), + metadata=cls.metadata(event.metadata), + final=event.final, + ) + + @classmethod + def push_notification_config( + cls, config: a2a_pb2.PushNotificationConfig + ) -> types.PushNotificationConfig: + return types.PushNotificationConfig( + id=config.id, + url=config.url, + token=config.token, + authentication=cls.authentication_info(config.authentication) + if config.HasField('authentication') + else None, + ) + + @classmethod + def authentication_info( + cls, info: a2a_pb2.AuthenticationInfo + ) -> types.PushNotificationAuthenticationInfo: + return types.PushNotificationAuthenticationInfo( + schemes=list(info.schemes), + credentials=info.credentials, + ) + + @classmethod + def message_send_configuration( + cls, config: a2a_pb2.SendMessageConfiguration + ) -> types.MessageSendConfiguration: + return types.MessageSendConfiguration( + accepted_output_modes=list(config.accepted_output_modes), + push_notification_config=cls.push_notification_config( + config.push_notification + ) + if config.HasField('push_notification') + else None, + history_length=config.history_length, + blocking=config.blocking, + ) + + @classmethod + def message_send_params( + cls, request: a2a_pb2.SendMessageRequest + ) -> types.MessageSendParams: + return types.MessageSendParams( + configuration=cls.message_send_configuration(request.configuration), + message=cls.message(request.request), + metadata=cls.metadata(request.metadata), + ) + + @classmethod + def task_id_params( + cls, + request: ( + a2a_pb2.CancelTaskRequest + | a2a_pb2.TaskSubscriptionRequest + | a2a_pb2.GetTaskPushNotificationConfigRequest + ), + ) -> types.TaskIdParams: + if isinstance(request, a2a_pb2.GetTaskPushNotificationConfigRequest): + m = _TASK_PUSH_CONFIG_NAME_MATCH.match(request.name) + if not m: + raise ServerError( + error=types.InvalidParamsError( + message=f'No task for {request.name}' + ) + ) + return types.TaskIdParams(id=m.group(1)) + m = _TASK_NAME_MATCH.match(request.name) + if not m: + raise ServerError( + error=types.InvalidParamsError( + message=f'No task for {request.name}' + ) + ) + return types.TaskIdParams(id=m.group(1)) + + @classmethod + def task_push_notification_config_request( + cls, + request: a2a_pb2.CreateTaskPushNotificationConfigRequest, + ) -> types.TaskPushNotificationConfig: + m = _TASK_NAME_MATCH.match(request.parent) + if not m: + raise ServerError( + error=types.InvalidParamsError( + message=f'No task for {request.parent}' + ) + ) + return types.TaskPushNotificationConfig( + push_notification_config=cls.push_notification_config( + request.config.push_notification_config, + ), + task_id=m.group(1), + ) + + @classmethod + def task_push_notification_config( + cls, + config: a2a_pb2.TaskPushNotificationConfig, + ) -> types.TaskPushNotificationConfig: + m = _TASK_PUSH_CONFIG_NAME_MATCH.match(config.name) + if not m: + raise ServerError( + error=types.InvalidParamsError( + message=f'Bad TaskPushNotificationConfig resource name {config.name}' + ) + ) + return types.TaskPushNotificationConfig( + push_notification_config=cls.push_notification_config( + config.push_notification_config, + ), + task_id=m.group(1), + ) + + @classmethod + def agent_card( + cls, + card: a2a_pb2.AgentCard, + ) -> types.AgentCard: + return types.AgentCard( + capabilities=cls.capabilities(card.capabilities), + default_input_modes=list(card.default_input_modes), + default_output_modes=list(card.default_output_modes), + description=card.description, + documentation_url=card.documentation_url, + name=card.name, + provider=cls.provider(card.provider), + security=cls.security(list(card.security)), + security_schemes=cls.security_schemes(dict(card.security_schemes)), + skills=[cls.skill(x) for x in card.skills] if card.skills else [], + url=card.url, + version=card.version, + supports_authenticated_extended_card=card.supports_authenticated_extended_card, + preferred_transport=card.preferred_transport, + protocol_version=card.protocol_version, + additional_interfaces=[ + cls.agent_interface(x) for x in card.additional_interfaces + ] + if card.additional_interfaces + else None, + signatures=[cls.agent_card_signature(x) for x in card.signatures] + if card.signatures + else None, + ) + + @classmethod + def agent_card_signature( + cls, signature: a2a_pb2.AgentCardSignature + ) -> types.AgentCardSignature: + return types.AgentCardSignature( + protected=signature.protected, + signature=signature.signature, + header=json_format.MessageToDict(signature.header), + ) + + @classmethod + def agent_interface( + cls, + interface: a2a_pb2.AgentInterface, + ) -> types.AgentInterface: + return types.AgentInterface( + transport=interface.transport, + url=interface.url, + ) + + @classmethod + def task_query_params( + cls, + request: a2a_pb2.GetTaskRequest, + ) -> types.TaskQueryParams: + m = _TASK_NAME_MATCH.match(request.name) + if not m: + raise ServerError( + error=types.InvalidParamsError( + message=f'No task for {request.name}' + ) + ) + return types.TaskQueryParams( + history_length=request.history_length + if request.history_length + else None, + id=m.group(1), + metadata=None, + ) + + @classmethod + def capabilities( + cls, capabilities: a2a_pb2.AgentCapabilities + ) -> types.AgentCapabilities: + return types.AgentCapabilities( + streaming=capabilities.streaming, + push_notifications=capabilities.push_notifications, + extensions=[ + cls.agent_extension(x) for x in capabilities.extensions + ], + ) + + @classmethod + def agent_extension( + cls, + extension: a2a_pb2.AgentExtension, + ) -> types.AgentExtension: + return types.AgentExtension( + uri=extension.uri, + description=extension.description, + params=json_format.MessageToDict(extension.params), + required=extension.required, + ) + + @classmethod + def security( + cls, + security: list[a2a_pb2.Security] | None, + ) -> list[dict[str, list[str]]] | None: + if not security: + return None + return [ + {k: list(v.list) for (k, v) in s.schemes.items()} for s in security + ] + + @classmethod + def provider( + cls, provider: a2a_pb2.AgentProvider | None + ) -> types.AgentProvider | None: + if not provider: + return None + return types.AgentProvider( + organization=provider.organization, + url=provider.url, + ) + + @classmethod + def security_schemes( + cls, schemes: dict[str, a2a_pb2.SecurityScheme] + ) -> dict[str, types.SecurityScheme]: + return {k: cls.security_scheme(v) for (k, v) in schemes.items()} + + @classmethod + def security_scheme( + cls, + scheme: a2a_pb2.SecurityScheme, + ) -> types.SecurityScheme: + if scheme.HasField('api_key_security_scheme'): + return types.SecurityScheme( + root=types.APIKeySecurityScheme( + description=scheme.api_key_security_scheme.description, + name=scheme.api_key_security_scheme.name, + in_=types.In(scheme.api_key_security_scheme.location), # type: ignore[call-arg] + ) + ) + if scheme.HasField('http_auth_security_scheme'): + return types.SecurityScheme( + root=types.HTTPAuthSecurityScheme( + description=scheme.http_auth_security_scheme.description, + scheme=scheme.http_auth_security_scheme.scheme, + bearer_format=scheme.http_auth_security_scheme.bearer_format, + ) + ) + if scheme.HasField('oauth2_security_scheme'): + return types.SecurityScheme( + root=types.OAuth2SecurityScheme( + description=scheme.oauth2_security_scheme.description, + flows=cls.oauth2_flows(scheme.oauth2_security_scheme.flows), + ) + ) + if scheme.HasField('mtls_security_scheme'): + return types.SecurityScheme( + root=types.MutualTLSSecurityScheme( + description=scheme.mtls_security_scheme.description, + ) + ) + return types.SecurityScheme( + root=types.OpenIdConnectSecurityScheme( + description=scheme.open_id_connect_security_scheme.description, + open_id_connect_url=scheme.open_id_connect_security_scheme.open_id_connect_url, + ) + ) + + @classmethod + def oauth2_flows(cls, flows: a2a_pb2.OAuthFlows) -> types.OAuthFlows: + if flows.HasField('authorization_code'): + return types.OAuthFlows( + authorization_code=types.AuthorizationCodeOAuthFlow( + authorization_url=flows.authorization_code.authorization_url, + refresh_url=flows.authorization_code.refresh_url, + scopes=dict(flows.authorization_code.scopes.items()), + token_url=flows.authorization_code.token_url, + ), + ) + if flows.HasField('client_credentials'): + return types.OAuthFlows( + client_credentials=types.ClientCredentialsOAuthFlow( + refresh_url=flows.client_credentials.refresh_url, + scopes=dict(flows.client_credentials.scopes.items()), + token_url=flows.client_credentials.token_url, + ), + ) + if flows.HasField('implicit'): + return types.OAuthFlows( + implicit=types.ImplicitOAuthFlow( + authorization_url=flows.implicit.authorization_url, + refresh_url=flows.implicit.refresh_url, + scopes=dict(flows.implicit.scopes.items()), + ), + ) + return types.OAuthFlows( + password=types.PasswordOAuthFlow( + refresh_url=flows.password.refresh_url, + scopes=dict(flows.password.scopes.items()), + token_url=flows.password.token_url, + ), + ) + + @classmethod + def stream_response( + cls, + response: a2a_pb2.StreamResponse, + ) -> ( + types.Message + | types.Task + | types.TaskStatusUpdateEvent + | types.TaskArtifactUpdateEvent + ): + if response.HasField('msg'): + return cls.message(response.msg) + if response.HasField('task'): + return cls.task(response.task) + if response.HasField('status_update'): + return cls.task_status_update_event(response.status_update) + if response.HasField('artifact_update'): + return cls.task_artifact_update_event(response.artifact_update) + raise ValueError('Unsupported StreamResponse type') + + @classmethod + def skill(cls, skill: a2a_pb2.AgentSkill) -> types.AgentSkill: + return types.AgentSkill( + id=skill.id, + name=skill.name, + description=skill.description, + tags=list(skill.tags), + examples=list(skill.examples), + input_modes=list(skill.input_modes), + output_modes=list(skill.output_modes), + ) + + @classmethod + def role(cls, role: a2a_pb2.Role) -> types.Role: + match role: + case a2a_pb2.Role.ROLE_USER: + return types.Role.user + case a2a_pb2.Role.ROLE_AGENT: + return types.Role.agent + case _: + return types.Role.agent diff --git a/src/a2a/utils/signing.py b/src/a2a/utils/signing.py new file mode 100644 index 000000000..6ea8c21b8 --- /dev/null +++ b/src/a2a/utils/signing.py @@ -0,0 +1,152 @@ +import json + +from collections.abc import Callable +from typing import Any, TypedDict + +from a2a.utils.helpers import canonicalize_agent_card + + +try: + import jwt + + from jwt.api_jwk import PyJWK + from jwt.exceptions import PyJWTError + from jwt.utils import base64url_decode, base64url_encode +except ImportError as e: + raise ImportError( + 'A2A Signing requires PyJWT to be installed. ' + 'Install with: ' + "'pip install a2a-sdk[signing]'" + ) from e + +from a2a.types import AgentCard, AgentCardSignature + + +class SignatureVerificationError(Exception): + """Base exception for signature verification errors.""" + + +class NoSignatureError(SignatureVerificationError): + """Exception raised when no signature is found on an AgentCard.""" + + +class InvalidSignaturesError(SignatureVerificationError): + """Exception raised when all signatures are invalid.""" + + +class ProtectedHeader(TypedDict): + """Protected header parameters for JWS (JSON Web Signature).""" + + kid: str + """ Key identifier. """ + alg: str | None + """ Algorithm used for signing. """ + jku: str | None + """ JSON Web Key Set URL. """ + typ: str | None + """ Token type. + + Best practice: SHOULD be "JOSE" for JWS tokens. + """ + + +def create_agent_card_signer( + signing_key: PyJWK | str | bytes, + protected_header: ProtectedHeader, + header: dict[str, Any] | None = None, +) -> Callable[[AgentCard], AgentCard]: + """Creates a function that signs an AgentCard and adds the signature. + + Args: + signing_key: The private key for signing. + protected_header: The protected header parameters. + header: Unprotected header parameters. + + Returns: + A callable that takes an AgentCard and returns the modified AgentCard with a signature. + """ + + def agent_card_signer(agent_card: AgentCard) -> AgentCard: + """Signs agent card.""" + canonical_payload = canonicalize_agent_card(agent_card) + payload_dict = json.loads(canonical_payload) + + jws_string = jwt.encode( + payload=payload_dict, + key=signing_key, + algorithm=protected_header.get('alg', 'HS256'), + headers=dict(protected_header), + ) + + # The result of jwt.encode is a compact serialization: HEADER.PAYLOAD.SIGNATURE + protected, _, signature = jws_string.split('.') + + agent_card_signature = AgentCardSignature( + header=header, + protected=protected, + signature=signature, + ) + + agent_card.signatures = (agent_card.signatures or []) + [ + agent_card_signature + ] + return agent_card + + return agent_card_signer + + +def create_signature_verifier( + key_provider: Callable[[str | None, str | None], PyJWK | str | bytes], + algorithms: list[str], +) -> Callable[[AgentCard], None]: + """Creates a function that verifies the signatures on an AgentCard. + + The verifier succeeds if at least one signature is valid. Otherwise, it raises an error. + + Args: + key_provider: A callable that accepts a key ID (kid) and a JWK Set URL (jku) and returns the verification key. + This function is responsible for fetching the correct key for a given signature. + algorithms: A list of acceptable algorithms (e.g., ['ES256', 'RS256']) for verification used to prevent algorithm confusion attacks. + + Returns: + A function that takes an AgentCard as input, and raises an error if none of the signatures are valid. + """ + + def signature_verifier( + agent_card: AgentCard, + ) -> None: + """Verifies agent card signatures.""" + if not agent_card.signatures: + raise NoSignatureError('AgentCard has no signatures to verify.') + + for agent_card_signature in agent_card.signatures: + try: + # get verification key + protected_header_json = base64url_decode( + agent_card_signature.protected.encode('utf-8') + ).decode('utf-8') + protected_header = json.loads(protected_header_json) + kid = protected_header.get('kid') + jku = protected_header.get('jku') + verification_key = key_provider(kid, jku) + + canonical_payload = canonicalize_agent_card(agent_card) + encoded_payload = base64url_encode( + canonical_payload.encode('utf-8') + ).decode('utf-8') + + token = f'{agent_card_signature.protected}.{encoded_payload}.{agent_card_signature.signature}' + jwt.decode( + jwt=token, + key=verification_key, + algorithms=algorithms, + ) + # Found a valid signature, exit the loop and function + break + except PyJWTError: + continue + else: + # This block runs only if the loop completes without a break + raise InvalidSignaturesError('No valid signature found') + + return signature_verifier diff --git a/src/a2a/utils/task.py b/src/a2a/utils/task.py index 9cf4df436..d8215cec0 100644 --- a/src/a2a/utils/task.py +++ b/src/a2a/utils/task.py @@ -2,7 +2,7 @@ import uuid -from a2a.types import Artifact, Message, Task, TaskState, TaskStatus +from a2a.types import Artifact, Message, Task, TaskState, TaskStatus, TextPart def new_task(request: Message) -> Task: @@ -15,13 +15,23 @@ def new_task(request: Message) -> Task: Returns: A new `Task` object initialized with 'submitted' status and the input message in history. + + Raises: + TypeError: If the message role is None. + ValueError: If the message parts are empty, if any part has empty content, or if the provided context_id is invalid. """ + if not request.role: + raise TypeError('Message role cannot be None') + if not request.parts: + raise ValueError('Message parts cannot be empty') + for part in request.parts: + if isinstance(part.root, TextPart) and not part.root.text: + raise ValueError('TextPart content cannot be empty') + return Task( status=TaskStatus(state=TaskState.submitted), - id=(request.taskId if request.taskId else str(uuid.uuid4())), - contextId=( - request.contextId if request.contextId else str(uuid.uuid4()) - ), + id=request.task_id or str(uuid.uuid4()), + context_id=request.context_id or str(uuid.uuid4()), history=[request], ) @@ -46,12 +56,37 @@ def completed_task( Returns: A `Task` object with status set to 'completed'. """ + if not artifacts or not all(isinstance(a, Artifact) for a in artifacts): + raise ValueError( + 'artifacts must be a non-empty list of Artifact objects' + ) + if history is None: history = [] return Task( status=TaskStatus(state=TaskState.completed), id=task_id, - contextId=context_id, + context_id=context_id, artifacts=artifacts, history=history, ) + + +def apply_history_length(task: Task, history_length: int | None) -> Task: + """Applies history_length parameter on task and returns a new task object. + + Args: + task: The original task object with complete history + history_length: History length configuration value + + Returns: + A new task object with limited history + """ + # Apply historyLength parameter if specified + if history_length is not None and history_length > 0 and task.history: + # Limit history to the most recent N messages + limited_history = task.history[-history_length:] + # Create a new task instance with limited history + return task.model_copy(update={'history': limited_history}) + + return task diff --git a/src/a2a/utils/telemetry.py b/src/a2a/utils/telemetry.py index 0aeee931d..fa8658bf7 100644 --- a/src/a2a/utils/telemetry.py +++ b/src/a2a/utils/telemetry.py @@ -18,6 +18,16 @@ - Automatic recording of exceptions and setting of span status. - Selective method tracing in classes using include/exclude lists. +Configuration: +- Environment Variable Control: OpenTelemetry instrumentation can be + disabled using the `OTEL_INSTRUMENTATION_A2A_SDK_ENABLED` environment + variable. + + - Default: `true` (tracing enabled when OpenTelemetry is installed) + - To disable: Set `OTEL_INSTRUMENTATION_A2A_SDK_ENABLED=false` + - Case insensitive: 'true', 'True', 'TRUE' all enable tracing + - Any other value disables tracing and logs a debug message + Usage: For a single function: ```python @@ -53,31 +63,89 @@ def internal_method(self): ``` """ +import asyncio import functools import inspect import logging +import os -from opentelemetry import trace -from opentelemetry.trace import SpanKind as _SpanKind -from opentelemetry.trace import StatusCode +from collections.abc import Callable +from typing import TYPE_CHECKING, Any +from typing_extensions import Self -SpanKind = _SpanKind -__all__ = ['SpanKind'] + +if TYPE_CHECKING: + from opentelemetry.trace import SpanKind as SpanKindType +else: + SpanKindType = object + +logger = logging.getLogger(__name__) + +try: + from opentelemetry import trace + from opentelemetry.trace import SpanKind as _SpanKind + from opentelemetry.trace import StatusCode + + otel_installed = True + +except ImportError: + logger.debug( + 'OpenTelemetry not found. Tracing will be disabled. ' + 'Install with: \'pip install "a2a-sdk[telemetry]"\'' + ) + otel_installed = False + +ENABLED_ENV_VAR = 'OTEL_INSTRUMENTATION_A2A_SDK_ENABLED' INSTRUMENTING_MODULE_NAME = 'a2a-python-sdk' INSTRUMENTING_MODULE_VERSION = '1.0.0' -logger = logging.getLogger(__name__) +# Check if tracing is enabled via environment variable +env_value = os.getenv(ENABLED_ENV_VAR, 'true') +otel_enabled = env_value.lower() == 'true' +# Log when tracing is explicitly disabled via environment variable +if otel_installed and not otel_enabled: + logger.debug( + 'A2A OTEL instrumentation disabled via environment variable ' + '%s=%r. Tracing will be disabled.', + ENABLED_ENV_VAR, + env_value, + ) -def trace_function( - func=None, +if not otel_installed or not otel_enabled: + + class _NoOp: + """A no-op object that absorbs all tracing calls when OpenTelemetry is not installed.""" + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + return self + + def __enter__(self) -> Self: + return self + + def __exit__(self, *args: object, **kwargs: Any) -> None: + pass + + def __getattr__(self, name: str) -> Any: + return self + + trace = _NoOp() # type: ignore + _SpanKind = _NoOp() # type: ignore + StatusCode = _NoOp() # type: ignore + +SpanKind = _SpanKind # type: ignore +__all__ = ['SpanKind'] + + +def trace_function( # noqa: PLR0915 + func: Callable | None = None, *, - span_name=None, - kind=SpanKind.INTERNAL, - attributes=None, - attribute_extractor=None, -): + span_name: str | None = None, + kind: SpanKindType = SpanKind.INTERNAL, + attributes: dict[str, Any] | None = None, + attribute_extractor: Callable | None = None, +) -> Callable: """A decorator to automatically trace a function call with OpenTelemetry. This decorator can be used to wrap both sync and async functions. @@ -135,11 +203,13 @@ def trace_function( is_async_func = inspect.iscoroutinefunction(func) logger.debug( - f'Start tracing for {actual_span_name}, is_async_func {is_async_func}' + 'Start tracing for %s, is_async_func %s', + actual_span_name, + is_async_func, ) @functools.wraps(func) - async def async_wrapper(*args, **kwargs) -> any: + async def async_wrapper(*args, **kwargs) -> Any: """Async Wrapper for the decorator.""" logger.debug('Start async tracer') tracer = trace.get_tracer( @@ -157,8 +227,12 @@ async def async_wrapper(*args, **kwargs) -> any: # Async wrapper, await for the function call to complete. result = await func(*args, **kwargs) span.set_status(StatusCode.OK) - return result - + # asyncio.CancelledError extends from BaseException + except asyncio.CancelledError as ce: + exception = None + logger.debug('CancelledError in span %s', actual_span_name) + span.record_exception(ce) + raise except Exception as e: exception = e span.record_exception(e) @@ -170,13 +244,15 @@ async def async_wrapper(*args, **kwargs) -> any: attribute_extractor( span, args, kwargs, result, exception ) - except Exception as attr_e: - logger.error( - f'attribute_extractor error in span {actual_span_name}: {attr_e}' + except Exception: + logger.exception( + 'attribute_extractor error in span %s', + actual_span_name, ) + return result @functools.wraps(func) - def sync_wrapper(*args, **kwargs): + def sync_wrapper(*args, **kwargs) -> Any: """Sync Wrapper for the decorator.""" tracer = trace.get_tracer(INSTRUMENTING_MODULE_NAME) with tracer.start_as_current_span(actual_span_name, kind=kind) as span: @@ -191,7 +267,6 @@ def sync_wrapper(*args, **kwargs): # Sync wrapper, execute the function call. result = func(*args, **kwargs) span.set_status(StatusCode.OK) - return result except Exception as e: exception = e @@ -204,10 +279,12 @@ def sync_wrapper(*args, **kwargs): attribute_extractor( span, args, kwargs, result, exception ) - except Exception as attr_e: - logger.error( - f'attribute_extractor error in span {actual_span_name}: {attr_e}' + except Exception: + logger.exception( + 'attribute_extractor error in span %s', + actual_span_name, ) + return result return async_wrapper if is_async_func else sync_wrapper @@ -215,8 +292,8 @@ def sync_wrapper(*args, **kwargs): def trace_class( include_list: list[str] | None = None, exclude_list: list[str] | None = None, - kind=SpanKind.INTERNAL, -): + kind: SpanKindType = SpanKind.INTERNAL, +) -> Callable: """A class decorator to automatically trace specified methods of a class. This decorator iterates over the methods of a class and applies the @@ -266,26 +343,19 @@ def not_traced_method(self): pass ``` """ - logger.debug(f'Trace all class {include_list}, {exclude_list}') + logger.debug('Trace all class %s, %s', include_list, exclude_list) exclude_list = exclude_list or [] - def decorator(cls): - all_methods = {} + def decorator(cls: Any) -> Any: for name, method in inspect.getmembers(cls, inspect.isfunction): - # Skip Dunders if name.startswith('__') and name.endswith('__'): continue - - # Skip if include list is defined but the method not included. if include_list and name not in include_list: continue - # Skip if include list is not defined but the method is in excludes. if not include_list and name in exclude_list: continue - all_methods[name] = method span_name = f'{cls.__module__}.{cls.__name__}.{name}' - # Set the decorator on the method. setattr( cls, name, diff --git a/tck/__init__.py b/tck/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tck/sut_agent.py b/tck/sut_agent.py new file mode 100644 index 000000000..525631ca0 --- /dev/null +++ b/tck/sut_agent.py @@ -0,0 +1,186 @@ +import asyncio +import logging +import os +import uuid + +from datetime import datetime, timezone + +import uvicorn + +from a2a.server.agent_execution.agent_executor import AgentExecutor +from a2a.server.agent_execution.context import RequestContext +from a2a.server.apps import A2AStarletteApplication +from a2a.server.events.event_queue import EventQueue +from a2a.server.request_handlers.default_request_handler import ( + DefaultRequestHandler, +) +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentProvider, + Message, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + TextPart, +) + + +JSONRPC_URL = '/a2a/jsonrpc' + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger('SUTAgent') + + +class SUTAgentExecutor(AgentExecutor): + """Execution logic for the SUT agent.""" + + def __init__(self) -> None: + """Initializes the SUT agent executor.""" + self.running_tasks = set() + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + """Cancels a task.""" + api_task_id = context.task_id + if api_task_id in self.running_tasks: + self.running_tasks.remove(api_task_id) + + status_update = TaskStatusUpdateEvent( + task_id=api_task_id, + context_id=context.context_id or str(uuid.uuid4()), + status=TaskStatus( + state=TaskState.canceled, + timestamp=datetime.now(timezone.utc).isoformat(), + ), + final=True, + ) + await event_queue.enqueue_event(status_update) + + async def execute( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + """Executes a task.""" + user_message = context.message + task_id = context.task_id + context_id = context.context_id + + self.running_tasks.add(task_id) + + logger.info( + '[SUTAgentExecutor] Processing message %s for task %s (context: %s)', + user_message.message_id, + task_id, + context_id, + ) + + working_status = TaskStatusUpdateEvent( + task_id=task_id, + context_id=context_id, + status=TaskStatus( + state=TaskState.working, + message=Message( + role='agent', + message_id=str(uuid.uuid4()), + parts=[TextPart(text='Processing your question')], + task_id=task_id, + context_id=context_id, + ), + timestamp=datetime.now(timezone.utc).isoformat(), + ), + final=False, + ) + await event_queue.enqueue_event(working_status) + + agent_reply_text = 'Hello world!' + await asyncio.sleep(3) # Simulate processing delay + + if task_id not in self.running_tasks: + logger.info('Task %s was cancelled.', task_id) + return + + logger.info('[SUTAgentExecutor] Response: %s', agent_reply_text) + + agent_message = Message( + role='agent', + message_id=str(uuid.uuid4()), + parts=[TextPart(text=agent_reply_text)], + task_id=task_id, + context_id=context_id, + ) + + final_update = TaskStatusUpdateEvent( + task_id=task_id, + context_id=context_id, + status=TaskStatus( + state=TaskState.input_required, + message=agent_message, + timestamp=datetime.now(timezone.utc).isoformat(), + ), + final=True, + ) + await event_queue.enqueue_event(final_update) + + +def main() -> None: + """Main entrypoint.""" + http_port = int(os.environ.get('HTTP_PORT', '41241')) + + agent_card = AgentCard( + name='SUT Agent', + description='An agent to be used as SUT against TCK tests.', + url=f'http://localhost:{http_port}{JSONRPC_URL}', + provider=AgentProvider( + organization='A2A Samples', + url='https://example.com/a2a-samples', + ), + version='1.0.0', + protocol_version='0.3.0', + capabilities=AgentCapabilities( + streaming=True, + push_notifications=False, + state_transition_history=True, + ), + default_input_modes=['text'], + default_output_modes=['text', 'task-status'], + skills=[ + { + 'id': 'sut_agent', + 'name': 'SUT Agent', + 'description': 'Simulate the general flow of a streaming agent.', + 'tags': ['sut'], + 'examples': ['hi', 'hello world', 'how are you', 'goodbye'], + 'input_modes': ['text'], + 'output_modes': ['text', 'task-status'], + } + ], + supports_authenticated_extended_card=False, + preferred_transport='JSONRPC', + additional_interfaces=[ + { + 'url': f'http://localhost:{http_port}{JSONRPC_URL}', + 'transport': 'JSONRPC', + }, + ], + ) + + request_handler = DefaultRequestHandler( + agent_executor=SUTAgentExecutor(), + task_store=InMemoryTaskStore(), + ) + + server = A2AStarletteApplication( + agent_card=agent_card, + http_handler=request_handler, + ) + + app = server.build(rpc_url=JSONRPC_URL) + + logger.info('Starting HTTP server on port %s...', http_port) + uvicorn.run(app, host='127.0.0.1', port=http_port, log_level='info') + + +if __name__ == '__main__': + main() diff --git a/tests/README.md b/tests/README.md index bab99450c..6c70551c7 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,11 +1,59 @@ ## Running the tests -1. Run the tests +1. Run all tests (excluding those requiring real DBs, see item 3): ```bash - uv run pytest -v -s client/test_client.py + uv run pytest ``` -In case of failures, you can cleanup the cache: + ``` + + **Useful Flags:** + - `-v` (verbose): Shows more detailed output, including each test name as it runs. + - `-s` (no capture): Allows stdout (print statements) to show in the console. Useful for debugging. + + Example with flags: + ```bash + uv run pytest -v -s + ``` + + Note: Some tests require external databases (PostgreSQL, MySQL) and will be skipped if the corresponding environment variables (`POSTGRES_TEST_DSN`, `MYSQL_TEST_DSN`) are not set. + +2. Run specific tests: + ```bash + # Run a specific test file + uv run pytest tests/client/test_client_factory.py + + # Run a specific test function + uv run pytest tests/client/test_client_factory.py::test_client_factory_connect_with_url + + # Run tests in a specific folder + uv run pytest tests/client/ + ``` + +3. Run database integration tests (requires Docker): + ```bash + ./scripts/run_db_tests.sh + ``` + + This script will: + - Start PostgreSQL and MySQL containers using Docker Compose. + - Run the database integration tests. + - Stop the containers after tests finish. + + You can also run tests for a specific database: + ```bash + ./scripts/run_db_tests.sh --postgres + # or + ./scripts/run_db_tests.sh --mysql + ``` + + To keep the databases running for debugging: + ```bash + ./scripts/run_db_tests.sh --debug + ``` + (Follow the onscreen instructions to export DSNs and run pytest manually). + +In case of failures, you can clean up the cache: 1. `uv clean` 2. `rm -fR .pytest_cache .venv __pycache__` diff --git a/tests/auth/test_user.py b/tests/auth/test_user.py new file mode 100644 index 000000000..e3bbe2e60 --- /dev/null +++ b/tests/auth/test_user.py @@ -0,0 +1,27 @@ +import unittest + +from inspect import isabstract + +from a2a.auth.user import UnauthenticatedUser, User + + +class TestUser(unittest.TestCase): + def test_is_abstract(self): + self.assertTrue(isabstract(User)) + + +class TestUnauthenticatedUser(unittest.TestCase): + def test_is_user_subclass(self): + self.assertTrue(issubclass(UnauthenticatedUser, User)) + + def test_is_authenticated_returns_false(self): + user = UnauthenticatedUser() + self.assertFalse(user.is_authenticated) + + def test_user_name_returns_empty_string(self): + user = UnauthenticatedUser() + self.assertEqual(user.user_name, '') + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/client/test_auth_middleware.py b/tests/client/test_auth_middleware.py new file mode 100644 index 000000000..c41b45017 --- /dev/null +++ b/tests/client/test_auth_middleware.py @@ -0,0 +1,358 @@ +import json + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +import httpx +import pytest +import respx + +from a2a.client import ( + AuthInterceptor, + Client, + ClientCallContext, + ClientCallInterceptor, + ClientConfig, + ClientFactory, + InMemoryContextCredentialStore, +) +from a2a.types import ( + APIKeySecurityScheme, + AgentCapabilities, + AgentCard, + AuthorizationCodeOAuthFlow, + HTTPAuthSecurityScheme, + In, + Message, + OAuth2SecurityScheme, + OAuthFlows, + OpenIdConnectSecurityScheme, + Role, + SecurityScheme, + SendMessageSuccessResponse, + TransportProtocol, +) + + +class HeaderInterceptor(ClientCallInterceptor): + """A simple mock interceptor for testing basic middleware functionality.""" + + def __init__(self, header_name: str, header_value: str): + self.header_name = header_name + self.header_value = header_value + + async def intercept( + self, + method_name: str, + request_payload: dict[str, Any], + http_kwargs: dict[str, Any], + agent_card: AgentCard | None, + context: ClientCallContext | None, + ) -> tuple[dict[str, Any], dict[str, Any]]: + headers = http_kwargs.get('headers', {}) + headers[self.header_name] = self.header_value + http_kwargs['headers'] = headers + return request_payload, http_kwargs + + +def build_success_response(request: httpx.Request) -> httpx.Response: + """Creates a valid JSON-RPC success response based on the request.""" + request_payload = json.loads(request.content) + response_payload = SendMessageSuccessResponse( + id=request_payload['id'], + jsonrpc='2.0', + result=Message( + kind='message', + message_id='message-id', + role=Role.agent, + parts=[], + ), + ).model_dump(mode='json') + return httpx.Response(200, json=response_payload) + + +def build_message() -> Message: + """Builds a minimal Message.""" + return Message( + message_id='msg1', + role=Role.user, + parts=[], + ) + + +async def send_message( + client: Client, + url: str, + session_id: str | None = None, +) -> httpx.Request: + """Mocks the response and sends a message using the client.""" + respx.post(url).mock(side_effect=build_success_response) + context = ClientCallContext( + state={'sessionId': session_id} if session_id else {} + ) + async for _ in client.send_message( + request=build_message(), + context=context, + ): + pass + return respx.calls.last.request + + +@pytest.fixture +def store(): + store = InMemoryContextCredentialStore() + yield store + + +@pytest.mark.asyncio +async def test_auth_interceptor_skips_when_no_agent_card( + store: InMemoryContextCredentialStore, +) -> None: + """Tests that the AuthInterceptor does not modify the request when no AgentCard is provided.""" + request_payload = {'foo': 'bar'} + http_kwargs = {'fizz': 'buzz'} + auth_interceptor = AuthInterceptor(credential_service=store) + + new_payload, new_kwargs = await auth_interceptor.intercept( + method_name='message/send', + request_payload=request_payload, + http_kwargs=http_kwargs, + agent_card=None, + context=ClientCallContext(state={}), + ) + assert new_payload == request_payload + assert new_kwargs == http_kwargs + + +@pytest.mark.asyncio +async def test_in_memory_context_credential_store( + store: InMemoryContextCredentialStore, +) -> None: + """Verifies that InMemoryContextCredentialStore correctly stores and retrieves + credentials based on the session ID in the client context. + """ + session_id = 'session-id' + scheme_name = 'test-scheme' + credential = 'test-token' + await store.set_credentials(session_id, scheme_name, credential) + + # Assert: Successful retrieval + context = ClientCallContext(state={'sessionId': session_id}) + retrieved_credential = await store.get_credentials(scheme_name, context) + assert retrieved_credential == credential + # Assert: Retrieval with wrong session ID returns None + wrong_context = ClientCallContext(state={'sessionId': 'wrong-session'}) + retrieved_credential_wrong = await store.get_credentials( + scheme_name, wrong_context + ) + assert retrieved_credential_wrong is None + # Assert: Retrieval with no context returns None + retrieved_credential_none = await store.get_credentials(scheme_name, None) + assert retrieved_credential_none is None + # Assert: Retrieval with context but no sessionId returns None + empty_context = ClientCallContext(state={}) + retrieved_credential_empty = await store.get_credentials( + scheme_name, empty_context + ) + assert retrieved_credential_empty is None + # Assert: Overwrite the credential when session_id already exists + new_credential = 'new-token' + await store.set_credentials(session_id, scheme_name, new_credential) + assert await store.get_credentials(scheme_name, context) == new_credential + + +@pytest.mark.asyncio +@respx.mock +async def test_client_with_simple_interceptor() -> None: + """Ensures that a custom HeaderInterceptor correctly injects a static header into outbound HTTP requests from the A2AClient.""" + url = 'http://agent.com/rpc' + interceptor = HeaderInterceptor('X-Test-Header', 'Test-Value-123') + card = AgentCard( + url=url, + name='testbot', + description='test bot', + version='1.0', + default_input_modes=[], + default_output_modes=[], + skills=[], + capabilities=AgentCapabilities(), + preferred_transport=TransportProtocol.jsonrpc, + ) + + async with httpx.AsyncClient() as http_client: + config = ClientConfig( + httpx_client=http_client, + supported_transports=[TransportProtocol.jsonrpc], + ) + factory = ClientFactory(config) + client = factory.create(card, interceptors=[interceptor]) + + request = await send_message(client, url) + assert request.headers['x-test-header'] == 'Test-Value-123' + + +@dataclass +class AuthTestCase: + """Represents a test scenario for verifying authentication behavior in AuthInterceptor.""" + + url: str + """The endpoint URL of the agent to which the request is sent.""" + session_id: str + """The client session ID used to fetch credentials from the credential store.""" + scheme_name: str + """The name of the security scheme defined in the agent card.""" + credential: str + """The actual credential value (e.g., API key, access token) to be injected.""" + security_scheme: Any + """The security scheme object (e.g., APIKeySecurityScheme, OAuth2SecurityScheme, etc.) to define behavior.""" + expected_header_key: str + """The expected HTTP header name to be set by the interceptor.""" + expected_header_value_func: Callable[[str], str] + """A function that maps the credential to its expected header value (e.g., lambda c: f"Bearer {c}").""" + + +api_key_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='apikey', + credential='secret-api-key', + security_scheme=APIKeySecurityScheme( + type='apiKey', + name='X-API-Key', + in_=In.header, + ), + expected_header_key='x-api-key', + expected_header_value_func=lambda c: c, +) + + +oauth2_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='oauth2', + credential='secret-oauth-access-token', + security_scheme=OAuth2SecurityScheme( + type='oauth2', + flows=OAuthFlows( + authorization_code=AuthorizationCodeOAuthFlow( + authorization_url='http://provider.com/auth', + token_url='http://provider.com/token', + scopes={'read': 'Read scope'}, + ) + ), + ), + expected_header_key='Authorization', + expected_header_value_func=lambda c: f'Bearer {c}', +) + + +oidc_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='oidc', + credential='secret-oidc-id-token', + security_scheme=OpenIdConnectSecurityScheme( + type='openIdConnect', + open_id_connect_url='http://provider.com/.well-known/openid-configuration', + ), + expected_header_key='Authorization', + expected_header_value_func=lambda c: f'Bearer {c}', +) + + +bearer_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='bearer', + credential='bearer-token-123', + security_scheme=HTTPAuthSecurityScheme( + scheme='bearer', + ), + expected_header_key='Authorization', + expected_header_value_func=lambda c: f'Bearer {c}', +) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'test_case', + [api_key_test_case, oauth2_test_case, oidc_test_case, bearer_test_case], +) +@respx.mock +async def test_auth_interceptor_variants( + test_case: AuthTestCase, store: InMemoryContextCredentialStore +) -> None: + """Parametrized test verifying that AuthInterceptor correctly attaches credentials based on the defined security scheme in the AgentCard.""" + await store.set_credentials( + test_case.session_id, test_case.scheme_name, test_case.credential + ) + auth_interceptor = AuthInterceptor(credential_service=store) + agent_card = AgentCard( + url=test_case.url, + name=f'{test_case.scheme_name}bot', + description=f'A bot that uses {test_case.scheme_name}', + version='1.0', + default_input_modes=[], + default_output_modes=[], + skills=[], + capabilities=AgentCapabilities(), + security=[{test_case.scheme_name: []}], + security_schemes={ + test_case.scheme_name: SecurityScheme( + root=test_case.security_scheme + ) + }, + preferred_transport=TransportProtocol.jsonrpc, + ) + + async with httpx.AsyncClient() as http_client: + config = ClientConfig( + httpx_client=http_client, + supported_transports=[TransportProtocol.jsonrpc], + ) + factory = ClientFactory(config) + client = factory.create(agent_card, interceptors=[auth_interceptor]) + + request = await send_message( + client, test_case.url, test_case.session_id + ) + assert request.headers[ + test_case.expected_header_key + ] == test_case.expected_header_value_func(test_case.credential) + + +@pytest.mark.asyncio +async def test_auth_interceptor_skips_when_scheme_not_in_security_schemes( + store: InMemoryContextCredentialStore, +) -> None: + """Tests that AuthInterceptor skips a scheme if it's listed in security requirements but not defined in security_schemes.""" + scheme_name = 'missing' + session_id = 'session-id' + credential = 'dummy-token' + request_payload = {'foo': 'bar'} + http_kwargs = {'fizz': 'buzz'} + await store.set_credentials(session_id, scheme_name, credential) + auth_interceptor = AuthInterceptor(credential_service=store) + agent_card = AgentCard( + url='http://agent.com/rpc', + name='missingbot', + description='A bot that uses missing scheme definition', + version='1.0', + default_input_modes=[], + default_output_modes=[], + skills=[], + capabilities=AgentCapabilities(), + security=[{scheme_name: []}], + security_schemes={}, + ) + + new_payload, new_kwargs = await auth_interceptor.intercept( + method_name='message/send', + request_payload=request_payload, + http_kwargs=http_kwargs, + agent_card=agent_card, + context=ClientCallContext(state={'sessionId': session_id}), + ) + assert new_payload == request_payload + assert new_kwargs == http_kwargs diff --git a/tests/client/test_base_client.py b/tests/client/test_base_client.py new file mode 100644 index 000000000..7aa47902d --- /dev/null +++ b/tests/client/test_base_client.py @@ -0,0 +1,203 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from a2a.client.base_client import BaseClient +from a2a.client.client import ClientConfig +from a2a.client.transports.base import ClientTransport +from a2a.types import ( + AgentCapabilities, + AgentCard, + Message, + MessageSendConfiguration, + Part, + Role, + Task, + TaskState, + TaskStatus, + TextPart, +) + + +@pytest.fixture +def mock_transport() -> AsyncMock: + return AsyncMock(spec=ClientTransport) + + +@pytest.fixture +def sample_agent_card() -> AgentCard: + return AgentCard( + name='Test Agent', + description='An agent for testing', + url='http://test.com', + version='1.0', + capabilities=AgentCapabilities(streaming=True), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + skills=[], + ) + + +@pytest.fixture +def sample_message() -> Message: + return Message( + role=Role.user, + message_id='msg-1', + parts=[Part(root=TextPart(text='Hello'))], + ) + + +@pytest.fixture +def base_client( + sample_agent_card: AgentCard, mock_transport: AsyncMock +) -> BaseClient: + config = ClientConfig(streaming=True) + return BaseClient( + card=sample_agent_card, + config=config, + transport=mock_transport, + consumers=[], + middleware=[], + ) + + +@pytest.mark.asyncio +async def test_send_message_streaming( + base_client: BaseClient, mock_transport: MagicMock, sample_message: Message +) -> None: + async def create_stream(*args, **kwargs): + yield Task( + id='task-123', + context_id='ctx-456', + status=TaskStatus(state=TaskState.completed), + ) + + mock_transport.send_message_streaming.return_value = create_stream() + + meta = {'test': 1} + stream = base_client.send_message(sample_message, request_metadata=meta) + events = [event async for event in stream] + + mock_transport.send_message_streaming.assert_called_once() + assert ( + mock_transport.send_message_streaming.call_args[0][0].metadata == meta + ) + assert not mock_transport.send_message.called + assert len(events) == 1 + assert events[0][0].id == 'task-123' + + +@pytest.mark.asyncio +async def test_send_message_non_streaming( + base_client: BaseClient, mock_transport: MagicMock, sample_message: Message +) -> None: + base_client._config.streaming = False + mock_transport.send_message.return_value = Task( + id='task-456', + context_id='ctx-789', + status=TaskStatus(state=TaskState.completed), + ) + + meta = {'test': 1} + stream = base_client.send_message(sample_message, request_metadata=meta) + events = [event async for event in stream] + + mock_transport.send_message.assert_called_once() + assert mock_transport.send_message.call_args[0][0].metadata == meta + assert not mock_transport.send_message_streaming.called + assert len(events) == 1 + assert events[0][0].id == 'task-456' + + +@pytest.mark.asyncio +async def test_send_message_non_streaming_agent_capability_false( + base_client: BaseClient, mock_transport: MagicMock, sample_message: Message +) -> None: + base_client._card.capabilities.streaming = False + mock_transport.send_message.return_value = Task( + id='task-789', + context_id='ctx-101', + status=TaskStatus(state=TaskState.completed), + ) + + events = [event async for event in base_client.send_message(sample_message)] + + mock_transport.send_message.assert_called_once() + assert not mock_transport.send_message_streaming.called + assert len(events) == 1 + assert events[0][0].id == 'task-789' + + +@pytest.mark.asyncio +async def test_send_message_callsite_config_overrides_non_streaming( + base_client: BaseClient, mock_transport: MagicMock, sample_message: Message +): + base_client._config.streaming = False + mock_transport.send_message.return_value = Task( + id='task-cfg-ns-1', + context_id='ctx-cfg-ns-1', + status=TaskStatus(state=TaskState.completed), + ) + + cfg = MessageSendConfiguration( + history_length=2, + blocking=False, + accepted_output_modes=['application/json'], + ) + events = [ + event + async for event in base_client.send_message( + sample_message, configuration=cfg + ) + ] + + mock_transport.send_message.assert_called_once() + assert not mock_transport.send_message_streaming.called + assert len(events) == 1 + task, _ = events[0] + assert task.id == 'task-cfg-ns-1' + + params = mock_transport.send_message.call_args[0][0] + assert params.configuration.history_length == 2 + assert params.configuration.blocking is False + assert params.configuration.accepted_output_modes == ['application/json'] + + +@pytest.mark.asyncio +async def test_send_message_callsite_config_overrides_streaming( + base_client: BaseClient, mock_transport: MagicMock, sample_message: Message +): + base_client._config.streaming = True + base_client._card.capabilities.streaming = True + + async def create_stream(*args, **kwargs): + yield Task( + id='task-cfg-s-1', + context_id='ctx-cfg-s-1', + status=TaskStatus(state=TaskState.completed), + ) + + mock_transport.send_message_streaming.return_value = create_stream() + + cfg = MessageSendConfiguration( + history_length=0, + blocking=True, + accepted_output_modes=['text/plain'], + ) + events = [ + event + async for event in base_client.send_message( + sample_message, configuration=cfg + ) + ] + + mock_transport.send_message_streaming.assert_called_once() + assert not mock_transport.send_message.called + assert len(events) == 1 + task, _ = events[0] + assert task.id == 'task-cfg-s-1' + + params = mock_transport.send_message_streaming.call_args[0][0] + assert params.configuration.history_length == 0 + assert params.configuration.blocking is True + assert params.configuration.accepted_output_modes == ['text/plain'] diff --git a/tests/client/test_card_resolver.py b/tests/client/test_card_resolver.py new file mode 100644 index 000000000..26f3f106d --- /dev/null +++ b/tests/client/test_card_resolver.py @@ -0,0 +1,400 @@ +import json +import logging + +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +import httpx +import pytest + +from a2a.client import A2ACardResolver, A2AClientHTTPError, A2AClientJSONError +from a2a.types import AgentCard +from a2a.utils import AGENT_CARD_WELL_KNOWN_PATH + + +@pytest.fixture +def mock_httpx_client(): + """Fixture providing a mocked async httpx client.""" + return AsyncMock(spec=httpx.AsyncClient) + + +@pytest.fixture +def base_url(): + """Fixture providing a test base URL.""" + return 'https://example.com' + + +@pytest.fixture +def resolver(mock_httpx_client, base_url): + """Fixture providing an A2ACardResolver instance.""" + return A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=base_url, + ) + + +@pytest.fixture +def mock_response(): + """Fixture providing a mock httpx Response.""" + response = Mock(spec=httpx.Response) + response.raise_for_status = Mock() + return response + + +@pytest.fixture +def valid_agent_card_data(): + """Fixture providing valid agent card data.""" + return { + 'name': 'TestAgent', + 'description': 'A test agent', + 'version': '1.0.0', + 'url': 'https://example.com/a2a', + 'capabilities': {}, + 'default_input_modes': ['text/plain'], + 'default_output_modes': ['text/plain'], + 'skills': [ + { + 'id': 'test-skill', + 'name': 'Test Skill', + 'description': 'A skill for testing', + 'tags': ['test'], + } + ], + } + + +class TestA2ACardResolverInit: + """Tests for A2ACardResolver initialization.""" + + def test_init_with_defaults(self, mock_httpx_client, base_url): + """Test initialization with default agent_card_path.""" + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=base_url, + ) + assert resolver.base_url == base_url + assert resolver.agent_card_path == AGENT_CARD_WELL_KNOWN_PATH[1:] + assert resolver.httpx_client == mock_httpx_client + + def test_init_with_custom_path(self, mock_httpx_client, base_url): + """Test initialization with custom agent_card_path.""" + custom_path = '/custom/agent/card' + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=base_url, + agent_card_path=custom_path, + ) + assert resolver.base_url == base_url + assert resolver.agent_card_path == custom_path[1:] + + def test_init_strips_leading_slash_from_agent_card_path( + self, mock_httpx_client, base_url + ): + """Test that leading slash is stripped from agent_card_path.""" + agent_card_path = '/well-known/agent' + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=base_url, + agent_card_path=agent_card_path, + ) + assert resolver.agent_card_path == agent_card_path[1:] + + +class TestGetAgentCard: + """Tests for get_agent_card methods.""" + + @pytest.mark.asyncio + async def test_get_agent_card_success_default_path( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test successful agent card fetch using default path.""" + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + + with patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ) as mock_validate: + result = await resolver.get_agent_card() + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + ) + mock_response.raise_for_status.assert_called_once() + mock_response.json.assert_called_once() + mock_validate.assert_called_once_with(valid_agent_card_data) + assert result is not None + + @pytest.mark.asyncio + async def test_get_agent_card_success_custom_path( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test successful agent card fetch using custom relative path.""" + custom_path = 'custom/path/card' + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + with patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ): + await resolver.get_agent_card(relative_card_path=custom_path) + + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{custom_path}', + ) + + @pytest.mark.asyncio + async def test_get_agent_card_strips_leading_slash_from_relative_path( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test successful agent card fetch using custom path with leading slash.""" + custom_path = '/custom/path/card' + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + with patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ): + await resolver.get_agent_card(relative_card_path=custom_path) + + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{custom_path[1:]}', + ) + + @pytest.mark.asyncio + async def test_get_agent_card_with_http_kwargs( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test that http_kwargs are passed to httpx.get.""" + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + http_kwargs = { + 'timeout': 30, + 'headers': {'Authorization': 'Bearer token'}, + } + with patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ): + await resolver.get_agent_card(http_kwargs=http_kwargs) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + timeout=30, + headers={'Authorization': 'Bearer token'}, + ) + + @pytest.mark.asyncio + async def test_get_agent_card_root_path( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test fetching agent card from root path.""" + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + with patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ): + await resolver.get_agent_card(relative_card_path='/') + mock_httpx_client.get.assert_called_once_with(f'{base_url}/') + + @pytest.mark.asyncio + async def test_get_agent_card_http_status_error( + self, resolver, mock_httpx_client + ): + """Test A2AClientHTTPError raised on HTTP status error.""" + status_code = 404 + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = status_code + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + 'Not Found', request=Mock(), response=mock_response + ) + mock_httpx_client.get.return_value = mock_response + + with pytest.raises(A2AClientHTTPError) as exc_info: + await resolver.get_agent_card() + + assert exc_info.value.status_code == status_code + assert 'Failed to fetch agent card' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_agent_card_json_decode_error( + self, resolver, mock_httpx_client, mock_response + ): + """Test A2AClientJSONError raised on JSON decode error.""" + mock_response.json.side_effect = json.JSONDecodeError( + 'Invalid JSON', '', 0 + ) + mock_httpx_client.get.return_value = mock_response + with pytest.raises(A2AClientJSONError) as exc_info: + await resolver.get_agent_card() + assert 'Failed to parse JSON' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_agent_card_request_error( + self, resolver, mock_httpx_client + ): + """Test A2AClientHTTPError raised on network request error.""" + mock_httpx_client.get.side_effect = httpx.RequestError( + 'Connection timeout', request=Mock() + ) + with pytest.raises(A2AClientHTTPError) as exc_info: + await resolver.get_agent_card() + assert exc_info.value.status_code == 503 + assert 'Network communication error' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_agent_card_validation_error( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test A2AClientJSONError is raised on agent card validation error.""" + return_json = {'invalid': 'data'} + mock_response.json.return_value = return_json + mock_httpx_client.get.return_value = mock_response + with pytest.raises(A2AClientJSONError) as exc_info: + await resolver.get_agent_card() + assert ( + f'Failed to validate agent card structure from {base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}' + in exc_info.value.message + ) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + ) + + @pytest.mark.asyncio + async def test_get_agent_card_logs_success( # noqa: PLR0913 + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + caplog, + ): + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + with ( + patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ), + caplog.at_level(logging.INFO), + ): + await resolver.get_agent_card() + assert ( + f'Successfully fetched agent card data from {base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}' + in caplog.text + ) + + @pytest.mark.asyncio + async def test_get_agent_card_none_relative_path( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test that None relative_card_path uses default path.""" + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + + with patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ): + await resolver.get_agent_card(relative_card_path=None) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + ) + + @pytest.mark.asyncio + async def test_get_agent_card_empty_string_relative_path( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test that empty string relative_card_path uses default path.""" + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + + with patch.object( + AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) + ): + await resolver.get_agent_card(relative_card_path='') + + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + ) + + @pytest.mark.parametrize('status_code', [400, 401, 403, 500, 502]) + @pytest.mark.asyncio + async def test_get_agent_card_different_status_codes( + self, resolver, mock_httpx_client, status_code + ): + """Test different HTTP status codes raise appropriate errors.""" + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = status_code + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + f'Status {status_code}', request=Mock(), response=mock_response + ) + mock_httpx_client.get.return_value = mock_response + with pytest.raises(A2AClientHTTPError) as exc_info: + await resolver.get_agent_card() + assert exc_info.value.status_code == status_code + + @pytest.mark.asyncio + async def test_get_agent_card_returns_agent_card_instance( + self, resolver, mock_httpx_client, mock_response, valid_agent_card_data + ): + """Test that get_agent_card returns an AgentCard instance.""" + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + mock_agent_card = Mock(spec=AgentCard) + + with patch.object( + AgentCard, 'model_validate', return_value=mock_agent_card + ): + result = await resolver.get_agent_card() + assert result == mock_agent_card + mock_response.raise_for_status.assert_called_once() + + @pytest.mark.asyncio + async def test_get_agent_card_with_signature_verifier( + self, resolver, mock_httpx_client, valid_agent_card_data + ): + """Test that the signature verifier is called if provided.""" + mock_verifier = MagicMock() + + mock_response = MagicMock(spec=httpx.Response) + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + + agent_card = await resolver.get_agent_card( + signature_verifier=mock_verifier + ) + + mock_verifier.assert_called_once_with(agent_card) diff --git a/tests/client/test_client.py b/tests/client/test_client.py deleted file mode 100644 index e7cf5fe79..000000000 --- a/tests/client/test_client.py +++ /dev/null @@ -1,692 +0,0 @@ -import json -from collections.abc import AsyncGenerator -from typing import Any -from unittest.mock import AsyncMock, MagicMock, patch - -import httpx -import pytest -from httpx_sse import EventSource, ServerSentEvent -from pydantic import ValidationError as PydanticValidationError - -from a2a.client import (A2ACardResolver, A2AClient, A2AClientHTTPError, - A2AClientJSONError, create_text_message_object) -from a2a.types import (A2ARequest, AgentCapabilities, AgentCard, AgentSkill, - CancelTaskRequest, CancelTaskResponse, - CancelTaskSuccessResponse, GetTaskRequest, - GetTaskResponse, InvalidParamsError, - JSONRPCErrorResponse, MessageSendParams, Role, - SendMessageRequest, SendMessageResponse, - SendMessageSuccessResponse, SendStreamingMessageRequest, - SendStreamingMessageResponse, TaskIdParams, - TaskNotCancelableError, TaskQueryParams) - -AGENT_CARD = AgentCard( - name='Hello World Agent', - description='Just a hello world agent', - url='http://localhost:9999/', - version='1.0.0', - defaultInputModes=['text'], - defaultOutputModes=['text'], - capabilities=AgentCapabilities(), - skills=[ - AgentSkill( - id='hello_world', - name='Returns hello world', - description='just returns hello world', - tags=['hello world'], - examples=['hi', 'hello world'], - ) - ], -) - -AGENT_CARD_EXTENDED = AGENT_CARD.model_copy( - update={ - 'name': 'Hello World Agent - Extended Edition', - 'skills': AGENT_CARD.skills - + [ - AgentSkill( - id='extended_skill', - name='Super Greet', - description='A more enthusiastic greeting.', - tags=['extended'], - examples=['super hi'], - ) - ], - 'version': '1.0.1', - } -) - -AGENT_CARD_SUPPORTS_EXTENDED = AGENT_CARD.model_copy( - update={'supportsAuthenticatedExtendedCard': True} -) -AGENT_CARD_NO_URL_SUPPORTS_EXTENDED = AGENT_CARD_SUPPORTS_EXTENDED.model_copy( - update={'url': ''} -) - -MINIMAL_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'contextId': 'session-xyz', - 'status': {'state': 'working'}, - 'kind': 'task', -} - -MINIMAL_CANCELLED_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'contextId': 'session-xyz', - 'status': {'state': 'canceled'}, - 'kind': 'task', -} - - -@pytest.fixture -def mock_httpx_client() -> AsyncMock: - return AsyncMock(spec=httpx.AsyncClient) - - -@pytest.fixture -def mock_agent_card() -> MagicMock: - return MagicMock(spec=AgentCard, url='http://agent.example.com/api') - - -async def async_iterable_from_list( - items: list[ServerSentEvent], -) -> AsyncGenerator[ServerSentEvent]: - """Helper to create an async iterable from a list.""" - for item in items: - yield item - - -class TestA2ACardResolver: - BASE_URL = 'http://example.com' - AGENT_CARD_PATH = '/.well-known/agent.json' - FULL_AGENT_CARD_URL = f'{BASE_URL}{AGENT_CARD_PATH}' - EXTENDED_AGENT_CARD_PATH = '/agent/authenticatedExtendedCard' # Default path - - @pytest.mark.asyncio - async def test_init_strips_slashes(self, mock_httpx_client: AsyncMock): - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url='http://example.com/', - agent_card_path='/.well-known/agent.json/', - ) - assert resolver.base_url == 'http://example.com' - assert ( - resolver.agent_card_path == '.well-known/agent.json/' - ) # Path is only lstrip'd - - @pytest.mark.asyncio - async def test_get_agent_card_success_public_only( - self, mock_httpx_client: AsyncMock - ): - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - mock_response.json.return_value = AGENT_CARD.model_dump(mode='json') - mock_httpx_client.get.return_value = mock_response - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) - agent_card = await resolver.get_agent_card(http_kwargs={'timeout': 10}) - - mock_httpx_client.get.assert_called_once_with( - self.FULL_AGENT_CARD_URL, timeout=10 - ) - mock_response.raise_for_status.assert_called_once() - assert isinstance(agent_card, AgentCard) - assert agent_card == AGENT_CARD - # Ensure only one call was made (for the public card) - assert mock_httpx_client.get.call_count == 1 - - @pytest.mark.asyncio - async def test_get_agent_card_success_with_specified_path_for_extended_card( - self, mock_httpx_client: AsyncMock): - extended_card_response = AsyncMock(spec=httpx.Response) - extended_card_response.status_code = 200 - extended_card_response.json.return_value = AGENT_CARD_EXTENDED.model_dump( - mode='json' - ) - - # Mock the single call for the extended card - mock_httpx_client.get.return_value = extended_card_response - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) - - # Fetch the extended card by providing its relative path and example auth - auth_kwargs = {"headers": {"Authorization": "Bearer test token"}} - agent_card_result = await resolver.get_agent_card( - relative_card_path=self.EXTENDED_AGENT_CARD_PATH, - http_kwargs=auth_kwargs - ) - - expected_extended_url = f'{self.BASE_URL}/{self.EXTENDED_AGENT_CARD_PATH.lstrip("/")}' - mock_httpx_client.get.assert_called_once_with(expected_extended_url, **auth_kwargs) - extended_card_response.raise_for_status.assert_called_once() - - assert isinstance(agent_card_result, AgentCard) - assert agent_card_result == AGENT_CARD_EXTENDED # Should return the extended card - - @pytest.mark.asyncio - async def test_get_agent_card_validation_error( - self, mock_httpx_client: AsyncMock - ): - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - # Data that will cause a Pydantic ValidationError - mock_response.json.return_value = {"invalid_field": "value", "name": "Test Agent"} - mock_httpx_client.get.return_value = mock_response - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, base_url=self.BASE_URL - ) - # The call that is expected to raise an error should be within pytest.raises - with pytest.raises(A2AClientJSONError) as exc_info: - await resolver.get_agent_card() # Fetches from default path - - assert f'Failed to validate agent card structure from {self.FULL_AGENT_CARD_URL}' in str(exc_info.value) - assert 'invalid_field' in str(exc_info.value) # Check if Pydantic error details are present - assert mock_httpx_client.get.call_count == 1 # Should only be called once - - @pytest.mark.asyncio - async def test_get_agent_card_http_status_error( - self, mock_httpx_client: AsyncMock - ): - mock_response = MagicMock( - spec=httpx.Response - ) # Use MagicMock for response attribute - mock_response.status_code = 404 - mock_response.text = 'Not Found' - - http_status_error = httpx.HTTPStatusError( - 'Not Found', request=MagicMock(), response=mock_response - ) - mock_httpx_client.get.side_effect = http_status_error - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) - - with pytest.raises(A2AClientHTTPError) as exc_info: - await resolver.get_agent_card() - - assert exc_info.value.status_code == 404 - assert f'Failed to fetch agent card from {self.FULL_AGENT_CARD_URL}' in str(exc_info.value) - assert 'Not Found' in str(exc_info.value) - mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) - - @pytest.mark.asyncio - async def test_get_agent_card_json_decode_error( - self, mock_httpx_client: AsyncMock - ): - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - # Define json_error before using it - json_error = json.JSONDecodeError('Expecting value', 'doc', 0) - mock_response.json.side_effect = json_error - mock_httpx_client.get.return_value = mock_response - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) - - with pytest.raises(A2AClientJSONError) as exc_info: - await resolver.get_agent_card() - - # Assertions using exc_info must be after the with block - assert f'Failed to parse JSON for agent card from {self.FULL_AGENT_CARD_URL}' in str(exc_info.value) - assert 'Expecting value' in str(exc_info.value) - mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) - - @pytest.mark.asyncio - async def test_get_agent_card_request_error( - self, mock_httpx_client: AsyncMock - ): - request_error = httpx.RequestError('Network issue', request=MagicMock()) - mock_httpx_client.get.side_effect = request_error - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) - - with pytest.raises(A2AClientHTTPError) as exc_info: - await resolver.get_agent_card() - - assert exc_info.value.status_code == 503 - assert f'Network communication error fetching agent card from {self.FULL_AGENT_CARD_URL}' in str(exc_info.value) - assert 'Network issue' in str(exc_info.value) - mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) - - -class TestA2AClient: - AGENT_URL = 'http://agent.example.com/api' - - def test_init_with_agent_card( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - assert client.url == mock_agent_card.url - assert client.httpx_client == mock_httpx_client - - def test_init_with_url(self, mock_httpx_client: AsyncMock): - client = A2AClient(httpx_client=mock_httpx_client, url=self.AGENT_URL) - assert client.url == self.AGENT_URL - assert client.httpx_client == mock_httpx_client - - def test_init_with_agent_card_and_url_prioritizes_agent_card( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, - agent_card=mock_agent_card, - url='http://otherurl.com', - ) - assert ( - client.url == mock_agent_card.url - ) # Agent card URL should be used - - def test_init_raises_value_error_if_no_card_or_url( - self, mock_httpx_client: AsyncMock - ): - with pytest.raises(ValueError) as exc_info: - A2AClient(httpx_client=mock_httpx_client) - assert 'Must provide either agent_card or url' in str(exc_info.value) - - @pytest.mark.asyncio - async def test_get_client_from_agent_card_url_success( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - base_url = 'http://example.com' - agent_card_path = '/.well-known/custom-agent.json' - resolver_kwargs = {'timeout': 30} - - mock_resolver_instance = AsyncMock(spec=A2ACardResolver) - mock_resolver_instance.get_agent_card.return_value = mock_agent_card - - with patch( - 'a2a.client.client.A2ACardResolver', - return_value=mock_resolver_instance, - ) as mock_resolver_class: - client = await A2AClient.get_client_from_agent_card_url( - httpx_client=mock_httpx_client, - base_url=base_url, - agent_card_path=agent_card_path, - http_kwargs=resolver_kwargs, - ) - - mock_resolver_class.assert_called_once_with( - mock_httpx_client, - base_url=base_url, - agent_card_path=agent_card_path, - ) - mock_resolver_instance.get_agent_card.assert_called_once_with( - http_kwargs=resolver_kwargs, - # relative_card_path=None is implied by not passing it - ) - assert isinstance(client, A2AClient) - assert client.url == mock_agent_card.url - assert client.httpx_client == mock_httpx_client - - @pytest.mark.asyncio - async def test_get_client_from_agent_card_url_resolver_error( - self, mock_httpx_client: AsyncMock - ): - error_to_raise = A2AClientHTTPError(404, 'Agent card not found') - with patch( - 'a2a.client.client.A2ACardResolver.get_agent_card', - new_callable=AsyncMock, - side_effect=error_to_raise, - ): - with pytest.raises(A2AClientHTTPError) as exc_info: - await A2AClient.get_client_from_agent_card_url( - httpx_client=mock_httpx_client, - base_url='http://example.com', - ) - assert exc_info.value == error_to_raise - - @pytest.mark.asyncio - async def test_send_message_success_use_request( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - - params = MessageSendParams( - message=create_text_message_object(content='Hello') - ) - - request = SendMessageRequest(id=123, params=params) - - success_response = create_text_message_object( - role=Role.agent, content='Hi there!' - ).model_dump(exclude_none=True) - - rpc_response: dict[str, Any] = { - 'id': 123, - 'jsonrpc': '2.0', - 'result': success_response, - } - - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_req: - mock_send_req.return_value = rpc_response - response = await client.send_message( - request=request, http_kwargs={'timeout': 10} - ) - - assert mock_send_req.call_count == 1 - called_args, called_kwargs = mock_send_req.call_args - assert not called_kwargs # no kwargs to _send_request - assert len(called_args) == 2 - json_rpc_request: dict[str, Any] = called_args[0] - assert isinstance(json_rpc_request['id'], int) - http_kwargs: dict[str, Any] = called_args[1] - assert http_kwargs['timeout'] == 10 - - a2a_request_arg = A2ARequest.model_validate(json_rpc_request) - assert isinstance(a2a_request_arg.root, SendMessageRequest) - assert isinstance(a2a_request_arg.root.params, MessageSendParams) - - assert a2a_request_arg.root.params.model_dump( - exclude_none=True - ) == params.model_dump(exclude_none=True) - - assert isinstance(response, SendMessageResponse) - assert isinstance(response.root, SendMessageSuccessResponse) - assert ( - response.root.result.model_dump(exclude_none=True) - == success_response - ) - - @pytest.mark.asyncio - async def test_send_message_error_response( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - - params = MessageSendParams( - message=create_text_message_object(content='Hello') - ) - - request = SendMessageRequest(id=123, params=params) - - error_response = InvalidParamsError() - - rpc_response: dict[str, Any] = { - 'id': 123, - 'jsonrpc': '2.0', - 'error': error_response.model_dump(exclude_none=True), - } - - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_req: - mock_send_req.return_value = rpc_response - response = await client.send_message(request=request) - - assert isinstance(response, SendMessageResponse) - assert isinstance(response.root, JSONRPCErrorResponse) - assert response.root.error.model_dump( - exclude_none=True - ) == InvalidParamsError().model_dump(exclude_none=True) - - @pytest.mark.asyncio - @patch('a2a.client.client.aconnect_sse') - async def test_send_message_streaming_success_request( - self, - mock_aconnect_sse: AsyncMock, - mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - - request = SendStreamingMessageRequest(id=123, params=params) - - mock_stream_response_1_dict: dict[str, Any] = { - 'id': 'stream_id_123', - 'jsonrpc': '2.0', - 'result': create_text_message_object( - content='First part ', role=Role.agent - ).model_dump(mode='json', exclude_none=True), - } - mock_stream_response_2_dict: dict[str, Any] = { - 'id': 'stream_id_123', - 'jsonrpc': '2.0', - 'result': create_text_message_object( - content='second part ', role=Role.agent - ).model_dump(mode='json', exclude_none=True), - } - - sse_event_1 = ServerSentEvent( - data=json.dumps(mock_stream_response_1_dict) - ) - sse_event_2 = ServerSentEvent( - data=json.dumps(mock_stream_response_2_dict) - ) - - mock_event_source = AsyncMock(spec=EventSource) - with patch.object(mock_event_source, 'aiter_sse') as mock_aiter_sse: - mock_aiter_sse.return_value = async_iterable_from_list( - [sse_event_1, sse_event_2] - ) - mock_aconnect_sse.return_value.__aenter__.return_value = ( - mock_event_source - ) - - results: list[Any] = [] - async for response in client.send_message_streaming( - request=request - ): - results.append(response) - - assert len(results) == 2 - assert isinstance(results[0], SendStreamingMessageResponse) - # Assuming SendStreamingMessageResponse is a RootModel like SendMessageResponse - assert results[0].root.id == 'stream_id_123' - assert ( - results[0].root.result.model_dump( # type: ignore - mode='json', exclude_none=True - ) - == mock_stream_response_1_dict['result'] - ) - - assert isinstance(results[1], SendStreamingMessageResponse) - assert results[1].root.id == 'stream_id_123' - assert ( - results[1].root.result.model_dump( # type: ignore - mode='json', exclude_none=True - ) - == mock_stream_response_2_dict['result'] - ) - - mock_aconnect_sse.assert_called_once() - call_args, call_kwargs = mock_aconnect_sse.call_args - assert call_args[0] == mock_httpx_client - assert call_args[1] == 'POST' - assert call_args[2] == mock_agent_card.url - - sent_json_payload = call_kwargs['json'] - assert sent_json_payload['method'] == 'message/stream' - assert sent_json_payload['params'] == params.model_dump( - mode='json', exclude_none=True - ) - assert ( - call_kwargs['timeout'] is None - ) # Default timeout for streaming - - @pytest.mark.asyncio - async def test_get_task_success_use_request( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - task_id_val = 'task_for_req_obj' - params_model = TaskQueryParams(id=task_id_val) - request_obj_id = 789 - request = GetTaskRequest(id=request_obj_id, params=params_model) - - rpc_response_payload: dict[str, Any] = { - 'id': request_obj_id, - 'jsonrpc': '2.0', - 'result': MINIMAL_TASK, - } - - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_req: - mock_send_req.return_value = rpc_response_payload - response = await client.get_task( - request=request, http_kwargs={'timeout': 20} - ) - - assert mock_send_req.call_count == 1 - called_args, called_kwargs = mock_send_req.call_args - assert len(called_args) == 2 - json_rpc_request_sent: dict[str, Any] = called_args[0] - assert not called_kwargs # no extra kwargs to _send_request - http_kwargs: dict[str, Any] = called_args[1] - assert http_kwargs['timeout'] == 20 - - assert json_rpc_request_sent['method'] == 'tasks/get' - assert json_rpc_request_sent['id'] == request_obj_id - assert json_rpc_request_sent['params'] == params_model.model_dump( - mode='json', exclude_none=True - ) - - assert isinstance(response, GetTaskResponse) - assert hasattr(response.root, 'result') - assert ( - response.root.result.model_dump(mode='json', exclude_none=True) # type: ignore - == MINIMAL_TASK - ) - assert response.root.id == request_obj_id - - @pytest.mark.asyncio - async def test_get_task_error_response( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params_model = TaskQueryParams(id='task_error_case') - request = GetTaskRequest(id='err_req_id', params=params_model) - error_details = InvalidParamsError() - - rpc_response_payload: dict[str, Any] = { - 'id': 'err_req_id', - 'jsonrpc': '2.0', - 'error': error_details.model_dump(mode='json', exclude_none=True), - } - - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_req: - mock_send_req.return_value = rpc_response_payload - response = await client.get_task(request=request) - - assert isinstance(response, GetTaskResponse) - assert isinstance(response.root, JSONRPCErrorResponse) - assert response.root.error.model_dump( - mode='json', exclude_none=True - ) == error_details.model_dump(exclude_none=True) - assert response.root.id == 'err_req_id' - - @pytest.mark.asyncio - async def test_cancel_task_success_use_request( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - task_id_val = MINIMAL_CANCELLED_TASK['id'] - params_model = TaskIdParams(id=task_id_val) - request_obj_id = 'cancel_req_obj_id_001' - request = CancelTaskRequest(id=request_obj_id, params=params_model) - - rpc_response_payload: dict[str, Any] = { - 'id': request_obj_id, - 'jsonrpc': '2.0', - 'result': MINIMAL_CANCELLED_TASK, - } - - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_req: - mock_send_req.return_value = rpc_response_payload - response = await client.cancel_task( - request=request, http_kwargs={'timeout': 15} - ) - - assert mock_send_req.call_count == 1 - called_args, called_kwargs = mock_send_req.call_args - assert not called_kwargs # no extra kwargs to _send_request - assert len(called_args) == 2 - json_rpc_request_sent: dict[str, Any] = called_args[0] - http_kwargs: dict[str, Any] = called_args[1] - assert http_kwargs['timeout'] == 15 - - assert json_rpc_request_sent['method'] == 'tasks/cancel' - assert json_rpc_request_sent['id'] == request_obj_id - assert json_rpc_request_sent['params'] == params_model.model_dump( - mode='json', exclude_none=True - ) - - assert isinstance(response, CancelTaskResponse) - assert isinstance(response.root, CancelTaskSuccessResponse) - assert ( - response.root.result.model_dump(mode='json', exclude_none=True) # type: ignore - == MINIMAL_CANCELLED_TASK - ) - assert response.root.id == request_obj_id - - @pytest.mark.asyncio - async def test_cancel_task_error_response( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params_model = TaskIdParams(id='task_cancel_error_case') - request = CancelTaskRequest(id='err_cancel_req', params=params_model) - error_details = TaskNotCancelableError() - - rpc_response_payload: dict[str, Any] = { - 'id': 'err_cancel_req', - 'jsonrpc': '2.0', - 'error': error_details.model_dump(mode='json', exclude_none=True), - } - - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_req: - mock_send_req.return_value = rpc_response_payload - response = await client.cancel_task(request=request) - - assert isinstance(response, CancelTaskResponse) - assert isinstance(response.root, JSONRPCErrorResponse) - assert response.root.error.model_dump( - mode='json', exclude_none=True - ) == error_details.model_dump(exclude_none=True) - assert response.root.id == 'err_cancel_req' diff --git a/tests/client/test_client_factory.py b/tests/client/test_client_factory.py new file mode 100644 index 000000000..c388974b1 --- /dev/null +++ b/tests/client/test_client_factory.py @@ -0,0 +1,268 @@ +"""Tests for the ClientFactory.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx +import pytest + +from a2a.client import ClientConfig, ClientFactory +from a2a.client.transports import JsonRpcTransport, RestTransport +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentInterface, + TransportProtocol, +) + + +@pytest.fixture +def base_agent_card() -> AgentCard: + """Provides a base AgentCard for tests.""" + return AgentCard( + name='Test Agent', + description='An agent for testing.', + url='http://primary-url.com', + version='1.0.0', + capabilities=AgentCapabilities(), + skills=[], + default_input_modes=[], + default_output_modes=[], + preferred_transport=TransportProtocol.jsonrpc, + ) + + +def test_client_factory_selects_preferred_transport(base_agent_card: AgentCard): + """Verify that the factory selects the preferred transport by default.""" + config = ClientConfig( + httpx_client=httpx.AsyncClient(), + supported_transports=[ + TransportProtocol.jsonrpc, + TransportProtocol.http_json, + ], + extensions=['https://example.com/test-ext/v0'], + ) + factory = ClientFactory(config) + client = factory.create(base_agent_card) + + assert isinstance(client._transport, JsonRpcTransport) + assert client._transport.url == 'http://primary-url.com' + assert ['https://example.com/test-ext/v0'] == client._transport.extensions + + +def test_client_factory_selects_secondary_transport_url( + base_agent_card: AgentCard, +): + """Verify that the factory selects the correct URL for a secondary transport.""" + base_agent_card.additional_interfaces = [ + AgentInterface( + transport=TransportProtocol.http_json, + url='http://secondary-url.com', + ) + ] + # Client prefers REST, which is available as a secondary transport + config = ClientConfig( + httpx_client=httpx.AsyncClient(), + supported_transports=[ + TransportProtocol.http_json, + TransportProtocol.jsonrpc, + ], + use_client_preference=True, + extensions=['https://example.com/test-ext/v0'], + ) + factory = ClientFactory(config) + client = factory.create(base_agent_card) + + assert isinstance(client._transport, RestTransport) + assert client._transport.url == 'http://secondary-url.com' + assert ['https://example.com/test-ext/v0'] == client._transport.extensions + + +def test_client_factory_server_preference(base_agent_card: AgentCard): + """Verify that the factory respects server transport preference.""" + base_agent_card.preferred_transport = TransportProtocol.http_json + base_agent_card.additional_interfaces = [ + AgentInterface( + transport=TransportProtocol.jsonrpc, url='http://secondary-url.com' + ) + ] + # Client supports both, but server prefers REST + config = ClientConfig( + httpx_client=httpx.AsyncClient(), + supported_transports=[ + TransportProtocol.jsonrpc, + TransportProtocol.http_json, + ], + ) + factory = ClientFactory(config) + client = factory.create(base_agent_card) + + assert isinstance(client._transport, RestTransport) + assert client._transport.url == 'http://primary-url.com' + + +def test_client_factory_no_compatible_transport(base_agent_card: AgentCard): + """Verify that the factory raises an error if no compatible transport is found.""" + config = ClientConfig( + httpx_client=httpx.AsyncClient(), + supported_transports=[TransportProtocol.grpc], + ) + factory = ClientFactory(config) + with pytest.raises(ValueError, match='no compatible transports found'): + factory.create(base_agent_card) + + +@pytest.mark.asyncio +async def test_client_factory_connect_with_agent_card( + base_agent_card: AgentCard, +): + """Verify that connect works correctly when provided with an AgentCard.""" + client = await ClientFactory.connect(base_agent_card) + assert isinstance(client._transport, JsonRpcTransport) + assert client._transport.url == 'http://primary-url.com' + + +@pytest.mark.asyncio +async def test_client_factory_connect_with_url(base_agent_card: AgentCard): + """Verify that connect works correctly when provided with a URL.""" + with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: + mock_resolver.return_value.get_agent_card = AsyncMock( + return_value=base_agent_card + ) + + agent_url = 'http://example.com' + client = await ClientFactory.connect(agent_url) + + mock_resolver.assert_called_once() + assert mock_resolver.call_args[0][1] == agent_url + mock_resolver.return_value.get_agent_card.assert_awaited_once() + + assert isinstance(client._transport, JsonRpcTransport) + assert client._transport.url == 'http://primary-url.com' + + +@pytest.mark.asyncio +async def test_client_factory_connect_with_url_and_client_config( + base_agent_card: AgentCard, +): + """Verify connect with a URL and a pre-configured httpx client.""" + with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: + mock_resolver.return_value.get_agent_card = AsyncMock( + return_value=base_agent_card + ) + + agent_url = 'http://example.com' + mock_httpx_client = httpx.AsyncClient() + config = ClientConfig(httpx_client=mock_httpx_client) + + client = await ClientFactory.connect(agent_url, client_config=config) + + mock_resolver.assert_called_once_with(mock_httpx_client, agent_url) + mock_resolver.return_value.get_agent_card.assert_awaited_once() + + assert isinstance(client._transport, JsonRpcTransport) + assert client._transport.url == 'http://primary-url.com' + + +@pytest.mark.asyncio +async def test_client_factory_connect_with_resolver_args( + base_agent_card: AgentCard, +): + """Verify connect passes resolver arguments correctly.""" + with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: + mock_resolver.return_value.get_agent_card = AsyncMock( + return_value=base_agent_card + ) + + agent_url = 'http://example.com' + relative_path = '/card' + http_kwargs = {'headers': {'X-Test': 'true'}} + + # The resolver args are only passed if an httpx_client is provided in config + config = ClientConfig(httpx_client=httpx.AsyncClient()) + + await ClientFactory.connect( + agent_url, + client_config=config, + relative_card_path=relative_path, + resolver_http_kwargs=http_kwargs, + ) + + mock_resolver.return_value.get_agent_card.assert_awaited_once_with( + relative_card_path=relative_path, + http_kwargs=http_kwargs, + signature_verifier=None, + ) + + +@pytest.mark.asyncio +async def test_client_factory_connect_resolver_args_without_client( + base_agent_card: AgentCard, +): + """Verify resolver args are ignored if no httpx_client is provided.""" + with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: + mock_resolver.return_value.get_agent_card = AsyncMock( + return_value=base_agent_card + ) + + agent_url = 'http://example.com' + relative_path = '/card' + http_kwargs = {'headers': {'X-Test': 'true'}} + + await ClientFactory.connect( + agent_url, + relative_card_path=relative_path, + resolver_http_kwargs=http_kwargs, + ) + + mock_resolver.return_value.get_agent_card.assert_awaited_once_with( + relative_card_path=relative_path, + http_kwargs=http_kwargs, + signature_verifier=None, + ) + + +@pytest.mark.asyncio +async def test_client_factory_connect_with_extra_transports( + base_agent_card: AgentCard, +): + """Verify that connect can register and use extra transports.""" + + class CustomTransport: + pass + + def custom_transport_producer(*args, **kwargs): + return CustomTransport() + + base_agent_card.preferred_transport = 'custom' + base_agent_card.url = 'custom://foo' + + config = ClientConfig(supported_transports=['custom']) + + client = await ClientFactory.connect( + base_agent_card, + client_config=config, + extra_transports={'custom': custom_transport_producer}, + ) + + assert isinstance(client._transport, CustomTransport) + + +@pytest.mark.asyncio +async def test_client_factory_connect_with_consumers_and_interceptors( + base_agent_card: AgentCard, +): + """Verify consumers and interceptors are passed through correctly.""" + consumer1 = MagicMock() + interceptor1 = MagicMock() + + with patch('a2a.client.client_factory.BaseClient') as mock_base_client: + await ClientFactory.connect( + base_agent_card, + consumers=[consumer1], + interceptors=[interceptor1], + ) + + mock_base_client.assert_called_once() + call_args = mock_base_client.call_args[0] + assert call_args[3] == [consumer1] + assert call_args[4] == [interceptor1] diff --git a/tests/client/test_client_task_manager.py b/tests/client/test_client_task_manager.py new file mode 100644 index 000000000..63f98d8b9 --- /dev/null +++ b/tests/client/test_client_task_manager.py @@ -0,0 +1,178 @@ +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from a2a.client.client_task_manager import ClientTaskManager +from a2a.client.errors import ( + A2AClientInvalidArgsError, + A2AClientInvalidStateError, +) +from a2a.types import ( + Artifact, + Message, + Part, + Role, + Task, + TaskArtifactUpdateEvent, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + TextPart, +) + + +@pytest.fixture +def task_manager() -> ClientTaskManager: + return ClientTaskManager() + + +@pytest.fixture +def sample_task() -> Task: + return Task( + id='task123', + context_id='context456', + status=TaskStatus(state=TaskState.working), + history=[], + artifacts=[], + ) + + +@pytest.fixture +def sample_message() -> Message: + return Message( + message_id='msg1', + role=Role.user, + parts=[Part(root=TextPart(text='Hello'))], + ) + + +def test_get_task_no_task_id_returns_none( + task_manager: ClientTaskManager, +) -> None: + assert task_manager.get_task() is None + + +def test_get_task_or_raise_no_task_raises_error( + task_manager: ClientTaskManager, +) -> None: + with pytest.raises(A2AClientInvalidStateError, match='no current Task'): + task_manager.get_task_or_raise() + + +@pytest.mark.asyncio +async def test_save_task_event_with_task( + task_manager: ClientTaskManager, sample_task: Task +) -> None: + await task_manager.save_task_event(sample_task) + assert task_manager.get_task() == sample_task + assert task_manager._task_id == sample_task.id + assert task_manager._context_id == sample_task.context_id + + +@pytest.mark.asyncio +async def test_save_task_event_with_task_already_set_raises_error( + task_manager: ClientTaskManager, sample_task: Task +) -> None: + await task_manager.save_task_event(sample_task) + with pytest.raises( + A2AClientInvalidArgsError, + match='Task is already set, create new manager for new tasks.', + ): + await task_manager.save_task_event(sample_task) + + +@pytest.mark.asyncio +async def test_save_task_event_with_status_update( + task_manager: ClientTaskManager, sample_task: Task, sample_message: Message +) -> None: + await task_manager.save_task_event(sample_task) + status_update = TaskStatusUpdateEvent( + task_id=sample_task.id, + context_id=sample_task.context_id, + status=TaskStatus(state=TaskState.completed, message=sample_message), + final=True, + ) + updated_task = await task_manager.save_task_event(status_update) + assert updated_task.status.state == TaskState.completed + assert updated_task.history == [sample_message] + + +@pytest.mark.asyncio +async def test_save_task_event_with_artifact_update( + task_manager: ClientTaskManager, sample_task: Task +) -> None: + await task_manager.save_task_event(sample_task) + artifact = Artifact( + artifact_id='art1', parts=[Part(root=TextPart(text='artifact content'))] + ) + artifact_update = TaskArtifactUpdateEvent( + task_id=sample_task.id, + context_id=sample_task.context_id, + artifact=artifact, + ) + + with patch( + 'a2a.client.client_task_manager.append_artifact_to_task' + ) as mock_append: + updated_task = await task_manager.save_task_event(artifact_update) + mock_append.assert_called_once_with(updated_task, artifact_update) + + +@pytest.mark.asyncio +async def test_save_task_event_creates_task_if_not_exists( + task_manager: ClientTaskManager, +) -> None: + status_update = TaskStatusUpdateEvent( + task_id='new_task', + context_id='new_context', + status=TaskStatus(state=TaskState.working), + final=False, + ) + updated_task = await task_manager.save_task_event(status_update) + assert updated_task is not None + assert updated_task.id == 'new_task' + assert updated_task.status.state == TaskState.working + + +@pytest.mark.asyncio +async def test_process_with_task_event( + task_manager: ClientTaskManager, sample_task: Task +) -> None: + with patch.object( + task_manager, 'save_task_event', new_callable=AsyncMock + ) as mock_save: + await task_manager.process(sample_task) + mock_save.assert_called_once_with(sample_task) + + +@pytest.mark.asyncio +async def test_process_with_non_task_event( + task_manager: ClientTaskManager, +) -> None: + with patch.object( + task_manager, 'save_task_event', new_callable=Mock + ) as mock_save: + non_task_event = 'not a task event' + await task_manager.process(non_task_event) + mock_save.assert_not_called() + + +def test_update_with_message( + task_manager: ClientTaskManager, sample_task: Task, sample_message: Message +) -> None: + updated_task = task_manager.update_with_message(sample_message, sample_task) + assert updated_task.history == [sample_message] + + +def test_update_with_message_moves_status_message( + task_manager: ClientTaskManager, sample_task: Task, sample_message: Message +) -> None: + status_message = Message( + message_id='status_msg', + role=Role.agent, + parts=[Part(root=TextPart(text='Status'))], + ) + sample_task.status.message = status_message + updated_task = task_manager.update_with_message(sample_message, sample_task) + assert updated_task.history == [status_message, sample_message] + assert updated_task.status.message is None diff --git a/tests/client/test_errors.py b/tests/client/test_errors.py index 30c4468dd..60636bd37 100644 --- a/tests/client/test_errors.py +++ b/tests/client/test_errors.py @@ -1,3 +1,5 @@ +from typing import NoReturn + import pytest from a2a.client import A2AClientError, A2AClientHTTPError, A2AClientJSONError @@ -6,13 +8,13 @@ class TestA2AClientError: """Test cases for the base A2AClientError class.""" - def test_instantiation(self): + def test_instantiation(self) -> None: """Test that A2AClientError can be instantiated.""" error = A2AClientError('Test error message') assert isinstance(error, Exception) assert str(error) == 'Test error message' - def test_inheritance(self): + def test_inheritance(self) -> None: """Test that A2AClientError inherits from Exception.""" error = A2AClientError() assert isinstance(error, Exception) @@ -21,31 +23,31 @@ def test_inheritance(self): class TestA2AClientHTTPError: """Test cases for A2AClientHTTPError class.""" - def test_instantiation(self): + def test_instantiation(self) -> None: """Test that A2AClientHTTPError can be instantiated with status_code and message.""" error = A2AClientHTTPError(404, 'Not Found') assert isinstance(error, A2AClientError) assert error.status_code == 404 assert error.message == 'Not Found' - def test_message_formatting(self): + def test_message_formatting(self) -> None: """Test that the error message is formatted correctly.""" error = A2AClientHTTPError(500, 'Internal Server Error') assert str(error) == 'HTTP Error 500: Internal Server Error' - def test_inheritance(self): + def test_inheritance(self) -> None: """Test that A2AClientHTTPError inherits from A2AClientError.""" error = A2AClientHTTPError(400, 'Bad Request') assert isinstance(error, A2AClientError) - def test_with_empty_message(self): + def test_with_empty_message(self) -> None: """Test behavior with an empty message.""" error = A2AClientHTTPError(403, '') assert error.status_code == 403 assert error.message == '' assert str(error) == 'HTTP Error 403: ' - def test_with_various_status_codes(self): + def test_with_various_status_codes(self) -> None: """Test with different HTTP status codes.""" test_cases = [ (200, 'OK'), @@ -68,29 +70,29 @@ def test_with_various_status_codes(self): class TestA2AClientJSONError: """Test cases for A2AClientJSONError class.""" - def test_instantiation(self): + def test_instantiation(self) -> None: """Test that A2AClientJSONError can be instantiated with a message.""" error = A2AClientJSONError('Invalid JSON format') assert isinstance(error, A2AClientError) assert error.message == 'Invalid JSON format' - def test_message_formatting(self): + def test_message_formatting(self) -> None: """Test that the error message is formatted correctly.""" error = A2AClientJSONError('Missing required field') assert str(error) == 'JSON Error: Missing required field' - def test_inheritance(self): + def test_inheritance(self) -> None: """Test that A2AClientJSONError inherits from A2AClientError.""" error = A2AClientJSONError('Parsing error') assert isinstance(error, A2AClientError) - def test_with_empty_message(self): + def test_with_empty_message(self) -> None: """Test behavior with an empty message.""" error = A2AClientJSONError('') assert error.message == '' assert str(error) == 'JSON Error: ' - def test_with_various_messages(self): + def test_with_various_messages(self) -> None: """Test with different error messages.""" test_messages = [ 'Malformed JSON', @@ -109,13 +111,13 @@ def test_with_various_messages(self): class TestExceptionHierarchy: """Test the exception hierarchy and relationships.""" - def test_exception_hierarchy(self): + def test_exception_hierarchy(self) -> None: """Test that the exception hierarchy is correct.""" assert issubclass(A2AClientError, Exception) assert issubclass(A2AClientHTTPError, A2AClientError) assert issubclass(A2AClientJSONError, A2AClientError) - def test_catch_specific_exception(self): + def test_catch_specific_exception(self) -> None: """Test that specific exceptions can be caught.""" try: raise A2AClientHTTPError(404, 'Not Found') @@ -123,7 +125,7 @@ def test_catch_specific_exception(self): assert e.status_code == 404 assert e.message == 'Not Found' - def test_catch_base_exception(self): + def test_catch_base_exception(self) -> None: """Test that derived exceptions can be caught as base exception.""" exceptions = [ A2AClientHTTPError(404, 'Not Found'), @@ -140,7 +142,7 @@ def test_catch_base_exception(self): class TestExceptionRaising: """Test cases for raising and handling the exceptions.""" - def test_raising_http_error(self): + def test_raising_http_error(self) -> NoReturn: """Test raising an HTTP error and checking its properties.""" with pytest.raises(A2AClientHTTPError) as excinfo: raise A2AClientHTTPError(429, 'Too Many Requests') @@ -150,7 +152,7 @@ def test_raising_http_error(self): assert error.message == 'Too Many Requests' assert str(error) == 'HTTP Error 429: Too Many Requests' - def test_raising_json_error(self): + def test_raising_json_error(self) -> NoReturn: """Test raising a JSON error and checking its properties.""" with pytest.raises(A2AClientJSONError) as excinfo: raise A2AClientJSONError('Invalid format') @@ -159,7 +161,7 @@ def test_raising_json_error(self): assert error.message == 'Invalid format' assert str(error) == 'JSON Error: Invalid format' - def test_raising_base_error(self): + def test_raising_base_error(self) -> NoReturn: """Test raising the base error.""" with pytest.raises(A2AClientError) as excinfo: raise A2AClientError('Generic client error') @@ -178,7 +180,9 @@ def test_raising_base_error(self): (500, 'Server Error', 'HTTP Error 500: Server Error'), ], ) -def test_http_error_parametrized(status_code, message, expected): +def test_http_error_parametrized( + status_code: int, message: str, expected: str +) -> None: """Parametrized test for HTTP errors with different status codes.""" error = A2AClientHTTPError(status_code, message) assert error.status_code == status_code @@ -194,7 +198,7 @@ def test_http_error_parametrized(status_code, message, expected): ('Parsing failed', 'JSON Error: Parsing failed'), ], ) -def test_json_error_parametrized(message, expected): +def test_json_error_parametrized(message: str, expected: str) -> None: """Parametrized test for JSON errors with different messages.""" error = A2AClientJSONError(message) assert error.message == message diff --git a/tests/client/test_legacy_client.py b/tests/client/test_legacy_client.py new file mode 100644 index 000000000..1bd9e4ae2 --- /dev/null +++ b/tests/client/test_legacy_client.py @@ -0,0 +1,115 @@ +"""Tests for the legacy client compatibility layer.""" + +from unittest.mock import AsyncMock, MagicMock + +import httpx +import pytest + +from a2a.client import A2AClient, A2AGrpcClient +from a2a.types import ( + AgentCapabilities, + AgentCard, + Message, + MessageSendParams, + Part, + Role, + SendMessageRequest, + Task, + TaskQueryParams, + TaskState, + TaskStatus, + TextPart, +) + + +@pytest.fixture +def mock_httpx_client() -> AsyncMock: + return AsyncMock(spec=httpx.AsyncClient) + + +@pytest.fixture +def mock_grpc_stub() -> AsyncMock: + stub = AsyncMock() + stub._channel = MagicMock() + return stub + + +@pytest.fixture +def jsonrpc_agent_card() -> AgentCard: + return AgentCard( + name='Test Agent', + description='A test agent', + url='http://test.agent.com/rpc', + version='1.0.0', + capabilities=AgentCapabilities(streaming=True), + skills=[], + default_input_modes=[], + default_output_modes=[], + preferred_transport='jsonrpc', + ) + + +@pytest.fixture +def grpc_agent_card() -> AgentCard: + return AgentCard( + name='Test Agent', + description='A test agent', + url='http://test.agent.com/rpc', + version='1.0.0', + capabilities=AgentCapabilities(streaming=True), + skills=[], + default_input_modes=[], + default_output_modes=[], + preferred_transport='grpc', + ) + + +@pytest.mark.asyncio +async def test_a2a_client_send_message( + mock_httpx_client: AsyncMock, jsonrpc_agent_card: AgentCard +): + client = A2AClient( + httpx_client=mock_httpx_client, agent_card=jsonrpc_agent_card + ) + + # Mock the underlying transport's send_message method + mock_response_task = Task( + id='task-123', + context_id='ctx-456', + status=TaskStatus(state=TaskState.completed), + ) + + client._transport.send_message = AsyncMock(return_value=mock_response_task) + + message = Message( + message_id='msg-123', + role=Role.user, + parts=[Part(root=TextPart(text='Hello'))], + ) + request = SendMessageRequest( + id='req-123', params=MessageSendParams(message=message) + ) + response = await client.send_message(request) + + assert response.root.result.id == 'task-123' + + +@pytest.mark.asyncio +async def test_a2a_grpc_client_get_task( + mock_grpc_stub: AsyncMock, grpc_agent_card: AgentCard +): + client = A2AGrpcClient(grpc_stub=mock_grpc_stub, agent_card=grpc_agent_card) + + mock_response_task = Task( + id='task-456', + context_id='ctx-789', + status=TaskStatus(state=TaskState.working), + ) + + client.get_task = AsyncMock(return_value=mock_response_task) + + params = TaskQueryParams(id='task-456') + response = await client.get_task(params) + + assert response.id == 'task-456' + client.get_task.assert_awaited_once_with(params) diff --git a/tests/client/test_optionals.py b/tests/client/test_optionals.py new file mode 100644 index 000000000..81cbd387d --- /dev/null +++ b/tests/client/test_optionals.py @@ -0,0 +1,16 @@ +"""Tests for a2a.client.optionals module.""" + +import importlib +import sys + +from unittest.mock import patch + + +def test_channel_import_failure(): + """Test Channel behavior when grpc is not available.""" + with patch.dict('sys.modules', {'grpc': None, 'grpc.aio': None}): + if 'a2a.client.optionals' in sys.modules: + del sys.modules['a2a.client.optionals'] + + optionals = importlib.import_module('a2a.client.optionals') + assert optionals.Channel is None diff --git a/tests/client/transports/test_grpc_client.py b/tests/client/transports/test_grpc_client.py new file mode 100644 index 000000000..111e44ba6 --- /dev/null +++ b/tests/client/transports/test_grpc_client.py @@ -0,0 +1,542 @@ +from unittest.mock import AsyncMock, MagicMock + +import grpc +import pytest + +from a2a.client.transports.grpc import GrpcTransport +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.grpc import a2a_pb2, a2a_pb2_grpc +from a2a.types import ( + AgentCapabilities, + AgentCard, + Artifact, + GetTaskPushNotificationConfigParams, + Message, + MessageSendParams, + Part, + PushNotificationAuthenticationInfo, + PushNotificationConfig, + Role, + Task, + TaskArtifactUpdateEvent, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + TextPart, +) +from a2a.utils import get_text_parts, proto_utils +from a2a.utils.errors import ServerError + + +@pytest.fixture +def mock_grpc_stub() -> AsyncMock: + """Provides a mock gRPC stub with methods mocked.""" + stub = AsyncMock(spec=a2a_pb2_grpc.A2AServiceStub) + stub.SendMessage = AsyncMock() + stub.SendStreamingMessage = MagicMock() + stub.GetTask = AsyncMock() + stub.CancelTask = AsyncMock() + stub.CreateTaskPushNotificationConfig = AsyncMock() + stub.GetTaskPushNotificationConfig = AsyncMock() + return stub + + +@pytest.fixture +def sample_agent_card() -> AgentCard: + """Provides a minimal agent card for initialization.""" + return AgentCard( + name='gRPC Test Agent', + description='Agent for testing gRPC client', + url='grpc://localhost:50051', + version='1.0', + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + skills=[], + ) + + +@pytest.fixture +def grpc_transport( + mock_grpc_stub: AsyncMock, sample_agent_card: AgentCard +) -> GrpcTransport: + """Provides a GrpcTransport instance.""" + channel = AsyncMock() + transport = GrpcTransport( + channel=channel, + agent_card=sample_agent_card, + extensions=[ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ], + ) + transport.stub = mock_grpc_stub + return transport + + +@pytest.fixture +def sample_message_send_params() -> MessageSendParams: + """Provides a sample MessageSendParams object.""" + return MessageSendParams( + message=Message( + role=Role.user, + message_id='msg-1', + parts=[Part(root=TextPart(text='Hello'))], + ) + ) + + +@pytest.fixture +def sample_task() -> Task: + """Provides a sample Task object.""" + return Task( + id='task-1', + context_id='ctx-1', + status=TaskStatus(state=TaskState.completed), + ) + + +@pytest.fixture +def sample_message() -> Message: + """Provides a sample Message object.""" + return Message( + role=Role.agent, + message_id='msg-response', + parts=[Part(root=TextPart(text='Hi there'))], + ) + + +@pytest.fixture +def sample_artifact() -> Artifact: + """Provides a sample Artifact object.""" + return Artifact( + artifact_id='artifact-1', + name='example.txt', + description='An example artifact', + parts=[Part(root=TextPart(text='Hi there'))], + metadata={}, + extensions=[], + ) + + +@pytest.fixture +def sample_task_status_update_event() -> TaskStatusUpdateEvent: + """Provides a sample TaskStatusUpdateEvent.""" + return TaskStatusUpdateEvent( + task_id='task-1', + context_id='ctx-1', + status=TaskStatus(state=TaskState.working), + final=False, + metadata={}, + ) + + +@pytest.fixture +def sample_task_artifact_update_event( + sample_artifact: Artifact, +) -> TaskArtifactUpdateEvent: + """Provides a sample TaskArtifactUpdateEvent.""" + return TaskArtifactUpdateEvent( + task_id='task-1', + context_id='ctx-1', + artifact=sample_artifact, + append=True, + last_chunk=True, + metadata={}, + ) + + +@pytest.fixture +def sample_authentication_info() -> PushNotificationAuthenticationInfo: + """Provides a sample AuthenticationInfo object.""" + return PushNotificationAuthenticationInfo( + schemes=['apikey', 'oauth2'], credentials='secret-token' + ) + + +@pytest.fixture +def sample_push_notification_config( + sample_authentication_info: PushNotificationAuthenticationInfo, +) -> PushNotificationConfig: + """Provides a sample PushNotificationConfig object.""" + return PushNotificationConfig( + id='config-1', + url='https://example.com/notify', + token='example-token', + authentication=sample_authentication_info, + ) + + +@pytest.fixture +def sample_task_push_notification_config( + sample_push_notification_config: PushNotificationConfig, +) -> TaskPushNotificationConfig: + """Provides a sample TaskPushNotificationConfig object.""" + return TaskPushNotificationConfig( + task_id='task-1', + push_notification_config=sample_push_notification_config, + ) + + +@pytest.mark.asyncio +async def test_send_message_task_response( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_message_send_params: MessageSendParams, + sample_task: Task, +) -> None: + """Test send_message that returns a Task.""" + mock_grpc_stub.SendMessage.return_value = a2a_pb2.SendMessageResponse( + task=proto_utils.ToProto.task(sample_task) + ) + + response = await grpc_transport.send_message( + sample_message_send_params, + extensions=['https://example.com/test-ext/v3'], + ) + + mock_grpc_stub.SendMessage.assert_awaited_once() + _, kwargs = mock_grpc_stub.SendMessage.call_args + assert kwargs['metadata'] == [ + ( + HTTP_EXTENSION_HEADER, + 'https://example.com/test-ext/v3', + ) + ] + assert isinstance(response, Task) + assert response.id == sample_task.id + + +@pytest.mark.asyncio +async def test_send_message_message_response( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_message_send_params: MessageSendParams, + sample_message: Message, +) -> None: + """Test send_message that returns a Message.""" + mock_grpc_stub.SendMessage.return_value = a2a_pb2.SendMessageResponse( + msg=proto_utils.ToProto.message(sample_message) + ) + + response = await grpc_transport.send_message(sample_message_send_params) + + mock_grpc_stub.SendMessage.assert_awaited_once() + _, kwargs = mock_grpc_stub.SendMessage.call_args + assert kwargs['metadata'] == [ + ( + HTTP_EXTENSION_HEADER, + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', + ) + ] + assert isinstance(response, Message) + assert response.message_id == sample_message.message_id + assert get_text_parts(response.parts) == get_text_parts( + sample_message.parts + ) + + +@pytest.mark.asyncio +async def test_send_message_streaming( # noqa: PLR0913 + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_message_send_params: MessageSendParams, + sample_message: Message, + sample_task: Task, + sample_task_status_update_event: TaskStatusUpdateEvent, + sample_task_artifact_update_event: TaskArtifactUpdateEvent, +) -> None: + """Test send_message_streaming that yields responses.""" + stream = MagicMock() + stream.read = AsyncMock( + side_effect=[ + a2a_pb2.StreamResponse( + msg=proto_utils.ToProto.message(sample_message) + ), + a2a_pb2.StreamResponse(task=proto_utils.ToProto.task(sample_task)), + a2a_pb2.StreamResponse( + status_update=proto_utils.ToProto.task_status_update_event( + sample_task_status_update_event + ) + ), + a2a_pb2.StreamResponse( + artifact_update=proto_utils.ToProto.task_artifact_update_event( + sample_task_artifact_update_event + ) + ), + grpc.aio.EOF, + ] + ) + mock_grpc_stub.SendStreamingMessage.return_value = stream + + responses = [ + response + async for response in grpc_transport.send_message_streaming( + sample_message_send_params + ) + ] + + mock_grpc_stub.SendStreamingMessage.assert_called_once() + _, kwargs = mock_grpc_stub.SendStreamingMessage.call_args + assert kwargs['metadata'] == [ + ( + HTTP_EXTENSION_HEADER, + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', + ) + ] + assert isinstance(responses[0], Message) + assert responses[0].message_id == sample_message.message_id + assert isinstance(responses[1], Task) + assert responses[1].id == sample_task.id + assert isinstance(responses[2], TaskStatusUpdateEvent) + assert responses[2].task_id == sample_task_status_update_event.task_id + assert isinstance(responses[3], TaskArtifactUpdateEvent) + assert responses[3].task_id == sample_task_artifact_update_event.task_id + + +@pytest.mark.asyncio +async def test_get_task( + grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task: Task +) -> None: + """Test retrieving a task.""" + mock_grpc_stub.GetTask.return_value = proto_utils.ToProto.task(sample_task) + params = TaskQueryParams(id=sample_task.id) + + response = await grpc_transport.get_task(params) + + mock_grpc_stub.GetTask.assert_awaited_once_with( + a2a_pb2.GetTaskRequest( + name=f'tasks/{sample_task.id}', history_length=None + ), + metadata=[ + ( + HTTP_EXTENSION_HEADER, + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', + ) + ], + ) + assert response.id == sample_task.id + + +@pytest.mark.asyncio +async def test_get_task_with_history( + grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task: Task +) -> None: + """Test retrieving a task with history.""" + mock_grpc_stub.GetTask.return_value = proto_utils.ToProto.task(sample_task) + history_len = 10 + params = TaskQueryParams(id=sample_task.id, history_length=history_len) + + await grpc_transport.get_task(params) + + mock_grpc_stub.GetTask.assert_awaited_once_with( + a2a_pb2.GetTaskRequest( + name=f'tasks/{sample_task.id}', history_length=history_len + ), + metadata=[ + ( + HTTP_EXTENSION_HEADER, + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', + ) + ], + ) + + +@pytest.mark.asyncio +async def test_cancel_task( + grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task: Task +) -> None: + """Test cancelling a task.""" + cancelled_task = sample_task.model_copy() + cancelled_task.status.state = TaskState.canceled + mock_grpc_stub.CancelTask.return_value = proto_utils.ToProto.task( + cancelled_task + ) + params = TaskIdParams(id=sample_task.id) + extensions = [ + 'https://example.com/test-ext/v3', + ] + response = await grpc_transport.cancel_task(params, extensions=extensions) + + mock_grpc_stub.CancelTask.assert_awaited_once_with( + a2a_pb2.CancelTaskRequest(name=f'tasks/{sample_task.id}'), + metadata=[(HTTP_EXTENSION_HEADER, 'https://example.com/test-ext/v3')], + ) + assert response.status.state == TaskState.canceled + + +@pytest.mark.asyncio +async def test_set_task_callback_with_valid_task( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_task_push_notification_config: TaskPushNotificationConfig, +) -> None: + """Test setting a task push notification config with a valid task id.""" + mock_grpc_stub.CreateTaskPushNotificationConfig.return_value = ( + proto_utils.ToProto.task_push_notification_config( + sample_task_push_notification_config + ) + ) + + response = await grpc_transport.set_task_callback( + sample_task_push_notification_config + ) + + mock_grpc_stub.CreateTaskPushNotificationConfig.assert_awaited_once_with( + a2a_pb2.CreateTaskPushNotificationConfigRequest( + parent=f'tasks/{sample_task_push_notification_config.task_id}', + config_id=sample_task_push_notification_config.push_notification_config.id, + config=proto_utils.ToProto.task_push_notification_config( + sample_task_push_notification_config + ), + ), + metadata=[ + ( + HTTP_EXTENSION_HEADER, + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', + ) + ], + ) + assert response.task_id == sample_task_push_notification_config.task_id + + +@pytest.mark.asyncio +async def test_set_task_callback_with_invalid_task( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_task_push_notification_config: TaskPushNotificationConfig, +) -> None: + """Test setting a task push notification config with an invalid task id.""" + mock_grpc_stub.CreateTaskPushNotificationConfig.return_value = a2a_pb2.TaskPushNotificationConfig( + name=( + f'invalid-path-to-tasks/{sample_task_push_notification_config.task_id}/' + f'pushNotificationConfigs/{sample_task_push_notification_config.push_notification_config.id}' + ), + push_notification_config=proto_utils.ToProto.push_notification_config( + sample_task_push_notification_config.push_notification_config + ), + ) + + with pytest.raises(ServerError) as exc_info: + await grpc_transport.set_task_callback( + sample_task_push_notification_config + ) + assert ( + 'Bad TaskPushNotificationConfig resource name' + in exc_info.value.error.message + ) + + +@pytest.mark.asyncio +async def test_get_task_callback_with_valid_task( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_task_push_notification_config: TaskPushNotificationConfig, +) -> None: + """Test retrieving a task push notification config with a valid task id.""" + mock_grpc_stub.GetTaskPushNotificationConfig.return_value = ( + proto_utils.ToProto.task_push_notification_config( + sample_task_push_notification_config + ) + ) + params = GetTaskPushNotificationConfigParams( + id=sample_task_push_notification_config.task_id, + push_notification_config_id=sample_task_push_notification_config.push_notification_config.id, + ) + + response = await grpc_transport.get_task_callback(params) + + mock_grpc_stub.GetTaskPushNotificationConfig.assert_awaited_once_with( + a2a_pb2.GetTaskPushNotificationConfigRequest( + name=( + f'tasks/{params.id}/' + f'pushNotificationConfigs/{params.push_notification_config_id}' + ), + ), + metadata=[ + ( + HTTP_EXTENSION_HEADER, + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', + ) + ], + ) + assert response.task_id == sample_task_push_notification_config.task_id + + +@pytest.mark.asyncio +async def test_get_task_callback_with_invalid_task( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_task_push_notification_config: TaskPushNotificationConfig, +) -> None: + """Test retrieving a task push notification config with an invalid task id.""" + mock_grpc_stub.GetTaskPushNotificationConfig.return_value = a2a_pb2.TaskPushNotificationConfig( + name=( + f'invalid-path-to-tasks/{sample_task_push_notification_config.task_id}/' + f'pushNotificationConfigs/{sample_task_push_notification_config.push_notification_config.id}' + ), + push_notification_config=proto_utils.ToProto.push_notification_config( + sample_task_push_notification_config.push_notification_config + ), + ) + params = GetTaskPushNotificationConfigParams( + id=sample_task_push_notification_config.task_id, + push_notification_config_id=sample_task_push_notification_config.push_notification_config.id, + ) + + with pytest.raises(ServerError) as exc_info: + await grpc_transport.get_task_callback(params) + assert ( + 'Bad TaskPushNotificationConfig resource name' + in exc_info.value.error.message + ) + + +@pytest.mark.parametrize( + 'initial_extensions, input_extensions, expected_metadata', + [ + ( + None, + None, + None, + ), # Case 1: No initial, No input + ( + ['ext1'], + None, + [(HTTP_EXTENSION_HEADER, 'ext1')], + ), # Case 2: Initial, No input + ( + None, + ['ext2'], + [(HTTP_EXTENSION_HEADER, 'ext2')], + ), # Case 3: No initial, Input + ( + ['ext1'], + ['ext2'], + [(HTTP_EXTENSION_HEADER, 'ext2')], + ), # Case 4: Initial, Input (override) + ( + ['ext1'], + ['ext2', 'ext3'], + [(HTTP_EXTENSION_HEADER, 'ext2,ext3')], + ), # Case 5: Initial, Multiple inputs (override) + ( + ['ext1', 'ext2'], + ['ext3'], + [(HTTP_EXTENSION_HEADER, 'ext3')], + ), # Case 6: Multiple initial, Single input (override) + ], +) +def test_get_grpc_metadata( + grpc_transport: GrpcTransport, + initial_extensions: list[str] | None, + input_extensions: list[str] | None, + expected_metadata: list[tuple[str, str]] | None, +) -> None: + """Tests _get_grpc_metadata for correct metadata generation and self.extensions update.""" + grpc_transport.extensions = initial_extensions + metadata = grpc_transport._get_grpc_metadata(input_extensions) + assert metadata == expected_metadata diff --git a/tests/client/transports/test_jsonrpc_client.py b/tests/client/transports/test_jsonrpc_client.py new file mode 100644 index 000000000..0f6bba5b0 --- /dev/null +++ b/tests/client/transports/test_jsonrpc_client.py @@ -0,0 +1,1051 @@ +import json + +from collections.abc import AsyncGenerator +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx +import pytest +import respx + +from httpx_sse import EventSource, SSEError, ServerSentEvent + +from a2a.client import ( + A2ACardResolver, + A2AClientHTTPError, + A2AClientJSONError, + A2AClientTimeoutError, + create_text_message_object, +) +from a2a.client.transports.jsonrpc import JsonRpcTransport +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentSkill, + InvalidParamsError, + Message, + MessageSendParams, + PushNotificationConfig, + Role, + SendMessageSuccessResponse, + Task, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, +) +from a2a.utils import AGENT_CARD_WELL_KNOWN_PATH + + +AGENT_CARD = AgentCard( + name='Hello World Agent', + description='Just a hello world agent', + url='http://localhost:9999/', + version='1.0.0', + default_input_modes=['text'], + default_output_modes=['text'], + capabilities=AgentCapabilities(), + skills=[ + AgentSkill( + id='hello_world', + name='Returns hello world', + description='just returns hello world', + tags=['hello world'], + examples=['hi', 'hello world'], + ) + ], +) + +AGENT_CARD_EXTENDED = AGENT_CARD.model_copy( + update={ + 'name': 'Hello World Agent - Extended Edition', + 'skills': [ + *AGENT_CARD.skills, + AgentSkill( + id='extended_skill', + name='Super Greet', + description='A more enthusiastic greeting.', + tags=['extended'], + examples=['super hi'], + ), + ], + 'version': '1.0.1', + } +) + +AGENT_CARD_SUPPORTS_EXTENDED = AGENT_CARD.model_copy( + update={'supports_authenticated_extended_card': True} +) +AGENT_CARD_NO_URL_SUPPORTS_EXTENDED = AGENT_CARD_SUPPORTS_EXTENDED.model_copy( + update={'url': ''} +) + +MINIMAL_TASK: dict[str, Any] = { + 'id': 'task-abc', + 'contextId': 'session-xyz', + 'status': {'state': 'working'}, + 'kind': 'task', +} + +MINIMAL_CANCELLED_TASK: dict[str, Any] = { + 'id': 'task-abc', + 'contextId': 'session-xyz', + 'status': {'state': 'canceled'}, + 'kind': 'task', +} + + +@pytest.fixture +def mock_httpx_client() -> AsyncMock: + return AsyncMock(spec=httpx.AsyncClient) + + +@pytest.fixture +def mock_agent_card() -> MagicMock: + mock = MagicMock(spec=AgentCard, url='http://agent.example.com/api') + mock.supports_authenticated_extended_card = False + return mock + + +async def async_iterable_from_list( + items: list[ServerSentEvent], +) -> AsyncGenerator[ServerSentEvent, None]: + """Helper to create an async iterable from a list.""" + for item in items: + yield item + + +def _assert_extensions_header(mock_kwargs: dict, expected_extensions: set[str]): + headers = mock_kwargs.get('headers', {}) + assert HTTP_EXTENSION_HEADER in headers + header_value = headers[HTTP_EXTENSION_HEADER] + actual_extensions = {e.strip() for e in header_value.split(',')} + assert actual_extensions == expected_extensions + + +class TestA2ACardResolver: + BASE_URL = 'http://example.com' + AGENT_CARD_PATH = AGENT_CARD_WELL_KNOWN_PATH + FULL_AGENT_CARD_URL = f'{BASE_URL}{AGENT_CARD_PATH}' + EXTENDED_AGENT_CARD_PATH = '/agent/authenticatedExtendedCard' + + @pytest.mark.asyncio + async def test_init_parameters_stored_correctly( + self, mock_httpx_client: AsyncMock + ): + base_url = 'http://example.com' + custom_path = '/custom/agent-card.json' + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=base_url, + agent_card_path=custom_path, + ) + assert resolver.base_url == base_url + assert resolver.agent_card_path == custom_path.lstrip('/') + assert resolver.httpx_client == mock_httpx_client + + resolver_default_path = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=base_url, + ) + assert ( + '/' + resolver_default_path.agent_card_path + == AGENT_CARD_WELL_KNOWN_PATH + ) + + @pytest.mark.asyncio + async def test_init_strips_slashes(self, mock_httpx_client: AsyncMock): + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url='http://example.com/', + agent_card_path='/.well-known/agent-card.json/', + ) + assert resolver.base_url == 'http://example.com' + assert resolver.agent_card_path == '.well-known/agent-card.json/' + + @pytest.mark.asyncio + async def test_get_agent_card_success_public_only( + self, mock_httpx_client: AsyncMock + ): + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = AGENT_CARD.model_dump(mode='json') + mock_httpx_client.get.return_value = mock_response + + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=self.BASE_URL, + agent_card_path=self.AGENT_CARD_PATH, + ) + agent_card = await resolver.get_agent_card(http_kwargs={'timeout': 10}) + + mock_httpx_client.get.assert_called_once_with( + self.FULL_AGENT_CARD_URL, timeout=10 + ) + mock_response.raise_for_status.assert_called_once() + assert isinstance(agent_card, AgentCard) + assert agent_card == AGENT_CARD + assert mock_httpx_client.get.call_count == 1 + + @pytest.mark.asyncio + async def test_get_agent_card_success_with_specified_path_for_extended_card( + self, mock_httpx_client: AsyncMock + ): + extended_card_response = AsyncMock(spec=httpx.Response) + extended_card_response.status_code = 200 + extended_card_response.json.return_value = ( + AGENT_CARD_EXTENDED.model_dump(mode='json') + ) + mock_httpx_client.get.return_value = extended_card_response + + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=self.BASE_URL, + agent_card_path=self.AGENT_CARD_PATH, + ) + + auth_kwargs = {'headers': {'Authorization': 'Bearer test token'}} + agent_card_result = await resolver.get_agent_card( + relative_card_path=self.EXTENDED_AGENT_CARD_PATH, + http_kwargs=auth_kwargs, + ) + + expected_extended_url = ( + f'{self.BASE_URL}/{self.EXTENDED_AGENT_CARD_PATH.lstrip("/")}' + ) + mock_httpx_client.get.assert_called_once_with( + expected_extended_url, **auth_kwargs + ) + extended_card_response.raise_for_status.assert_called_once() + assert isinstance(agent_card_result, AgentCard) + assert agent_card_result == AGENT_CARD_EXTENDED + + @pytest.mark.asyncio + async def test_get_agent_card_validation_error( + self, mock_httpx_client: AsyncMock + ): + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = { + 'invalid_field': 'value', + 'name': 'Test Agent', + } + mock_httpx_client.get.return_value = mock_response + + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, base_url=self.BASE_URL + ) + with pytest.raises(A2AClientJSONError) as exc_info: + await resolver.get_agent_card() + + assert ( + f'Failed to validate agent card structure from {self.FULL_AGENT_CARD_URL}' + in str(exc_info.value) + ) + assert 'invalid_field' in str(exc_info.value) + assert mock_httpx_client.get.call_count == 1 + + @pytest.mark.asyncio + async def test_get_agent_card_http_status_error( + self, mock_httpx_client: AsyncMock + ): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = 'Not Found' + http_status_error = httpx.HTTPStatusError( + 'Not Found', request=MagicMock(), response=mock_response + ) + mock_httpx_client.get.side_effect = http_status_error + + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=self.BASE_URL, + agent_card_path=self.AGENT_CARD_PATH, + ) + + with pytest.raises(A2AClientHTTPError) as exc_info: + await resolver.get_agent_card() + + assert exc_info.value.status_code == 404 + assert ( + f'Failed to fetch agent card from {self.FULL_AGENT_CARD_URL}' + in str(exc_info.value) + ) + assert 'Not Found' in str(exc_info.value) + mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) + + @pytest.mark.asyncio + async def test_get_agent_card_json_decode_error( + self, mock_httpx_client: AsyncMock + ): + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + json_error = json.JSONDecodeError('Expecting value', 'doc', 0) + mock_response.json.side_effect = json_error + mock_httpx_client.get.return_value = mock_response + + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=self.BASE_URL, + agent_card_path=self.AGENT_CARD_PATH, + ) + + with pytest.raises(A2AClientJSONError) as exc_info: + await resolver.get_agent_card() + + assert ( + f'Failed to parse JSON for agent card from {self.FULL_AGENT_CARD_URL}' + in str(exc_info.value) + ) + assert 'Expecting value' in str(exc_info.value) + mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) + + @pytest.mark.asyncio + async def test_get_agent_card_request_error( + self, mock_httpx_client: AsyncMock + ): + request_error = httpx.RequestError('Network issue', request=MagicMock()) + mock_httpx_client.get.side_effect = request_error + + resolver = A2ACardResolver( + httpx_client=mock_httpx_client, + base_url=self.BASE_URL, + agent_card_path=self.AGENT_CARD_PATH, + ) + + with pytest.raises(A2AClientHTTPError) as exc_info: + await resolver.get_agent_card() + + assert exc_info.value.status_code == 503 + assert ( + f'Network communication error fetching agent card from {self.FULL_AGENT_CARD_URL}' + in str(exc_info.value) + ) + assert 'Network issue' in str(exc_info.value) + mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) + + +class TestJsonRpcTransport: + AGENT_URL = 'http://agent.example.com/api' + + def test_init_with_agent_card( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + assert client.url == mock_agent_card.url + assert client.httpx_client == mock_httpx_client + + def test_init_with_url(self, mock_httpx_client: AsyncMock): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, url=self.AGENT_URL + ) + assert client.url == self.AGENT_URL + assert client.httpx_client == mock_httpx_client + + def test_init_with_agent_card_and_url_prioritizes_url( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://otherurl.com', + ) + assert client.url == 'http://otherurl.com' + + def test_init_raises_value_error_if_no_card_or_url( + self, mock_httpx_client: AsyncMock + ): + with pytest.raises(ValueError) as exc_info: + JsonRpcTransport(httpx_client=mock_httpx_client) + assert 'Must provide either agent_card or url' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_send_message_success( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello') + ) + success_response = create_text_message_object( + role=Role.agent, content='Hi there!' + ) + rpc_response = SendMessageSuccessResponse( + id='123', jsonrpc='2.0', result=success_response + ) + response = httpx.Response( + 200, json=rpc_response.model_dump(mode='json') + ) + response.request = httpx.Request('POST', 'http://agent.example.com/api') + mock_httpx_client.post.return_value = response + + response = await client.send_message(request=params) + + assert isinstance(response, Message) + assert response.model_dump() == success_response.model_dump() + + @pytest.mark.asyncio + async def test_send_message_error_response( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello') + ) + error_response = InvalidParamsError() + rpc_response = { + 'id': '123', + 'jsonrpc': '2.0', + 'error': error_response.model_dump(exclude_none=True), + } + mock_httpx_client.post.return_value.json.return_value = rpc_response + + with pytest.raises(Exception): + await client.send_message(request=params) + + @pytest.mark.asyncio + @patch('a2a.client.transports.jsonrpc.aconnect_sse') + async def test_send_message_streaming_success( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + mock_stream_response_1 = SendMessageSuccessResponse( + id='stream_id_123', + jsonrpc='2.0', + result=create_text_message_object( + content='First part ', role=Role.agent + ), + ) + mock_stream_response_2 = SendMessageSuccessResponse( + id='stream_id_123', + jsonrpc='2.0', + result=create_text_message_object( + content='second part ', role=Role.agent + ), + ) + sse_event_1 = ServerSentEvent( + data=mock_stream_response_1.model_dump_json() + ) + sse_event_2 = ServerSentEvent( + data=mock_stream_response_2.model_dump_json() + ) + mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.aiter_sse.return_value = async_iterable_from_list( + [sse_event_1, sse_event_2] + ) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + results = [ + item async for item in client.send_message_streaming(request=params) + ] + + assert len(results) == 2 + assert isinstance(results[0], Message) + assert ( + results[0].model_dump() + == mock_stream_response_1.result.model_dump() + ) + assert isinstance(results[1], Message) + assert ( + results[1].model_dump() + == mock_stream_response_2.result.model_dump() + ) + + # Repro of https://github.com/a2aproject/a2a-python/issues/540 + @pytest.mark.asyncio + @respx.mock + async def test_send_message_streaming_comment_success( + self, + mock_agent_card: MagicMock, + ): + async with httpx.AsyncClient() as client: + transport = JsonRpcTransport( + httpx_client=client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + mock_stream_response_1 = SendMessageSuccessResponse( + id='stream_id_123', + jsonrpc='2.0', + result=create_text_message_object( + content='First part', role=Role.agent + ), + ) + mock_stream_response_2 = SendMessageSuccessResponse( + id='stream_id_123', + jsonrpc='2.0', + result=create_text_message_object( + content='Second part', role=Role.agent + ), + ) + + sse_content = ( + 'id: stream_id_1\n' + f'data: {mock_stream_response_1.model_dump_json()}\n\n' + ': keep-alive\n\n' + 'id: stream_id_2\n' + f'data: {mock_stream_response_2.model_dump_json()}\n\n' + ': keep-alive\n\n' + ) + + respx.post(mock_agent_card.url).mock( + return_value=httpx.Response( + 200, + headers={'Content-Type': 'text/event-stream'}, + content=sse_content, + ) + ) + + results = [ + item + async for item in transport.send_message_streaming( + request=params + ) + ] + + assert len(results) == 2 + assert results[0] == mock_stream_response_1.result + assert results[1] == mock_stream_response_2.result + + @pytest.mark.asyncio + async def test_send_request_http_status_error( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = 'Not Found' + http_error = httpx.HTTPStatusError( + 'Not Found', request=MagicMock(), response=mock_response + ) + mock_httpx_client.post.side_effect = http_error + + with pytest.raises(A2AClientHTTPError) as exc_info: + await client._send_request({}, {}) + + assert exc_info.value.status_code == 404 + assert 'Not Found' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_send_request_json_decode_error( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + json_error = json.JSONDecodeError('Expecting value', 'doc', 0) + mock_response.json.side_effect = json_error + mock_httpx_client.post.return_value = mock_response + + with pytest.raises(A2AClientJSONError) as exc_info: + await client._send_request({}, {}) + + assert 'Expecting value' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_send_request_httpx_request_error( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + request_error = httpx.RequestError('Network issue', request=MagicMock()) + mock_httpx_client.post.side_effect = request_error + + with pytest.raises(A2AClientHTTPError) as exc_info: + await client._send_request({}, {}) + + assert exc_info.value.status_code == 503 + assert 'Network communication error' in str(exc_info.value) + assert 'Network issue' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_send_message_client_timeout( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + mock_httpx_client.post.side_effect = httpx.ReadTimeout( + 'Request timed out' + ) + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello') + ) + + with pytest.raises(A2AClientTimeoutError) as exc_info: + await client.send_message(request=params) + + assert 'Client Request timed out' in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_task_success( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = TaskQueryParams(id='task-abc') + rpc_response = { + 'id': '123', + 'jsonrpc': '2.0', + 'result': MINIMAL_TASK, + } + with patch.object( + client, '_send_request', new_callable=AsyncMock + ) as mock_send_request: + mock_send_request.return_value = rpc_response + response = await client.get_task(request=params) + + assert isinstance(response, Task) + assert ( + response.model_dump() + == Task.model_validate(MINIMAL_TASK).model_dump() + ) + mock_send_request.assert_called_once() + sent_payload = mock_send_request.call_args.args[0] + assert sent_payload['method'] == 'tasks/get' + + @pytest.mark.asyncio + async def test_cancel_task_success( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = TaskIdParams(id='task-abc') + rpc_response = { + 'id': '123', + 'jsonrpc': '2.0', + 'result': MINIMAL_CANCELLED_TASK, + } + with patch.object( + client, '_send_request', new_callable=AsyncMock + ) as mock_send_request: + mock_send_request.return_value = rpc_response + response = await client.cancel_task(request=params) + + assert isinstance(response, Task) + assert ( + response.model_dump() + == Task.model_validate(MINIMAL_CANCELLED_TASK).model_dump() + ) + mock_send_request.assert_called_once() + sent_payload = mock_send_request.call_args.args[0] + assert sent_payload['method'] == 'tasks/cancel' + + @pytest.mark.asyncio + async def test_set_task_callback_success( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = TaskPushNotificationConfig( + task_id='task-abc', + push_notification_config=PushNotificationConfig( + url='http://callback.com' + ), + ) + rpc_response = { + 'id': '123', + 'jsonrpc': '2.0', + 'result': params.model_dump(mode='json'), + } + with patch.object( + client, '_send_request', new_callable=AsyncMock + ) as mock_send_request: + mock_send_request.return_value = rpc_response + response = await client.set_task_callback(request=params) + + assert isinstance(response, TaskPushNotificationConfig) + assert response.model_dump() == params.model_dump() + mock_send_request.assert_called_once() + sent_payload = mock_send_request.call_args.args[0] + assert sent_payload['method'] == 'tasks/pushNotificationConfig/set' + + @pytest.mark.asyncio + async def test_get_task_callback_success( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = TaskIdParams(id='task-abc') + expected_response = TaskPushNotificationConfig( + task_id='task-abc', + push_notification_config=PushNotificationConfig( + url='http://callback.com' + ), + ) + rpc_response = { + 'id': '123', + 'jsonrpc': '2.0', + 'result': expected_response.model_dump(mode='json'), + } + with patch.object( + client, '_send_request', new_callable=AsyncMock + ) as mock_send_request: + mock_send_request.return_value = rpc_response + response = await client.get_task_callback(request=params) + + assert isinstance(response, TaskPushNotificationConfig) + assert response.model_dump() == expected_response.model_dump() + mock_send_request.assert_called_once() + sent_payload = mock_send_request.call_args.args[0] + assert sent_payload['method'] == 'tasks/pushNotificationConfig/get' + + @pytest.mark.asyncio + @patch('a2a.client.transports.jsonrpc.aconnect_sse') + async def test_send_message_streaming_sse_error( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.aiter_sse.side_effect = SSEError( + 'Simulated SSE error' + ) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + with pytest.raises(A2AClientHTTPError): + _ = [ + item + async for item in client.send_message_streaming(request=params) + ] + + @pytest.mark.asyncio + @patch('a2a.client.transports.jsonrpc.aconnect_sse') + async def test_send_message_streaming_json_error( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + sse_event = ServerSentEvent(data='{invalid json') + mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.aiter_sse.return_value = async_iterable_from_list( + [sse_event] + ) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + with pytest.raises(A2AClientJSONError): + _ = [ + item + async for item in client.send_message_streaming(request=params) + ] + + @pytest.mark.asyncio + @patch('a2a.client.transports.jsonrpc.aconnect_sse') + async def test_send_message_streaming_request_error( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.aiter_sse.side_effect = httpx.RequestError( + 'Simulated request error', request=MagicMock() + ) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + with pytest.raises(A2AClientHTTPError): + _ = [ + item + async for item in client.send_message_streaming(request=params) + ] + + @pytest.mark.asyncio + async def test_get_card_no_card_provided( + self, mock_httpx_client: AsyncMock + ): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, url=self.AGENT_URL + ) + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = AGENT_CARD.model_dump(mode='json') + mock_httpx_client.get.return_value = mock_response + + card = await client.get_card() + + assert card == AGENT_CARD + mock_httpx_client.get.assert_called_once() + + @pytest.mark.asyncio + async def test_get_card_with_extended_card_support( + self, mock_httpx_client: AsyncMock + ): + agent_card = AGENT_CARD.model_copy( + update={'supports_authenticated_extended_card': True} + ) + client = JsonRpcTransport( + httpx_client=mock_httpx_client, agent_card=agent_card + ) + + rpc_response = { + 'id': '123', + 'jsonrpc': '2.0', + 'result': AGENT_CARD_EXTENDED.model_dump(mode='json'), + } + with patch.object( + client, '_send_request', new_callable=AsyncMock + ) as mock_send_request: + mock_send_request.return_value = rpc_response + card = await client.get_card() + + assert card == AGENT_CARD_EXTENDED + mock_send_request.assert_called_once() + sent_payload = mock_send_request.call_args.args[0] + assert sent_payload['method'] == 'agent/getAuthenticatedExtendedCard' + + @pytest.mark.asyncio + async def test_close(self, mock_httpx_client: AsyncMock): + client = JsonRpcTransport( + httpx_client=mock_httpx_client, url=self.AGENT_URL + ) + await client.close() + mock_httpx_client.aclose.assert_called_once() + + +class TestJsonRpcTransportExtensions: + @pytest.mark.asyncio + async def test_send_message_with_default_extensions( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + """Test that send_message adds extension headers when extensions are provided.""" + extensions = [ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ] + client = JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + extensions=extensions, + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello') + ) + success_response = create_text_message_object( + role=Role.agent, content='Hi there!' + ) + rpc_response = SendMessageSuccessResponse( + id='123', jsonrpc='2.0', result=success_response + ) + # Mock the response from httpx_client.post + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = rpc_response.model_dump(mode='json') + mock_httpx_client.post.return_value = mock_response + + await client.send_message(request=params) + + mock_httpx_client.post.assert_called_once() + _, mock_kwargs = mock_httpx_client.post.call_args + + _assert_extensions_header( + mock_kwargs, + { + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + }, + ) + + @pytest.mark.asyncio + @patch('a2a.client.transports.jsonrpc.aconnect_sse') + async def test_send_message_streaming_with_new_extensions( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + """Test X-A2A-Extensions header in send_message_streaming.""" + new_extensions = ['https://example.com/test-ext/v2'] + extensions = ['https://example.com/test-ext/v1'] + client = JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + extensions=extensions, + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + + mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + async for _ in client.send_message_streaming( + request=params, extensions=new_extensions + ): + pass + + mock_aconnect_sse.assert_called_once() + _, kwargs = mock_aconnect_sse.call_args + + _assert_extensions_header( + kwargs, + { + 'https://example.com/test-ext/v2', + }, + ) + + @pytest.mark.asyncio + @patch('a2a.client.transports.jsonrpc.aconnect_sse') + async def test_send_message_streaming_server_error_propagates( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + """Test that send_message_streaming propagates server errors (e.g., 403, 500) directly.""" + client = JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + ) + params = MessageSendParams( + message=create_text_message_object(content='Error stream') + ) + + mock_event_source = AsyncMock(spec=EventSource) + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 403 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + 'Forbidden', + request=httpx.Request('POST', 'http://test.url'), + response=mock_response, + ) + mock_event_source.response = mock_response + mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + with pytest.raises(A2AClientHTTPError) as exc_info: + async for _ in client.send_message_streaming(request=params): + pass + + assert exc_info.value.status_code == 403 + mock_aconnect_sse.assert_called_once() + + @pytest.mark.asyncio + async def test_get_card_no_card_provided_with_extensions( + self, mock_httpx_client: AsyncMock + ): + """Test get_card with extensions set in Client when no card is initially provided. + Tests that the extensions are added to the HTTP GET request.""" + extensions = [ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ] + client = JsonRpcTransport( + httpx_client=mock_httpx_client, + url=TestJsonRpcTransport.AGENT_URL, + extensions=extensions, + ) + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = AGENT_CARD.model_dump(mode='json') + mock_httpx_client.get.return_value = mock_response + + await client.get_card() + + mock_httpx_client.get.assert_called_once() + _, mock_kwargs = mock_httpx_client.get.call_args + + _assert_extensions_header( + mock_kwargs, + { + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + }, + ) + + @pytest.mark.asyncio + async def test_get_card_with_extended_card_support_with_extensions( + self, mock_httpx_client: AsyncMock + ): + """Test get_card with extensions passed to get_card call when extended card support is enabled. + Tests that the extensions are added to the RPC request.""" + extensions = [ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ] + agent_card = AGENT_CARD.model_copy( + update={'supports_authenticated_extended_card': True} + ) + client = JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + extensions=extensions, + ) + + rpc_response = { + 'id': '123', + 'jsonrpc': '2.0', + 'result': AGENT_CARD_EXTENDED.model_dump(mode='json'), + } + with patch.object( + client, '_send_request', new_callable=AsyncMock + ) as mock_send_request: + mock_send_request.return_value = rpc_response + await client.get_card(extensions=extensions) + + mock_send_request.assert_called_once() + _, mock_kwargs = mock_send_request.call_args[0] + + _assert_extensions_header( + mock_kwargs, + { + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + }, + ) diff --git a/tests/client/transports/test_rest_client.py b/tests/client/transports/test_rest_client.py new file mode 100644 index 000000000..c889ebaff --- /dev/null +++ b/tests/client/transports/test_rest_client.py @@ -0,0 +1,325 @@ +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx +import pytest +import respx + +from google.protobuf.json_format import MessageToJson +from httpx_sse import EventSource, ServerSentEvent + +from a2a.client import create_text_message_object +from a2a.client.errors import A2AClientHTTPError +from a2a.client.transports.rest import RestTransport +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.grpc import a2a_pb2 +from a2a.types import ( + AgentCapabilities, + AgentCard, + MessageSendParams, + Role, +) +from a2a.utils import proto_utils + + +@pytest.fixture +def mock_httpx_client() -> AsyncMock: + return AsyncMock(spec=httpx.AsyncClient) + + +@pytest.fixture +def mock_agent_card() -> MagicMock: + mock = MagicMock(spec=AgentCard, url='http://agent.example.com/api') + mock.supports_authenticated_extended_card = False + return mock + + +async def async_iterable_from_list( + items: list[ServerSentEvent], +) -> AsyncGenerator[ServerSentEvent, None]: + """Helper to create an async iterable from a list.""" + for item in items: + yield item + + +def _assert_extensions_header(mock_kwargs: dict, expected_extensions: set[str]): + headers = mock_kwargs.get('headers', {}) + assert HTTP_EXTENSION_HEADER in headers + header_value = headers[HTTP_EXTENSION_HEADER] + actual_extensions = {e.strip() for e in header_value.split(',')} + assert actual_extensions == expected_extensions + + +class TestRestTransportExtensions: + @pytest.mark.asyncio + async def test_send_message_with_default_extensions( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + """Test that send_message adds extensions to headers.""" + extensions = [ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ] + client = RestTransport( + httpx_client=mock_httpx_client, + extensions=extensions, + agent_card=mock_agent_card, + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello') + ) + + # Mock the build_request method to capture its inputs + mock_build_request = MagicMock( + return_value=AsyncMock(spec=httpx.Request) + ) + mock_httpx_client.build_request = mock_build_request + + # Mock the send method + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_httpx_client.send.return_value = mock_response + + await client.send_message(request=params) + + mock_build_request.assert_called_once() + _, kwargs = mock_build_request.call_args + + _assert_extensions_header( + kwargs, + { + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + }, + ) + + # Repro of https://github.com/a2aproject/a2a-python/issues/540 + @pytest.mark.asyncio + @respx.mock + async def test_send_message_streaming_comment_success( + self, + mock_agent_card: MagicMock, + ): + """Test that SSE comments are ignored.""" + async with httpx.AsyncClient() as client: + transport = RestTransport( + httpx_client=client, agent_card=mock_agent_card + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + + mock_stream_response_1 = a2a_pb2.StreamResponse( + msg=proto_utils.ToProto.message( + create_text_message_object( + content='First part', role=Role.agent + ) + ) + ) + mock_stream_response_2 = a2a_pb2.StreamResponse( + msg=proto_utils.ToProto.message( + create_text_message_object( + content='Second part', role=Role.agent + ) + ) + ) + + sse_content = ( + 'id: stream_id_1\n' + f'data: {MessageToJson(mock_stream_response_1, indent=None)}\n\n' + ': keep-alive\n\n' + 'id: stream_id_2\n' + f'data: {MessageToJson(mock_stream_response_2, indent=None)}\n\n' + ': keep-alive\n\n' + ) + + respx.post( + f'{mock_agent_card.url.rstrip("/")}/v1/message:stream' + ).mock( + return_value=httpx.Response( + 200, + headers={'Content-Type': 'text/event-stream'}, + content=sse_content, + ) + ) + + results = [] + async for item in transport.send_message_streaming(request=params): + results.append(item) + + assert len(results) == 2 + assert results[0].parts[0].root.text == 'First part' + assert results[1].parts[0].root.text == 'Second part' + + @pytest.mark.asyncio + @patch('a2a.client.transports.rest.aconnect_sse') + async def test_send_message_streaming_with_new_extensions( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + """Test X-A2A-Extensions header in send_message_streaming.""" + new_extensions = ['https://example.com/test-ext/v2'] + extensions = ['https://example.com/test-ext/v1'] + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + extensions=extensions, + ) + params = MessageSendParams( + message=create_text_message_object(content='Hello stream') + ) + + mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + async for _ in client.send_message_streaming( + request=params, extensions=new_extensions + ): + pass + + mock_aconnect_sse.assert_called_once() + _, kwargs = mock_aconnect_sse.call_args + + _assert_extensions_header( + kwargs, + { + 'https://example.com/test-ext/v2', + }, + ) + + @pytest.mark.asyncio + @patch('a2a.client.transports.rest.aconnect_sse') + async def test_send_message_streaming_server_error_propagates( + self, + mock_aconnect_sse: AsyncMock, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + ): + """Test that send_message_streaming propagates server errors (e.g., 403, 500) directly.""" + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + ) + params = MessageSendParams( + message=create_text_message_object(content='Error stream') + ) + + mock_event_source = AsyncMock(spec=EventSource) + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 403 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + 'Forbidden', + request=httpx.Request('POST', 'http://test.url'), + response=mock_response, + ) + mock_event_source.response = mock_response + mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + with pytest.raises(A2AClientHTTPError) as exc_info: + async for _ in client.send_message_streaming(request=params): + pass + + assert exc_info.value.status_code == 403 + + mock_aconnect_sse.assert_called_once() + + @pytest.mark.asyncio + async def test_get_card_no_card_provided_with_extensions( + self, mock_httpx_client: AsyncMock + ): + """Test get_card with extensions set in Client when no card is initially provided. + Tests that the extensions are added to the HTTP GET request.""" + extensions = [ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ] + client = RestTransport( + httpx_client=mock_httpx_client, + url='http://agent.example.com/api', + extensions=extensions, + ) + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = { + 'name': 'Test Agent', + 'description': 'Test Agent Description', + 'url': 'http://agent.example.com/api', + 'version': '1.0.0', + 'default_input_modes': ['text'], + 'default_output_modes': ['text'], + 'capabilities': AgentCapabilities().model_dump(), + 'skills': [], + } + mock_httpx_client.get.return_value = mock_response + + await client.get_card() + + mock_httpx_client.get.assert_called_once() + _, mock_kwargs = mock_httpx_client.get.call_args + + _assert_extensions_header( + mock_kwargs, + { + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + }, + ) + + @pytest.mark.asyncio + async def test_get_card_with_extended_card_support_with_extensions( + self, mock_httpx_client: AsyncMock + ): + """Test get_card with extensions passed to get_card call when extended card support is enabled. + Tests that the extensions are added to the GET request.""" + extensions = [ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ] + agent_card = AgentCard( + name='Test Agent', + description='Test Agent Description', + url='http://agent.example.com/api', + version='1.0.0', + default_input_modes=['text'], + default_output_modes=['text'], + capabilities=AgentCapabilities(), + skills=[], + supports_authenticated_extended_card=True, + ) + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + ) + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = agent_card.model_dump(mode='json') + mock_httpx_client.send.return_value = mock_response + + with patch.object( + client, '_send_get_request', new_callable=AsyncMock + ) as mock_send_get_request: + mock_send_get_request.return_value = agent_card.model_dump( + mode='json' + ) + await client.get_card(extensions=extensions) + + mock_send_get_request.assert_called_once() + _, _, mock_kwargs = mock_send_get_request.call_args[0] + + _assert_extensions_header( + mock_kwargs, + { + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + }, + ) diff --git a/tests/e2e/push_notifications/agent_app.py b/tests/e2e/push_notifications/agent_app.py new file mode 100644 index 000000000..1fa9bc546 --- /dev/null +++ b/tests/e2e/push_notifications/agent_app.py @@ -0,0 +1,145 @@ +import httpx + +from fastapi import FastAPI + +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.apps import A2ARESTFastAPIApplication +from a2a.server.events import EventQueue +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks import ( + BasePushNotificationSender, + InMemoryPushNotificationConfigStore, + InMemoryTaskStore, + TaskUpdater, +) +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentSkill, + InvalidParamsError, + Message, + Task, +) +from a2a.utils import ( + new_agent_text_message, + new_task, +) +from a2a.utils.errors import ServerError + + +def test_agent_card(url: str) -> AgentCard: + """Returns an agent card for the test agent.""" + return AgentCard( + name='Test Agent', + description='Just a test agent', + url=url, + version='1.0.0', + default_input_modes=['text'], + default_output_modes=['text'], + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + skills=[ + AgentSkill( + id='greeting', + name='Greeting Agent', + description='just greets the user', + tags=['greeting'], + examples=['Hello Agent!', 'How are you?'], + ) + ], + supports_authenticated_extended_card=True, + ) + + +class TestAgent: + """Agent for push notification testing.""" + + async def invoke( + self, updater: TaskUpdater, msg: Message, task: Task + ) -> None: + # Fail for unsupported messages. + if ( + not msg.parts + or len(msg.parts) != 1 + or msg.parts[0].root.kind != 'text' + ): + await updater.failed( + new_agent_text_message( + 'Unsupported message.', task.context_id, task.id + ) + ) + return + text_message = msg.parts[0].root.text + + # Simple request-response flow. + if text_message == 'Hello Agent!': + await updater.complete( + new_agent_text_message('Hello User!', task.context_id, task.id) + ) + + # Flow with user input required: "How are you?" -> "Good! How are you?" -> "Good" -> "Amazing". + elif text_message == 'How are you?': + await updater.requires_input( + new_agent_text_message( + 'Good! How are you?', task.context_id, task.id + ) + ) + elif text_message == 'Good': + await updater.complete( + new_agent_text_message('Amazing', task.context_id, task.id) + ) + + # Fail for unsupported messages. + else: + await updater.failed( + new_agent_text_message( + 'Unsupported message.', task.context_id, task.id + ) + ) + + +class TestAgentExecutor(AgentExecutor): + """Test AgentExecutor implementation.""" + + def __init__(self) -> None: + self.agent = TestAgent() + + async def execute( + self, + context: RequestContext, + event_queue: EventQueue, + ) -> None: + if not context.message: + raise ServerError(error=InvalidParamsError(message='No message')) + + task = context.current_task + if not task: + task = new_task(context.message) + await event_queue.enqueue_event(task) + updater = TaskUpdater(event_queue, task.id, task.context_id) + + await self.agent.invoke(updater, context.message, task) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + raise NotImplementedError('cancel not supported') + + +def create_agent_app( + url: str, notification_client: httpx.AsyncClient +) -> FastAPI: + """Creates a new HTTP+REST FastAPI application for the test agent.""" + push_config_store = InMemoryPushNotificationConfigStore() + app = A2ARESTFastAPIApplication( + agent_card=test_agent_card(url), + http_handler=DefaultRequestHandler( + agent_executor=TestAgentExecutor(), + task_store=InMemoryTaskStore(), + push_config_store=push_config_store, + push_sender=BasePushNotificationSender( + httpx_client=notification_client, + config_store=push_config_store, + ), + ), + ) + return app.build() diff --git a/tests/e2e/push_notifications/notifications_app.py b/tests/e2e/push_notifications/notifications_app.py new file mode 100644 index 000000000..c12e98096 --- /dev/null +++ b/tests/e2e/push_notifications/notifications_app.py @@ -0,0 +1,69 @@ +import asyncio + +from typing import Annotated + +from fastapi import FastAPI, HTTPException, Path, Request +from pydantic import BaseModel, ValidationError + +from a2a.types import Task + + +class Notification(BaseModel): + """Encapsulates default push notification data.""" + + task: Task + token: str + + +def create_notifications_app() -> FastAPI: + """Creates a simple push notification ingesting HTTP+REST application.""" + app = FastAPI() + store_lock = asyncio.Lock() + store: dict[str, list[Notification]] = {} + + @app.post('/notifications') + async def add_notification(request: Request): + """Endpoint for ingesting notifications from agents. It receives a JSON + payload and stores it in-memory. + """ + token = request.headers.get('x-a2a-notification-token') + if not token: + raise HTTPException( + status_code=400, + detail='Missing "x-a2a-notification-token" header.', + ) + try: + task = Task.model_validate(await request.json()) + except ValidationError as e: + raise HTTPException(status_code=400, detail=str(e)) + + async with store_lock: + if task.id not in store: + store[task.id] = [] + store[task.id].append( + Notification( + task=task, + token=token, + ) + ) + return { + 'status': 'received', + } + + @app.get('/tasks/{task_id}/notifications') + async def list_notifications_by_task( + task_id: Annotated[ + str, Path(title='The ID of the task to list the notifications for.') + ], + ): + """Helper endpoint for retrieving ingested notifications for a given task.""" + async with store_lock: + notifications = store.get(task_id, []) + return {'notifications': notifications} + + @app.get('/health') + def health_check(): + """Helper endpoint for checking if the server is up.""" + return {'status': 'ok'} + + return app diff --git a/tests/e2e/push_notifications/test_default_push_notification_support.py b/tests/e2e/push_notifications/test_default_push_notification_support.py new file mode 100644 index 000000000..d7364b840 --- /dev/null +++ b/tests/e2e/push_notifications/test_default_push_notification_support.py @@ -0,0 +1,244 @@ +import asyncio +import time +import uuid + +import httpx +import pytest +import pytest_asyncio + +from agent_app import create_agent_app +from notifications_app import Notification, create_notifications_app +from utils import ( + create_app_process, + find_free_port, + wait_for_server_ready, +) + +from a2a.client import ( + ClientConfig, + ClientFactory, + minimal_agent_card, +) +from a2a.types import ( + Message, + Part, + PushNotificationConfig, + Role, + Task, + TaskPushNotificationConfig, + TaskState, + TextPart, + TransportProtocol, +) + + +@pytest.fixture(scope='module') +def notifications_server(): + """ + Starts a simple push notifications ingesting server and yields its URL. + """ + host = '127.0.0.1' + port = find_free_port() + url = f'http://{host}:{port}' + + process = create_app_process(create_notifications_app(), host, port) + process.start() + try: + wait_for_server_ready(f'{url}/health') + except TimeoutError as e: + process.terminate() + raise e + + yield url + + process.terminate() + process.join() + + +@pytest_asyncio.fixture(scope='module') +async def notifications_client(): + """An async client fixture for calling the notifications server.""" + async with httpx.AsyncClient() as client: + yield client + + +@pytest.fixture(scope='module') +def agent_server(notifications_client: httpx.AsyncClient): + """Starts a test agent server and yields its URL.""" + host = '127.0.0.1' + port = find_free_port() + url = f'http://{host}:{port}' + + process = create_app_process( + create_agent_app(url, notifications_client), host, port + ) + process.start() + try: + wait_for_server_ready(f'{url}/v1/card') + except TimeoutError as e: + process.terminate() + raise e + + yield url + + process.terminate() + process.join() + + +@pytest_asyncio.fixture(scope='function') +async def http_client(): + """An async client fixture for test functions.""" + async with httpx.AsyncClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_notification_triggering_with_in_message_config_e2e( + notifications_server: str, + agent_server: str, + http_client: httpx.AsyncClient, +): + """ + Tests push notification triggering for in-message push notification config. + """ + # Create an A2A client with a push notification config. + token = uuid.uuid4().hex + a2a_client = ClientFactory( + ClientConfig( + supported_transports=[TransportProtocol.http_json], + push_notification_configs=[ + PushNotificationConfig( + id='in-message-config', + url=f'{notifications_server}/notifications', + token=token, + ) + ], + ) + ).create(minimal_agent_card(agent_server, [TransportProtocol.http_json])) + + # Send a message and extract the returned task. + responses = [ + response + async for response in a2a_client.send_message( + Message( + message_id='hello-agent', + parts=[Part(root=TextPart(text='Hello Agent!'))], + role=Role.user, + ) + ) + ] + assert len(responses) == 1 + assert isinstance(responses[0], tuple) + assert isinstance(responses[0][0], Task) + task = responses[0][0] + + # Verify a single notification was sent. + notifications = await wait_for_n_notifications( + http_client, + f'{notifications_server}/tasks/{task.id}/notifications', + n=1, + ) + assert notifications[0].token == token + assert notifications[0].task.id == task.id + assert notifications[0].task.status.state == 'completed' + + +@pytest.mark.asyncio +async def test_notification_triggering_after_config_change_e2e( + notifications_server: str, agent_server: str, http_client: httpx.AsyncClient +): + """ + Tests notification triggering after setting the push notification config in a separate call. + """ + # Configure an A2A client without a push notification config. + a2a_client = ClientFactory( + ClientConfig( + supported_transports=[TransportProtocol.http_json], + ) + ).create(minimal_agent_card(agent_server, [TransportProtocol.http_json])) + + # Send a message and extract the returned task. + responses = [ + response + async for response in a2a_client.send_message( + Message( + message_id='how-are-you', + parts=[Part(root=TextPart(text='How are you?'))], + role=Role.user, + ) + ) + ] + assert len(responses) == 1 + assert isinstance(responses[0], tuple) + assert isinstance(responses[0][0], Task) + task = responses[0][0] + assert task.status.state == TaskState.input_required + + # Verify that no notification has been sent yet. + response = await http_client.get( + f'{notifications_server}/tasks/{task.id}/notifications' + ) + assert response.status_code == 200 + assert len(response.json().get('notifications', [])) == 0 + + # Set the push notification config. + token = uuid.uuid4().hex + await a2a_client.set_task_callback( + TaskPushNotificationConfig( + task_id=task.id, + push_notification_config=PushNotificationConfig( + id='after-config-change', + url=f'{notifications_server}/notifications', + token=token, + ), + ) + ) + + # Send another message that should trigger a push notification. + responses = [ + response + async for response in a2a_client.send_message( + Message( + task_id=task.id, + message_id='good', + parts=[Part(root=TextPart(text='Good'))], + role=Role.user, + ) + ) + ] + assert len(responses) == 1 + + # Verify that the push notification was sent. + notifications = await wait_for_n_notifications( + http_client, + f'{notifications_server}/tasks/{task.id}/notifications', + n=1, + ) + assert notifications[0].task.id == task.id + assert notifications[0].task.status.state == 'completed' + assert notifications[0].token == token + + +async def wait_for_n_notifications( + http_client: httpx.AsyncClient, + url: str, + n: int, + timeout: int = 3, +) -> list[Notification]: + """ + Queries the notification URL until the desired number of notifications + is received or the timeout is reached. + """ + start_time = time.time() + notifications = [] + while True: + response = await http_client.get(url) + assert response.status_code == 200 + notifications = response.json()['notifications'] + if len(notifications) == n: + return [Notification.model_validate(n) for n in notifications] + if time.time() - start_time > timeout: + raise TimeoutError( + f'Notification retrieval timed out. Got {len(notifications)} notification(s), want {n}. Retrieved notifications: {notifications}.' + ) + await asyncio.sleep(0.1) diff --git a/tests/e2e/push_notifications/utils.py b/tests/e2e/push_notifications/utils.py new file mode 100644 index 000000000..01d84a30f --- /dev/null +++ b/tests/e2e/push_notifications/utils.py @@ -0,0 +1,45 @@ +import contextlib +import socket +import time + +from multiprocessing import Process + +import httpx +import uvicorn + + +def find_free_port(): + """Finds and returns an available ephemeral localhost port.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(('127.0.0.1', 0)) + return s.getsockname()[1] + + +def run_server(app, host, port) -> None: + """Runs a uvicorn server.""" + uvicorn.run(app, host=host, port=port, log_level='warning') + + +def wait_for_server_ready(url: str, timeout: int = 10) -> None: + """Polls the provided URL endpoint until the server is up.""" + start_time = time.time() + while True: + with contextlib.suppress(httpx.ConnectError): + with httpx.Client() as client: + response = client.get(url) + if response.status_code == 200: + return + if time.time() - start_time > timeout: + raise TimeoutError( + f'Server at {url} failed to start after {timeout}s' + ) + time.sleep(0.1) + + +def create_app_process(app, host, port) -> Process: + """Creates a separate process for a given application.""" + return Process( + target=run_server, + args=(app, host, port), + daemon=True, + ) diff --git a/tests/extensions/test_common.py b/tests/extensions/test_common.py new file mode 100644 index 000000000..b3123028a --- /dev/null +++ b/tests/extensions/test_common.py @@ -0,0 +1,146 @@ +import pytest +from a2a.extensions.common import ( + HTTP_EXTENSION_HEADER, + find_extension_by_uri, + get_requested_extensions, + update_extension_header, +) +from a2a.types import AgentCapabilities, AgentCard, AgentExtension + + +def test_get_requested_extensions(): + assert get_requested_extensions([]) == set() + assert get_requested_extensions(['foo']) == {'foo'} + assert get_requested_extensions(['foo', 'bar']) == {'foo', 'bar'} + assert get_requested_extensions(['foo, bar']) == {'foo', 'bar'} + assert get_requested_extensions(['foo,bar']) == {'foo', 'bar'} + assert get_requested_extensions(['foo', 'bar,baz']) == {'foo', 'bar', 'baz'} + assert get_requested_extensions(['foo,, bar', 'baz']) == { + 'foo', + 'bar', + 'baz', + } + assert get_requested_extensions([' foo , bar ', 'baz']) == { + 'foo', + 'bar', + 'baz', + } + + +def test_find_extension_by_uri(): + ext1 = AgentExtension(uri='foo', description='The Foo extension') + ext2 = AgentExtension(uri='bar', description='The Bar extension') + card = AgentCard( + name='Test Agent', + description='Test Agent Description', + version='1.0', + url='http://test.com', + skills=[], + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + capabilities=AgentCapabilities(extensions=[ext1, ext2]), + ) + + assert find_extension_by_uri(card, 'foo') == ext1 + assert find_extension_by_uri(card, 'bar') == ext2 + assert find_extension_by_uri(card, 'baz') is None + + +def test_find_extension_by_uri_no_extensions(): + card = AgentCard( + name='Test Agent', + description='Test Agent Description', + version='1.0', + url='http://test.com', + skills=[], + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + capabilities=AgentCapabilities(extensions=None), + ) + + assert find_extension_by_uri(card, 'foo') is None + + +@pytest.mark.parametrize( + 'extensions, header, expected_extensions', + [ + ( + ['ext1', 'ext2'], # extensions + '', # header + { + 'ext1', + 'ext2', + }, # expected_extensions + ), # Case 1: New extensions provided, empty header. + ( + None, # extensions + 'ext1, ext2', # header + { + 'ext1', + 'ext2', + }, # expected_extensions + ), # Case 2: Extensions is None, existing header extensions. + ( + [], # extensions + 'ext1', # header + {}, # expected_extensions + ), # Case 3: New extensions is empty list, existing header extensions. + ( + ['ext1', 'ext2'], # extensions + 'ext3', # header + { + 'ext1', + 'ext2', + }, # expected_extensions + ), # Case 4: New extensions provided, and an existing header. New extensions should override active extensions. + ], +) +def test_update_extension_header_merge_with_existing_extensions( + extensions: list[str], + header: str, + expected_extensions: set[str], +): + http_kwargs = {'headers': {HTTP_EXTENSION_HEADER: header}} + result_kwargs = update_extension_header(http_kwargs, extensions) + header_value = result_kwargs['headers'][HTTP_EXTENSION_HEADER] + if not header_value: + actual_extensions = {} + else: + actual_extensions_list = [e.strip() for e in header_value.split(',')] + actual_extensions = set(actual_extensions_list) + assert actual_extensions == expected_extensions + + +def test_update_extension_header_with_other_headers(): + extensions = ['ext'] + http_kwargs = {'headers': {'X_Other': 'Test'}} + result_kwargs = update_extension_header(http_kwargs, extensions) + headers = result_kwargs.get('headers', {}) + assert HTTP_EXTENSION_HEADER in headers + assert headers[HTTP_EXTENSION_HEADER] == 'ext' + assert headers['X_Other'] == 'Test' + + +@pytest.mark.parametrize( + 'http_kwargs', + [ + None, + {}, + ], +) +def test_update_extension_header_headers_not_in_kwargs( + http_kwargs: dict[str, str] | None, +): + extensions = ['ext'] + http_kwargs = {} + result_kwargs = update_extension_header(http_kwargs, extensions) + headers = result_kwargs.get('headers', {}) + assert HTTP_EXTENSION_HEADER in headers + assert headers[HTTP_EXTENSION_HEADER] == 'ext' + + +def test_update_extension_header_with_other_headers_extensions_none(): + http_kwargs = {'headers': {'X_Other': 'Test'}} + result_kwargs = update_extension_header(http_kwargs, None) + assert HTTP_EXTENSION_HEADER not in result_kwargs['headers'] + assert result_kwargs['headers']['X_Other'] == 'Test' diff --git a/tests/integration/test_client_server_integration.py b/tests/integration/test_client_server_integration.py new file mode 100644 index 000000000..e6552fcb9 --- /dev/null +++ b/tests/integration/test_client_server_integration.py @@ -0,0 +1,1143 @@ +import asyncio +from collections.abc import AsyncGenerator +from typing import NamedTuple, Any +from unittest.mock import ANY, AsyncMock, patch + +import grpc +import httpx +import pytest +import pytest_asyncio +from grpc.aio import Channel + +from jwt.api_jwk import PyJWK +from a2a.client import ClientConfig +from a2a.client.base_client import BaseClient +from a2a.client.transports import JsonRpcTransport, RestTransport +from a2a.client.transports.base import ClientTransport +from a2a.client.transports.grpc import GrpcTransport +from a2a.grpc import a2a_pb2_grpc +from a2a.server.apps import A2AFastAPIApplication, A2ARESTFastAPIApplication +from a2a.server.request_handlers import GrpcHandler, RequestHandler +from a2a.utils.signing import ( + create_agent_card_signer, + create_signature_verifier, +) +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentInterface, + GetTaskPushNotificationConfigParams, + Message, + MessageSendParams, + Part, + PushNotificationConfig, + Role, + Task, + TaskIdParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + TextPart, + TransportProtocol, +) +from cryptography.hazmat.primitives import asymmetric + +# --- Test Constants --- + +TASK_FROM_STREAM = Task( + id='task-123-stream', + context_id='ctx-456-stream', + status=TaskStatus(state=TaskState.completed), + kind='task', +) + +TASK_FROM_BLOCKING = Task( + id='task-789-blocking', + context_id='ctx-101-blocking', + status=TaskStatus(state=TaskState.completed), + kind='task', +) + +GET_TASK_RESPONSE = Task( + id='task-get-456', + context_id='ctx-get-789', + status=TaskStatus(state=TaskState.working), + kind='task', +) + +CANCEL_TASK_RESPONSE = Task( + id='task-cancel-789', + context_id='ctx-cancel-101', + status=TaskStatus(state=TaskState.canceled), + kind='task', +) + +CALLBACK_CONFIG = TaskPushNotificationConfig( + task_id='task-callback-123', + push_notification_config=PushNotificationConfig( + id='pnc-abc', url='http://callback.example.com', token='' + ), +) + +RESUBSCRIBE_EVENT = TaskStatusUpdateEvent( + task_id='task-resub-456', + context_id='ctx-resub-789', + status=TaskStatus(state=TaskState.working), + final=False, +) + + +def create_key_provider(verification_key: PyJWK | str | bytes): + """Creates a key provider function for testing.""" + + def key_provider(kid: str | None, jku: str | None): + return verification_key + + return key_provider + + +# --- Test Fixtures --- + + +@pytest.fixture +def mock_request_handler() -> AsyncMock: + """Provides a mock RequestHandler for the server-side handlers.""" + handler = AsyncMock(spec=RequestHandler) + + # Configure on_message_send for non-streaming calls + handler.on_message_send.return_value = TASK_FROM_BLOCKING + + # Configure on_message_send_stream for streaming calls + async def stream_side_effect(*args, **kwargs): + yield TASK_FROM_STREAM + + handler.on_message_send_stream.side_effect = stream_side_effect + + # Configure other methods + handler.on_get_task.return_value = GET_TASK_RESPONSE + handler.on_cancel_task.return_value = CANCEL_TASK_RESPONSE + handler.on_set_task_push_notification_config.side_effect = ( + lambda params, context: params + ) + handler.on_get_task_push_notification_config.return_value = CALLBACK_CONFIG + + async def resubscribe_side_effect(*args, **kwargs): + yield RESUBSCRIBE_EVENT + + handler.on_resubscribe_to_task.side_effect = resubscribe_side_effect + + return handler + + +@pytest.fixture +def agent_card() -> AgentCard: + """Provides a sample AgentCard for tests.""" + return AgentCard( + name='Test Agent', + description='An agent for integration testing.', + url='http://testserver', + version='1.0.0', + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + skills=[], + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + preferred_transport=TransportProtocol.jsonrpc, + supports_authenticated_extended_card=False, + additional_interfaces=[ + AgentInterface( + transport=TransportProtocol.http_json, url='http://testserver' + ), + AgentInterface( + transport=TransportProtocol.grpc, url='localhost:50051' + ), + ], + ) + + +class TransportSetup(NamedTuple): + """Holds the transport and handler for a given test.""" + + transport: ClientTransport + handler: AsyncMock + + +# --- HTTP/JSON-RPC/REST Setup --- + + +@pytest.fixture +def http_base_setup(mock_request_handler: AsyncMock, agent_card: AgentCard): + """A base fixture to patch the sse-starlette event loop issue.""" + from sse_starlette import sse + + sse.AppStatus.should_exit_event = asyncio.Event() + yield mock_request_handler, agent_card + + +@pytest.fixture +def jsonrpc_setup(http_base_setup) -> TransportSetup: + """Sets up the JsonRpcTransport and in-memory server.""" + mock_request_handler, agent_card = http_base_setup + app_builder = A2AFastAPIApplication( + agent_card, mock_request_handler, extended_agent_card=agent_card + ) + app = app_builder.build() + httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + transport = JsonRpcTransport( + httpx_client=httpx_client, agent_card=agent_card + ) + return TransportSetup(transport=transport, handler=mock_request_handler) + + +@pytest.fixture +def rest_setup(http_base_setup) -> TransportSetup: + """Sets up the RestTransport and in-memory server.""" + mock_request_handler, agent_card = http_base_setup + app_builder = A2ARESTFastAPIApplication(agent_card, mock_request_handler) + app = app_builder.build() + httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + transport = RestTransport(httpx_client=httpx_client, agent_card=agent_card) + return TransportSetup(transport=transport, handler=mock_request_handler) + + +# --- gRPC Setup --- + + +@pytest_asyncio.fixture +async def grpc_server_and_handler( + mock_request_handler: AsyncMock, agent_card: AgentCard +) -> AsyncGenerator[tuple[str, AsyncMock], None]: + """Creates and manages an in-process gRPC test server.""" + server = grpc.aio.server() + port = server.add_insecure_port('[::]:0') + server_address = f'localhost:{port}' + servicer = GrpcHandler(agent_card, mock_request_handler) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + await server.start() + yield server_address, mock_request_handler + await server.stop(0) + + +# --- The Integration Tests --- + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_sends_message_streaming( + transport_setup_fixture: str, request +) -> None: + """ + Integration test for HTTP-based transports (JSON-RPC, REST) streaming. + """ + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + handler = transport_setup.handler + + message_to_send = Message( + role=Role.user, + message_id='msg-integration-test', + parts=[Part(root=TextPart(text='Hello, integration test!'))], + ) + params = MessageSendParams(message=message_to_send) + + stream = transport.send_message_streaming(request=params) + first_event = await anext(stream) + + assert first_event.id == TASK_FROM_STREAM.id + assert first_event.context_id == TASK_FROM_STREAM.context_id + + handler.on_message_send_stream.assert_called_once() + call_args, _ = handler.on_message_send_stream.call_args + received_params: MessageSendParams = call_args[0] + + assert received_params.message.message_id == message_to_send.message_id + assert ( + received_params.message.parts[0].root.text + == message_to_send.parts[0].root.text + ) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_sends_message_streaming( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + """ + Integration test specifically for the gRPC transport streaming. + """ + server_address, handler = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + message_to_send = Message( + role=Role.user, + message_id='msg-grpc-integration-test', + parts=[Part(root=TextPart(text='Hello, gRPC integration test!'))], + ) + params = MessageSendParams(message=message_to_send) + + stream = transport.send_message_streaming(request=params) + first_event = await anext(stream) + + assert first_event.id == TASK_FROM_STREAM.id + assert first_event.context_id == TASK_FROM_STREAM.context_id + + handler.on_message_send_stream.assert_called_once() + call_args, _ = handler.on_message_send_stream.call_args + received_params: MessageSendParams = call_args[0] + + assert received_params.message.message_id == message_to_send.message_id + assert ( + received_params.message.parts[0].root.text + == message_to_send.parts[0].root.text + ) + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_sends_message_blocking( + transport_setup_fixture: str, request +) -> None: + """ + Integration test for HTTP-based transports (JSON-RPC, REST) blocking. + """ + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + handler = transport_setup.handler + + message_to_send = Message( + role=Role.user, + message_id='msg-integration-test-blocking', + parts=[Part(root=TextPart(text='Hello, blocking test!'))], + ) + params = MessageSendParams(message=message_to_send) + + result = await transport.send_message(request=params) + + assert result.id == TASK_FROM_BLOCKING.id + assert result.context_id == TASK_FROM_BLOCKING.context_id + + handler.on_message_send.assert_awaited_once() + call_args, _ = handler.on_message_send.call_args + received_params: MessageSendParams = call_args[0] + + assert received_params.message.message_id == message_to_send.message_id + assert ( + received_params.message.parts[0].root.text + == message_to_send.parts[0].root.text + ) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_sends_message_blocking( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + """ + Integration test specifically for the gRPC transport blocking. + """ + server_address, handler = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + message_to_send = Message( + role=Role.user, + message_id='msg-grpc-integration-test-blocking', + parts=[Part(root=TextPart(text='Hello, gRPC blocking test!'))], + ) + params = MessageSendParams(message=message_to_send) + + result = await transport.send_message(request=params) + + assert result.id == TASK_FROM_BLOCKING.id + assert result.context_id == TASK_FROM_BLOCKING.context_id + + handler.on_message_send.assert_awaited_once() + call_args, _ = handler.on_message_send.call_args + received_params: MessageSendParams = call_args[0] + + assert received_params.message.message_id == message_to_send.message_id + assert ( + received_params.message.parts[0].root.text + == message_to_send.parts[0].root.text + ) + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_get_task( + transport_setup_fixture: str, request +) -> None: + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + handler = transport_setup.handler + + params = TaskQueryParams(id=GET_TASK_RESPONSE.id) + result = await transport.get_task(request=params) + + assert result.id == GET_TASK_RESPONSE.id + handler.on_get_task.assert_awaited_once_with(params, ANY) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_get_task( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + server_address, handler = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + params = TaskQueryParams(id=GET_TASK_RESPONSE.id) + result = await transport.get_task(request=params) + + assert result.id == GET_TASK_RESPONSE.id + handler.on_get_task.assert_awaited_once() + assert handler.on_get_task.call_args[0][0].id == GET_TASK_RESPONSE.id + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_cancel_task( + transport_setup_fixture: str, request +) -> None: + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + handler = transport_setup.handler + + params = TaskIdParams(id=CANCEL_TASK_RESPONSE.id) + result = await transport.cancel_task(request=params) + + assert result.id == CANCEL_TASK_RESPONSE.id + handler.on_cancel_task.assert_awaited_once_with(params, ANY) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_cancel_task( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + server_address, handler = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + params = TaskIdParams(id=CANCEL_TASK_RESPONSE.id) + result = await transport.cancel_task(request=params) + + assert result.id == CANCEL_TASK_RESPONSE.id + handler.on_cancel_task.assert_awaited_once() + assert handler.on_cancel_task.call_args[0][0].id == CANCEL_TASK_RESPONSE.id + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_set_task_callback( + transport_setup_fixture: str, request +) -> None: + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + handler = transport_setup.handler + + params = CALLBACK_CONFIG + result = await transport.set_task_callback(request=params) + + assert result.task_id == CALLBACK_CONFIG.task_id + assert ( + result.push_notification_config.id + == CALLBACK_CONFIG.push_notification_config.id + ) + assert ( + result.push_notification_config.url + == CALLBACK_CONFIG.push_notification_config.url + ) + handler.on_set_task_push_notification_config.assert_awaited_once_with( + params, ANY + ) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_set_task_callback( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + server_address, handler = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + params = CALLBACK_CONFIG + result = await transport.set_task_callback(request=params) + + assert result.task_id == CALLBACK_CONFIG.task_id + assert ( + result.push_notification_config.id + == CALLBACK_CONFIG.push_notification_config.id + ) + assert ( + result.push_notification_config.url + == CALLBACK_CONFIG.push_notification_config.url + ) + handler.on_set_task_push_notification_config.assert_awaited_once() + assert ( + handler.on_set_task_push_notification_config.call_args[0][0].task_id + == CALLBACK_CONFIG.task_id + ) + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_get_task_callback( + transport_setup_fixture: str, request +) -> None: + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + handler = transport_setup.handler + + params = GetTaskPushNotificationConfigParams( + id=CALLBACK_CONFIG.task_id, + push_notification_config_id=CALLBACK_CONFIG.push_notification_config.id, + ) + result = await transport.get_task_callback(request=params) + + assert result.task_id == CALLBACK_CONFIG.task_id + assert ( + result.push_notification_config.id + == CALLBACK_CONFIG.push_notification_config.id + ) + assert ( + result.push_notification_config.url + == CALLBACK_CONFIG.push_notification_config.url + ) + handler.on_get_task_push_notification_config.assert_awaited_once_with( + params, ANY + ) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_get_task_callback( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + server_address, handler = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + params = GetTaskPushNotificationConfigParams( + id=CALLBACK_CONFIG.task_id, + push_notification_config_id=CALLBACK_CONFIG.push_notification_config.id, + ) + result = await transport.get_task_callback(request=params) + + assert result.task_id == CALLBACK_CONFIG.task_id + assert ( + result.push_notification_config.id + == CALLBACK_CONFIG.push_notification_config.id + ) + assert ( + result.push_notification_config.url + == CALLBACK_CONFIG.push_notification_config.url + ) + handler.on_get_task_push_notification_config.assert_awaited_once() + assert ( + handler.on_get_task_push_notification_config.call_args[0][0].id + == CALLBACK_CONFIG.task_id + ) + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_resubscribe( + transport_setup_fixture: str, request +) -> None: + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + handler = transport_setup.handler + + params = TaskIdParams(id=RESUBSCRIBE_EVENT.task_id) + stream = transport.resubscribe(request=params) + first_event = await anext(stream) + + assert first_event.task_id == RESUBSCRIBE_EVENT.task_id + handler.on_resubscribe_to_task.assert_called_once_with(params, ANY) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_resubscribe( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + server_address, handler = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + params = TaskIdParams(id=RESUBSCRIBE_EVENT.task_id) + stream = transport.resubscribe(request=params) + first_event = await anext(stream) + + assert first_event.task_id == RESUBSCRIBE_EVENT.task_id + handler.on_resubscribe_to_task.assert_called_once() + assert ( + handler.on_resubscribe_to_task.call_args[0][0].id + == RESUBSCRIBE_EVENT.task_id + ) + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'transport_setup_fixture', + [ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ], +) +async def test_http_transport_get_card( + transport_setup_fixture: str, request, agent_card: AgentCard +) -> None: + transport_setup: TransportSetup = request.getfixturevalue( + transport_setup_fixture + ) + transport = transport_setup.transport + # Get the base card. + result = await transport.get_card() + + assert result.name == agent_card.name + assert transport.agent_card.name == agent_card.name + assert transport._needs_extended_card is False + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_http_transport_get_authenticated_card( + agent_card: AgentCard, + mock_request_handler: AsyncMock, +) -> None: + agent_card.supports_authenticated_extended_card = True + extended_agent_card = agent_card.model_copy(deep=True) + extended_agent_card.name = 'Extended Agent Card' + + app_builder = A2ARESTFastAPIApplication( + agent_card, + mock_request_handler, + extended_agent_card=extended_agent_card, + ) + app = app_builder.build() + httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + + transport = RestTransport(httpx_client=httpx_client, agent_card=agent_card) + result = await transport.get_card() + assert result.name == extended_agent_card.name + assert transport.agent_card is not None + assert transport.agent_card.name == extended_agent_card.name + assert transport._needs_extended_card is False + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_get_card( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> None: + server_address, _ = grpc_server_and_handler + agent_card.url = server_address + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + + # The transport starts with a minimal card, get_card() fetches the full one + assert transport.agent_card is not None + transport.agent_card.supports_authenticated_extended_card = True + result = await transport.get_card() + + assert result.name == agent_card.name + assert transport.agent_card.name == agent_card.name + assert transport._needs_extended_card is False + + await transport.close() + + +@pytest.mark.asyncio +async def test_json_transport_base_client_send_message_with_extensions( + jsonrpc_setup: TransportSetup, agent_card: AgentCard +) -> None: + """ + Integration test for BaseClient with JSON-RPC transport to ensure extensions are included in headers. + """ + transport = jsonrpc_setup.transport + agent_card.capabilities.streaming = False + + # Create a BaseClient instance + client = BaseClient( + card=agent_card, + config=ClientConfig(streaming=False), + transport=transport, + consumers=[], + middleware=[], + ) + + message_to_send = Message( + role=Role.user, + message_id='msg-integration-test-extensions', + parts=[Part(root=TextPart(text='Hello, extensions test!'))], + ) + extensions = [ + 'https://example.com/test-ext/v1', + 'https://example.com/test-ext/v2', + ] + + with patch.object( + transport, '_send_request', new_callable=AsyncMock + ) as mock_send_request: + mock_send_request.return_value = { + 'id': '123', + 'jsonrpc': '2.0', + 'result': TASK_FROM_BLOCKING.model_dump(mode='json'), + } + + # Call send_message on the BaseClient + async for _ in client.send_message( + request=message_to_send, extensions=extensions + ): + pass + + mock_send_request.assert_called_once() + call_args, _ = mock_send_request.call_args + kwargs = call_args[1] + headers = kwargs.get('headers', {}) + assert 'X-A2A-Extensions' in headers + assert ( + headers['X-A2A-Extensions'] + == 'https://example.com/test-ext/v1,https://example.com/test-ext/v2' + ) + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_json_transport_get_signed_base_card( + jsonrpc_setup: TransportSetup, agent_card: AgentCard +) -> None: + """Tests fetching and verifying a symmetrically signed AgentCard via JSON-RPC. + + The client transport is initialized without a card, forcing it to fetch + the base card from the server. The server signs the card using HS384. + The client then verifies the signature. + """ + mock_request_handler = jsonrpc_setup.handler + agent_card.supports_authenticated_extended_card = False + + # Setup signing on the server side + key = 'key12345' + signer = create_agent_card_signer( + signing_key=key, + protected_header={ + 'alg': 'HS384', + 'kid': 'testkey', + 'jku': None, + 'typ': 'JOSE', + }, + ) + + app_builder = A2AFastAPIApplication( + agent_card, + mock_request_handler, + card_modifier=signer, # Sign the base card + ) + app = app_builder.build() + httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + + transport = JsonRpcTransport( + httpx_client=httpx_client, + url=agent_card.url, + agent_card=None, + ) + + # Get the card, this will trigger verification in get_card + signature_verifier = create_signature_verifier( + create_key_provider(key), ['HS384'] + ) + result = await transport.get_card(signature_verifier=signature_verifier) + assert result.name == agent_card.name + assert result.signatures is not None + assert len(result.signatures) == 1 + assert transport.agent_card is not None + assert transport.agent_card.name == agent_card.name + assert transport._needs_extended_card is False + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_json_transport_get_signed_extended_card( + jsonrpc_setup: TransportSetup, agent_card: AgentCard +) -> None: + """Tests fetching and verifying an asymmetrically signed extended AgentCard via JSON-RPC. + + The client has a base card and fetches the extended card, which is signed + by the server using ES256. The client verifies the signature on the + received extended card. + """ + mock_request_handler = jsonrpc_setup.handler + agent_card.supports_authenticated_extended_card = True + extended_agent_card = agent_card.model_copy(deep=True) + extended_agent_card.name = 'Extended Agent Card' + + # Setup signing on the server side + private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + public_key = private_key.public_key() + signer = create_agent_card_signer( + signing_key=private_key, + protected_header={ + 'alg': 'ES256', + 'kid': 'testkey', + 'jku': None, + 'typ': 'JOSE', + }, + ) + + app_builder = A2AFastAPIApplication( + agent_card, + mock_request_handler, + extended_agent_card=extended_agent_card, + extended_card_modifier=lambda card, ctx: signer( + card + ), # Sign the extended card + ) + app = app_builder.build() + httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + + transport = JsonRpcTransport( + httpx_client=httpx_client, agent_card=agent_card + ) + + # Get the card, this will trigger verification in get_card + signature_verifier = create_signature_verifier( + create_key_provider(public_key), ['HS384', 'ES256'] + ) + result = await transport.get_card(signature_verifier=signature_verifier) + assert result.name == extended_agent_card.name + assert result.signatures is not None + assert len(result.signatures) == 1 + assert transport.agent_card is not None + assert transport.agent_card.name == extended_agent_card.name + assert transport._needs_extended_card is False + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_json_transport_get_signed_base_and_extended_cards( + jsonrpc_setup: TransportSetup, agent_card: AgentCard +) -> None: + """Tests fetching and verifying both base and extended cards via JSON-RPC when no card is initially provided. + + The client starts with no card. It first fetches the base card, which is + signed. It then fetches the extended card, which is also signed. Both signatures + are verified independently upon retrieval. + """ + mock_request_handler = jsonrpc_setup.handler + assert agent_card.signatures is None + agent_card.supports_authenticated_extended_card = True + extended_agent_card = agent_card.model_copy(deep=True) + extended_agent_card.name = 'Extended Agent Card' + + # Setup signing on the server side + private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + public_key = private_key.public_key() + signer = create_agent_card_signer( + signing_key=private_key, + protected_header={ + 'alg': 'ES256', + 'kid': 'testkey', + 'jku': None, + 'typ': 'JOSE', + }, + ) + + app_builder = A2AFastAPIApplication( + agent_card, + mock_request_handler, + extended_agent_card=extended_agent_card, + card_modifier=signer, # Sign the base card + extended_card_modifier=lambda card, ctx: signer( + card + ), # Sign the extended card + ) + app = app_builder.build() + httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + + transport = JsonRpcTransport( + httpx_client=httpx_client, + url=agent_card.url, + agent_card=None, + ) + + # Get the card, this will trigger verification in get_card + signature_verifier = create_signature_verifier( + create_key_provider(public_key), ['HS384', 'ES256', 'RS256'] + ) + result = await transport.get_card(signature_verifier=signature_verifier) + assert result.name == extended_agent_card.name + assert result.signatures is not None + assert len(result.signatures) == 1 + assert transport.agent_card is not None + assert transport.agent_card.name == extended_agent_card.name + assert transport._needs_extended_card is False + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_rest_transport_get_signed_card( + rest_setup: TransportSetup, agent_card: AgentCard +) -> None: + """Tests fetching and verifying signed base and extended cards via REST. + + The client starts with no card. It first fetches the base card, which is + signed. It then fetches the extended card, which is also signed. Both signatures + are verified independently upon retrieval. + """ + mock_request_handler = rest_setup.handler + agent_card.supports_authenticated_extended_card = True + extended_agent_card = agent_card.model_copy(deep=True) + extended_agent_card.name = 'Extended Agent Card' + + # Setup signing on the server side + private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + public_key = private_key.public_key() + signer = create_agent_card_signer( + signing_key=private_key, + protected_header={ + 'alg': 'ES256', + 'kid': 'testkey', + 'jku': None, + 'typ': 'JOSE', + }, + ) + + app_builder = A2ARESTFastAPIApplication( + agent_card, + mock_request_handler, + extended_agent_card=extended_agent_card, + card_modifier=signer, # Sign the base card + extended_card_modifier=lambda card, ctx: signer( + card + ), # Sign the extended card + ) + app = app_builder.build() + httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + + transport = RestTransport( + httpx_client=httpx_client, + url=agent_card.url, + agent_card=None, + ) + + # Get the card, this will trigger verification in get_card + signature_verifier = create_signature_verifier( + create_key_provider(public_key), ['HS384', 'ES256', 'RS256'] + ) + result = await transport.get_card(signature_verifier=signature_verifier) + assert result.name == extended_agent_card.name + assert result.signatures is not None + assert len(result.signatures) == 1 + assert transport.agent_card is not None + assert transport.agent_card.name == extended_agent_card.name + assert transport._needs_extended_card is False + + if hasattr(transport, 'close'): + await transport.close() + + +@pytest.mark.asyncio +async def test_grpc_transport_get_signed_card( + mock_request_handler: AsyncMock, agent_card: AgentCard +) -> None: + """Tests fetching and verifying a signed AgentCard via gRPC.""" + # Setup signing on the server side + agent_card.supports_authenticated_extended_card = True + + private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + public_key = private_key.public_key() + signer = create_agent_card_signer( + signing_key=private_key, + protected_header={ + 'alg': 'ES256', + 'kid': 'testkey', + 'jku': None, + 'typ': 'JOSE', + }, + ) + + server = grpc.aio.server() + port = server.add_insecure_port('[::]:0') + server_address = f'localhost:{port}' + agent_card.url = server_address + + servicer = GrpcHandler( + agent_card, + mock_request_handler, + card_modifier=signer, + ) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + await server.start() + + transport = None # Initialize transport + try: + + def channel_factory(address: str) -> Channel: + return grpc.aio.insecure_channel(address) + + channel = channel_factory(server_address) + transport = GrpcTransport(channel=channel, agent_card=agent_card) + transport.agent_card = None + assert transport._needs_extended_card is True + + # Get the card, this will trigger verification in get_card + signature_verifier = create_signature_verifier( + create_key_provider(public_key), ['HS384', 'ES256', 'RS256'] + ) + result = await transport.get_card(signature_verifier=signature_verifier) + assert result.signatures is not None + assert len(result.signatures) == 1 + assert transport._needs_extended_card is False + finally: + if transport: + await transport.close() + await server.stop(0) # Gracefully stop the server diff --git a/tests/server/agent_execution/test_context.py b/tests/server/agent_execution/test_context.py index 92d097073..979978add 100644 --- a/tests/server/agent_execution/test_context.py +++ b/tests/server/agent_execution/test_context.py @@ -5,32 +5,35 @@ import pytest from a2a.server.agent_execution import RequestContext +from a2a.server.context import ServerCallContext +from a2a.server.id_generator import IDGenerator from a2a.types import ( Message, MessageSendParams, Task, ) +from a2a.utils.errors import ServerError class TestRequestContext: """Tests for the RequestContext class.""" @pytest.fixture - def mock_message(self): + def mock_message(self) -> Mock: """Fixture for a mock Message.""" - return Mock(spec=Message, taskId=None, contextId=None) + return Mock(spec=Message, task_id=None, context_id=None) @pytest.fixture - def mock_params(self, mock_message): + def mock_params(self, mock_message: Mock) -> Mock: """Fixture for a mock MessageSendParams.""" return Mock(spec=MessageSendParams, message=mock_message) @pytest.fixture - def mock_task(self): + def mock_task(self) -> Mock: """Fixture for a mock Task.""" - return Mock(spec=Task, id='task-123', contextId='context-456') + return Mock(spec=Task, id='task-123', context_id='context-456') - def test_init_without_params(self): + def test_init_without_params(self) -> None: """Test initialization without parameters.""" context = RequestContext() assert context.message is None @@ -39,7 +42,7 @@ def test_init_without_params(self): assert context.current_task is None assert context.related_tasks == [] - def test_init_with_params_no_ids(self, mock_params): + def test_init_with_params_no_ids(self, mock_params: Mock) -> None: """Test initialization with params but no task or context IDs.""" with patch( 'uuid.uuid4', @@ -53,31 +56,32 @@ def test_init_with_params_no_ids(self, mock_params): assert context.message == mock_params.message assert context.task_id == '00000000-0000-0000-0000-000000000001' assert ( - mock_params.message.taskId == '00000000-0000-0000-0000-000000000001' + mock_params.message.task_id + == '00000000-0000-0000-0000-000000000001' ) assert context.context_id == '00000000-0000-0000-0000-000000000002' assert ( - mock_params.message.contextId + mock_params.message.context_id == '00000000-0000-0000-0000-000000000002' ) - def test_init_with_task_id(self, mock_params): + def test_init_with_task_id(self, mock_params: Mock) -> None: """Test initialization with task ID provided.""" task_id = 'task-123' context = RequestContext(request=mock_params, task_id=task_id) assert context.task_id == task_id - assert mock_params.message.taskId == task_id + assert mock_params.message.task_id == task_id - def test_init_with_context_id(self, mock_params): + def test_init_with_context_id(self, mock_params: Mock) -> None: """Test initialization with context ID provided.""" context_id = 'context-456' context = RequestContext(request=mock_params, context_id=context_id) assert context.context_id == context_id - assert mock_params.message.contextId == context_id + assert mock_params.message.context_id == context_id - def test_init_with_both_ids(self, mock_params): + def test_init_with_both_ids(self, mock_params: Mock) -> None: """Test initialization with both task and context IDs provided.""" task_id = 'task-123' context_id = 'context-456' @@ -86,22 +90,22 @@ def test_init_with_both_ids(self, mock_params): ) assert context.task_id == task_id - assert mock_params.message.taskId == task_id + assert mock_params.message.task_id == task_id assert context.context_id == context_id - assert mock_params.message.contextId == context_id + assert mock_params.message.context_id == context_id - def test_init_with_task(self, mock_params, mock_task): + def test_init_with_task(self, mock_params: Mock, mock_task: Mock) -> None: """Test initialization with a task object.""" context = RequestContext(request=mock_params, task=mock_task) assert context.current_task == mock_task - def test_get_user_input_no_params(self): + def test_get_user_input_no_params(self) -> None: """Test get_user_input with no params returns empty string.""" context = RequestContext() assert context.get_user_input() == '' - def test_attach_related_task(self, mock_task): + def test_attach_related_task(self, mock_task: Mock) -> None: """Test attach_related_task adds a task to related_tasks.""" context = RequestContext() assert len(context.related_tasks) == 0 @@ -116,7 +120,7 @@ def test_attach_related_task(self, mock_task): assert len(context.related_tasks) == 2 assert context.related_tasks[1] == another_task - def test_current_task_property(self, mock_task): + def test_current_task_property(self, mock_task: Mock) -> None: """Test current_task getter and setter.""" context = RequestContext() assert context.current_task is None @@ -129,43 +133,100 @@ def test_current_task_property(self, mock_task): context.current_task = new_task assert context.current_task == new_task - def test_check_or_generate_task_id_no_params(self): + def test_check_or_generate_task_id_no_params(self) -> None: """Test _check_or_generate_task_id with no params does nothing.""" context = RequestContext() context._check_or_generate_task_id() assert context.task_id is None - def test_check_or_generate_task_id_with_existing_task_id(self, mock_params): + def test_check_or_generate_task_id_with_existing_task_id( + self, mock_params: Mock + ) -> None: """Test _check_or_generate_task_id with existing task ID.""" existing_id = 'existing-task-id' - mock_params.message.taskId = existing_id + mock_params.message.task_id = existing_id context = RequestContext(request=mock_params) # The method is called during initialization assert context.task_id == existing_id - assert mock_params.message.taskId == existing_id + assert mock_params.message.task_id == existing_id - def test_check_or_generate_context_id_no_params(self): + def test_check_or_generate_task_id_with_custom_id_generator( + self, mock_params: Mock + ) -> None: + """Test _check_or_generate_task_id uses custom ID generator when provided.""" + id_generator = Mock(spec=IDGenerator) + id_generator.generate.return_value = 'custom-task-id' + + context = RequestContext( + request=mock_params, task_id_generator=id_generator + ) + # The method is called during initialization + + assert context.task_id == 'custom-task-id' + + def test_check_or_generate_context_id_no_params(self) -> None: """Test _check_or_generate_context_id with no params does nothing.""" context = RequestContext() context._check_or_generate_context_id() assert context.context_id is None def test_check_or_generate_context_id_with_existing_context_id( - self, mock_params - ): + self, mock_params: Mock + ) -> None: """Test _check_or_generate_context_id with existing context ID.""" existing_id = 'existing-context-id' - mock_params.message.contextId = existing_id + mock_params.message.context_id = existing_id context = RequestContext(request=mock_params) # The method is called during initialization assert context.context_id == existing_id - assert mock_params.message.contextId == existing_id + assert mock_params.message.context_id == existing_id + + def test_check_or_generate_context_id_with_custom_id_generator( + self, mock_params: Mock + ) -> None: + """Test _check_or_generate_context_id uses custom ID generator when provided.""" + id_generator = Mock(spec=IDGenerator) + id_generator.generate.return_value = 'custom-context-id' - def test_with_related_tasks_provided(self, mock_task): + context = RequestContext( + request=mock_params, context_id_generator=id_generator + ) + # The method is called during initialization + + assert context.context_id == 'custom-context-id' + + def test_init_raises_error_on_task_id_mismatch( + self, mock_params: Mock, mock_task: Mock + ) -> None: + """Test that an error is raised if provided task_id mismatches task.id.""" + with pytest.raises(ServerError) as exc_info: + RequestContext( + request=mock_params, task_id='wrong-task-id', task=mock_task + ) + assert 'bad task id' in str(exc_info.value.error.message) + + def test_init_raises_error_on_context_id_mismatch( + self, mock_params: Mock, mock_task: Mock + ) -> None: + """Test that an error is raised if provided context_id mismatches task.context_id.""" + # Set a valid task_id to avoid that error + mock_params.message.task_id = mock_task.id + + with pytest.raises(ServerError) as exc_info: + RequestContext( + request=mock_params, + task_id=mock_task.id, + context_id='wrong-context-id', + task=mock_task, + ) + + assert 'bad context id' in str(exc_info.value.error.message) + + def test_with_related_tasks_provided(self, mock_task: Mock) -> None: """Test initialization with related tasks provided.""" related_tasks = [mock_task, Mock(spec=Task)] context = RequestContext(related_tasks=related_tasks) @@ -173,20 +234,33 @@ def test_with_related_tasks_provided(self, mock_task): assert context.related_tasks == related_tasks assert len(context.related_tasks) == 2 - def test_message_property_without_params(self): + def test_message_property_without_params(self) -> None: """Test message property returns None when no params are provided.""" context = RequestContext() assert context.message is None - def test_message_property_with_params(self, mock_params): + def test_message_property_with_params(self, mock_params: Mock) -> None: """Test message property returns the message from params.""" context = RequestContext(request=mock_params) assert context.message == mock_params.message - def test_init_with_existing_ids_in_message(self, mock_message, mock_params): + def test_metadata_property_without_content(self) -> None: + """Test metadata property returns empty dict when no content are provided.""" + context = RequestContext() + assert context.metadata == {} + + def test_metadata_property_with_content(self, mock_params: Mock) -> None: + """Test metadata property returns the metadata from params.""" + mock_params.metadata = {'key': 'value'} + context = RequestContext(request=mock_params) + assert context.metadata == {'key': 'value'} + + def test_init_with_existing_ids_in_message( + self, mock_message: Mock, mock_params: Mock + ) -> None: """Test initialization with existing IDs in the message.""" - mock_message.taskId = 'existing-task-id' - mock_message.contextId = 'existing-context-id' + mock_message.task_id = 'existing-task-id' + mock_message.context_id = 'existing-context-id' context = RequestContext(request=mock_params) @@ -195,10 +269,10 @@ def test_init_with_existing_ids_in_message(self, mock_message, mock_params): # No new UUIDs should be generated def test_init_with_task_id_and_existing_task_id_match( - self, mock_params, mock_task - ): + self, mock_params: Mock, mock_task: Mock + ) -> None: """Test initialization succeeds when task_id matches task.id.""" - mock_params.message.taskId = mock_task.id + mock_params.message.task_id = mock_task.id context = RequestContext( request=mock_params, task_id=mock_task.id, task=mock_task @@ -208,18 +282,31 @@ def test_init_with_task_id_and_existing_task_id_match( assert context.current_task == mock_task def test_init_with_context_id_and_existing_context_id_match( - self, mock_params, mock_task - ): - """Test initialization succeeds when context_id matches task.contextId.""" - mock_params.message.taskId = mock_task.id # Set matching task ID - mock_params.message.contextId = mock_task.contextId + self, mock_params: Mock, mock_task: Mock + ) -> None: + """Test initialization succeeds when context_id matches task.context_id.""" + mock_params.message.task_id = mock_task.id # Set matching task ID + mock_params.message.context_id = mock_task.context_id context = RequestContext( request=mock_params, task_id=mock_task.id, - context_id=mock_task.contextId, + context_id=mock_task.context_id, task=mock_task, ) - assert context.context_id == mock_task.contextId + assert context.context_id == mock_task.context_id assert context.current_task == mock_task + + def test_extension_handling(self) -> None: + """Test extension handling in RequestContext.""" + call_context = ServerCallContext(requested_extensions={'foo', 'bar'}) + context = RequestContext(call_context=call_context) + + assert context.requested_extensions == {'foo', 'bar'} + + context.add_activated_extension('foo') + assert call_context.activated_extensions == {'foo'} + + context.add_activated_extension('baz') + assert call_context.activated_extensions == {'foo', 'baz'} diff --git a/tests/server/agent_execution/test_simple_request_context_builder.py b/tests/server/agent_execution/test_simple_request_context_builder.py new file mode 100644 index 000000000..c1cbcf051 --- /dev/null +++ b/tests/server/agent_execution/test_simple_request_context_builder.py @@ -0,0 +1,340 @@ +import unittest + +from unittest.mock import AsyncMock + +from a2a.auth.user import UnauthenticatedUser # Import User types +from a2a.server.agent_execution.context import ( + RequestContext, # Corrected import path +) +from a2a.server.agent_execution.simple_request_context_builder import ( + SimpleRequestContextBuilder, +) +from a2a.server.context import ServerCallContext +from a2a.server.id_generator import IDGenerator +from a2a.server.tasks.task_store import TaskStore +from a2a.types import ( + Message, + MessageSendParams, + Part, + # ServerCallContext, # Removed from a2a.types + Role, + Task, + TaskState, + TaskStatus, + TextPart, +) + + +# Helper to create a simple message +def create_sample_message( + content: str = 'test message', + msg_id: str = 'msg1', + role: Role = Role.user, + reference_task_ids: list[str] | None = None, +) -> Message: + return Message( + message_id=msg_id, + role=role, + parts=[Part(root=TextPart(text=content))], + reference_task_ids=reference_task_ids if reference_task_ids else [], + ) + + +# Helper to create a simple task +def create_sample_task( + task_id: str = 'task1', + status_state: TaskState = TaskState.submitted, + context_id: str = 'ctx1', +) -> Task: + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=status_state), + ) + + +class TestSimpleRequestContextBuilder(unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.mock_task_store = AsyncMock(spec=TaskStore) + + def test_init_with_populate_true_and_task_store(self) -> None: + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=True, task_store=self.mock_task_store + ) + self.assertTrue(builder._should_populate_referred_tasks) + self.assertEqual(builder._task_store, self.mock_task_store) + + def test_init_with_populate_false_task_store_none(self) -> None: + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=False, task_store=None + ) + self.assertFalse(builder._should_populate_referred_tasks) + self.assertIsNone(builder._task_store) + + def test_init_with_populate_false_task_store_provided(self) -> None: + # Even if populate is false, task_store might still be provided (though not used by build for related_tasks) + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=False, + task_store=self.mock_task_store, + ) + self.assertFalse(builder._should_populate_referred_tasks) + self.assertEqual(builder._task_store, self.mock_task_store) + + async def test_build_basic_context_no_populate(self) -> None: + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=False, + task_store=self.mock_task_store, + ) + + params = MessageSendParams(message=create_sample_message()) + task_id = 'test_task_id_1' + context_id = 'test_context_id_1' + current_task = create_sample_task( + task_id=task_id, context_id=context_id + ) + # Pass a valid User instance, e.g., UnauthenticatedUser or a mock spec'd as User + server_call_context = ServerCallContext( + user=UnauthenticatedUser(), auth_token='dummy_token' + ) + + request_context = await builder.build( + params=params, + task_id=task_id, + context_id=context_id, + task=current_task, + context=server_call_context, + ) + + self.assertIsInstance(request_context, RequestContext) + # Access params via its properties message and configuration + self.assertEqual(request_context.message, params.message) + self.assertEqual(request_context.configuration, params.configuration) + self.assertEqual(request_context.task_id, task_id) + self.assertEqual(request_context.context_id, context_id) + self.assertEqual( + request_context.current_task, current_task + ) # Property is current_task + self.assertEqual( + request_context.call_context, server_call_context + ) # Property is call_context + self.assertEqual(request_context.related_tasks, []) # Initialized to [] + self.mock_task_store.get.assert_not_called() + + async def test_build_populate_true_with_reference_task_ids(self) -> None: + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=True, task_store=self.mock_task_store + ) + ref_task_id1 = 'ref_task1' + ref_task_id2 = 'ref_task2_missing' + ref_task_id3 = 'ref_task3' + + mock_ref_task1 = create_sample_task(task_id=ref_task_id1) + mock_ref_task3 = create_sample_task(task_id=ref_task_id3) + + # Configure task_store.get mock + # Note: AsyncMock side_effect needs to handle multiple calls if they have different args. + # A simple way is a list of return values, or a function. + async def get_side_effect(task_id): + if task_id == ref_task_id1: + return mock_ref_task1 + if task_id == ref_task_id3: + return mock_ref_task3 + return None + + self.mock_task_store.get = AsyncMock(side_effect=get_side_effect) + + params = MessageSendParams( + message=create_sample_message( + reference_task_ids=[ref_task_id1, ref_task_id2, ref_task_id3] + ) + ) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + + request_context = await builder.build( + params=params, + task_id='t1', + context_id='c1', + task=None, + context=server_call_context, + ) + + self.assertEqual(self.mock_task_store.get.call_count, 3) + self.mock_task_store.get.assert_any_call(ref_task_id1) + self.mock_task_store.get.assert_any_call(ref_task_id2) + self.mock_task_store.get.assert_any_call(ref_task_id3) + + self.assertIsNotNone(request_context.related_tasks) + self.assertEqual( + len(request_context.related_tasks), 2 + ) # Only non-None tasks + self.assertIn(mock_ref_task1, request_context.related_tasks) + self.assertIn(mock_ref_task3, request_context.related_tasks) + + async def test_build_populate_true_params_none(self) -> None: + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=True, task_store=self.mock_task_store + ) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + request_context = await builder.build( + params=None, + task_id='t1', + context_id='c1', + task=None, + context=server_call_context, + ) + self.assertEqual(request_context.related_tasks, []) + self.mock_task_store.get.assert_not_called() + + async def test_build_populate_true_reference_ids_empty_or_none( + self, + ) -> None: + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=True, task_store=self.mock_task_store + ) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + + # Test with empty list + params_empty_refs = MessageSendParams( + message=create_sample_message(reference_task_ids=[]) + ) + request_context_empty = await builder.build( + params=params_empty_refs, + task_id='t1', + context_id='c1', + task=None, + context=server_call_context, + ) + self.assertEqual( + request_context_empty.related_tasks, [] + ) # Should be [] if list is empty + self.mock_task_store.get.assert_not_called() + + self.mock_task_store.get.reset_mock() # Reset for next call + + # Test with referenceTaskIds=None (Pydantic model might default it to empty list or handle it) + # create_sample_message defaults to [] if None is passed, so this tests the same as above. + # To explicitly test None in Message, we'd have to bypass Pydantic default or modify helper. + # For now, this covers the "no IDs to process" case. + msg_with_no_refs = Message( + message_id='m2', role=Role.user, parts=[], referenceTaskIds=None + ) + params_none_refs = MessageSendParams(message=msg_with_no_refs) + request_context_none = await builder.build( + params=params_none_refs, + task_id='t2', + context_id='c2', + task=None, + context=server_call_context, + ) + self.assertEqual(request_context_none.related_tasks, []) + self.mock_task_store.get.assert_not_called() + + async def test_build_populate_true_task_store_none(self) -> None: + # This scenario might be prevented by constructor logic if should_populate_referred_tasks is True, + # but testing defensively. The builder might allow task_store=None if it's set post-init, + # or if constructor logic changes. Current SimpleRequestContextBuilder takes it at init. + # If task_store is None, it should not attempt to call get. + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=True, + task_store=None, # Explicitly None + ) + params = MessageSendParams( + message=create_sample_message(reference_task_ids=['ref1']) + ) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + + request_context = await builder.build( + params=params, + task_id='t1', + context_id='c1', + task=None, + context=server_call_context, + ) + # Expect related_tasks to be an empty list as task_store is None + self.assertEqual(request_context.related_tasks, []) + # No mock_task_store to check calls on, this test is mostly for graceful handling. + + async def test_build_populate_false_with_reference_task_ids(self) -> None: + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=False, + task_store=self.mock_task_store, + ) + params = MessageSendParams( + message=create_sample_message( + reference_task_ids=['ref_task_should_not_be_fetched'] + ) + ) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + + request_context = await builder.build( + params=params, + task_id='t1', + context_id='c1', + task=None, + context=server_call_context, + ) + self.assertEqual(request_context.related_tasks, []) + self.mock_task_store.get.assert_not_called() + + async def test_build_with_custom_id_generators(self) -> None: + mock_task_id_generator = AsyncMock(spec=IDGenerator) + mock_context_id_generator = AsyncMock(spec=IDGenerator) + mock_task_id_generator.generate.return_value = 'custom_task_id' + mock_context_id_generator.generate.return_value = 'custom_context_id' + + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=False, + task_store=self.mock_task_store, + task_id_generator=mock_task_id_generator, + context_id_generator=mock_context_id_generator, + ) + params = MessageSendParams(message=create_sample_message()) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + + request_context = await builder.build( + params=params, + task_id=None, + context_id=None, + task=None, + context=server_call_context, + ) + + mock_task_id_generator.generate.assert_called_once() + mock_context_id_generator.generate.assert_called_once() + self.assertEqual(request_context.task_id, 'custom_task_id') + self.assertEqual(request_context.context_id, 'custom_context_id') + + async def test_build_with_provided_ids_and_custom_id_generators( + self, + ) -> None: + mock_task_id_generator = AsyncMock(spec=IDGenerator) + mock_context_id_generator = AsyncMock(spec=IDGenerator) + + builder = SimpleRequestContextBuilder( + should_populate_referred_tasks=False, + task_store=self.mock_task_store, + task_id_generator=mock_task_id_generator, + context_id_generator=mock_context_id_generator, + ) + params = MessageSendParams(message=create_sample_message()) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + + provided_task_id = 'provided_task_id' + provided_context_id = 'provided_context_id' + + request_context = await builder.build( + params=params, + task_id=provided_task_id, + context_id=provided_context_id, + task=None, + context=server_call_context, + ) + + mock_task_id_generator.generate.assert_not_called() + mock_context_id_generator.generate.assert_not_called() + self.assertEqual(request_context.task_id, provided_task_id) + self.assertEqual(request_context.context_id, provided_context_id) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/server/apps/jsonrpc/test_fastapi_app.py b/tests/server/apps/jsonrpc/test_fastapi_app.py new file mode 100644 index 000000000..ddb68691f --- /dev/null +++ b/tests/server/apps/jsonrpc/test_fastapi_app.py @@ -0,0 +1,80 @@ +from typing import Any +from unittest.mock import MagicMock + +import pytest + +from a2a.server.apps.jsonrpc import fastapi_app +from a2a.server.apps.jsonrpc.fastapi_app import A2AFastAPIApplication +from a2a.server.request_handlers.request_handler import ( + RequestHandler, # For mock spec +) +from a2a.types import AgentCard # For mock spec + + +# --- A2AFastAPIApplication Tests --- + + +class TestA2AFastAPIApplicationOptionalDeps: + # Running tests in this class requires the optional dependency fastapi to be + # present in the test environment. + + @pytest.fixture(scope='class', autouse=True) + def ensure_pkg_fastapi_is_present(self): + try: + import fastapi as _fastapi # noqa: F401 + except ImportError: + pytest.fail( + f'Running tests in {self.__class__.__name__} requires' + ' the optional dependency fastapi to be present in the test' + ' environment. Run `uv sync --dev ...` before running the test' + ' suite.' + ) + + @pytest.fixture(scope='class') + def mock_app_params(self) -> dict: + # Mock http_handler + mock_handler = MagicMock(spec=RequestHandler) + # Mock agent_card with essential attributes accessed in __init__ + mock_agent_card = MagicMock(spec=AgentCard) + # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed + # in __init__ + mock_agent_card.url = 'http://example.com' + # Ensure 'supports_authenticated_extended_card' attribute exists + mock_agent_card.supports_authenticated_extended_card = False + return {'agent_card': mock_agent_card, 'http_handler': mock_handler} + + @pytest.fixture(scope='class') + def mark_pkg_fastapi_not_installed(self): + pkg_fastapi_installed_flag = fastapi_app._package_fastapi_installed + fastapi_app._package_fastapi_installed = False + yield + fastapi_app._package_fastapi_installed = pkg_fastapi_installed_flag + + def test_create_a2a_fastapi_app_with_present_deps_succeeds( + self, mock_app_params: dict + ): + try: + _app = A2AFastAPIApplication(**mock_app_params) + except ImportError: + pytest.fail( + 'With the fastapi package present, creating a' + ' A2AFastAPIApplication instance should not raise ImportError' + ) + + def test_create_a2a_fastapi_app_with_missing_deps_raises_importerror( + self, + mock_app_params: dict, + mark_pkg_fastapi_not_installed: Any, + ): + with pytest.raises( + ImportError, + match=( + 'The `fastapi` package is required to use the' + ' `A2AFastAPIApplication`' + ), + ): + _app = A2AFastAPIApplication(**mock_app_params) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/tests/server/apps/jsonrpc/test_jsonrpc_app.py b/tests/server/apps/jsonrpc/test_jsonrpc_app.py new file mode 100644 index 000000000..36309872e --- /dev/null +++ b/tests/server/apps/jsonrpc/test_jsonrpc_app.py @@ -0,0 +1,380 @@ +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from starlette.testclient import TestClient + + +# Attempt to import StarletteBaseUser, fallback to MagicMock if not available +try: + from starlette.authentication import BaseUser as StarletteBaseUser +except ImportError: + StarletteBaseUser = MagicMock() # type: ignore + +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.server.apps.jsonrpc import ( + jsonrpc_app, # Keep this import for optional deps test +) +from a2a.server.apps.jsonrpc.jsonrpc_app import ( + JSONRPCApplication, + StarletteUserProxy, +) +from a2a.server.apps.jsonrpc.starlette_app import A2AStarletteApplication +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import ( + RequestHandler, +) # For mock spec +from a2a.types import ( + AgentCard, + Message, + MessageSendParams, + Part, + Role, + SendMessageRequest, + SendMessageResponse, + SendMessageSuccessResponse, + TextPart, +) + + +# --- StarletteUserProxy Tests --- + + +class TestStarletteUserProxy: + def test_starlette_user_proxy_is_authenticated_true(self): + starlette_user_mock = MagicMock(spec=StarletteBaseUser) + starlette_user_mock.is_authenticated = True + proxy = StarletteUserProxy(starlette_user_mock) + assert proxy.is_authenticated is True + + def test_starlette_user_proxy_is_authenticated_false(self): + starlette_user_mock = MagicMock(spec=StarletteBaseUser) + starlette_user_mock.is_authenticated = False + proxy = StarletteUserProxy(starlette_user_mock) + assert proxy.is_authenticated is False + + def test_starlette_user_proxy_user_name(self): + starlette_user_mock = MagicMock(spec=StarletteBaseUser) + starlette_user_mock.display_name = 'Test User DisplayName' + proxy = StarletteUserProxy(starlette_user_mock) + assert proxy.user_name == 'Test User DisplayName' + + def test_starlette_user_proxy_user_name_raises_attribute_error(self): + """ + Tests that if the underlying starlette user object is missing the + display_name attribute, the proxy currently raises an AttributeError. + """ + starlette_user_mock = MagicMock(spec=StarletteBaseUser) + # Ensure display_name is not present on the mock to trigger AttributeError + del starlette_user_mock.display_name + + proxy = StarletteUserProxy(starlette_user_mock) + with pytest.raises(AttributeError, match='display_name'): + _ = proxy.user_name + + +# --- JSONRPCApplication Tests (Selected) --- + + +class TestJSONRPCApplicationSetup: # Renamed to avoid conflict + def test_jsonrpc_app_build_method_abstract_raises_typeerror( + self, + ): # Renamed test + mock_handler = MagicMock(spec=RequestHandler) + # Mock agent_card with essential attributes accessed in JSONRPCApplication.__init__ + mock_agent_card = MagicMock(spec=AgentCard) + # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed in __init__ + mock_agent_card.url = 'http://mockurl.com' + # Ensure 'supportsAuthenticatedExtendedCard' attribute exists + mock_agent_card.supports_authenticated_extended_card = False + + # This will fail at definition time if an abstract method is not implemented + with pytest.raises( + TypeError, + match=".*abstract class IncompleteJSONRPCApp .* abstract method '?build'?", + ): + + class IncompleteJSONRPCApp(JSONRPCApplication): + # Intentionally not implementing 'build' + def some_other_method(self): + pass + + IncompleteJSONRPCApp( + agent_card=mock_agent_card, http_handler=mock_handler + ) + + +class TestJSONRPCApplicationOptionalDeps: + # Running tests in this class requires optional dependencies starlette and + # sse-starlette to be present in the test environment. + + @pytest.fixture(scope='class', autouse=True) + def ensure_pkg_starlette_is_present(self): + try: + import sse_starlette as _sse_starlette # noqa: F401 + import starlette as _starlette # noqa: F401 + except ImportError: + pytest.fail( + f'Running tests in {self.__class__.__name__} requires' + ' optional dependencies starlette and sse-starlette to be' + ' present in the test environment. Run `uv sync --dev ...`' + ' before running the test suite.' + ) + + @pytest.fixture(scope='class') + def mock_app_params(self) -> dict: + # Mock http_handler + mock_handler = MagicMock(spec=RequestHandler) + # Mock agent_card with essential attributes accessed in __init__ + mock_agent_card = MagicMock(spec=AgentCard) + # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed + # in __init__ + mock_agent_card.url = 'http://example.com' + # Ensure 'supportsAuthenticatedExtendedCard' attribute exists + mock_agent_card.supports_authenticated_extended_card = False + return {'agent_card': mock_agent_card, 'http_handler': mock_handler} + + @pytest.fixture(scope='class') + def mark_pkg_starlette_not_installed(self): + pkg_starlette_installed_flag = jsonrpc_app._package_starlette_installed + jsonrpc_app._package_starlette_installed = False + yield + jsonrpc_app._package_starlette_installed = pkg_starlette_installed_flag + + def test_create_jsonrpc_based_app_with_present_deps_succeeds( + self, mock_app_params: dict + ): + class DummyJSONRPCApp(JSONRPCApplication): + def build( + self, + agent_card_url='/.well-known/agent.json', + rpc_url='/', + **kwargs, + ): + return object() + + try: + _app = DummyJSONRPCApp(**mock_app_params) + except ImportError: + pytest.fail( + 'With packages starlette and see-starlette present, creating a' + ' JSONRPCApplication-based instance should not raise' + ' ImportError' + ) + + def test_create_jsonrpc_based_app_with_missing_deps_raises_importerror( + self, mock_app_params: dict, mark_pkg_starlette_not_installed: Any + ): + class DummyJSONRPCApp(JSONRPCApplication): + def build( + self, + agent_card_url='/.well-known/agent.json', + rpc_url='/', + **kwargs, + ): + return object() + + with pytest.raises( + ImportError, + match=( + 'Packages `starlette` and `sse-starlette` are required to use' + ' the `JSONRPCApplication`' + ), + ): + _app = DummyJSONRPCApp(**mock_app_params) + + +class TestJSONRPCExtensions: + @pytest.fixture + def mock_handler(self): + handler = AsyncMock(spec=RequestHandler) + handler.on_message_send.return_value = SendMessageResponse( + root=SendMessageSuccessResponse( + id='1', + result=Message( + message_id='test', + role=Role.agent, + parts=[Part(TextPart(text='response message'))], + ), + ) + ) + return handler + + @pytest.fixture + def test_app(self, mock_handler): + mock_agent_card = MagicMock(spec=AgentCard) + mock_agent_card.url = 'http://mockurl.com' + mock_agent_card.supports_authenticated_extended_card = False + + return A2AStarletteApplication( + agent_card=mock_agent_card, http_handler=mock_handler + ) + + @pytest.fixture + def client(self, test_app): + return TestClient(test_app.build()) + + def test_request_with_single_extension(self, client, mock_handler): + headers = {HTTP_EXTENSION_HEADER: 'foo'} + response = client.post( + '/', + headers=headers, + json=SendMessageRequest( + id='1', + params=MessageSendParams( + message=Message( + message_id='1', + role=Role.user, + parts=[Part(TextPart(text='hi'))], + ) + ), + ).model_dump(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert isinstance(call_context, ServerCallContext) + assert call_context.requested_extensions == {'foo'} + + def test_request_with_comma_separated_extensions( + self, client, mock_handler + ): + headers = {HTTP_EXTENSION_HEADER: 'foo, bar'} + response = client.post( + '/', + headers=headers, + json=SendMessageRequest( + id='1', + params=MessageSendParams( + message=Message( + message_id='1', + role=Role.user, + parts=[Part(TextPart(text='hi'))], + ) + ), + ).model_dump(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert call_context.requested_extensions == {'foo', 'bar'} + + def test_request_with_comma_separated_extensions_no_space( + self, client, mock_handler + ): + headers = [ + (HTTP_EXTENSION_HEADER, 'foo, bar'), + (HTTP_EXTENSION_HEADER, 'baz'), + ] + response = client.post( + '/', + headers=headers, + json=SendMessageRequest( + id='1', + params=MessageSendParams( + message=Message( + message_id='1', + role=Role.user, + parts=[Part(TextPart(text='hi'))], + ) + ), + ).model_dump(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert call_context.requested_extensions == {'foo', 'bar', 'baz'} + + def test_method_added_to_call_context_state(self, client, mock_handler): + response = client.post( + '/', + json=SendMessageRequest( + id='1', + params=MessageSendParams( + message=Message( + message_id='1', + role=Role.user, + parts=[Part(TextPart(text='hi'))], + ) + ), + ).model_dump(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert call_context.state['method'] == 'message/send' + + def test_request_with_multiple_extension_headers( + self, client, mock_handler + ): + headers = [ + (HTTP_EXTENSION_HEADER, 'foo'), + (HTTP_EXTENSION_HEADER, 'bar'), + ] + response = client.post( + '/', + headers=headers, + json=SendMessageRequest( + id='1', + params=MessageSendParams( + message=Message( + message_id='1', + role=Role.user, + parts=[Part(TextPart(text='hi'))], + ) + ), + ).model_dump(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert call_context.requested_extensions == {'foo', 'bar'} + + def test_response_with_activated_extensions(self, client, mock_handler): + def side_effect(request, context: ServerCallContext): + context.activated_extensions.add('foo') + context.activated_extensions.add('baz') + return SendMessageResponse( + root=SendMessageSuccessResponse( + id='1', + result=Message( + message_id='test', + role=Role.agent, + parts=[Part(TextPart(text='response message'))], + ), + ) + ) + + mock_handler.on_message_send.side_effect = side_effect + + response = client.post( + '/', + json=SendMessageRequest( + id='1', + params=MessageSendParams( + message=Message( + message_id='1', + role=Role.user, + parts=[Part(TextPart(text='hi'))], + ) + ), + ).model_dump(), + ) + response.raise_for_status() + + assert response.status_code == 200 + assert HTTP_EXTENSION_HEADER in response.headers + assert set(response.headers[HTTP_EXTENSION_HEADER].split(', ')) == { + 'foo', + 'baz', + } + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/tests/server/apps/jsonrpc/test_serialization.py b/tests/server/apps/jsonrpc/test_serialization.py new file mode 100644 index 000000000..f67780461 --- /dev/null +++ b/tests/server/apps/jsonrpc/test_serialization.py @@ -0,0 +1,228 @@ +from unittest import mock + +import pytest + +from fastapi import FastAPI +from pydantic import ValidationError +from starlette.testclient import TestClient + +from a2a.server.apps import A2AFastAPIApplication, A2AStarletteApplication +from a2a.types import ( + APIKeySecurityScheme, + AgentCapabilities, + AgentCard, + In, + InvalidRequestError, + JSONParseError, + Message, + Part, + Role, + SecurityScheme, + TextPart, +) + + +@pytest.fixture +def agent_card_with_api_key(): + """Provides an AgentCard with an APIKeySecurityScheme for testing serialization.""" + # This data uses the alias 'in', which is correct for creating the model. + api_key_scheme_data = { + 'type': 'apiKey', + 'name': 'X-API-KEY', + 'in': 'header', + } + api_key_scheme = APIKeySecurityScheme.model_validate(api_key_scheme_data) + + return AgentCard( + name='APIKeyAgent', + description='An agent that uses API Key auth.', + url='http://example.com/apikey-agent', + version='1.0.0', + capabilities=AgentCapabilities(), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + skills=[], + security_schemes={'api_key_auth': SecurityScheme(root=api_key_scheme)}, + security=[{'api_key_auth': []}], + ) + + +def test_starlette_agent_card_with_api_key_scheme_alias( + agent_card_with_api_key: AgentCard, +): + """ + Tests that the A2AStarletteApplication endpoint correctly serializes aliased fields. + + This verifies the fix for `APIKeySecurityScheme.in_` being serialized as `in_` instead of `in`. + """ + handler = mock.AsyncMock() + app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) + client = TestClient(app_instance.build()) + + response = client.get('/.well-known/agent-card.json') + assert response.status_code == 200 + response_data = response.json() + + security_scheme_json = response_data['securitySchemes']['api_key_auth'] + assert 'in' in security_scheme_json + assert security_scheme_json['in'] == 'header' + assert 'in_' not in security_scheme_json + + try: + parsed_card = AgentCard.model_validate(response_data) + parsed_scheme_wrapper = parsed_card.security_schemes['api_key_auth'] + assert isinstance(parsed_scheme_wrapper.root, APIKeySecurityScheme) + assert parsed_scheme_wrapper.root.in_ == In.header + except ValidationError as e: + pytest.fail( + f"AgentCard.model_validate failed on the server's response: {e}" + ) + + +def test_fastapi_agent_card_with_api_key_scheme_alias( + agent_card_with_api_key: AgentCard, +): + """ + Tests that the A2AFastAPIApplication endpoint correctly serializes aliased fields. + + This verifies the fix for `APIKeySecurityScheme.in_` being serialized as `in_` instead of `in`. + """ + handler = mock.AsyncMock() + app_instance = A2AFastAPIApplication(agent_card_with_api_key, handler) + client = TestClient(app_instance.build()) + + response = client.get('/.well-known/agent-card.json') + assert response.status_code == 200 + response_data = response.json() + + security_scheme_json = response_data['securitySchemes']['api_key_auth'] + assert 'in' in security_scheme_json + assert 'in_' not in security_scheme_json + assert security_scheme_json['in'] == 'header' + + +def test_handle_invalid_json(agent_card_with_api_key: AgentCard): + """Test handling of malformed JSON.""" + handler = mock.AsyncMock() + app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) + client = TestClient(app_instance.build()) + + response = client.post( + '/', + content='{ "jsonrpc": "2.0", "method": "test", "id": 1, "params": { "key": "value" }', + ) + assert response.status_code == 200 + data = response.json() + assert data['error']['code'] == JSONParseError().code + + +def test_handle_oversized_payload(agent_card_with_api_key: AgentCard): + """Test handling of oversized JSON payloads.""" + handler = mock.AsyncMock() + app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) + client = TestClient(app_instance.build()) + + large_string = 'a' * 11 * 1_000_000 # 11MB string + payload = { + 'jsonrpc': '2.0', + 'method': 'test', + 'id': 1, + 'params': {'data': large_string}, + } + + response = client.post('/', json=payload) + assert response.status_code == 200 + data = response.json() + assert data['error']['code'] == InvalidRequestError().code + + +@pytest.mark.parametrize( + 'max_content_length', + [ + None, + 11 * 1024 * 1024, + 30 * 1024 * 1024, + ], +) +def test_handle_oversized_payload_with_max_content_length( + agent_card_with_api_key: AgentCard, + max_content_length: int | None, +): + """Test handling of JSON payloads with sizes within custom max_content_length.""" + handler = mock.AsyncMock() + app_instance = A2AStarletteApplication( + agent_card_with_api_key, handler, max_content_length=max_content_length + ) + client = TestClient(app_instance.build()) + + large_string = 'a' * 11 * 1_000_000 # 11MB string + payload = { + 'jsonrpc': '2.0', + 'method': 'test', + 'id': 1, + 'params': {'data': large_string}, + } + + response = client.post('/', json=payload) + assert response.status_code == 200 + data = response.json() + # When max_content_length is set, requests up to that size should not be + # rejected due to payload size. The request might fail for other reasons, + # but it shouldn't be an InvalidRequestError related to the content length. + assert data['error']['code'] != InvalidRequestError().code + + +def test_handle_unicode_characters(agent_card_with_api_key: AgentCard): + """Test handling of unicode characters in JSON payload.""" + handler = mock.AsyncMock() + app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) + client = TestClient(app_instance.build()) + + unicode_text = 'こんにちは世界' # "Hello world" in Japanese + unicode_payload = { + 'jsonrpc': '2.0', + 'method': 'message/send', + 'id': 'unicode_test', + 'params': { + 'message': { + 'role': 'user', + 'parts': [{'kind': 'text', 'text': unicode_text}], + 'message_id': 'msg-unicode', + } + }, + } + + # Mock a handler for this method + handler.on_message_send.return_value = Message( + role=Role.agent, + parts=[Part(root=TextPart(text=f'Received: {unicode_text}'))], + message_id='response-unicode', + ) + + response = client.post('/', json=unicode_payload) + + # We are not testing the handler logic here, just that the server can correctly + # deserialize the unicode payload without errors. A 200 response with any valid + # JSON-RPC response indicates success. + assert response.status_code == 200 + data = response.json() + assert 'error' not in data or data['error'] is None + assert data['result']['parts'][0]['text'] == f'Received: {unicode_text}' + + +def test_fastapi_sub_application(agent_card_with_api_key: AgentCard): + """ + Tests that the A2AFastAPIApplication endpoint correctly passes the url in sub-application. + """ + handler = mock.AsyncMock() + sub_app_instance = A2AFastAPIApplication(agent_card_with_api_key, handler) + app_instance = FastAPI() + app_instance.mount('/a2a', sub_app_instance.build()) + client = TestClient(app_instance) + + response = client.get('/a2a/openapi.json') + assert response.status_code == 200 + response_data = response.json() + + assert 'servers' in response_data + assert response_data['servers'] == [{'url': '/a2a'}] diff --git a/tests/server/apps/jsonrpc/test_starlette_app.py b/tests/server/apps/jsonrpc/test_starlette_app.py new file mode 100644 index 000000000..6a1472c8c --- /dev/null +++ b/tests/server/apps/jsonrpc/test_starlette_app.py @@ -0,0 +1,82 @@ +from typing import Any +from unittest.mock import MagicMock + +import pytest + +from a2a.server.apps.jsonrpc import starlette_app +from a2a.server.apps.jsonrpc.starlette_app import A2AStarletteApplication +from a2a.server.request_handlers.request_handler import ( + RequestHandler, # For mock spec +) +from a2a.types import AgentCard # For mock spec + + +# --- A2AStarletteApplication Tests --- + + +class TestA2AStarletteApplicationOptionalDeps: + # Running tests in this class requires optional dependencies starlette and + # sse-starlette to be present in the test environment. + + @pytest.fixture(scope='class', autouse=True) + def ensure_pkg_starlette_is_present(self): + try: + import sse_starlette as _sse_starlette # noqa: F401 + import starlette as _starlette # noqa: F401 + except ImportError: + pytest.fail( + f'Running tests in {self.__class__.__name__} requires' + ' optional dependencies starlette and sse-starlette to be' + ' present in the test environment. Run `uv sync --dev ...`' + ' before running the test suite.' + ) + + @pytest.fixture(scope='class') + def mock_app_params(self) -> dict: + # Mock http_handler + mock_handler = MagicMock(spec=RequestHandler) + # Mock agent_card with essential attributes accessed in __init__ + mock_agent_card = MagicMock(spec=AgentCard) + # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed + # in __init__ + mock_agent_card.url = 'http://example.com' + # Ensure 'supports_authenticated_extended_card' attribute exists + mock_agent_card.supports_authenticated_extended_card = False + return {'agent_card': mock_agent_card, 'http_handler': mock_handler} + + @pytest.fixture(scope='class') + def mark_pkg_starlette_not_installed(self): + pkg_starlette_installed_flag = ( + starlette_app._package_starlette_installed + ) + starlette_app._package_starlette_installed = False + yield + starlette_app._package_starlette_installed = ( + pkg_starlette_installed_flag + ) + + def test_create_a2a_starlette_app_with_present_deps_succeeds( + self, mock_app_params: dict + ): + try: + _app = A2AStarletteApplication(**mock_app_params) + except ImportError: + pytest.fail( + 'With packages starlette and see-starlette present, creating an' + ' A2AStarletteApplication instance should not raise ImportError' + ) + + def test_create_a2a_starlette_app_with_missing_deps_raises_importerror( + self, + mock_app_params: dict, + mark_pkg_starlette_not_installed: Any, + ): + with pytest.raises( + ImportError, + match='Packages `starlette` and `sse-starlette` are required', + ): + _app = A2AStarletteApplication(**mock_app_params) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/tests/server/apps/rest/test_rest_fastapi_app.py b/tests/server/apps/rest/test_rest_fastapi_app.py new file mode 100644 index 000000000..9ea8c9686 --- /dev/null +++ b/tests/server/apps/rest/test_rest_fastapi_app.py @@ -0,0 +1,403 @@ +import logging + +from typing import Any +from unittest.mock import MagicMock + +import pytest + +from fastapi import FastAPI +from google.protobuf import json_format +from httpx import ASGITransport, AsyncClient + +from a2a.grpc import a2a_pb2 +from a2a.server.apps.rest import fastapi_app, rest_adapter +from a2a.server.apps.rest.fastapi_app import A2ARESTFastAPIApplication +from a2a.server.apps.rest.rest_adapter import RESTAdapter +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types import ( + AgentCard, + Message, + Part, + Role, + Task, + TaskState, + TaskStatus, + TextPart, +) + + +logger = logging.getLogger(__name__) + + +@pytest.fixture +async def agent_card() -> AgentCard: + mock_agent_card = MagicMock(spec=AgentCard) + mock_agent_card.url = 'http://mockurl.com' + mock_agent_card.supports_authenticated_extended_card = False + + # Mock the capabilities object with streaming disabled + mock_capabilities = MagicMock() + mock_capabilities.streaming = False + mock_agent_card.capabilities = mock_capabilities + + return mock_agent_card + + +@pytest.fixture +async def streaming_agent_card() -> AgentCard: + """Agent card that supports streaming for testing streaming endpoints.""" + mock_agent_card = MagicMock(spec=AgentCard) + mock_agent_card.url = 'http://mockurl.com' + mock_agent_card.supports_authenticated_extended_card = False + + # Mock the capabilities object with streaming enabled + mock_capabilities = MagicMock() + mock_capabilities.streaming = True + mock_agent_card.capabilities = mock_capabilities + + return mock_agent_card + + +@pytest.fixture +async def request_handler() -> RequestHandler: + return MagicMock(spec=RequestHandler) + + +@pytest.fixture +async def streaming_app( + streaming_agent_card: AgentCard, request_handler: RequestHandler +) -> FastAPI: + """Builds the FastAPI application for testing streaming endpoints.""" + + return A2ARESTFastAPIApplication( + streaming_agent_card, request_handler + ).build(agent_card_url='/well-known/agent-card.json', rpc_url='') + + +@pytest.fixture +async def streaming_client(streaming_app: FastAPI) -> AsyncClient: + """HTTP client for the streaming FastAPI application.""" + return AsyncClient( + transport=ASGITransport(app=streaming_app), base_url='http://test' + ) + + +@pytest.fixture +async def app( + agent_card: AgentCard, request_handler: RequestHandler +) -> FastAPI: + """Builds the FastAPI application for testing.""" + + return A2ARESTFastAPIApplication(agent_card, request_handler).build( + agent_card_url='/well-known/agent.json', rpc_url='' + ) + + +@pytest.fixture +async def client(app: FastAPI) -> AsyncClient: + return AsyncClient( + transport=ASGITransport(app=app), base_url='http://testapp' + ) + + +@pytest.fixture +def mark_pkg_starlette_not_installed(): + pkg_starlette_installed_flag = rest_adapter._package_starlette_installed + rest_adapter._package_starlette_installed = False + yield + rest_adapter._package_starlette_installed = pkg_starlette_installed_flag + + +@pytest.fixture +def mark_pkg_fastapi_not_installed(): + pkg_fastapi_installed_flag = fastapi_app._package_fastapi_installed + fastapi_app._package_fastapi_installed = False + yield + fastapi_app._package_fastapi_installed = pkg_fastapi_installed_flag + + +@pytest.mark.anyio +async def test_create_rest_adapter_with_present_deps_succeeds( + agent_card: AgentCard, request_handler: RequestHandler +): + try: + _app = RESTAdapter(agent_card, request_handler) + except ImportError: + pytest.fail( + 'With packages starlette and see-starlette present, creating an' + ' RESTAdapter instance should not raise ImportError' + ) + + +@pytest.mark.anyio +async def test_create_rest_adapter_with_missing_deps_raises_importerror( + agent_card: AgentCard, + request_handler: RequestHandler, + mark_pkg_starlette_not_installed: Any, +): + with pytest.raises( + ImportError, + match=( + 'Packages `starlette` and `sse-starlette` are required to use' + ' the `RESTAdapter`.' + ), + ): + _app = RESTAdapter(agent_card, request_handler) + + +@pytest.mark.anyio +async def test_create_a2a_rest_fastapi_app_with_present_deps_succeeds( + agent_card: AgentCard, request_handler: RequestHandler +): + try: + _app = A2ARESTFastAPIApplication(agent_card, request_handler).build( + agent_card_url='/well-known/agent.json', rpc_url='' + ) + except ImportError: + pytest.fail( + 'With the fastapi package present, creating a' + ' A2ARESTFastAPIApplication instance should not raise ImportError' + ) + + +@pytest.mark.anyio +async def test_create_a2a_rest_fastapi_app_with_missing_deps_raises_importerror( + agent_card: AgentCard, + request_handler: RequestHandler, + mark_pkg_fastapi_not_installed: Any, +): + with pytest.raises( + ImportError, + match=( + 'The `fastapi` package is required to use the' + ' `A2ARESTFastAPIApplication`' + ), + ): + _app = A2ARESTFastAPIApplication(agent_card, request_handler).build( + agent_card_url='/well-known/agent.json', rpc_url='' + ) + + +@pytest.mark.anyio +async def test_send_message_success_message( + client: AsyncClient, request_handler: MagicMock +) -> None: + expected_response = a2a_pb2.SendMessageResponse( + msg=a2a_pb2.Message( + message_id='test', + role=a2a_pb2.Role.ROLE_AGENT, + content=[ + a2a_pb2.Part(text='response message'), + ], + ), + ) + request_handler.on_message_send.return_value = Message( + message_id='test', + role=Role.agent, + parts=[Part(TextPart(text='response message'))], + ) + + request = a2a_pb2.SendMessageRequest( + request=a2a_pb2.Message(), + configuration=a2a_pb2.SendMessageConfiguration(), + ) + # To see log output, run pytest with '--log-cli=true --log-cli-level=INFO' + response = await client.post( + '/v1/message:send', json=json_format.MessageToDict(request) + ) + # request should always be successful + response.raise_for_status() + + actual_response = a2a_pb2.SendMessageResponse() + json_format.Parse(response.text, actual_response) + assert expected_response == actual_response + + +@pytest.mark.anyio +async def test_send_message_success_task( + client: AsyncClient, request_handler: MagicMock +) -> None: + expected_response = a2a_pb2.SendMessageResponse( + task=a2a_pb2.Task( + id='test_task_id', + context_id='test_context_id', + status=a2a_pb2.TaskStatus( + state=a2a_pb2.TaskState.TASK_STATE_COMPLETED, + update=a2a_pb2.Message( + message_id='test', + role=a2a_pb2.ROLE_AGENT, + content=[ + a2a_pb2.Part(text='response task message'), + ], + ), + ), + ), + ) + request_handler.on_message_send.return_value = Task( + id='test_task_id', + context_id='test_context_id', + status=TaskStatus( + state=TaskState.completed, + message=Message( + message_id='test', + role=Role.agent, + parts=[Part(TextPart(text='response task message'))], + ), + ), + ) + + request = a2a_pb2.SendMessageRequest( + request=a2a_pb2.Message(), + configuration=a2a_pb2.SendMessageConfiguration(), + ) + # To see log output, run pytest with '--log-cli=true --log-cli-level=INFO' + response = await client.post( + '/v1/message:send', json=json_format.MessageToDict(request) + ) + # request should always be successful + response.raise_for_status() + + actual_response = a2a_pb2.SendMessageResponse() + json_format.Parse(response.text, actual_response) + assert expected_response == actual_response + + +@pytest.mark.anyio +async def test_streaming_message_request_body_consumption( + streaming_client: AsyncClient, request_handler: MagicMock +) -> None: + """Test that streaming endpoint properly handles request body consumption. + + This test verifies the fix for the deadlock issue where request.body() + was being consumed inside the EventSourceResponse context, causing + the application to hang indefinitely. + """ + + # Mock the async generator response from the request handler + async def mock_stream_response(): + """Mock streaming response generator.""" + yield Message( + message_id='stream_msg_1', + role=Role.agent, + parts=[Part(TextPart(text='First streaming response'))], + ) + yield Message( + message_id='stream_msg_2', + role=Role.agent, + parts=[Part(TextPart(text='Second streaming response'))], + ) + + request_handler.on_message_send_stream.return_value = mock_stream_response() + + # Create a valid streaming request + request = a2a_pb2.SendMessageRequest( + request=a2a_pb2.Message( + message_id='test_stream_msg', + role=a2a_pb2.ROLE_USER, + content=[a2a_pb2.Part(text='Test streaming message')], + ), + configuration=a2a_pb2.SendMessageConfiguration(), + ) + + # This should not hang indefinitely (previously it would due to the deadlock) + response = await streaming_client.post( + '/v1/message:stream', + json=json_format.MessageToDict(request), + headers={'Accept': 'text/event-stream'}, + timeout=10.0, # Reasonable timeout to prevent hanging in tests + ) + + # The response should be successful + response.raise_for_status() + assert response.status_code == 200 + assert 'text/event-stream' in response.headers.get('content-type', '') + + # Verify that the request handler was called + request_handler.on_message_send_stream.assert_called_once() + + +@pytest.mark.anyio +async def test_streaming_endpoint_with_invalid_content_type( + streaming_client: AsyncClient, request_handler: MagicMock +) -> None: + """Test streaming endpoint behavior with invalid content type.""" + + async def mock_stream_response(): + yield Message( + message_id='stream_msg_1', + role=Role.agent, + parts=[Part(TextPart(text='Response'))], + ) + + request_handler.on_message_send_stream.return_value = mock_stream_response() + + request = a2a_pb2.SendMessageRequest( + request=a2a_pb2.Message( + message_id='test_stream_msg', + role=a2a_pb2.ROLE_USER, + content=[a2a_pb2.Part(text='Test message')], + ), + configuration=a2a_pb2.SendMessageConfiguration(), + ) + + # Send request without proper event-stream headers + response = await streaming_client.post( + '/v1/message:stream', + json=json_format.MessageToDict(request), + timeout=10.0, + ) + + # Should still succeed (the adapter handles content-type internally) + response.raise_for_status() + assert response.status_code == 200 + + +@pytest.mark.anyio +async def test_send_message_rejected_task( + client: AsyncClient, request_handler: MagicMock +) -> None: + expected_response = a2a_pb2.SendMessageResponse( + task=a2a_pb2.Task( + id='test_task_id', + context_id='test_context_id', + status=a2a_pb2.TaskStatus( + state=a2a_pb2.TaskState.TASK_STATE_REJECTED, + update=a2a_pb2.Message( + message_id='test', + role=a2a_pb2.ROLE_AGENT, + content=[ + a2a_pb2.Part(text="I don't want to work"), + ], + ), + ), + ), + ) + request_handler.on_message_send.return_value = Task( + id='test_task_id', + context_id='test_context_id', + status=TaskStatus( + state=TaskState.rejected, + message=Message( + message_id='test', + role=Role.agent, + parts=[Part(TextPart(text="I don't want to work"))], + ), + ), + ) + request = a2a_pb2.SendMessageRequest( + request=a2a_pb2.Message(), + configuration=a2a_pb2.SendMessageConfiguration(), + ) + + response = await client.post( + '/v1/message:send', json=json_format.MessageToDict(request) + ) + + response.raise_for_status() + actual_response = a2a_pb2.SendMessageResponse() + json_format.Parse(response.text, actual_response) + assert expected_response == actual_response + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/tests/server/events/test_event_consumer.py b/tests/server/events/test_event_consumer.py index 08111a2bd..d306418ec 100644 --- a/tests/server/events/test_event_consumer.py +++ b/tests/server/events/test_event_consumer.py @@ -1,11 +1,13 @@ import asyncio from typing import Any -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch import pytest -from a2a.server.events.event_consumer import EventConsumer +from pydantic import ValidationError + +from a2a.server.events.event_consumer import EventConsumer, QueueClosed from a2a.server.events.event_queue import EventQueue from a2a.types import ( A2AError, @@ -26,7 +28,7 @@ MINIMAL_TASK: dict[str, Any] = { 'id': '123', - 'contextId': 'session-xyz', + 'context_id': 'session-xyz', 'status': {'state': 'submitted'}, 'kind': 'task', } @@ -34,7 +36,7 @@ MESSAGE_PAYLOAD: dict[str, Any] = { 'role': 'agent', 'parts': [{'text': 'test message'}], - 'messageId': '111', + 'message_id': '111', } @@ -48,6 +50,14 @@ def event_consumer(mock_event_queue: EventQueue): return EventConsumer(queue=mock_event_queue) +def test_init_logs_debug_message(mock_event_queue: EventQueue): + """Test that __init__ logs a debug message.""" + # Patch the logger instance within the module where EventConsumer is defined + with patch('a2a.server.events.event_consumer.logger') as mock_logger: + EventConsumer(queue=mock_event_queue) # Instantiate to trigger __init__ + mock_logger.debug.assert_called_once_with('EventConsumer initialized') + + @pytest.mark.asyncio async def test_consume_one_task_event( event_consumer: MagicMock, @@ -118,15 +128,15 @@ async def test_consume_all_multiple_events( events: list[Any] = [ Task(**MINIMAL_TASK), TaskArtifactUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', artifact=Artifact( - artifactId='11', parts=[Part(TextPart(text='text'))] + artifact_id='11', parts=[Part(TextPart(text='text'))] ), ), TaskStatusUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', status=TaskStatus(state=TaskState.working), final=True, ), @@ -139,6 +149,7 @@ async def mock_dequeue() -> Any: event = events[cursor] cursor += 1 return event + return None mock_event_queue.dequeue_event = mock_dequeue consumed_events: list[Any] = [] @@ -159,16 +170,16 @@ async def test_consume_until_message( events: list[Any] = [ Task(**MINIMAL_TASK), TaskArtifactUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', artifact=Artifact( - artifactId='11', parts=[Part(TextPart(text='text'))] + artifact_id='11', parts=[Part(TextPart(text='text'))] ), ), Message(**MESSAGE_PAYLOAD), TaskStatusUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', status=TaskStatus(state=TaskState.working), final=True, ), @@ -181,6 +192,7 @@ async def mock_dequeue() -> Any: event = events[cursor] cursor += 1 return event + return None mock_event_queue.dequeue_event = mock_dequeue consumed_events: list[Any] = [] @@ -210,6 +222,7 @@ async def mock_dequeue() -> Any: event = events[cursor] cursor += 1 return event + return None mock_event_queue.dequeue_event = mock_dequeue consumed_events: list[Any] = [] @@ -219,3 +232,226 @@ async def mock_dequeue() -> Any: assert len(consumed_events) == 1 assert consumed_events[0] == events[0] assert mock_event_queue.task_done.call_count == 1 + + +@pytest.mark.asyncio +async def test_consume_all_raises_stored_exception( + event_consumer: EventConsumer, +): + """Test that consume_all raises an exception if _exception is set.""" + sample_exception = RuntimeError('Simulated agent error') + event_consumer._exception = sample_exception + + with pytest.raises(RuntimeError, match='Simulated agent error'): + async for _ in event_consumer.consume_all(): + pass # Should not reach here + + +@pytest.mark.asyncio +async def test_consume_all_stops_on_queue_closed_and_confirmed_closed( + event_consumer: EventConsumer, mock_event_queue: AsyncMock +): + """Test consume_all stops if QueueClosed is raised and queue.is_closed() is True.""" + # Simulate the queue raising QueueClosed (which is asyncio.QueueEmpty or QueueShutdown) + mock_event_queue.dequeue_event.side_effect = QueueClosed( + 'Queue is empty/closed' + ) + # Simulate the queue confirming it's closed + mock_event_queue.is_closed.return_value = True + + consumed_events = [] + async for event in event_consumer.consume_all(): + consumed_events.append(event) # Should not happen + + assert ( + len(consumed_events) == 0 + ) # No events should be consumed as it breaks on QueueClosed + mock_event_queue.dequeue_event.assert_called_once() # Should attempt to dequeue once + mock_event_queue.is_closed.assert_called_once() # Should check if closed + + +@pytest.mark.asyncio +async def test_consume_all_continues_on_queue_empty_if_not_really_closed( + event_consumer: EventConsumer, mock_event_queue: AsyncMock +): + """Test that QueueClosed with is_closed=False allows loop to continue via timeout.""" + payload = MESSAGE_PAYLOAD.copy() + payload['message_id'] = 'final_event_id' + final_event = Message(**payload) + + # Setup dequeue_event behavior: + # 1. Raise QueueClosed (e.g., asyncio.QueueEmpty) + # 2. Return the final_event + # 3. Raise QueueClosed again (to terminate after final_event) + dequeue_effects = [ + QueueClosed('Simulated temporary empty'), + final_event, + QueueClosed('Queue closed after final event'), + ] + mock_event_queue.dequeue_event.side_effect = dequeue_effects + + # Setup is_closed behavior: + # 1. False when QueueClosed is first raised (so loop doesn't break) + # 2. True after final_event is processed and QueueClosed is raised again + is_closed_effects = [False, True] + mock_event_queue.is_closed.side_effect = is_closed_effects + + # Patch asyncio.wait_for used inside consume_all + # The goal is that the first QueueClosed leads to a TimeoutError inside consume_all, + # the loop continues, and then the final_event is fetched. + + # To reliably test the timeout behavior within consume_all, we adjust the consumer's + # internal timeout to be very short for the test. + event_consumer._timeout = 0.001 + + consumed_events = [] + async for event in event_consumer.consume_all(): + consumed_events.append(event) + + assert len(consumed_events) == 1 + assert consumed_events[0] == final_event + + # Dequeue attempts: + # 1. Raises QueueClosed (is_closed=False, leads to TimeoutError, loop continues) + # 2. Returns final_event (which is a Message, causing consume_all to break) + assert ( + mock_event_queue.dequeue_event.call_count == 2 + ) # Only two calls needed + + # is_closed calls: + # 1. After first QueueClosed (returns False) + # The second QueueClosed is not reached because Message breaks the loop. + assert mock_event_queue.is_closed.call_count == 1 + + +@pytest.mark.asyncio +async def test_consume_all_handles_queue_empty_when_closed_python_version_agnostic( + event_consumer: EventConsumer, mock_event_queue: AsyncMock, monkeypatch +): + """Ensure consume_all stops with no events when queue is closed and dequeue_event raises asyncio.QueueEmpty (Python version-agnostic).""" + # Make QueueClosed a distinct exception (not QueueEmpty) to emulate py3.13 semantics + from a2a.server.events import event_consumer as ec + + class QueueShutDown(Exception): + pass + + monkeypatch.setattr(ec, 'QueueClosed', QueueShutDown, raising=True) + + # Simulate queue reporting closed while dequeue raises QueueEmpty + mock_event_queue.dequeue_event.side_effect = asyncio.QueueEmpty( + 'closed/empty' + ) + mock_event_queue.is_closed.return_value = True + + consumed_events = [] + async for event in event_consumer.consume_all(): + consumed_events.append(event) + + assert consumed_events == [] + mock_event_queue.dequeue_event.assert_called_once() + mock_event_queue.is_closed.assert_called_once() + + +@pytest.mark.asyncio +async def test_consume_all_continues_on_queue_empty_when_not_closed( + event_consumer: EventConsumer, mock_event_queue: AsyncMock, monkeypatch +): + """Ensure consume_all continues after asyncio.QueueEmpty when queue is open, yielding the next (final) event.""" + # First dequeue raises QueueEmpty (transient empty), then a final Message arrives + final = Message(role='agent', parts=[{'text': 'done'}], message_id='final') + mock_event_queue.dequeue_event.side_effect = [ + asyncio.QueueEmpty('temporarily empty'), + final, + ] + mock_event_queue.is_closed.return_value = False + + # Make the polling responsive in tests + event_consumer._timeout = 0.001 + + consumed = [] + async for ev in event_consumer.consume_all(): + consumed.append(ev) + + assert consumed == [final] + assert mock_event_queue.dequeue_event.call_count == 2 + mock_event_queue.is_closed.assert_called_once() + + +def test_agent_task_callback_sets_exception(event_consumer: EventConsumer): + """Test that agent_task_callback sets _exception if the task had one.""" + mock_task = MagicMock(spec=asyncio.Task) + mock_task.cancelled.return_value = False + mock_task.done.return_value = True + sample_exception = ValueError('Task failed') + mock_task.exception.return_value = sample_exception + + event_consumer.agent_task_callback(mock_task) + + assert event_consumer._exception == sample_exception + mock_task.exception.assert_called_once() + + +def test_agent_task_callback_no_exception(event_consumer: EventConsumer): + """Test that agent_task_callback does nothing if the task has no exception.""" + mock_task = MagicMock(spec=asyncio.Task) + mock_task.cancelled.return_value = False + mock_task.done.return_value = True + mock_task.exception.return_value = None # No exception + + event_consumer.agent_task_callback(mock_task) + + assert event_consumer._exception is None # Should remain None + mock_task.exception.assert_called_once() + + +def test_agent_task_callback_cancelled_task(event_consumer: EventConsumer): + """Test that agent_task_callback does nothing if the task has no exception.""" + mock_task = MagicMock(spec=asyncio.Task) + mock_task.cancelled.return_value = True + mock_task.done.return_value = True + sample_exception = ValueError('Task still running') + mock_task.exception.return_value = sample_exception + + event_consumer.agent_task_callback(mock_task) + + assert event_consumer._exception is None # Should remain None + mock_task.exception.assert_not_called() + + +def test_agent_task_callback_not_done_task(event_consumer: EventConsumer): + """Test that agent_task_callback does nothing if the task has no exception.""" + mock_task = MagicMock(spec=asyncio.Task) + mock_task.cancelled.return_value = False + mock_task.done.return_value = False + sample_exception = ValueError('Task is cancelled') + mock_task.exception.return_value = sample_exception + + event_consumer.agent_task_callback(mock_task) + + assert event_consumer._exception is None # Should remain None + mock_task.exception.assert_not_called() + + +@pytest.mark.asyncio +async def test_consume_all_handles_validation_error( + event_consumer: EventConsumer, mock_event_queue: AsyncMock +): + """Test that consume_all gracefully handles a pydantic.ValidationError.""" + # Simulate dequeue_event raising a ValidationError + mock_event_queue.dequeue_event.side_effect = [ + ValidationError.from_exception_data(title='Test Error', line_errors=[]), + asyncio.CancelledError, # To stop the loop for the test + ] + + with patch( + 'a2a.server.events.event_consumer.logger.error' + ) as logger_error_mock: + with pytest.raises(asyncio.CancelledError): + async for _ in event_consumer.consume_all(): + pass + + # Check that the specific error was logged and the consumer continued + logger_error_mock.assert_called_once() + assert ( + 'Invalid event format received' in logger_error_mock.call_args[0][0] + ) diff --git a/tests/server/events/test_event_queue.py b/tests/server/events/test_event_queue.py index 8a9c163e8..96ded9580 100644 --- a/tests/server/events/test_event_queue.py +++ b/tests/server/events/test_event_queue.py @@ -1,32 +1,42 @@ import asyncio +import sys + +from typing import Any +from unittest.mock import ( + AsyncMock, + MagicMock, + patch, +) + import pytest -from a2a.server.events.event_queue import EventQueue + +from a2a.server.events.event_queue import DEFAULT_MAX_QUEUE_SIZE, EventQueue from a2a.types import ( A2AError, + Artifact, JSONRPCError, Message, + Part, Task, TaskArtifactUpdateEvent, - TaskStatusUpdateEvent, - TaskStatus, + TaskNotFoundError, TaskState, - Artifact, - Part, + TaskStatus, + TaskStatusUpdateEvent, TextPart, - TaskNotFoundError, ) -from typing import Any + MINIMAL_TASK: dict[str, Any] = { 'id': '123', - 'contextId': 'session-xyz', + 'context_id': 'session-xyz', 'status': {'state': 'submitted'}, 'kind': 'task', } MESSAGE_PAYLOAD: dict[str, Any] = { 'role': 'agent', 'parts': [{'text': 'test message'}], - 'messageId': '111', + 'message_id': '111', } @@ -35,11 +45,36 @@ def event_queue() -> EventQueue: return EventQueue() +def test_constructor_default_max_queue_size() -> None: + """Test that the queue is created with the default max size.""" + eq = EventQueue() + assert eq.queue.maxsize == DEFAULT_MAX_QUEUE_SIZE + + +def test_constructor_max_queue_size() -> None: + """Test that the asyncio.Queue is created with the specified max_queue_size.""" + custom_size = 123 + eq = EventQueue(max_queue_size=custom_size) + assert eq.queue.maxsize == custom_size + + +def test_constructor_invalid_max_queue_size() -> None: + """Test that a ValueError is raised for non-positive max_queue_size.""" + with pytest.raises( + ValueError, match='max_queue_size must be greater than 0' + ): + EventQueue(max_queue_size=0) + with pytest.raises( + ValueError, match='max_queue_size must be greater than 0' + ): + EventQueue(max_queue_size=-10) + + @pytest.mark.asyncio async def test_enqueue_and_dequeue_event(event_queue: EventQueue) -> None: """Test that an event can be enqueued and dequeued.""" event = Message(**MESSAGE_PAYLOAD) - event_queue.enqueue_event(event) + await event_queue.enqueue_event(event) dequeued_event = await event_queue.dequeue_event() assert dequeued_event == event @@ -48,7 +83,7 @@ async def test_enqueue_and_dequeue_event(event_queue: EventQueue) -> None: async def test_dequeue_event_no_wait(event_queue: EventQueue) -> None: """Test dequeue_event with no_wait=True.""" event = Task(**MINIMAL_TASK) - event_queue.enqueue_event(event) + await event_queue.enqueue_event(event) dequeued_event = await event_queue.dequeue_event(no_wait=True) assert dequeued_event == event @@ -66,12 +101,12 @@ async def test_dequeue_event_empty_queue_no_wait( async def test_dequeue_event_wait(event_queue: EventQueue) -> None: """Test dequeue_event with the default wait behavior.""" event = TaskStatusUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', status=TaskStatus(state=TaskState.working), final=True, ) - event_queue.enqueue_event(event) + await event_queue.enqueue_event(event) dequeued_event = await event_queue.dequeue_event() assert dequeued_event == event @@ -80,11 +115,13 @@ async def test_dequeue_event_wait(event_queue: EventQueue) -> None: async def test_task_done(event_queue: EventQueue) -> None: """Test the task_done method.""" event = TaskArtifactUpdateEvent( - taskId='task_123', - contextId='session-xyz', - artifact=Artifact(artifactId='11', parts=[Part(TextPart(text='text'))]), + task_id='task_123', + context_id='session-xyz', + artifact=Artifact( + artifact_id='11', parts=[Part(TextPart(text='text'))] + ), ) - event_queue.enqueue_event(event) + await event_queue.enqueue_event(event) _ = await event_queue.dequeue_event() event_queue.task_done() @@ -99,6 +136,373 @@ async def test_enqueue_different_event_types( JSONRPCError(code=111, message='rpc error'), ] for event in events: - event_queue.enqueue_event(event) + await event_queue.enqueue_event(event) dequeued_event = await event_queue.dequeue_event() assert dequeued_event == event + + +@pytest.mark.asyncio +async def test_enqueue_event_propagates_to_children( + event_queue: EventQueue, +) -> None: + """Test that events are enqueued to tapped child queues.""" + child_queue1 = event_queue.tap() + child_queue2 = event_queue.tap() + + event1 = Message(**MESSAGE_PAYLOAD) + event2 = Task(**MINIMAL_TASK) + + await event_queue.enqueue_event(event1) + await event_queue.enqueue_event(event2) + + # Check parent queue + assert await event_queue.dequeue_event(no_wait=True) == event1 + assert await event_queue.dequeue_event(no_wait=True) == event2 + + # Check child queue 1 + assert await child_queue1.dequeue_event(no_wait=True) == event1 + assert await child_queue1.dequeue_event(no_wait=True) == event2 + + # Check child queue 2 + assert await child_queue2.dequeue_event(no_wait=True) == event1 + assert await child_queue2.dequeue_event(no_wait=True) == event2 + + +@pytest.mark.asyncio +async def test_enqueue_event_when_closed( + event_queue: EventQueue, expected_queue_closed_exception: type[Exception] +) -> None: + """Test that no event is enqueued if the parent queue is closed.""" + await event_queue.close() # Close the queue first + + event = Message(**MESSAGE_PAYLOAD) + # Attempt to enqueue, should do nothing or log a warning as per implementation + await event_queue.enqueue_event(event) + + # Verify the queue is still empty + with pytest.raises(expected_queue_closed_exception): + await event_queue.dequeue_event(no_wait=True) + + # Also verify child queues are not affected directly by parent's enqueue attempt when closed + # (though they would be closed too by propagation) + child_queue = ( + event_queue.tap() + ) # Tap after close might be weird, but let's see + # The current implementation would add it to _children + # and then child.close() would be called. + # A more robust test for child propagation is in test_close_propagates + await ( + child_queue.close() + ) # ensure child is also seen as closed for this test's purpose + with pytest.raises(expected_queue_closed_exception): + await child_queue.dequeue_event(no_wait=True) + + +@pytest.fixture +def expected_queue_closed_exception() -> type[Exception]: + if sys.version_info < (3, 13): + return asyncio.QueueEmpty + return asyncio.QueueShutDown + + +@pytest.mark.asyncio +async def test_dequeue_event_closed_and_empty_no_wait( + event_queue: EventQueue, expected_queue_closed_exception: type[Exception] +) -> None: + """Test dequeue_event raises QueueEmpty when closed, empty, and no_wait=True.""" + await event_queue.close() + assert event_queue.is_closed() + # Ensure queue is actually empty (e.g. by trying a non-blocking get on internal queue) + with pytest.raises(expected_queue_closed_exception): + event_queue.queue.get_nowait() + + with pytest.raises(expected_queue_closed_exception): + await event_queue.dequeue_event(no_wait=True) + + +@pytest.mark.asyncio +async def test_dequeue_event_closed_and_empty_waits_then_raises( + event_queue: EventQueue, expected_queue_closed_exception: type[Exception] +) -> None: + """Test dequeue_event raises QueueEmpty eventually when closed, empty, and no_wait=False.""" + await event_queue.close() + assert event_queue.is_closed() + with pytest.raises(expected_queue_closed_exception): + event_queue.queue.get_nowait() # verify internal queue is empty + + # This test is tricky because await event_queue.dequeue_event() would hang if not for the close check. + # The current implementation's dequeue_event checks `is_closed` first. + # If closed and empty, it raises QueueEmpty immediately (on Python <= 3.12). + # On Python 3.13+, this check is skipped and asyncio.Queue.get() raises QueueShutDown instead. + # The "waits_then_raises" scenario described in the subtask implies the `get()` might wait. + # However, the current code: + # async with self._lock: + # if self._is_closed and self.queue.empty(): + # logger.warning('Queue is closed. Event will not be dequeued.') + # raise asyncio.QueueEmpty('Queue is closed.') + # event = await self.queue.get() -> this line is not reached if closed and empty. + + # So, for the current implementation, it will raise QueueEmpty immediately. + with pytest.raises(expected_queue_closed_exception): + await event_queue.dequeue_event(no_wait=False) + + # If the implementation were to change to allow `await self.queue.get()` + # to be called even when closed (to drain it), then a timeout test would be needed. + # For now, testing the current behavior. + # Example of a timeout test if it were to wait: + # with pytest.raises(asyncio.TimeoutError): # Or QueueEmpty if that's what join/shutdown causes get() to raise + # await asyncio.wait_for(event_queue.dequeue_event(no_wait=False), timeout=0.01) + + +@pytest.mark.asyncio +async def test_tap_creates_child_queue(event_queue: EventQueue) -> None: + """Test that tap creates a new EventQueue and adds it to children.""" + initial_children_count = len(event_queue._children) + + child_queue = event_queue.tap() + + assert isinstance(child_queue, EventQueue) + assert child_queue != event_queue # Ensure it's a new instance + assert len(event_queue._children) == initial_children_count + 1 + assert child_queue in event_queue._children + + # Test that the new child queue has the default max size (or specific if tap could configure it) + assert child_queue.queue.maxsize == DEFAULT_MAX_QUEUE_SIZE + + +@pytest.mark.asyncio +async def test_close_sets_flag_and_handles_internal_queue_old_python( + event_queue: EventQueue, +) -> None: + """Test close behavior on Python < 3.13 (using queue.join).""" + with patch('sys.version_info', (3, 12, 0)): # Simulate older Python + # Mock queue.join as it's called in older versions + event_queue.queue.join = AsyncMock() + + await event_queue.close() + + assert event_queue.is_closed() is True + event_queue.queue.join.assert_awaited_once() # waited for drain + + +@pytest.mark.asyncio +async def test_close_sets_flag_and_handles_internal_queue_new_python( + event_queue: EventQueue, +) -> None: + """Test close behavior on Python >= 3.13 (using queue.shutdown).""" + with patch('sys.version_info', (3, 13, 0)): + # Inject a dummy shutdown method for non-3.13 runtimes + from typing import cast + + queue = cast('Any', event_queue.queue) + queue.shutdown = MagicMock() # type: ignore[attr-defined] + await event_queue.close() + assert event_queue.is_closed() is True + queue.shutdown.assert_called_once_with(False) + + +@pytest.mark.asyncio +async def test_close_graceful_py313_waits_for_join_and_children( + event_queue: EventQueue, +) -> None: + """For Python >=3.13 and immediate=False, close should shut down(False), then wait for join and children.""" + with patch('sys.version_info', (3, 13, 0)): + # Arrange + from typing import cast + + q_any = cast('Any', event_queue.queue) + q_any.shutdown = MagicMock() # type: ignore[attr-defined] + event_queue.queue.join = AsyncMock() + + child = event_queue.tap() + child.close = AsyncMock() + + # Act + await event_queue.close(immediate=False) + + # Assert + event_queue.queue.join.assert_awaited_once() + child.close.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_close_propagates_to_children(event_queue: EventQueue) -> None: + """Test that close() is called on all child queues.""" + child_queue1 = event_queue.tap() + child_queue2 = event_queue.tap() + + # Mock the close method of children to verify they are called + child_queue1.close = AsyncMock() + child_queue2.close = AsyncMock() + + await event_queue.close() + + child_queue1.close.assert_awaited_once() + child_queue2.close.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_close_idempotent(event_queue: EventQueue) -> None: + """Test that calling close() multiple times doesn't cause errors and only acts once.""" + # Mock the internal queue's join or shutdown to see how many times it's effectively called + with patch( + 'sys.version_info', (3, 12, 0) + ): # Test with older version logic first + event_queue.queue.join = AsyncMock() + await event_queue.close() + assert event_queue.is_closed() is True + event_queue.queue.join.assert_called_once() # Called first time + + # Call close again + await event_queue.close() + assert event_queue.is_closed() is True + event_queue.queue.join.assert_called_once() # Still only called once + + # Reset for new Python version test + event_queue_new = EventQueue() # New queue for fresh state + with patch('sys.version_info', (3, 13, 0)): + from typing import cast + + queue = cast('Any', event_queue_new.queue) + queue.shutdown = MagicMock() # type: ignore[attr-defined] + await event_queue_new.close() + assert event_queue_new.is_closed() is True + queue.shutdown.assert_called_once() + + await event_queue_new.close() + assert event_queue_new.is_closed() is True + queue.shutdown.assert_called_once() # Still only called once + + +@pytest.mark.asyncio +async def test_is_closed_reflects_state(event_queue: EventQueue) -> None: + """Test that is_closed() returns the correct state before and after closing.""" + assert event_queue.is_closed() is False # Initially open + + await event_queue.close() + + assert event_queue.is_closed() is True # Closed after calling close() + + +@pytest.mark.asyncio +async def test_close_with_immediate_true(event_queue: EventQueue) -> None: + """Test close with immediate=True clears events immediately.""" + # Add some events to the queue + event1 = Message(**MESSAGE_PAYLOAD) + event2 = Task(**MINIMAL_TASK) + await event_queue.enqueue_event(event1) + await event_queue.enqueue_event(event2) + + # Verify events are in queue + assert not event_queue.queue.empty() + + # Close with immediate=True + await event_queue.close(immediate=True) + + # Verify queue is closed and empty + assert event_queue.is_closed() is True + assert event_queue.queue.empty() + + +@pytest.mark.asyncio +async def test_close_immediate_propagates_to_children( + event_queue: EventQueue, +) -> None: + """Test that immediate parameter is propagated to child queues.""" + child_queue = event_queue.tap() + + # Add events to both parent and child + event = Message(**MESSAGE_PAYLOAD) + await event_queue.enqueue_event(event) + + assert child_queue.is_closed() is False + assert child_queue.queue.empty() is False + + # close event queue + await event_queue.close(immediate=True) + + # Verify child queue was called and empty with immediate=True + assert child_queue.is_closed() is True + assert child_queue.queue.empty() + + +@pytest.mark.asyncio +async def test_clear_events_current_queue_only(event_queue: EventQueue) -> None: + """Test clear_events clears only the current queue when clear_child_queues=False.""" + child_queue = event_queue.tap() + event1 = Message(**MESSAGE_PAYLOAD) + event2 = Task(**MINIMAL_TASK) + await event_queue.enqueue_event(event1) + await event_queue.enqueue_event(event2) + + # Clear only parent queue + await event_queue.clear_events(clear_child_queues=False) + + # Verify parent queue is empty + assert event_queue.queue.empty() + + # Verify child queue still has its event + assert not child_queue.queue.empty() + assert child_queue.is_closed() is False + + dequeued_child_event = await child_queue.dequeue_event(no_wait=True) + assert dequeued_child_event == event1 + + +@pytest.mark.asyncio +async def test_clear_events_with_children(event_queue: EventQueue) -> None: + """Test clear_events clears both current queue and child queues.""" + # Create child queues and add events + child_queue1 = event_queue.tap() + child_queue2 = event_queue.tap() + + # Add events to parent queue + event1 = Message(**MESSAGE_PAYLOAD) + event2 = Task(**MINIMAL_TASK) + await event_queue.enqueue_event(event1) + await event_queue.enqueue_event(event2) + + # Clear all queues + await event_queue.clear_events(clear_child_queues=True) + + # Verify all queues are empty + assert event_queue.queue.empty() + assert child_queue1.queue.empty() + assert child_queue2.queue.empty() + + +@pytest.mark.asyncio +async def test_clear_events_empty_queue(event_queue: EventQueue) -> None: + """Test clear_events works correctly with empty queue.""" + # Verify queue is initially empty + assert event_queue.queue.empty() + + # Clear events from empty queue + await event_queue.clear_events() + + # Verify queue remains empty + assert event_queue.queue.empty() + + +@pytest.mark.asyncio +async def test_clear_events_closed_queue(event_queue: EventQueue) -> None: + """Test clear_events works correctly with closed queue.""" + # Add events and close queue + + with patch('sys.version_info', (3, 12, 0)): # Simulate older Python + # Mock queue.join as it's called in older versions + event_queue.queue.join = AsyncMock() + + event = Message(**MESSAGE_PAYLOAD) + await event_queue.enqueue_event(event) + await event_queue.close() + + # Verify queue is closed but not empty + assert event_queue.is_closed() is True + assert not event_queue.queue.empty() + + # Clear events from closed queue + await event_queue.clear_events() + + # Verify queue is now empty + assert event_queue.queue.empty() diff --git a/tests/server/events/test_inmemory_queue_manager.py b/tests/server/events/test_inmemory_queue_manager.py index 8371903ca..b51334a95 100644 --- a/tests/server/events/test_inmemory_queue_manager.py +++ b/tests/server/events/test_inmemory_queue_manager.py @@ -14,34 +14,38 @@ class TestInMemoryQueueManager: @pytest.fixture - def queue_manager(self): + def queue_manager(self) -> InMemoryQueueManager: """Fixture to create a fresh InMemoryQueueManager for each test.""" - manager = InMemoryQueueManager() - return manager + return InMemoryQueueManager() @pytest.fixture - def event_queue(self): + def event_queue(self) -> MagicMock: """Fixture to create a mock EventQueue.""" queue = MagicMock(spec=EventQueue) + # Mock the tap method to return itself queue.tap.return_value = queue return queue @pytest.mark.asyncio - async def test_init(self, queue_manager): + async def test_init(self, queue_manager: InMemoryQueueManager) -> None: """Test that the InMemoryQueueManager initializes with empty task queue and a lock.""" assert queue_manager._task_queue == {} assert isinstance(queue_manager._lock, asyncio.Lock) @pytest.mark.asyncio - async def test_add_new_queue(self, queue_manager, event_queue): + async def test_add_new_queue( + self, queue_manager: InMemoryQueueManager, event_queue: MagicMock + ) -> None: """Test adding a new queue to the manager.""" task_id = 'test_task_id' await queue_manager.add(task_id, event_queue) assert queue_manager._task_queue[task_id] == event_queue @pytest.mark.asyncio - async def test_add_existing_queue(self, queue_manager, event_queue): + async def test_add_existing_queue( + self, queue_manager: InMemoryQueueManager, event_queue: MagicMock + ) -> None: """Test adding a queue with an existing task_id raises TaskQueueExists.""" task_id = 'test_task_id' await queue_manager.add(task_id, event_queue) @@ -50,7 +54,9 @@ async def test_add_existing_queue(self, queue_manager, event_queue): await queue_manager.add(task_id, event_queue) @pytest.mark.asyncio - async def test_get_existing_queue(self, queue_manager, event_queue): + async def test_get_existing_queue( + self, queue_manager: InMemoryQueueManager, event_queue: MagicMock + ) -> None: """Test getting an existing queue returns the queue.""" task_id = 'test_task_id' await queue_manager.add(task_id, event_queue) @@ -59,13 +65,17 @@ async def test_get_existing_queue(self, queue_manager, event_queue): assert result == event_queue @pytest.mark.asyncio - async def test_get_nonexistent_queue(self, queue_manager): + async def test_get_nonexistent_queue( + self, queue_manager: InMemoryQueueManager + ) -> None: """Test getting a nonexistent queue returns None.""" result = await queue_manager.get('nonexistent_task_id') assert result is None @pytest.mark.asyncio - async def test_tap_existing_queue(self, queue_manager, event_queue): + async def test_tap_existing_queue( + self, queue_manager: InMemoryQueueManager, event_queue: MagicMock + ) -> None: """Test tapping an existing queue returns the tapped queue.""" task_id = 'test_task_id' await queue_manager.add(task_id, event_queue) @@ -75,13 +85,17 @@ async def test_tap_existing_queue(self, queue_manager, event_queue): event_queue.tap.assert_called_once() @pytest.mark.asyncio - async def test_tap_nonexistent_queue(self, queue_manager): + async def test_tap_nonexistent_queue( + self, queue_manager: InMemoryQueueManager + ) -> None: """Test tapping a nonexistent queue returns None.""" result = await queue_manager.tap('nonexistent_task_id') assert result is None @pytest.mark.asyncio - async def test_close_existing_queue(self, queue_manager, event_queue): + async def test_close_existing_queue( + self, queue_manager: InMemoryQueueManager, event_queue: MagicMock + ) -> None: """Test closing an existing queue removes it from the manager.""" task_id = 'test_task_id' await queue_manager.add(task_id, event_queue) @@ -90,13 +104,17 @@ async def test_close_existing_queue(self, queue_manager, event_queue): assert task_id not in queue_manager._task_queue @pytest.mark.asyncio - async def test_close_nonexistent_queue(self, queue_manager): + async def test_close_nonexistent_queue( + self, queue_manager: InMemoryQueueManager + ) -> None: """Test closing a nonexistent queue raises NoTaskQueue.""" with pytest.raises(NoTaskQueue): await queue_manager.close('nonexistent_task_id') @pytest.mark.asyncio - async def test_create_or_tap_new_queue(self, queue_manager): + async def test_create_or_tap_new_queue( + self, queue_manager: InMemoryQueueManager + ) -> None: """Test create_or_tap with a new task_id creates and returns a new queue.""" task_id = 'test_task_id' @@ -106,8 +124,8 @@ async def test_create_or_tap_new_queue(self, queue_manager): @pytest.mark.asyncio async def test_create_or_tap_existing_queue( - self, queue_manager, event_queue - ): + self, queue_manager: InMemoryQueueManager, event_queue: MagicMock + ) -> None: """Test create_or_tap with an existing task_id taps and returns the existing queue.""" task_id = 'test_task_id' await queue_manager.add(task_id, event_queue) @@ -118,7 +136,9 @@ async def test_create_or_tap_existing_queue( event_queue.tap.assert_called_once() @pytest.mark.asyncio - async def test_concurrency(self, queue_manager): + async def test_concurrency( + self, queue_manager: InMemoryQueueManager + ) -> None: """Test concurrent access to the queue manager.""" async def add_task(task_id): diff --git a/tests/server/request_handlers/test_default_request_handler.py b/tests/server/request_handlers/test_default_request_handler.py new file mode 100644 index 000000000..88dd77ab4 --- /dev/null +++ b/tests/server/request_handlers/test_default_request_handler.py @@ -0,0 +1,2646 @@ +import asyncio +import contextlib +import logging +import time + +from unittest.mock import ( + AsyncMock, + MagicMock, + PropertyMock, + patch, +) + +import pytest + +from a2a.server.agent_execution import ( + AgentExecutor, + RequestContext, + RequestContextBuilder, + SimpleRequestContextBuilder, +) +from a2a.server.context import ServerCallContext +from a2a.server.events import EventQueue, InMemoryQueueManager, QueueManager +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks import ( + InMemoryPushNotificationConfigStore, + InMemoryTaskStore, + PushNotificationConfigStore, + PushNotificationSender, + ResultAggregator, + TaskStore, + TaskUpdater, +) +from a2a.types import ( + DeleteTaskPushNotificationConfigParams, + GetTaskPushNotificationConfigParams, + InternalError, + InvalidParamsError, + ListTaskPushNotificationConfigParams, + Message, + MessageSendConfiguration, + MessageSendParams, + Part, + PushNotificationConfig, + Role, + Task, + TaskIdParams, + TaskNotFoundError, + TaskPushNotificationConfig, + TaskQueryParams, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + TextPart, + UnsupportedOperationError, +) +from a2a.utils import ( + new_task, +) + + +class DummyAgentExecutor(AgentExecutor): + async def execute(self, context: RequestContext, event_queue: EventQueue): + task_updater = TaskUpdater( + event_queue, context.task_id, context.context_id + ) + async for i in self._run(): + parts = [Part(root=TextPart(text=f'Event {i}'))] + try: + await task_updater.update_status( + TaskState.working, + message=task_updater.new_agent_message(parts), + ) + except RuntimeError: + # Stop processing when the event loop is closed + break + + async def _run(self): + for i in range(1_000_000): # Simulate a long-running stream + yield i + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +# Helper to create a simple task for tests +def create_sample_task( + task_id='task1', status_state=TaskState.submitted, context_id='ctx1' +) -> Task: + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=status_state), + ) + + +# Helper to create ServerCallContext +def create_server_call_context() -> ServerCallContext: + # Assuming UnauthenticatedUser is available or can be imported + from a2a.auth.user import UnauthenticatedUser + + return ServerCallContext(user=UnauthenticatedUser()) + + +def test_init_default_dependencies(): + """Test that default dependencies are created if not provided.""" + agent_executor = DummyAgentExecutor() + task_store = InMemoryTaskStore() + + handler = DefaultRequestHandler( + agent_executor=agent_executor, task_store=task_store + ) + + assert isinstance(handler._queue_manager, InMemoryQueueManager) + assert isinstance( + handler._request_context_builder, SimpleRequestContextBuilder + ) + assert handler._push_config_store is None + assert handler._push_sender is None + assert ( + handler._request_context_builder._should_populate_referred_tasks + is False + ) + assert handler._request_context_builder._task_store == task_store + + +@pytest.mark.asyncio +async def test_on_get_task_not_found(): + """Test on_get_task when task_store.get returns None.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), task_store=mock_task_store + ) + + params = TaskQueryParams(id='non_existent_task') + + from a2a.utils.errors import ServerError # Local import for ServerError + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + await request_handler.on_get_task(params, context) + + assert isinstance(exc_info.value.error, TaskNotFoundError) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + + +@pytest.mark.asyncio +async def test_on_cancel_task_task_not_found(): + """Test on_cancel_task when the task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), task_store=mock_task_store + ) + params = TaskIdParams(id='task_not_found_for_cancel') + + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + await request_handler.on_cancel_task(params, context) + + assert isinstance(exc_info.value.error, TaskNotFoundError) + mock_task_store.get.assert_awaited_once_with( + 'task_not_found_for_cancel', context + ) + + +@pytest.mark.asyncio +async def test_on_cancel_task_queue_tap_returns_none(): + """Test on_cancel_task when queue_manager.tap returns None.""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='tap_none_task') + mock_task_store.get.return_value = sample_task + + mock_queue_manager = AsyncMock(spec=QueueManager) + mock_queue_manager.tap.return_value = ( + None # Simulate queue not found / tap returns None + ) + + mock_agent_executor = AsyncMock( + spec=AgentExecutor + ) # Use AsyncMock for agent_executor + + # Mock ResultAggregator and its consume_all method + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + mock_result_aggregator_instance.consume_all.return_value = ( + create_sample_task( + task_id='tap_none_task', + status_state=TaskState.canceled, # Expected final state + ) + ) + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + queue_manager=mock_queue_manager, + ) + + context = create_server_call_context() + with patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ): + params = TaskIdParams(id='tap_none_task') + result_task = await request_handler.on_cancel_task(params, context) + + mock_task_store.get.assert_awaited_once_with('tap_none_task', context) + mock_queue_manager.tap.assert_awaited_once_with('tap_none_task') + # agent_executor.cancel should be called with a new EventQueue if tap returned None + mock_agent_executor.cancel.assert_awaited_once() + # Verify the EventQueue passed to cancel was a new one + call_args_list = mock_agent_executor.cancel.call_args_list + args, _ = call_args_list[0] + assert isinstance( + args[1], EventQueue + ) # args[1] is the event_queue argument + + mock_result_aggregator_instance.consume_all.assert_awaited_once() + assert result_task is not None + assert result_task.status.state == TaskState.canceled + + +@pytest.mark.asyncio +async def test_on_cancel_task_cancels_running_agent(): + """Test on_cancel_task cancels a running agent task.""" + task_id = 'running_agent_task_to_cancel' + sample_task = create_sample_task(task_id=task_id) + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = sample_task + + mock_queue_manager = AsyncMock(spec=QueueManager) + mock_event_queue = AsyncMock(spec=EventQueue) + mock_queue_manager.tap.return_value = mock_event_queue + + mock_agent_executor = AsyncMock(spec=AgentExecutor) + + # Mock ResultAggregator + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + mock_result_aggregator_instance.consume_all.return_value = ( + create_sample_task(task_id=task_id, status_state=TaskState.canceled) + ) + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + queue_manager=mock_queue_manager, + ) + + # Simulate a running agent task + mock_producer_task = AsyncMock(spec=asyncio.Task) + request_handler._running_agents[task_id] = mock_producer_task + + context = create_server_call_context() + with patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ): + params = TaskIdParams(id=task_id) + await request_handler.on_cancel_task(params, context) + + mock_producer_task.cancel.assert_called_once() + mock_agent_executor.cancel.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_on_cancel_task_completes_during_cancellation(): + """Test on_cancel_task fails to cancel a task due to concurrent task completion.""" + task_id = 'running_agent_task_to_cancel' + sample_task = create_sample_task(task_id=task_id) + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = sample_task + + mock_queue_manager = AsyncMock(spec=QueueManager) + mock_event_queue = AsyncMock(spec=EventQueue) + mock_queue_manager.tap.return_value = mock_event_queue + + mock_agent_executor = AsyncMock(spec=AgentExecutor) + + # Mock ResultAggregator + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + mock_result_aggregator_instance.consume_all.return_value = ( + create_sample_task(task_id=task_id, status_state=TaskState.completed) + ) + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + queue_manager=mock_queue_manager, + ) + + # Simulate a running agent task + mock_producer_task = AsyncMock(spec=asyncio.Task) + request_handler._running_agents[task_id] = mock_producer_task + + from a2a.utils.errors import ( + ServerError, # Local import + TaskNotCancelableError, # Local import + ) + + with patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ): + params = TaskIdParams(id=task_id) + with pytest.raises(ServerError) as exc_info: + await request_handler.on_cancel_task( + params, create_server_call_context() + ) + + mock_producer_task.cancel.assert_called_once() + mock_agent_executor.cancel.assert_awaited_once() + assert isinstance(exc_info.value.error, TaskNotCancelableError) + + +@pytest.mark.asyncio +async def test_on_cancel_task_invalid_result_type(): + """Test on_cancel_task when result_aggregator returns a Message instead of a Task.""" + task_id = 'cancel_invalid_result_task' + sample_task = create_sample_task(task_id=task_id) + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = sample_task + + mock_queue_manager = AsyncMock(spec=QueueManager) + mock_event_queue = AsyncMock(spec=EventQueue) + mock_queue_manager.tap.return_value = mock_event_queue + + mock_agent_executor = AsyncMock(spec=AgentExecutor) + + # Mock ResultAggregator to return a Message + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + mock_result_aggregator_instance.consume_all.return_value = Message( + message_id='unexpected_msg', role=Role.agent, parts=[] + ) + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + queue_manager=mock_queue_manager, + ) + + from a2a.utils.errors import ServerError # Local import + + with patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ): + params = TaskIdParams(id=task_id) + with pytest.raises(ServerError) as exc_info: + await request_handler.on_cancel_task( + params, create_server_call_context() + ) + + assert isinstance(exc_info.value.error, InternalError) + assert ( + 'Agent did not return valid response for cancel' + in exc_info.value.error.message + ) # type: ignore + + +@pytest.mark.asyncio +async def test_on_message_send_with_push_notification(): + """Test on_message_send sets push notification info if provided.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'push_task_1' + context_id = 'push_ctx_1' + sample_initial_task = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.submitted + ) + + # TaskManager will be created inside on_message_send. + # We need to mock task_store.get to return None initially for TaskManager to create a new task. + # Then, TaskManager.update_with_message will be called. + # For simplicity in this unit test, let's assume TaskManager correctly sets up the task + # and the task object (with IDs) is available for _request_context_builder.build + + mock_task_store.get.return_value = ( + None # Simulate new task scenario for TaskManager + ) + + # Mock _request_context_builder.build to return a context with the generated/confirmed IDs + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context.context_id = context_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + push_config_store=mock_push_notification_store, + request_context_builder=mock_request_context_builder, + ) + + push_config = PushNotificationConfig(url='http://callback.com/push') + message_config = MessageSendConfiguration( + push_notification_config=push_config, + accepted_output_modes=['text/plain'], # Added required field + ) + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_push', + parts=[], + task_id=task_id, + context_id=context_id, + ), + configuration=message_config, + ) + + # Mock ResultAggregator and its consume_and_break_on_interrupt + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + final_task_result = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.completed + ) + mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( + final_task_result, + False, + ) + + # Mock the current_result property to return the final task result + async def get_current_result(): + return final_task_result + + # Configure the 'current_result' property on the type of the mock instance + type(mock_result_aggregator_instance).current_result = PropertyMock( + return_value=get_current_result() + ) + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=sample_initial_task, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.update_with_message', + return_value=sample_initial_task, + ), + ): # Ensure task object is returned + await request_handler.on_message_send( + params, create_server_call_context() + ) + + mock_push_notification_store.set_info.assert_awaited_once_with( + task_id, push_config + ) + # Other assertions for full flow if needed (e.g., agent execution) + mock_agent_executor.execute.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_on_message_send_with_push_notification_in_non_blocking_request(): + """Test that push notification callback is called during background event processing for non-blocking requests.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + mock_push_sender = AsyncMock() + + task_id = 'non_blocking_task_1' + context_id = 'non_blocking_ctx_1' + + # Create a task that will be returned after the first event + initial_task = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.working + ) + + # Create a final task that will be available during background processing + final_task = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.completed + ) + + mock_task_store.get.return_value = None + + # Mock request context + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context.context_id = context_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + push_config_store=mock_push_notification_store, + request_context_builder=mock_request_context_builder, + push_sender=mock_push_sender, + ) + + # Configure push notification + push_config = PushNotificationConfig(url='http://callback.com/push') + message_config = MessageSendConfiguration( + push_notification_config=push_config, + accepted_output_modes=['text/plain'], + blocking=False, # Non-blocking request + ) + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_non_blocking', + parts=[], + task_id=task_id, + context_id=context_id, + ), + configuration=message_config, + ) + + # Mock ResultAggregator with custom behavior + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + + # First call returns the initial task and indicates interruption (non-blocking) + mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( + initial_task, + True, # interrupted = True for non-blocking + ) + + # Mock the current_result property to return the final task + async def get_current_result(): + return final_task + + type(mock_result_aggregator_instance).current_result = PropertyMock( + return_value=get_current_result() + ) + + # Track if the event_callback was passed to consume_and_break_on_interrupt + event_callback_passed = False + event_callback_received = None + + async def mock_consume_and_break_on_interrupt( + consumer, blocking=True, event_callback=None + ): + nonlocal event_callback_passed, event_callback_received + event_callback_passed = event_callback is not None + event_callback_received = event_callback + return initial_task, True # interrupted = True for non-blocking + + mock_result_aggregator_instance.consume_and_break_on_interrupt = ( + mock_consume_and_break_on_interrupt + ) + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=initial_task, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.update_with_message', + return_value=initial_task, + ), + ): + # Execute the non-blocking request + result = await request_handler.on_message_send( + params, create_server_call_context() + ) + + # Verify the result is the initial task (non-blocking behavior) + assert result == initial_task + + # Verify that the event_callback was passed to consume_and_break_on_interrupt + assert event_callback_passed, ( + 'event_callback should have been passed to consume_and_break_on_interrupt' + ) + assert event_callback_received is not None, ( + 'event_callback should not be None' + ) + + # Verify that the push notification was sent with the final task + mock_push_sender.send_notification.assert_called_with(final_task) + + # Verify that the push notification config was stored + mock_push_notification_store.set_info.assert_awaited_once_with( + task_id, push_config + ) + + +@pytest.mark.asyncio +async def test_on_message_send_with_push_notification_no_existing_Task(): + """Test on_message_send for new task sets push notification info if provided.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'push_task_1' + context_id = 'push_ctx_1' + + mock_task_store.get.return_value = ( + None # Simulate new task scenario for TaskManager + ) + + # Mock _request_context_builder.build to return a context with the generated/confirmed IDs + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context.context_id = context_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + push_config_store=mock_push_notification_store, + request_context_builder=mock_request_context_builder, + ) + + push_config = PushNotificationConfig(url='http://callback.com/push') + message_config = MessageSendConfiguration( + push_notification_config=push_config, + accepted_output_modes=['text/plain'], # Added required field + ) + params = MessageSendParams( + message=Message(role=Role.user, message_id='msg_push', parts=[]), + configuration=message_config, + ) + + # Mock ResultAggregator and its consume_and_break_on_interrupt + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + final_task_result = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.completed + ) + mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( + final_task_result, + False, + ) + + # Mock the current_result property to return the final task result + async def get_current_result(): + return final_task_result + + # Configure the 'current_result' property on the type of the mock instance + type(mock_result_aggregator_instance).current_result = PropertyMock( + return_value=get_current_result() + ) + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ), + ): + await request_handler.on_message_send( + params, create_server_call_context() + ) + + mock_push_notification_store.set_info.assert_awaited_once_with( + task_id, push_config + ) + # Other assertions for full flow if needed (e.g., agent execution) + mock_agent_executor.execute.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_on_message_send_no_result_from_aggregator(): + """Test on_message_send when aggregator returns (None, False).""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'no_result_task' + # Mock _request_context_builder.build + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + request_context_builder=mock_request_context_builder, + ) + params = MessageSendParams( + message=Message(role=Role.user, message_id='msg_no_res', parts=[]) + ) + + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( + None, + False, + ) + + from a2a.utils.errors import ServerError # Local import + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ), + ): # TaskManager.get_task for initial task + with pytest.raises(ServerError) as exc_info: + await request_handler.on_message_send( + params, create_server_call_context() + ) + + assert isinstance(exc_info.value.error, InternalError) + + +@pytest.mark.asyncio +async def test_on_message_send_task_id_mismatch(): + """Test on_message_send when result task ID doesn't match request context task ID.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + context_task_id = 'context_task_id_1' + result_task_id = 'DIFFERENT_task_id_1' # Mismatch + + # Mock _request_context_builder.build + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = context_task_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + request_context_builder=mock_request_context_builder, + ) + params = MessageSendParams( + message=Message(role=Role.user, message_id='msg_id_mismatch', parts=[]) + ) + + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + mismatched_task = create_sample_task(task_id=result_task_id) + mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( + mismatched_task, + False, + ) + + from a2a.utils.errors import ServerError # Local import + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ), + ): + with pytest.raises(ServerError) as exc_info: + await request_handler.on_message_send( + params, create_server_call_context() + ) + + assert isinstance(exc_info.value.error, InternalError) + assert 'Task ID mismatch' in exc_info.value.error.message # type: ignore + + +class HelloAgentExecutor(AgentExecutor): + async def execute(self, context: RequestContext, event_queue: EventQueue): + task = context.current_task + if not task: + assert context.message is not None, ( + 'A message is required to create a new task' + ) + task = new_task(context.message) # type: ignore + await event_queue.enqueue_event(task) + updater = TaskUpdater(event_queue, task.id, task.context_id) + + try: + parts = [Part(root=TextPart(text='I am working'))] + await updater.update_status( + TaskState.working, + message=updater.new_agent_message(parts), + ) + except Exception as e: + # Stop processing when the event loop is closed + logging.warning('Error: %s', e) + return + await updater.add_artifact( + [Part(root=TextPart(text='Hello world!'))], + name='conversion_result', + ) + await updater.complete() + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +@pytest.mark.asyncio +async def test_on_message_send_non_blocking(): + task_store = InMemoryTaskStore() + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandler( + agent_executor=HelloAgentExecutor(), + task_store=task_store, + push_config_store=push_store, + ) + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_push', + parts=[Part(root=TextPart(text='Hi'))], + ), + configuration=MessageSendConfiguration( + blocking=False, accepted_output_modes=['text/plain'] + ), + ) + + result = await request_handler.on_message_send( + params, create_server_call_context() + ) + + assert result is not None + assert isinstance(result, Task) + assert result.status.state == TaskState.submitted + + # Polling for 500ms until task is completed. + task: Task | None = None + for _ in range(5): + await asyncio.sleep(0.1) + task = await task_store.get(result.id) + assert task is not None + if task.status.state == TaskState.completed: + break + + assert task is not None + assert task.status.state == TaskState.completed + assert ( + result.history + and task.history + and len(result.history) == len(task.history) + ) + + +@pytest.mark.asyncio +async def test_on_message_send_limit_history(): + task_store = InMemoryTaskStore() + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandler( + agent_executor=HelloAgentExecutor(), + task_store=task_store, + push_config_store=push_store, + ) + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_push', + parts=[Part(root=TextPart(text='Hi'))], + ), + configuration=MessageSendConfiguration( + blocking=True, + accepted_output_modes=['text/plain'], + history_length=1, + ), + ) + + result = await request_handler.on_message_send( + params, create_server_call_context() + ) + + # verify that history_length is honored + assert result is not None + assert isinstance(result, Task) + assert result.history is not None and len(result.history) == 1 + assert result.status.state == TaskState.completed + + # verify that history is still persisted to the store + task = await task_store.get(result.id) + assert task is not None + assert task.history is not None and len(task.history) > 1 + + +@pytest.mark.asyncio +async def test_on_get_task_limit_history(): + task_store = InMemoryTaskStore() + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandler( + agent_executor=HelloAgentExecutor(), + task_store=task_store, + push_config_store=push_store, + ) + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_push', + parts=[Part(root=TextPart(text='Hi'))], + ), + configuration=MessageSendConfiguration( + blocking=True, + accepted_output_modes=['text/plain'], + ), + ) + + result = await request_handler.on_message_send( + params, create_server_call_context() + ) + + assert result is not None + assert isinstance(result, Task) + + get_task_result = await request_handler.on_get_task( + TaskQueryParams(id=result.id, history_length=1), + create_server_call_context(), + ) + assert get_task_result is not None + assert isinstance(get_task_result, Task) + assert ( + get_task_result.history is not None + and len(get_task_result.history) == 1 + ) + + +@pytest.mark.asyncio +async def test_on_message_send_interrupted_flow(): + """Test on_message_send when flow is interrupted (e.g., auth_required).""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'interrupted_task_1' + # Mock _request_context_builder.build + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + request_context_builder=mock_request_context_builder, + ) + params = MessageSendParams( + message=Message(role=Role.user, message_id='msg_interrupt', parts=[]) + ) + + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + interrupt_task_result = create_sample_task( + task_id=task_id, status_state=TaskState.auth_required + ) + mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( + interrupt_task_result, + True, + ) # Interrupted = True + + # Patch asyncio.create_task to verify _cleanup_producer is scheduled + with ( + patch('asyncio.create_task') as mock_asyncio_create_task, + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ), + ): + result = await request_handler.on_message_send( + params, create_server_call_context() + ) + + assert result == interrupt_task_result + assert ( + mock_asyncio_create_task.call_count == 2 + ) # First for _run_event_stream, second for _cleanup_producer + + # Check that the second call to create_task was for _cleanup_producer + found_cleanup_call = False + for call_args_tuple in mock_asyncio_create_task.call_args_list: + created_coro = call_args_tuple[0][0] + if ( + hasattr(created_coro, '__name__') + and created_coro.__name__ == '_cleanup_producer' + ): + found_cleanup_call = True + break + assert found_cleanup_call, ( + '_cleanup_producer was not scheduled with asyncio.create_task' + ) + + +@pytest.mark.asyncio +async def test_on_message_send_stream_with_push_notification(): + """Test on_message_send_stream sets and uses push notification info.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_push_config_store = AsyncMock(spec=PushNotificationConfigStore) + mock_push_sender = AsyncMock(spec=PushNotificationSender) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'stream_push_task_1' + context_id = 'stream_push_ctx_1' + + # Initial task state for TaskManager + initial_task_for_tm = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.submitted + ) + + # Task state for RequestContext + task_for_rc = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.working + ) # Example state after message update + + mock_task_store.get.return_value = None # New task for TaskManager + + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context.context_id = context_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + push_config_store=mock_push_config_store, + push_sender=mock_push_sender, + request_context_builder=mock_request_context_builder, + ) + + push_config = PushNotificationConfig(url='http://callback.stream.com/push') + message_config = MessageSendConfiguration( + push_notification_config=push_config, + accepted_output_modes=['text/plain'], # Added required field + ) + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_stream_push', + parts=[], + task_id=task_id, + context_id=context_id, + ), + configuration=message_config, + ) + + # Latch to ensure background execute is scheduled before asserting + execute_called = asyncio.Event() + + async def exec_side_effect(*args, **kwargs): + execute_called.set() + + mock_agent_executor.execute.side_effect = exec_side_effect + + # Mock ResultAggregator and its consume_and_emit + mock_result_aggregator_instance = MagicMock( + spec=ResultAggregator + ) # Use MagicMock for easier property mocking + + # Events to be yielded by consume_and_emit + event1_task_update = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.working + ) + event2_final_task = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.completed + ) + + async def event_stream_gen(): + yield event1_task_update + yield event2_final_task + + # consume_and_emit is called by `async for ... in result_aggregator.consume_and_emit(consumer)` + # This means result_aggregator.consume_and_emit(consumer) must directly return an async iterable. + # If consume_and_emit is an async method, this is problematic in the product code. + # For the test, we make the mock of consume_and_emit a synchronous method + # that returns the async generator object. + def sync_get_event_stream_gen(*args, **kwargs): + return event_stream_gen() + + mock_result_aggregator_instance.consume_and_emit = MagicMock( + side_effect=sync_get_event_stream_gen + ) + + # Mock current_result property to return appropriate awaitables + # Coroutines that will be returned by successive accesses to current_result + async def current_result_coro1(): + return event1_task_update + + async def current_result_coro2(): + return event2_final_task + + # Use unittest.mock.PropertyMock for async property + # We need to patch 'ResultAggregator.current_result' when this instance is used. + # This is complex because ResultAggregator is instantiated inside the handler. + # Easier: If mock_result_aggregator_instance is a MagicMock, we can assign a callable. + # This part is tricky. Let's assume current_result is an async method for easier mocking first. + # If it's truly a property, the mocking is harder with instance mocks. + # Let's adjust the mock_result_aggregator_instance.current_result to be an AsyncMock directly + # This means the code would call `await result_aggregator.current_result()` + # But the actual code is `await result_aggregator.current_result` + # This implies `result_aggregator.current_result` IS an awaitable. + # So, we can mock it with a side_effect that returns awaitables (coroutines). + + # Create simple awaitables (coroutines) for side_effect + async def get_event1(): + return event1_task_update + + async def get_event2(): + return event2_final_task + + # Make the current_result attribute of the mock instance itself an awaitable + # This still means current_result is not callable. + # For an async property, the mock needs to have current_result as a non-AsyncMock attribute + # that is itself an awaitable. + + # Let's try to mock the property at the type level for ResultAggregator temporarily + # This is not ideal as it affects all instances. + + # Alternative: Configure the AsyncMock for current_result to return a coroutine + # when it's awaited. This is not directly supported by AsyncMock for property access. + + # Simplest for now: Assume `current_result` attribute of the mocked `ResultAggregator` instance + # can be sequentially awaited if it's a list of awaitables that a test runner can handle. + # This is likely to fail again but will clarify the exact point of await. + # The error "TypeError: object AsyncMock can't be used in 'await' expression" means + # `mock_result_aggregator_instance.current_result` is an AsyncMock, and that's what's awaited. + # This AsyncMock needs to have a __await__ method. + + # Let's make the side_effect of the AsyncMock `current_result` provide the values. + # This assumes that `await mock.property` somehow triggers a call to the mock. + # This is not how AsyncMock works. + + # The code is `await result_aggregator.current_result`. + # `result_aggregator` is an instance of `ResultAggregator`. + # `current_result` is an async property. + # So `result_aggregator.current_result` evaluates to a coroutine. + # We need `mock_result_aggregator_instance.current_result` to be a coroutine, + # or a list of coroutines if accessed multiple times. + # This is best done by mocking the property itself. + # Let's assume it's called twice. + + # We will patch ResultAggregator to be our mock_result_aggregator_instance + # Then, we need to control what its `current_result` property returns. + # We can use a PropertyMock for this, attached to the type of mock_result_aggregator_instance. + + # For this specific test, let's make current_result a simple async def method on the mock instance + # This means we are slightly diverging from the "property" nature just for this mock. + # Mock current_result property to return appropriate awaitables (coroutines) sequentially. + async def get_event1_coro(): + return event1_task_update + + async def get_event2_coro(): + return event2_final_task + + # Configure the 'current_result' property on the type of the mock instance + # This makes accessing `instance.current_result` call the side_effect function, + # which then cycles through our list of coroutines. + # We need a new PropertyMock for each instance, or patch the class. + # Since mock_result_aggregator_instance is already created, we attach to its type. + # This can be tricky. A more direct way is to ensure the instance's attribute `current_result` + # behaves as desired. If `mock_result_aggregator_instance` is a `MagicMock`, its attributes are also mocks. + + # Let's make `current_result` a MagicMock whose side_effect returns the coroutines. + # This means when `result_aggregator.current_result` is accessed, this mock is "called". + # This isn't quite right for a property. A property isn't "called" on access. + + # Correct approach for mocking an async property on an instance mock: + # Set the attribute `current_result` on the instance `mock_result_aggregator_instance` + # to be a `PropertyMock` if we were patching the class. + # Since we have the instance, we can try to replace its `current_result` attribute. + # The instance `mock_result_aggregator_instance` is a `MagicMock`. + # We can make `mock_result_aggregator_instance.current_result` a `PropertyMock` + # that returns a coroutine. For multiple calls, `side_effect` on `PropertyMock` is a list of return_values. + + # Create a PropertyMock that will cycle through coroutines + # This requires Python 3.8+ for PropertyMock to be directly usable with side_effect list for properties. + # For older versions or for clarity with async properties, directly mocking the attribute + # to be a series of awaitables is hard. + # The easiest is to ensure `current_result` is an AsyncMock that returns the values. + # The product code `await result_aggregator.current_result` means `current_result` must be an awaitable. + + # Let's make current_result an AsyncMock whose __call__ returns the sequence. + # Mock current_result as an async property + # Create coroutines that will be the "result" of awaiting the property + async def get_current_result_coro1(): + return event1_task_update + + async def get_current_result_coro2(): + return event2_final_task + + # Configure the 'current_result' property on the mock_result_aggregator_instance + # using PropertyMock attached to its type. This makes instance.current_result return + # items from side_effect sequentially on each access. + # Since current_result is an async property, these items should be coroutines. + # We need to ensure that mock_result_aggregator_instance itself is the one patched. + # The patch for ResultAggregator returns this instance. + # So, we configure PropertyMock on the type of this specific mock instance. + # This is slightly unusual; typically PropertyMock is used when patching a class. + # A more straightforward approach for an instance is if its type is already a mock. + # As mock_result_aggregator_instance is a MagicMock, we can configure its 'current_result' + # attribute to be a PropertyMock. + + # Let's directly assign a PropertyMock to the type of the instance for `current_result` + # This ensures that when `instance.current_result` is accessed, the PropertyMock's logic is triggered. + # However, PropertyMock is usually used with `patch.object` or by setting it on the class. + # + # A simpler way for MagicMock instance: + # `mock_result_aggregator_instance.current_result` is already a MagicMock (or AsyncMock if spec'd). + # We need to make it return a coroutine upon access. + # The most direct way to mock an async property on a MagicMock instance + # such that it returns a sequence of awaitables: + async def side_effect_current_result(): + yield event1_task_update + yield event2_final_task + + # Create an async generator from the side effect + current_result_gen = side_effect_current_result() + + # Make current_result return the next item from this generator (wrapped in a coroutine) + # each time it's accessed. + async def get_next_current_result(): + try: + return await current_result_gen.__anext__() + except StopAsyncIteration: + # Handle case where it's awaited more times than values provided + return None # Or raise an error + + # Since current_result is a property, accessing it should return a coroutine. + # We can achieve this by making mock_result_aggregator_instance.current_result + # a MagicMock whose side_effect returns these coroutines. + # This is still tricky because it's a property access. + + # Let's use the PropertyMock on the class being mocked via the patch. + # Setup for consume_and_emit + def sync_get_event_stream_gen_for_prop_test(*args, **kwargs): + return event_stream_gen() + + mock_result_aggregator_instance.consume_and_emit = MagicMock( + side_effect=sync_get_event_stream_gen_for_prop_test + ) + + # Configure current_result on the type of the mock_result_aggregator_instance + # This makes it behave like a property that returns items from side_effect on access. + type(mock_result_aggregator_instance).current_result = PropertyMock( + side_effect=[get_current_result_coro1(), get_current_result_coro2()] + ) + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=initial_task_for_tm, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.update_with_message', + return_value=task_for_rc, + ), + ): + # Consume the stream + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass + + await asyncio.wait_for(execute_called.wait(), timeout=0.1) + + # Assertions + # 1. set_info called once at the beginning if task exists (or after task is created from message) + mock_push_config_store.set_info.assert_any_call(task_id, push_config) + + # 2. send_notification called for each task event yielded by aggregator + assert mock_push_sender.send_notification.await_count == 2 + mock_push_sender.send_notification.assert_any_await(event1_task_update) + mock_push_sender.send_notification.assert_any_await(event2_final_task) + + mock_agent_executor.execute.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_stream_disconnect_then_resubscribe_receives_future_events(): + """Start streaming, disconnect, then resubscribe and ensure subsequent events are streamed.""" + # Arrange + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + + # Use a real queue manager so taps receive future events + queue_manager = InMemoryQueueManager() + + task_id = 'reconn_task_1' + context_id = 'reconn_ctx_1' + + # Task exists and is non-final + task_for_resub = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.working + ) + mock_task_store.get.return_value = task_for_resub + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + queue_manager=queue_manager, + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_reconn', + parts=[], + task_id=task_id, + context_id=context_id, + ) + ) + + # Producer behavior: emit one event, then later emit second event + exec_started = asyncio.Event() + allow_second_event = asyncio.Event() + allow_finish = asyncio.Event() + + first_event = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.working + ) + second_event = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.completed + ) + + async def exec_side_effect(_request, queue: EventQueue): + exec_started.set() + await queue.enqueue_event(first_event) + await allow_second_event.wait() + await queue.enqueue_event(second_event) + await allow_finish.wait() + + mock_agent_executor.execute.side_effect = exec_side_effect + + # Start streaming and consume first event + agen = request_handler.on_message_send_stream( + params, create_server_call_context() + ) + first = await agen.__anext__() + assert first == first_event + + # Simulate client disconnect + await asyncio.wait_for(agen.aclose(), timeout=0.1) + + # Resubscribe and start consuming future events + resub_gen = request_handler.on_resubscribe_to_task( + TaskIdParams(id=task_id), create_server_call_context() + ) + + # Allow producer to emit the next event + allow_second_event.set() + + received = await resub_gen.__anext__() + assert received == second_event + + # Finish producer to allow cleanup paths to complete + allow_finish.set() + + +@pytest.mark.asyncio +async def test_on_message_send_stream_client_disconnect_triggers_background_cleanup_and_producer_continues(): + """Simulate client disconnect: stream stops early, cleanup is scheduled in background, + producer keeps running, and cleanup completes after producer finishes.""" + # Arrange + mock_task_store = AsyncMock(spec=TaskStore) + mock_queue_manager = AsyncMock(spec=QueueManager) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'disc_task_1' + context_id = 'disc_ctx_1' + + # RequestContext with IDs + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context.context_id = context_id + mock_request_context_builder.build.return_value = mock_request_context + + # Queue used by _run_event_stream; must support close() + mock_queue = AsyncMock(spec=EventQueue) + mock_queue_manager.create_or_tap.return_value = mock_queue + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + queue_manager=mock_queue_manager, + request_context_builder=mock_request_context_builder, + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='mid', + parts=[], + task_id=task_id, + context_id=context_id, + ) + ) + + # Agent executor runs in background until we allow it to finish + execute_started = asyncio.Event() + execute_finish = asyncio.Event() + + async def exec_side_effect(*_args, **_kwargs): + execute_started.set() + await execute_finish.wait() + + mock_agent_executor.execute.side_effect = exec_side_effect + + # ResultAggregator emits one Task event (so the stream yields once) + first_event = create_sample_task(task_id=task_id, context_id=context_id) + + async def single_event_stream(): + yield first_event + # will never yield again; client will disconnect + + mock_result_aggregator_instance = MagicMock(spec=ResultAggregator) + mock_result_aggregator_instance.consume_and_emit.return_value = ( + single_event_stream() + ) + # Signal when background consume_all is started + bg_started = asyncio.Event() + + async def mock_consume_all(_consumer): + bg_started.set() + # emulate short-running background work + await asyncio.sleep(0) + + mock_result_aggregator_instance.consume_all = mock_consume_all + + produced_task: asyncio.Task | None = None + cleanup_task: asyncio.Task | None = None + + orig_create_task = asyncio.create_task + + def create_task_spy(coro): + nonlocal produced_task, cleanup_task + task = orig_create_task(coro) + # Inspect the coroutine name to make the spy more robust + if coro.__name__ == '_run_event_stream': + produced_task = task + elif coro.__name__ == '_cleanup_producer': + cleanup_task = task + return task + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch('asyncio.create_task', side_effect=create_task_spy), + ): + # Act: start stream and consume only the first event, then disconnect + agen = request_handler.on_message_send_stream( + params, create_server_call_context() + ) + first = await agen.__anext__() + assert first == first_event + # Simulate client disconnect + await asyncio.wait_for(agen.aclose(), timeout=0.1) + + # Assert cleanup was scheduled and producer was started + assert produced_task is not None + assert cleanup_task is not None + + # Assert background consume_all started + await asyncio.wait_for(bg_started.wait(), timeout=0.2) + + # execute should have started + await asyncio.wait_for(execute_started.wait(), timeout=0.1) + + # Producer should still be running (not finished immediately on disconnect) + assert not produced_task.done() + + # Allow executor to finish, which should complete producer and then cleanup + execute_finish.set() + await asyncio.wait_for(produced_task, timeout=0.2) + await asyncio.wait_for(cleanup_task, timeout=0.2) + + # Queue close awaited by _run_event_stream + mock_queue.close.assert_awaited_once() + # QueueManager close called by _cleanup_producer + mock_queue_manager.close.assert_awaited_once_with(task_id) + # Running agents is cleared + assert task_id not in request_handler._running_agents + + # Cleanup any lingering background tasks started by on_message_send_stream + # (e.g., background_consume) + for t in list(request_handler._background_tasks): + t.cancel() + with contextlib.suppress(asyncio.CancelledError): + await t + + +@pytest.mark.asyncio +async def test_disconnect_persists_final_task_to_store(): + """After client disconnect, ensure background consumer persists final Task to store.""" + task_store = InMemoryTaskStore() + queue_manager = InMemoryQueueManager() + + # Custom agent that emits a working update then a completed final update + class FinishingAgent(AgentExecutor): + def __init__(self): + self.allow_finish = asyncio.Event() + + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + from typing import cast + + updater = TaskUpdater( + event_queue, + cast('str', context.task_id), + cast('str', context.context_id), + ) + await updater.update_status(TaskState.working) + await self.allow_finish.wait() + await updater.update_status(TaskState.completed) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + return None + + agent = FinishingAgent() + + handler = DefaultRequestHandler( + agent_executor=agent, task_store=task_store, queue_manager=queue_manager + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_persist', + parts=[], + ) + ) + + # Start streaming and consume the first event (working) + agen = handler.on_message_send_stream(params, create_server_call_context()) + first = await agen.__anext__() + if isinstance(first, TaskStatusUpdateEvent): + assert first.status.state == TaskState.working + task_id = first.task_id + else: + assert ( + isinstance(first, Task) and first.status.state == TaskState.working + ) + task_id = first.id + + # Disconnect client + await asyncio.wait_for(agen.aclose(), timeout=0.1) + + # Finish agent and allow background consumer to persist final state + agent.allow_finish.set() + + # Wait until background_consume task for this task_id is gone + await wait_until( + lambda: all( + not t.get_name().startswith(f'background_consume:{task_id}') + for t in handler._background_tasks + ), + timeout=1.0, + interval=0.01, + ) + + # Verify task is persisted as completed + persisted = await task_store.get(task_id, create_server_call_context()) + assert persisted is not None + assert persisted.status.state == TaskState.completed + + +async def wait_until(predicate, timeout: float = 0.2, interval: float = 0.0): + """Await until predicate() is True or timeout elapses.""" + loop = asyncio.get_running_loop() + end = loop.time() + timeout + while True: + if predicate(): + return + if loop.time() >= end: + raise AssertionError('condition not met within timeout') + await asyncio.sleep(interval) + + +@pytest.mark.asyncio +async def test_background_cleanup_task_is_tracked_and_cleared(): + """Ensure background cleanup task is tracked while pending and removed when done.""" + # Arrange + mock_task_store = AsyncMock(spec=TaskStore) + mock_queue_manager = AsyncMock(spec=QueueManager) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'track_task_1' + context_id = 'track_ctx_1' + + # RequestContext with IDs + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context.context_id = context_id + mock_request_context_builder.build.return_value = mock_request_context + + mock_queue = AsyncMock(spec=EventQueue) + mock_queue_manager.create_or_tap.return_value = mock_queue + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + queue_manager=mock_queue_manager, + request_context_builder=mock_request_context_builder, + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='mid_track', + parts=[], + task_id=task_id, + context_id=context_id, + ) + ) + + # Agent executor runs in background until we allow it to finish + execute_started = asyncio.Event() + execute_finish = asyncio.Event() + + async def exec_side_effect(*_args, **_kwargs): + execute_started.set() + await execute_finish.wait() + + mock_agent_executor.execute.side_effect = exec_side_effect + + # ResultAggregator emits one Task event (so the stream yields once) + first_event = create_sample_task(task_id=task_id, context_id=context_id) + + async def single_event_stream(): + yield first_event + + mock_result_aggregator_instance = MagicMock(spec=ResultAggregator) + mock_result_aggregator_instance.consume_and_emit.return_value = ( + single_event_stream() + ) + + produced_task: asyncio.Task | None = None + cleanup_task: asyncio.Task | None = None + + orig_create_task = asyncio.create_task + + def create_task_spy(coro): + nonlocal produced_task, cleanup_task + task = orig_create_task(coro) + if coro.__name__ == '_run_event_stream': + produced_task = task + elif coro.__name__ == '_cleanup_producer': + cleanup_task = task + return task + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch('asyncio.create_task', side_effect=create_task_spy), + ): + # Act: start stream and consume only the first event, then disconnect + agen = request_handler.on_message_send_stream( + params, create_server_call_context() + ) + first = await agen.__anext__() + assert first == first_event + # Simulate client disconnect + await asyncio.wait_for(agen.aclose(), timeout=0.1) + + assert produced_task is not None + assert cleanup_task is not None + + # Background cleanup task should be tracked while producer is still running + await asyncio.wait_for(execute_started.wait(), timeout=0.1) + assert cleanup_task in request_handler._background_tasks + + # Allow executor to finish; this should complete producer, then cleanup + execute_finish.set() + await asyncio.wait_for(produced_task, timeout=0.1) + await asyncio.wait_for(cleanup_task, timeout=0.1) + + # Wait for callback to remove task from tracking + await wait_until( + lambda: cleanup_task not in request_handler._background_tasks, + timeout=0.1, + ) + + # Cleanup any lingering background tasks + for t in list(request_handler._background_tasks): + t.cancel() + with contextlib.suppress(asyncio.CancelledError): + await t + + +@pytest.mark.asyncio +async def test_on_message_send_stream_task_id_mismatch(): + """Test on_message_send_stream raises error if yielded task ID mismatches.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock( + spec=AgentExecutor + ) # Only need a basic mock + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + context_task_id = 'stream_task_id_ctx' + mismatched_task_id = 'DIFFERENT_stream_task_id' + + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = context_task_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + request_context_builder=mock_request_context_builder, + ) + params = MessageSendParams( + message=Message( + role=Role.user, message_id='msg_stream_mismatch', parts=[] + ) + ) + + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + mismatched_task_event = create_sample_task( + task_id=mismatched_task_id + ) # Task with different ID + + async def event_stream_gen_mismatch(): + yield mismatched_task_event + + mock_result_aggregator_instance.consume_and_emit.return_value = ( + event_stream_gen_mismatch() + ) + + from a2a.utils.errors import ServerError # Local import + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ), + ): + with pytest.raises(ServerError) as exc_info: + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass # Consume the stream to trigger the error + + assert isinstance(exc_info.value.error, InternalError) + assert 'Task ID mismatch' in exc_info.value.error.message # type: ignore + + +@pytest.mark.asyncio +async def test_cleanup_producer_task_id_not_in_running_agents(): + """Test _cleanup_producer when task_id is not in _running_agents (e.g., already cleaned up).""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_queue_manager = AsyncMock(spec=QueueManager) + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + queue_manager=mock_queue_manager, + ) + + task_id = 'task_already_cleaned' + + # Create a real, completed asyncio.Task for the test + async def dummy_coro_for_task(): + pass + + mock_producer_task = asyncio.create_task(dummy_coro_for_task()) + await asyncio.sleep( + 0 + ) # Ensure the task has a chance to complete/be scheduled + + # Call cleanup directly, ensuring task_id is NOT in _running_agents + # This simulates a race condition or double cleanup. + if task_id in request_handler._running_agents: + del request_handler._running_agents[task_id] # Ensure it's not there + + try: + await request_handler._cleanup_producer(mock_producer_task, task_id) + except Exception as e: + pytest.fail(f'_cleanup_producer raised an exception unexpectedly: {e}') + + # Verify queue_manager.close was still called + mock_queue_manager.close.assert_awaited_once_with(task_id) + # No error should be raised by pop if key is missing and default is None. + + +@pytest.mark.asyncio +async def test_set_task_push_notification_config_no_notifier(): + """Test on_set_task_push_notification_config when _push_config_store is None.""" + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, # Explicitly None + ) + params = TaskPushNotificationConfig( + task_id='task1', + push_notification_config=PushNotificationConfig( + url='http://example.com' + ), + ) + from a2a.utils.errors import ServerError # Local import + + with pytest.raises(ServerError) as exc_info: + await request_handler.on_set_task_push_notification_config( + params, create_server_call_context() + ) + assert isinstance(exc_info.value.error, UnsupportedOperationError) + + +@pytest.mark.asyncio +async def test_set_task_push_notification_config_task_not_found(): + """Test on_set_task_push_notification_config when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None # Task not found + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + mock_push_sender = AsyncMock(spec=PushNotificationSender) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + push_sender=mock_push_sender, + ) + params = TaskPushNotificationConfig( + task_id='non_existent_task', + push_notification_config=PushNotificationConfig( + url='http://example.com' + ), + ) + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + await request_handler.on_set_task_push_notification_config( + params, context + ) + + assert isinstance(exc_info.value.error, TaskNotFoundError) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.set_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_no_store(): + """Test on_get_task_push_notification_config when _push_config_store is None.""" + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, # Explicitly None + ) + params = GetTaskPushNotificationConfigParams(id='task1') + from a2a.utils.errors import ServerError # Local import + + with pytest.raises(ServerError) as exc_info: + await request_handler.on_get_task_push_notification_config( + params, create_server_call_context() + ) + assert isinstance(exc_info.value.error, UnsupportedOperationError) + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_task_not_found(): + """Test on_get_task_push_notification_config when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None # Task not found + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + ) + params = GetTaskPushNotificationConfigParams(id='non_existent_task') + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + await request_handler.on_get_task_push_notification_config( + params, context + ) + + assert isinstance(exc_info.value.error, TaskNotFoundError) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_info_not_found(): + """Test on_get_task_push_notification_config when push_config_store.get_info returns None.""" + mock_task_store = AsyncMock(spec=TaskStore) + + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + mock_push_store.get_info.return_value = None # Info not found + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + ) + params = GetTaskPushNotificationConfigParams(id='non_existent_task') + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + await request_handler.on_get_task_push_notification_config( + params, context + ) + + assert isinstance( + exc_info.value.error, InternalError + ) # Current code raises InternalError + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_awaited_once_with('non_existent_task') + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_info_with_config(): + """Test on_get_task_push_notification_config with valid push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + + set_config_params = TaskPushNotificationConfig( + task_id='task_1', + push_notification_config=PushNotificationConfig( + id='config_id', url='http://1.example.com' + ), + ) + context = create_server_call_context() + await request_handler.on_set_task_push_notification_config( + set_config_params, context + ) + + params = GetTaskPushNotificationConfigParams( + id='task_1', push_notification_config_id='config_id' + ) + + result: TaskPushNotificationConfig = ( + await request_handler.on_get_task_push_notification_config( + params, context + ) + ) + + assert result is not None + assert result.task_id == 'task_1' + assert ( + result.push_notification_config.url + == set_config_params.push_notification_config.url + ) + assert result.push_notification_config.id == 'config_id' + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_info_with_config_no_id(): + """Test on_get_task_push_notification_config with no push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + + set_config_params = TaskPushNotificationConfig( + task_id='task_1', + push_notification_config=PushNotificationConfig( + url='http://1.example.com' + ), + ) + await request_handler.on_set_task_push_notification_config( + set_config_params, create_server_call_context() + ) + + params = TaskIdParams(id='task_1') + + result: TaskPushNotificationConfig = ( + await request_handler.on_get_task_push_notification_config( + params, create_server_call_context() + ) + ) + + assert result is not None + assert result.task_id == 'task_1' + assert ( + result.push_notification_config.url + == set_config_params.push_notification_config.url + ) + assert result.push_notification_config.id == 'task_1' + + +@pytest.mark.asyncio +async def test_on_resubscribe_to_task_task_not_found(): + """Test on_resubscribe_to_task when the task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None # Task not found + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), task_store=mock_task_store + ) + params = TaskIdParams(id='resub_task_not_found') + + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + # Need to consume the async generator to trigger the error + async for _ in request_handler.on_resubscribe_to_task(params, context): + pass + + assert isinstance(exc_info.value.error, TaskNotFoundError) + mock_task_store.get.assert_awaited_once_with( + 'resub_task_not_found', context + ) + + +@pytest.mark.asyncio +async def test_on_resubscribe_to_task_queue_not_found(): + """Test on_resubscribe_to_task when the queue is not found by queue_manager.tap.""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='resub_queue_not_found') + mock_task_store.get.return_value = sample_task + + mock_queue_manager = AsyncMock(spec=QueueManager) + mock_queue_manager.tap.return_value = None # Queue not found + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + queue_manager=mock_queue_manager, + ) + params = TaskIdParams(id='resub_queue_not_found') + + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + async for _ in request_handler.on_resubscribe_to_task(params, context): + pass + + assert isinstance( + exc_info.value.error, TaskNotFoundError + ) # Should be TaskNotFoundError as per spec + mock_task_store.get.assert_awaited_once_with( + 'resub_queue_not_found', context + ) + mock_queue_manager.tap.assert_awaited_once_with('resub_queue_not_found') + + +@pytest.mark.asyncio +async def test_on_message_send_stream(): + request_handler = DefaultRequestHandler( + DummyAgentExecutor(), InMemoryTaskStore() + ) + message_params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg-123', + parts=[Part(root=TextPart(text='How are you?'))], + ), + ) + + async def consume_stream(): + events = [] + async for event in request_handler.on_message_send_stream( + message_params + ): + events.append(event) + if len(events) >= 3: + break # Stop after a few events + + return events + + # Consume first 3 events from the stream and measure time + start = time.perf_counter() + events = await consume_stream() + elapsed = time.perf_counter() - start + + # Assert we received events quickly + assert len(events) == 3 + assert elapsed < 0.5 + + texts = [p.root.text for e in events for p in e.status.message.parts] + assert texts == ['Event 0', 'Event 1', 'Event 2'] + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_no_store(): + """Test on_list_task_push_notification_config when _push_config_store is None.""" + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, # Explicitly None + ) + params = ListTaskPushNotificationConfigParams(id='task1') + from a2a.utils.errors import ServerError # Local import + + with pytest.raises(ServerError) as exc_info: + await request_handler.on_list_task_push_notification_config( + params, create_server_call_context() + ) + assert isinstance(exc_info.value.error, UnsupportedOperationError) + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_task_not_found(): + """Test on_list_task_push_notification_config when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None # Task not found + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + ) + params = ListTaskPushNotificationConfigParams(id='non_existent_task') + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + await request_handler.on_list_task_push_notification_config( + params, context + ) + + assert isinstance(exc_info.value.error, TaskNotFoundError) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_list_no_task_push_notification_config_info(): + """Test on_get_task_push_notification_config when push_config_store.get_info returns []""" + mock_task_store = AsyncMock(spec=TaskStore) + + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + params = ListTaskPushNotificationConfigParams(id='non_existent_task') + + result = await request_handler.on_list_task_push_notification_config( + params, create_server_call_context() + ) + assert result == [] + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_info_with_config(): + """Test on_list_task_push_notification_config with push config+id""" + mock_task_store = AsyncMock(spec=TaskStore) + + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + + push_config1 = PushNotificationConfig( + id='config_1', url='http://example.com' + ) + push_config2 = PushNotificationConfig( + id='config_2', url='http://example.com' + ) + + push_store = InMemoryPushNotificationConfigStore() + await push_store.set_info('task_1', push_config1) + await push_store.set_info('task_1', push_config2) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + params = ListTaskPushNotificationConfigParams(id='task_1') + + result: list[ + TaskPushNotificationConfig + ] = await request_handler.on_list_task_push_notification_config( + params, create_server_call_context() + ) + + assert len(result) == 2 + assert result[0].task_id == 'task_1' + assert result[0].push_notification_config == push_config1 + assert result[1].task_id == 'task_1' + assert result[1].push_notification_config == push_config2 + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_info_with_config_and_no_id(): + """Test on_list_task_push_notification_config with no push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + + # multiple calls without config id should replace the existing + set_config_params1 = TaskPushNotificationConfig( + task_id='task_1', + push_notification_config=PushNotificationConfig( + url='http://1.example.com' + ), + ) + await request_handler.on_set_task_push_notification_config( + set_config_params1, create_server_call_context() + ) + + set_config_params2 = TaskPushNotificationConfig( + task_id='task_1', + push_notification_config=PushNotificationConfig( + url='http://2.example.com' + ), + ) + await request_handler.on_set_task_push_notification_config( + set_config_params2, create_server_call_context() + ) + + params = ListTaskPushNotificationConfigParams(id='task_1') + + result: list[ + TaskPushNotificationConfig + ] = await request_handler.on_list_task_push_notification_config( + params, create_server_call_context() + ) + + assert len(result) == 1 + assert result[0].task_id == 'task_1' + assert ( + result[0].push_notification_config.url + == set_config_params2.push_notification_config.url + ) + assert result[0].push_notification_config.id == 'task_1' + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_no_store(): + """Test on_delete_task_push_notification_config when _push_config_store is None.""" + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, # Explicitly None + ) + params = DeleteTaskPushNotificationConfigParams( + id='task1', push_notification_config_id='config1' + ) + from a2a.utils.errors import ServerError # Local import + + with pytest.raises(ServerError) as exc_info: + await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert isinstance(exc_info.value.error, UnsupportedOperationError) + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_task_not_found(): + """Test on_delete_task_push_notification_config when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None # Task not found + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + ) + params = DeleteTaskPushNotificationConfigParams( + id='non_existent_task', push_notification_config_id='config1' + ) + from a2a.utils.errors import ServerError # Local import + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + await request_handler.on_delete_task_push_notification_config( + params, context + ) + + assert isinstance(exc_info.value.error, TaskNotFoundError) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_delete_no_task_push_notification_config_info(): + """Test on_delete_task_push_notification_config without config info""" + mock_task_store = AsyncMock(spec=TaskStore) + + sample_task = create_sample_task(task_id='task_1') + mock_task_store.get.return_value = sample_task + + push_store = InMemoryPushNotificationConfigStore() + await push_store.set_info( + 'task_2', + PushNotificationConfig(id='config_1', url='http://example.com'), + ) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + params = DeleteTaskPushNotificationConfigParams( + id='task1', push_notification_config_id='config_non_existant' + ) + + result = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert result is None + + params = DeleteTaskPushNotificationConfigParams( + id='task2', push_notification_config_id='config_non_existant' + ) + + result = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert result is None + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_info_with_config(): + """Test on_list_task_push_notification_config with push config+id""" + mock_task_store = AsyncMock(spec=TaskStore) + + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + + push_config1 = PushNotificationConfig( + id='config_1', url='http://example.com' + ) + push_config2 = PushNotificationConfig( + id='config_2', url='http://example.com' + ) + + push_store = InMemoryPushNotificationConfigStore() + await push_store.set_info('task_1', push_config1) + await push_store.set_info('task_1', push_config2) + await push_store.set_info('task_2', push_config1) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + params = DeleteTaskPushNotificationConfigParams( + id='task_1', push_notification_config_id='config_1' + ) + + result1 = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + + assert result1 is None + + result2 = await request_handler.on_list_task_push_notification_config( + ListTaskPushNotificationConfigParams(id='task_1'), + create_server_call_context(), + ) + + assert len(result2) == 1 + assert result2[0].task_id == 'task_1' + assert result2[0].push_notification_config == push_config2 + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_info_with_config_and_no_id(): + """Test on_list_task_push_notification_config with no push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + + push_config = PushNotificationConfig(url='http://example.com') + + # insertion without id should replace the existing config + push_store = InMemoryPushNotificationConfigStore() + await push_store.set_info('task_1', push_config) + await push_store.set_info('task_1', push_config) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + ) + params = DeleteTaskPushNotificationConfigParams( + id='task_1', push_notification_config_id='task_1' + ) + + result = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + + assert result is None + + result2 = await request_handler.on_list_task_push_notification_config( + ListTaskPushNotificationConfigParams(id='task_1'), + create_server_call_context(), + ) + + assert len(result2) == 0 + + +TERMINAL_TASK_STATES = { + TaskState.completed, + TaskState.canceled, + TaskState.failed, + TaskState.rejected, +} + + +@pytest.mark.asyncio +@pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) +async def test_on_message_send_task_in_terminal_state(terminal_state): + """Test on_message_send when task is already in a terminal state.""" + task_id = f'terminal_task_{terminal_state.value}' + terminal_task = create_sample_task( + task_id=task_id, status_state=terminal_state + ) + + mock_task_store = AsyncMock(spec=TaskStore) + # The get method of TaskManager calls task_store.get. + # We mock TaskManager.get_task which is an async method. + # So we should patch that instead. + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), task_store=mock_task_store + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_terminal', + parts=[], + task_id=task_id, + ) + ) + + from a2a.utils.errors import ServerError + + # Patch the TaskManager's get_task method to return our terminal task + with patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=terminal_task, + ): + with pytest.raises(ServerError) as exc_info: + await request_handler.on_message_send( + params, create_server_call_context() + ) + + assert isinstance(exc_info.value.error, InvalidParamsError) + assert exc_info.value.error.message + assert ( + f'Task {task_id} is in terminal state: {terminal_state.value}' + in exc_info.value.error.message + ) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) +async def test_on_message_send_stream_task_in_terminal_state(terminal_state): + """Test on_message_send_stream when task is already in a terminal state.""" + task_id = f'terminal_stream_task_{terminal_state.value}' + terminal_task = create_sample_task( + task_id=task_id, status_state=terminal_state + ) + + mock_task_store = AsyncMock(spec=TaskStore) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), task_store=mock_task_store + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_terminal_stream', + parts=[], + task_id=task_id, + ) + ) + + from a2a.utils.errors import ServerError + + with patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=terminal_task, + ): + with pytest.raises(ServerError) as exc_info: + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass # pragma: no cover + + assert isinstance(exc_info.value.error, InvalidParamsError) + assert exc_info.value.error.message + assert ( + f'Task {task_id} is in terminal state: {terminal_state.value}' + in exc_info.value.error.message + ) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) +async def test_on_resubscribe_to_task_in_terminal_state(terminal_state): + """Test on_resubscribe_to_task when task is in a terminal state.""" + task_id = f'resub_terminal_task_{terminal_state.value}' + terminal_task = create_sample_task( + task_id=task_id, status_state=terminal_state + ) + + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = terminal_task + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=mock_task_store, + queue_manager=AsyncMock(spec=QueueManager), + ) + params = TaskIdParams(id=task_id) + + from a2a.utils.errors import ServerError + + context = create_server_call_context() + with pytest.raises(ServerError) as exc_info: + async for _ in request_handler.on_resubscribe_to_task(params, context): + pass # pragma: no cover + + assert isinstance(exc_info.value.error, InvalidParamsError) + assert exc_info.value.error.message + assert ( + f'Task {task_id} is in terminal state: {terminal_state.value}' + in exc_info.value.error.message + ) + mock_task_store.get.assert_awaited_once_with(task_id, context) + + +@pytest.mark.asyncio +async def test_on_message_send_task_id_provided_but_task_not_found(): + """Test on_message_send when task_id is provided but task doesn't exist.""" + task_id = 'nonexistent_task' + mock_task_store = AsyncMock(spec=TaskStore) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), task_store=mock_task_store + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_nonexistent', + parts=[Part(root=TextPart(text='Hello'))], + task_id=task_id, + context_id='ctx1', + ) + ) + + from a2a.utils.errors import ServerError + + # Mock TaskManager.get_task to return None (task not found) + with patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ): + with pytest.raises(ServerError) as exc_info: + await request_handler.on_message_send( + params, create_server_call_context() + ) + + assert isinstance(exc_info.value.error, TaskNotFoundError) + assert exc_info.value.error.message + assert ( + f'Task {task_id} was specified but does not exist' + in exc_info.value.error.message + ) + + +@pytest.mark.asyncio +async def test_on_message_send_stream_task_id_provided_but_task_not_found(): + """Test on_message_send_stream when task_id is provided but task doesn't exist.""" + task_id = 'nonexistent_stream_task' + mock_task_store = AsyncMock(spec=TaskStore) + + request_handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), task_store=mock_task_store + ) + + params = MessageSendParams( + message=Message( + role=Role.user, + message_id='msg_nonexistent_stream', + parts=[Part(root=TextPart(text='Hello'))], + task_id=task_id, + context_id='ctx1', + ) + ) + + from a2a.utils.errors import ServerError + + # Mock TaskManager.get_task to return None (task not found) + with patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ): + with pytest.raises(ServerError) as exc_info: + # Need to consume the async generator to trigger the error + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass + + assert isinstance(exc_info.value.error, TaskNotFoundError) + assert exc_info.value.error.message + assert ( + f'Task {task_id} was specified but does not exist' + in exc_info.value.error.message + ) diff --git a/tests/server/request_handlers/test_grpc_handler.py b/tests/server/request_handlers/test_grpc_handler.py new file mode 100644 index 000000000..647d9e86f --- /dev/null +++ b/tests/server/request_handlers/test_grpc_handler.py @@ -0,0 +1,455 @@ +from unittest.mock import AsyncMock, MagicMock + +import grpc +import grpc.aio +import pytest + +from a2a import types +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.grpc import a2a_pb2 +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers import GrpcHandler, RequestHandler +from a2a.utils.errors import ServerError + + +# --- Fixtures --- + + +@pytest.fixture +def mock_request_handler() -> AsyncMock: + return AsyncMock(spec=RequestHandler) + + +@pytest.fixture +def mock_grpc_context() -> AsyncMock: + context = AsyncMock(spec=grpc.aio.ServicerContext) + context.abort = AsyncMock() + context.set_trailing_metadata = MagicMock() + return context + + +@pytest.fixture +def sample_agent_card() -> types.AgentCard: + return types.AgentCard( + name='Test Agent', + description='A test agent', + url='http://localhost', + version='1.0.0', + capabilities=types.AgentCapabilities( + streaming=True, push_notifications=True + ), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + skills=[], + ) + + +@pytest.fixture +def grpc_handler( + mock_request_handler: AsyncMock, sample_agent_card: types.AgentCard +) -> GrpcHandler: + return GrpcHandler( + agent_card=sample_agent_card, request_handler=mock_request_handler + ) + + +# --- Test Cases --- + + +@pytest.mark.asyncio +async def test_send_message_success( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + """Test successful SendMessage call.""" + request_proto = a2a_pb2.SendMessageRequest( + request=a2a_pb2.Message(message_id='msg-1') + ) + response_model = types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus(state=types.TaskState.completed), + ) + mock_request_handler.on_message_send.return_value = response_model + + response = await grpc_handler.SendMessage(request_proto, mock_grpc_context) + + mock_request_handler.on_message_send.assert_awaited_once() + assert isinstance(response, a2a_pb2.SendMessageResponse) + assert response.HasField('task') + assert response.task.id == 'task-1' + + +@pytest.mark.asyncio +async def test_send_message_server_error( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + """Test SendMessage call when handler raises a ServerError.""" + request_proto = a2a_pb2.SendMessageRequest() + error = ServerError(error=types.InvalidParamsError(message='Bad params')) + mock_request_handler.on_message_send.side_effect = error + + await grpc_handler.SendMessage(request_proto, mock_grpc_context) + + mock_grpc_context.abort.assert_awaited_once_with( + grpc.StatusCode.INVALID_ARGUMENT, 'InvalidParamsError: Bad params' + ) + + +@pytest.mark.asyncio +async def test_get_task_success( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + """Test successful GetTask call.""" + request_proto = a2a_pb2.GetTaskRequest(name='tasks/task-1') + response_model = types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus(state=types.TaskState.working), + ) + mock_request_handler.on_get_task.return_value = response_model + + response = await grpc_handler.GetTask(request_proto, mock_grpc_context) + + mock_request_handler.on_get_task.assert_awaited_once() + assert isinstance(response, a2a_pb2.Task) + assert response.id == 'task-1' + + +@pytest.mark.asyncio +async def test_get_task_not_found( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + """Test GetTask call when task is not found.""" + request_proto = a2a_pb2.GetTaskRequest(name='tasks/task-1') + mock_request_handler.on_get_task.return_value = None + + await grpc_handler.GetTask(request_proto, mock_grpc_context) + + mock_grpc_context.abort.assert_awaited_once_with( + grpc.StatusCode.NOT_FOUND, 'TaskNotFoundError: Task not found' + ) + + +@pytest.mark.asyncio +async def test_cancel_task_server_error( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + """Test CancelTask call when handler raises ServerError.""" + request_proto = a2a_pb2.CancelTaskRequest(name='tasks/task-1') + error = ServerError(error=types.TaskNotCancelableError()) + mock_request_handler.on_cancel_task.side_effect = error + + await grpc_handler.CancelTask(request_proto, mock_grpc_context) + + mock_grpc_context.abort.assert_awaited_once_with( + grpc.StatusCode.UNIMPLEMENTED, + 'TaskNotCancelableError: Task cannot be canceled', + ) + + +@pytest.mark.asyncio +async def test_send_streaming_message( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + """Test successful SendStreamingMessage call.""" + + async def mock_stream(): + yield types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus(state=types.TaskState.working), + ) + + mock_request_handler.on_message_send_stream.return_value = mock_stream() + request_proto = a2a_pb2.SendMessageRequest() + + results = [ + result + async for result in grpc_handler.SendStreamingMessage( + request_proto, mock_grpc_context + ) + ] + + assert len(results) == 1 + assert results[0].HasField('task') + assert results[0].task.id == 'task-1' + + +@pytest.mark.asyncio +async def test_get_agent_card( + grpc_handler: GrpcHandler, + sample_agent_card: types.AgentCard, + mock_grpc_context: AsyncMock, +) -> None: + """Test GetAgentCard call.""" + request_proto = a2a_pb2.GetAgentCardRequest() + response = await grpc_handler.GetAgentCard(request_proto, mock_grpc_context) + + assert response.name == sample_agent_card.name + assert response.version == sample_agent_card.version + + +@pytest.mark.asyncio +async def test_get_agent_card_with_modifier( + mock_request_handler: AsyncMock, + sample_agent_card: types.AgentCard, + mock_grpc_context: AsyncMock, +) -> None: + """Test GetAgentCard call with a card_modifier.""" + + async def modifier(card: types.AgentCard) -> types.AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Modified gRPC Agent' + return modified_card + + grpc_handler_modified = GrpcHandler( + agent_card=sample_agent_card, + request_handler=mock_request_handler, + card_modifier=modifier, + ) + + request_proto = a2a_pb2.GetAgentCardRequest() + response = await grpc_handler_modified.GetAgentCard( + request_proto, mock_grpc_context + ) + + assert response.name == 'Modified gRPC Agent' + assert response.version == sample_agent_card.version + + +@pytest.mark.asyncio +async def test_get_agent_card_with_modifier_sync( + mock_request_handler: AsyncMock, + sample_agent_card: types.AgentCard, + mock_grpc_context: AsyncMock, +) -> None: + """Test GetAgentCard call with a synchronous card_modifier.""" + + def modifier(card: types.AgentCard) -> types.AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Modified gRPC Agent' + return modified_card + + grpc_handler_modified = GrpcHandler( + agent_card=sample_agent_card, + request_handler=mock_request_handler, + card_modifier=modifier, + ) + + request_proto = a2a_pb2.GetAgentCardRequest() + response = await grpc_handler_modified.GetAgentCard( + request_proto, mock_grpc_context + ) + + assert response.name == 'Modified gRPC Agent' + assert response.version == sample_agent_card.version + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'server_error, grpc_status_code, error_message_part', + [ + ( + ServerError(error=types.JSONParseError()), + grpc.StatusCode.INTERNAL, + 'JSONParseError', + ), + ( + ServerError(error=types.InvalidRequestError()), + grpc.StatusCode.INVALID_ARGUMENT, + 'InvalidRequestError', + ), + ( + ServerError(error=types.MethodNotFoundError()), + grpc.StatusCode.NOT_FOUND, + 'MethodNotFoundError', + ), + ( + ServerError(error=types.InvalidParamsError()), + grpc.StatusCode.INVALID_ARGUMENT, + 'InvalidParamsError', + ), + ( + ServerError(error=types.InternalError()), + grpc.StatusCode.INTERNAL, + 'InternalError', + ), + ( + ServerError(error=types.TaskNotFoundError()), + grpc.StatusCode.NOT_FOUND, + 'TaskNotFoundError', + ), + ( + ServerError(error=types.TaskNotCancelableError()), + grpc.StatusCode.UNIMPLEMENTED, + 'TaskNotCancelableError', + ), + ( + ServerError(error=types.PushNotificationNotSupportedError()), + grpc.StatusCode.UNIMPLEMENTED, + 'PushNotificationNotSupportedError', + ), + ( + ServerError(error=types.UnsupportedOperationError()), + grpc.StatusCode.UNIMPLEMENTED, + 'UnsupportedOperationError', + ), + ( + ServerError(error=types.ContentTypeNotSupportedError()), + grpc.StatusCode.UNIMPLEMENTED, + 'ContentTypeNotSupportedError', + ), + ( + ServerError(error=types.InvalidAgentResponseError()), + grpc.StatusCode.INTERNAL, + 'InvalidAgentResponseError', + ), + ( + ServerError(error=types.JSONRPCError(code=99, message='Unknown')), + grpc.StatusCode.UNKNOWN, + 'Unknown error', + ), + ], +) +async def test_abort_context_error_mapping( # noqa: PLR0913 + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + server_error: ServerError, + grpc_status_code: grpc.StatusCode, + error_message_part: str, +) -> None: + mock_request_handler.on_get_task.side_effect = server_error + request_proto = a2a_pb2.GetTaskRequest(name='tasks/any') + await grpc_handler.GetTask(request_proto, mock_grpc_context) + + mock_grpc_context.abort.assert_awaited_once() + call_args, _ = mock_grpc_context.abort.call_args + assert call_args[0] == grpc_status_code + assert error_message_part in call_args[1] + + +@pytest.mark.asyncio +class TestGrpcExtensions: + async def test_send_message_with_extensions( + self, + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + ) -> None: + mock_grpc_context.invocation_metadata = grpc.aio.Metadata( + (HTTP_EXTENSION_HEADER, 'foo'), + (HTTP_EXTENSION_HEADER, 'bar'), + ) + + def side_effect(request, context: ServerCallContext): + context.activated_extensions.add('foo') + context.activated_extensions.add('baz') + return types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus(state=types.TaskState.completed), + ) + + mock_request_handler.on_message_send.side_effect = side_effect + + await grpc_handler.SendMessage( + a2a_pb2.SendMessageRequest(), mock_grpc_context + ) + + mock_request_handler.on_message_send.assert_awaited_once() + call_context = mock_request_handler.on_message_send.call_args[0][1] + assert isinstance(call_context, ServerCallContext) + assert call_context.requested_extensions == {'foo', 'bar'} + + mock_grpc_context.set_trailing_metadata.assert_called_once() + called_metadata = ( + mock_grpc_context.set_trailing_metadata.call_args.args[0] + ) + assert set(called_metadata) == { + (HTTP_EXTENSION_HEADER, 'foo'), + (HTTP_EXTENSION_HEADER, 'baz'), + } + + async def test_send_message_with_comma_separated_extensions( + self, + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + ) -> None: + mock_grpc_context.invocation_metadata = grpc.aio.Metadata( + (HTTP_EXTENSION_HEADER, 'foo ,, bar,'), + (HTTP_EXTENSION_HEADER, 'baz , bar'), + ) + mock_request_handler.on_message_send.return_value = types.Message( + message_id='1', + role=types.Role.agent, + parts=[types.Part(root=types.TextPart(text='test'))], + ) + + await grpc_handler.SendMessage( + a2a_pb2.SendMessageRequest(), mock_grpc_context + ) + + mock_request_handler.on_message_send.assert_awaited_once() + call_context = mock_request_handler.on_message_send.call_args[0][1] + assert isinstance(call_context, ServerCallContext) + assert call_context.requested_extensions == {'foo', 'bar', 'baz'} + + async def test_send_streaming_message_with_extensions( + self, + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + ) -> None: + mock_grpc_context.invocation_metadata = grpc.aio.Metadata( + (HTTP_EXTENSION_HEADER, 'foo'), + (HTTP_EXTENSION_HEADER, 'bar'), + ) + + async def side_effect(request, context: ServerCallContext): + context.activated_extensions.add('foo') + context.activated_extensions.add('baz') + yield types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus(state=types.TaskState.working), + ) + + mock_request_handler.on_message_send_stream.side_effect = side_effect + + results = [ + result + async for result in grpc_handler.SendStreamingMessage( + a2a_pb2.SendMessageRequest(), mock_grpc_context + ) + ] + assert results + + mock_request_handler.on_message_send_stream.assert_called_once() + call_context = mock_request_handler.on_message_send_stream.call_args[0][ + 1 + ] + assert isinstance(call_context, ServerCallContext) + assert call_context.requested_extensions == {'foo', 'bar'} + + mock_grpc_context.set_trailing_metadata.assert_called_once() + called_metadata = ( + mock_grpc_context.set_trailing_metadata.call_args.args[0] + ) + assert set(called_metadata) == { + (HTTP_EXTENSION_HEADER, 'foo'), + (HTTP_EXTENSION_HEADER, 'baz'), + } diff --git a/tests/server/request_handlers/test_jsonrpc_handler.py b/tests/server/request_handlers/test_jsonrpc_handler.py index 459b6e290..4ed6e7025 100644 --- a/tests/server/request_handlers/test_jsonrpc_handler.py +++ b/tests/server/request_handlers/test_jsonrpc_handler.py @@ -1,7 +1,9 @@ +import asyncio import unittest import unittest.async_case + from collections.abc import AsyncGenerator -from typing import Any +from typing import Any, NoReturn from unittest.mock import AsyncMock, MagicMock, call, patch import httpx @@ -15,13 +17,26 @@ from a2a.server.events import QueueManager from a2a.server.events.event_queue import EventQueue from a2a.server.request_handlers import DefaultRequestHandler, JSONRPCHandler -from a2a.server.tasks import InMemoryPushNotifier, PushNotifier, TaskStore +from a2a.server.tasks import ( + BasePushNotificationSender, + InMemoryPushNotificationConfigStore, + PushNotificationConfigStore, + PushNotificationSender, + TaskStore, +) from a2a.types import ( AgentCapabilities, AgentCard, Artifact, CancelTaskRequest, CancelTaskSuccessResponse, + DeleteTaskPushNotificationConfigParams, + DeleteTaskPushNotificationConfigRequest, + DeleteTaskPushNotificationConfigSuccessResponse, + GetAuthenticatedExtendedCardRequest, + GetAuthenticatedExtendedCardResponse, + GetAuthenticatedExtendedCardSuccessResponse, + GetTaskPushNotificationConfigParams, GetTaskPushNotificationConfigRequest, GetTaskPushNotificationConfigResponse, GetTaskPushNotificationConfigSuccessResponse, @@ -30,6 +45,9 @@ GetTaskSuccessResponse, InternalError, JSONRPCErrorResponse, + ListTaskPushNotificationConfigParams, + ListTaskPushNotificationConfigRequest, + ListTaskPushNotificationConfigSuccessResponse, Message, MessageSendConfiguration, MessageSendParams, @@ -57,6 +75,7 @@ ) from a2a.utils.errors import ServerError + MINIMAL_TASK: dict[str, Any] = { 'id': 'task_123', 'contextId': 'session-xyz', @@ -74,7 +93,9 @@ class TestJSONRPCtHandler(unittest.async_case.IsolatedAsyncioTestCase): @pytest.fixture(autouse=True) def init_fixtures(self) -> None: self.mock_agent_card = MagicMock( - spec=AgentCard, url='http://agent.example.com/api' + spec=AgentCard, + url='http://agent.example.com/api', + supports_authenticated_extended_card=True, ) async def test_on_get_task_success(self) -> None: @@ -94,7 +115,7 @@ async def test_on_get_task_success(self) -> None: ) self.assertIsInstance(response.root, GetTaskSuccessResponse) assert response.root.result == mock_task # type: ignore - mock_task_store.get.assert_called_once_with(task_id) + mock_task_store.get.assert_called_once_with(task_id, unittest.mock.ANY) async def test_on_get_task_not_found(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -130,6 +151,7 @@ async def test_on_cancel_task_success(self) -> None: call_context = ServerCallContext(state={'foo': 'bar'}) async def streaming_coro(): + mock_task.status.state = TaskState.canceled yield mock_task with patch( @@ -141,6 +163,7 @@ async def streaming_coro(): assert mock_agent_executor.cancel.call_count == 1 self.assertIsInstance(response.root, CancelTaskSuccessResponse) assert response.root.result == mock_task # type: ignore + assert response.root.result.status.state == TaskState.canceled mock_agent_executor.cancel.assert_called_once() async def test_on_cancel_task_not_supported(self) -> None: @@ -187,7 +210,9 @@ async def test_on_cancel_task_not_found(self) -> None: response = await handler.on_cancel_task(request) self.assertIsInstance(response.root, JSONRPCErrorResponse) assert response.root.error == TaskNotFoundError() # type: ignore - mock_task_store.get.assert_called_once_with('nonexistent_id') + mock_task_store.get.assert_called_once_with( + 'nonexistent_id', unittest.mock.ANY + ) mock_agent_executor.cancel.assert_not_called() @patch( @@ -256,8 +281,8 @@ async def streaming_coro(): params=MessageSendParams( message=Message( **MESSAGE_PAYLOAD, - taskId=mock_task.id, - contextId=mock_task.contextId, + task_id=mock_task.id, + context_id=mock_task.context_id, ) ), ) @@ -324,15 +349,15 @@ async def test_on_message_stream_new_message_success( events: list[Any] = [ Task(**MINIMAL_TASK), TaskArtifactUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', artifact=Artifact( - artifactId='11', parts=[Part(TextPart(text='text'))] + artifact_id='11', parts=[Part(TextPart(text='text'))] ), ), TaskStatusUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', status=TaskStatus(state=TaskState.completed), final=True, ), @@ -342,6 +367,14 @@ async def streaming_coro(): for event in events: yield event + # Latch to ensure background execute is scheduled before asserting + execute_called = asyncio.Event() + + async def exec_side_effect(*args, **kwargs): + execute_called.set() + + mock_agent_executor.execute.side_effect = exec_side_effect + with patch( 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', return_value=streaming_coro(), @@ -363,6 +396,7 @@ async def streaming_coro(): event.root, SendStreamingMessageSuccessResponse ) assert event.root.result == events[i] + await asyncio.wait_for(execute_called.wait(), timeout=0.1) mock_agent_executor.execute.assert_called_once() async def test_on_message_stream_new_message_existing_task_success( @@ -381,15 +415,15 @@ async def test_on_message_stream_new_message_existing_task_success( events: list[Any] = [ mock_task, TaskArtifactUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', artifact=Artifact( - artifactId='11', parts=[Part(TextPart(text='text'))] + artifact_id='11', parts=[Part(TextPart(text='text'))] ), ), TaskStatusUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', status=TaskStatus(state=TaskState.working), final=True, ), @@ -399,6 +433,14 @@ async def streaming_coro(): for event in events: yield event + # Latch to ensure background execute is scheduled before asserting + execute_called = asyncio.Event() + + async def exec_side_effect(*args, **kwargs): + execute_called.set() + + mock_agent_executor.execute.side_effect = exec_side_effect + with patch( 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', return_value=streaming_coro(), @@ -410,8 +452,8 @@ async def streaming_coro(): params=MessageSendParams( message=Message( **MESSAGE_PAYLOAD, - taskId=mock_task.id, - contextId=mock_task.contextId, + task_id=mock_task.id, + context_id=mock_task.context_id, ) ), ) @@ -419,27 +461,31 @@ async def streaming_coro(): assert isinstance(response, AsyncGenerator) collected_events = [item async for item in response] assert len(collected_events) == len(events) + await asyncio.wait_for(execute_called.wait(), timeout=0.1) mock_agent_executor.execute.assert_called_once() assert mock_task.history is not None and len(mock_task.history) == 1 async def test_set_push_notification_success(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) mock_task_store = AsyncMock(spec=TaskStore) - mock_push_notifier = AsyncMock(spec=PushNotifier) + mock_push_notification_store = AsyncMock( + spec=PushNotificationConfigStore + ) + request_handler = DefaultRequestHandler( mock_agent_executor, mock_task_store, - push_notifier=mock_push_notifier, + push_config_store=mock_push_notification_store, ) self.mock_agent_card.capabilities = AgentCapabilities( - streaming=True, pushNotifications=True + streaming=True, push_notifications=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) mock_task = Task(**MINIMAL_TASK) mock_task_store.get.return_value = mock_task task_push_config = TaskPushNotificationConfig( - taskId=mock_task.id, - pushNotificationConfig=PushNotificationConfig( + task_id=mock_task.id, + push_notification_config=PushNotificationConfig( url='http://example.com' ), ) @@ -447,40 +493,41 @@ async def test_set_push_notification_success(self) -> None: id='1', params=task_push_config ) response: SetTaskPushNotificationConfigResponse = ( - await handler.set_push_notification(request) + await handler.set_push_notification_config(request) ) self.assertIsInstance( response.root, SetTaskPushNotificationConfigSuccessResponse ) assert response.root.result == task_push_config # type: ignore - mock_push_notifier.set_info.assert_called_once_with( - mock_task.id, task_push_config.pushNotificationConfig + mock_push_notification_store.set_info.assert_called_once_with( + mock_task.id, task_push_config.push_notification_config ) async def test_get_push_notification_success(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) mock_task_store = AsyncMock(spec=TaskStore) - mock_httpx_client = AsyncMock(spec=httpx.AsyncClient) - push_notifier = InMemoryPushNotifier(httpx_client=mock_httpx_client) + push_notification_store = InMemoryPushNotificationConfigStore() request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store, push_notifier=push_notifier + mock_agent_executor, + mock_task_store, + push_config_store=push_notification_store, ) self.mock_agent_card.capabilities = AgentCapabilities( - streaming=True, pushNotifications=True + streaming=True, push_notifications=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) mock_task = Task(**MINIMAL_TASK) mock_task_store.get.return_value = mock_task task_push_config = TaskPushNotificationConfig( - taskId=mock_task.id, - pushNotificationConfig=PushNotificationConfig( + task_id=mock_task.id, + push_notification_config=PushNotificationConfig( url='http://example.com' ), ) request = SetTaskPushNotificationConfigRequest( id='1', params=task_push_config ) - await handler.set_push_notification(request) + await handler.set_push_notification_config(request) get_request: GetTaskPushNotificationConfigRequest = ( GetTaskPushNotificationConfigRequest( @@ -488,7 +535,7 @@ async def test_get_push_notification_success(self) -> None: ) ) get_response: GetTaskPushNotificationConfigResponse = ( - await handler.get_push_notification(get_request) + await handler.get_push_notification_config(get_request) ) self.assertIsInstance( get_response.root, GetTaskPushNotificationConfigSuccessResponse @@ -504,12 +551,18 @@ async def test_on_message_stream_new_message_send_push_notification_success( mock_agent_executor = AsyncMock(spec=AgentExecutor) mock_task_store = AsyncMock(spec=TaskStore) mock_httpx_client = AsyncMock(spec=httpx.AsyncClient) - push_notifier = InMemoryPushNotifier(httpx_client=mock_httpx_client) + push_notification_store = InMemoryPushNotificationConfigStore() + push_notification_sender = BasePushNotificationSender( + mock_httpx_client, push_notification_store + ) request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store, push_notifier=push_notifier + mock_agent_executor, + mock_task_store, + push_config_store=push_notification_store, + push_sender=push_notification_sender, ) self.mock_agent_card.capabilities = AgentCapabilities( - streaming=True, pushNotifications=True + streaming=True, push_notifications=True ) _mock_builder_build.return_value = RequestContext( request=MagicMock(), @@ -523,15 +576,15 @@ async def test_on_message_stream_new_message_send_push_notification_success( events: list[Any] = [ Task(**MINIMAL_TASK), TaskArtifactUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', artifact=Artifact( - artifactId='11', parts=[Part(TextPart(text='text'))] + artifact_id='11', parts=[Part(TextPart(text='text'))] ), ), TaskStatusUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', status=TaskStatus(state=TaskState.completed), final=True, ), @@ -553,8 +606,8 @@ async def streaming_coro(): params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), ) request.params.configuration = MessageSendConfiguration( - acceptedOutputModes=['text'], - pushNotificationConfig=PushNotificationConfig( + accepted_output_modes=['text'], + push_notification_config=PushNotificationConfig( url='http://example.com' ), ) @@ -573,6 +626,7 @@ async def streaming_coro(): 'kind': 'task', 'status': {'state': 'submitted'}, }, + headers=None, ), call( 'http://example.com', @@ -593,6 +647,7 @@ async def streaming_coro(): 'kind': 'task', 'status': {'state': 'submitted'}, }, + headers=None, ), call( 'http://example.com', @@ -613,6 +668,7 @@ async def streaming_coro(): 'kind': 'task', 'status': {'state': 'completed'}, }, + headers=None, ), ] mock_httpx_client.post.assert_has_calls(calls) @@ -631,15 +687,15 @@ async def test_on_resubscribe_existing_task_success( mock_task = Task(**MINIMAL_TASK, history=[]) events: list[Any] = [ TaskArtifactUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', artifact=Artifact( - artifactId='11', parts=[Part(TextPart(text='text'))] + artifact_id='11', parts=[Part(TextPart(text='text'))] ), ), TaskStatusUpdateEvent( - taskId='task_123', - contextId='session-xyz', + task_id='task_123', + context_id='session-xyz', status=TaskStatus(state=TaskState.completed), final=True, ), @@ -711,9 +767,8 @@ async def test_streaming_not_supported_error( async for _ in handler.on_message_send_stream(request): pass - aaa = context.exception self.assertEqual( - str(context.exception.error.message), + str(context.exception.error.message), # type: ignore 'Streaming is not supported by the agent', ) @@ -727,14 +782,14 @@ async def test_push_notifications_not_supported_error(self) -> None: ) # Create agent card with push notifications capability disabled self.mock_agent_card.capabilities = AgentCapabilities( - pushNotifications=False, streaming=True + push_notifications=False, streaming=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) # Act & Assert task_push_config = TaskPushNotificationConfig( - taskId='task_123', - pushNotificationConfig=PushNotificationConfig( + task_id='task_123', + push_notification_config=PushNotificationConfig( url='http://example.com' ), ) @@ -744,14 +799,14 @@ async def test_push_notifications_not_supported_error(self) -> None: # Should raise ServerError about push notifications not supported with self.assertRaises(ServerError) as context: - await handler.set_push_notification(request) + await handler.set_push_notification_config(request) self.assertEqual( - str(context.exception.error.message), + str(context.exception.error.message), # type: ignore 'Push notifications are not supported by the agent', ) - async def test_on_get_push_notification_no_push_notifier(self) -> None: + async def test_on_get_push_notification_no_push_config_store(self) -> None: """Test get_push_notification with no push notifier configured.""" # Arrange mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -761,7 +816,7 @@ async def test_on_get_push_notification_no_push_notifier(self) -> None: mock_agent_executor, mock_task_store ) self.mock_agent_card.capabilities = AgentCapabilities( - pushNotifications=True + push_notifications=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) @@ -772,13 +827,13 @@ async def test_on_get_push_notification_no_push_notifier(self) -> None: get_request = GetTaskPushNotificationConfigRequest( id='1', params=TaskIdParams(id=mock_task.id) ) - response = await handler.get_push_notification(get_request) + response = await handler.get_push_notification_config(get_request) # Assert self.assertIsInstance(response.root, JSONRPCErrorResponse) self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore - async def test_on_set_push_notification_no_push_notifier(self) -> None: + async def test_on_set_push_notification_no_push_config_store(self) -> None: """Test set_push_notification with no push notifier configured.""" # Arrange mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -788,7 +843,7 @@ async def test_on_set_push_notification_no_push_notifier(self) -> None: mock_agent_executor, mock_task_store ) self.mock_agent_card.capabilities = AgentCapabilities( - pushNotifications=True + push_notifications=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) @@ -797,15 +852,15 @@ async def test_on_set_push_notification_no_push_notifier(self) -> None: # Act task_push_config = TaskPushNotificationConfig( - taskId=mock_task.id, - pushNotificationConfig=PushNotificationConfig( + task_id=mock_task.id, + push_notification_config=PushNotificationConfig( url='http://example.com' ), ) request = SetTaskPushNotificationConfigRequest( id='1', params=task_push_config ) - response = await handler.set_push_notification(request) + response = await handler.set_push_notification_config(request) # Assert self.assertIsInstance(response.root, JSONRPCErrorResponse) @@ -822,7 +877,7 @@ async def test_on_message_send_internal_error(self) -> None: handler = JSONRPCHandler(self.mock_agent_card, request_handler) # Make the request handler raise an Internal error without specifying an error type - async def raise_server_error(*args, **kwargs): + async def raise_server_error(*args, **kwargs) -> NoReturn: raise ServerError(InternalError(message='Internal Error')) # Patch the method to raise an error @@ -884,7 +939,8 @@ async def test_default_request_handler_with_custom_components(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) mock_task_store = AsyncMock(spec=TaskStore) mock_queue_manager = AsyncMock(spec=QueueManager) - mock_push_notifier = AsyncMock(spec=PushNotifier) + mock_push_config_store = AsyncMock(spec=PushNotificationConfigStore) + mock_push_sender = AsyncMock(spec=PushNotificationSender) mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) # Act @@ -892,7 +948,8 @@ async def test_default_request_handler_with_custom_components(self) -> None: agent_executor=mock_agent_executor, task_store=mock_task_store, queue_manager=mock_queue_manager, - push_notifier=mock_push_notifier, + push_config_store=mock_push_config_store, + push_sender=mock_push_sender, request_context_builder=mock_request_context_builder, ) @@ -900,7 +957,8 @@ async def test_default_request_handler_with_custom_components(self) -> None: self.assertEqual(handler.agent_executor, mock_agent_executor) self.assertEqual(handler.task_store, mock_task_store) self.assertEqual(handler._queue_manager, mock_queue_manager) - self.assertEqual(handler._push_notifier, mock_push_notifier) + self.assertEqual(handler._push_config_store, mock_push_config_store) + self.assertEqual(handler._push_sender, mock_push_sender) self.assertEqual( handler._request_context_builder, mock_request_context_builder ) @@ -920,7 +978,7 @@ async def test_on_message_send_error_handling(self) -> None: mock_task_store.get.return_value = mock_task # Set up consume_and_break_on_interrupt to raise ServerError - async def consume_raises_error(*args, **kwargs): + async def consume_raises_error(*args, **kwargs) -> NoReturn: raise ServerError(error=UnsupportedOperationError()) with patch( @@ -933,8 +991,8 @@ async def consume_raises_error(*args, **kwargs): params=MessageSendParams( message=Message( **MESSAGE_PAYLOAD, - taskId=mock_task.id, - contextId=mock_task.contextId, + task_id=mock_task.id, + context_id=mock_task.context_id, ) ), ) @@ -943,7 +1001,7 @@ async def consume_raises_error(*args, **kwargs): # Assert self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, UnsupportedOperationError()) + self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore async def test_on_message_send_task_id_mismatch(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -1007,3 +1065,315 @@ async def streaming_coro(): collected_events[0].root, JSONRPCErrorResponse ) self.assertIsInstance(collected_events[0].root.error, InternalError) + + async def test_on_get_push_notification(self) -> None: + """Test get_push_notification_config handling""" + mock_task_store = AsyncMock(spec=TaskStore) + + mock_task = Task(**MINIMAL_TASK) + mock_task_store.get.return_value = mock_task + + # Create request handler without a push notifier + request_handler = AsyncMock(spec=DefaultRequestHandler) + task_push_config = TaskPushNotificationConfig( + task_id=mock_task.id, + push_notification_config=PushNotificationConfig( + id='config1', url='http://example.com' + ), + ) + request_handler.on_get_task_push_notification_config.return_value = ( + task_push_config + ) + + self.mock_agent_card.capabilities = AgentCapabilities( + push_notifications=True + ) + handler = JSONRPCHandler(self.mock_agent_card, request_handler) + list_request = GetTaskPushNotificationConfigRequest( + id='1', + params=GetTaskPushNotificationConfigParams( + id=mock_task.id, push_notification_config_id='config1' + ), + ) + response = await handler.get_push_notification_config(list_request) + # Assert + self.assertIsInstance( + response.root, GetTaskPushNotificationConfigSuccessResponse + ) + self.assertEqual(response.root.result, task_push_config) # type: ignore + + async def test_on_list_push_notification(self) -> None: + """Test list_push_notification_config handling""" + mock_task_store = AsyncMock(spec=TaskStore) + + mock_task = Task(**MINIMAL_TASK) + mock_task_store.get.return_value = mock_task + + # Create request handler without a push notifier + request_handler = AsyncMock(spec=DefaultRequestHandler) + task_push_config = TaskPushNotificationConfig( + task_id=mock_task.id, + push_notification_config=PushNotificationConfig( + url='http://example.com' + ), + ) + request_handler.on_list_task_push_notification_config.return_value = [ + task_push_config + ] + + self.mock_agent_card.capabilities = AgentCapabilities( + push_notifications=True + ) + handler = JSONRPCHandler(self.mock_agent_card, request_handler) + list_request = ListTaskPushNotificationConfigRequest( + id='1', params=ListTaskPushNotificationConfigParams(id=mock_task.id) + ) + response = await handler.list_push_notification_config(list_request) + # Assert + self.assertIsInstance( + response.root, ListTaskPushNotificationConfigSuccessResponse + ) + self.assertEqual(response.root.result, [task_push_config]) # type: ignore + + async def test_on_list_push_notification_error(self) -> None: + """Test list_push_notification_config handling""" + mock_task_store = AsyncMock(spec=TaskStore) + + mock_task = Task(**MINIMAL_TASK) + mock_task_store.get.return_value = mock_task + + # Create request handler without a push notifier + request_handler = AsyncMock(spec=DefaultRequestHandler) + _ = TaskPushNotificationConfig( + task_id=mock_task.id, + push_notification_config=PushNotificationConfig( + url='http://example.com' + ), + ) + # throw server error + request_handler.on_list_task_push_notification_config.side_effect = ( + ServerError(InternalError()) + ) + + self.mock_agent_card.capabilities = AgentCapabilities( + push_notifications=True + ) + handler = JSONRPCHandler(self.mock_agent_card, request_handler) + list_request = ListTaskPushNotificationConfigRequest( + id='1', params=ListTaskPushNotificationConfigParams(id=mock_task.id) + ) + response = await handler.list_push_notification_config(list_request) + # Assert + self.assertIsInstance(response.root, JSONRPCErrorResponse) + self.assertEqual(response.root.error, InternalError()) # type: ignore + + async def test_on_delete_push_notification(self) -> None: + """Test delete_push_notification_config handling""" + + # Create request handler without a push notifier + request_handler = AsyncMock(spec=DefaultRequestHandler) + request_handler.on_delete_task_push_notification_config.return_value = ( + None + ) + + self.mock_agent_card.capabilities = AgentCapabilities( + push_notifications=True + ) + handler = JSONRPCHandler(self.mock_agent_card, request_handler) + delete_request = DeleteTaskPushNotificationConfigRequest( + id='1', + params=DeleteTaskPushNotificationConfigParams( + id='task1', push_notification_config_id='config1' + ), + ) + response = await handler.delete_push_notification_config(delete_request) + # Assert + self.assertIsInstance( + response.root, DeleteTaskPushNotificationConfigSuccessResponse + ) + self.assertEqual(response.root.result, None) # type: ignore + + async def test_on_delete_push_notification_error(self) -> None: + """Test delete_push_notification_config error handling""" + + # Create request handler without a push notifier + request_handler = AsyncMock(spec=DefaultRequestHandler) + # throw server error + request_handler.on_delete_task_push_notification_config.side_effect = ( + ServerError(UnsupportedOperationError()) + ) + + self.mock_agent_card.capabilities = AgentCapabilities( + push_notifications=True + ) + handler = JSONRPCHandler(self.mock_agent_card, request_handler) + delete_request = DeleteTaskPushNotificationConfigRequest( + id='1', + params=DeleteTaskPushNotificationConfigParams( + id='task1', push_notification_config_id='config1' + ), + ) + response = await handler.delete_push_notification_config(delete_request) + # Assert + self.assertIsInstance(response.root, JSONRPCErrorResponse) + self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore + + async def test_get_authenticated_extended_card_success(self) -> None: + """Test successful retrieval of the authenticated extended agent card.""" + # Arrange + mock_request_handler = AsyncMock(spec=DefaultRequestHandler) + mock_extended_card = AgentCard( + name='Extended Card', + description='More details', + url='http://agent.example.com/api', + version='1.1', + capabilities=AgentCapabilities(), + default_input_modes=['text/plain'], + default_output_modes=['application/json'], + skills=[], + ) + handler = JSONRPCHandler( + self.mock_agent_card, + mock_request_handler, + extended_agent_card=mock_extended_card, + extended_card_modifier=None, + ) + request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-1') + call_context = ServerCallContext(state={'foo': 'bar'}) + + # Act + response: GetAuthenticatedExtendedCardResponse = ( + await handler.get_authenticated_extended_card(request, call_context) + ) + + # Assert + self.assertIsInstance( + response.root, GetAuthenticatedExtendedCardSuccessResponse + ) + self.assertEqual(response.root.id, 'ext-card-req-1') + self.assertEqual(response.root.result, mock_extended_card) + + async def test_get_authenticated_extended_card_not_configured(self) -> None: + """Test error when authenticated extended agent card is not configured.""" + # Arrange + mock_request_handler = AsyncMock(spec=DefaultRequestHandler) + self.mock_agent_card.supports_extended_card = True + handler = JSONRPCHandler( + self.mock_agent_card, + mock_request_handler, + extended_agent_card=None, + extended_card_modifier=None, + ) + request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-2') + call_context = ServerCallContext(state={'foo': 'bar'}) + + # Act + response: GetAuthenticatedExtendedCardResponse = ( + await handler.get_authenticated_extended_card(request, call_context) + ) + + # Assert + # Authenticated Extended Card flag is set with no extended card, + # returns base card in this case. + self.assertIsInstance( + response.root, GetAuthenticatedExtendedCardSuccessResponse + ) + self.assertEqual(response.root.id, 'ext-card-req-2') + + async def test_get_authenticated_extended_card_with_modifier(self) -> None: + """Test successful retrieval of a dynamically modified extended agent card.""" + # Arrange + mock_request_handler = AsyncMock(spec=DefaultRequestHandler) + mock_base_card = AgentCard( + name='Base Card', + description='Base details', + url='http://agent.example.com/api', + version='1.0', + capabilities=AgentCapabilities(), + default_input_modes=['text/plain'], + default_output_modes=['application/json'], + skills=[], + ) + + async def modifier( + card: AgentCard, context: ServerCallContext + ) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Modified Card' + modified_card.description = ( + f'Modified for context: {context.state.get("foo")}' + ) + return modified_card + + handler = JSONRPCHandler( + self.mock_agent_card, + mock_request_handler, + extended_agent_card=mock_base_card, + extended_card_modifier=modifier, + ) + request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-mod') + call_context = ServerCallContext(state={'foo': 'bar'}) + + # Act + response: GetAuthenticatedExtendedCardResponse = ( + await handler.get_authenticated_extended_card(request, call_context) + ) + + # Assert + self.assertIsInstance( + response.root, GetAuthenticatedExtendedCardSuccessResponse + ) + self.assertEqual(response.root.id, 'ext-card-req-mod') + modified_card = response.root.result + self.assertEqual(modified_card.name, 'Modified Card') + self.assertEqual(modified_card.description, 'Modified for context: bar') + self.assertEqual(modified_card.version, '1.0') + + async def test_get_authenticated_extended_card_with_modifier_sync( + self, + ) -> None: + """Test successful retrieval of a synchronously dynamically modified extended agent card.""" + # Arrange + mock_request_handler = AsyncMock(spec=DefaultRequestHandler) + mock_base_card = AgentCard( + name='Base Card', + description='Base details', + url='http://agent.example.com/api', + version='1.0', + capabilities=AgentCapabilities(), + default_input_modes=['text/plain'], + default_output_modes=['application/json'], + skills=[], + ) + + def modifier(card: AgentCard, context: ServerCallContext) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Modified Card' + modified_card.description = ( + f'Modified for context: {context.state.get("foo")}' + ) + return modified_card + + handler = JSONRPCHandler( + self.mock_agent_card, + mock_request_handler, + extended_agent_card=mock_base_card, + extended_card_modifier=modifier, + ) + request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-mod') + call_context = ServerCallContext(state={'foo': 'bar'}) + + # Act + response: GetAuthenticatedExtendedCardResponse = ( + await handler.get_authenticated_extended_card(request, call_context) + ) + + # Assert + self.assertIsInstance( + response.root, GetAuthenticatedExtendedCardSuccessResponse + ) + self.assertEqual(response.root.id, 'ext-card-req-mod') + modified_card = response.root.result + self.assertEqual(modified_card.name, 'Modified Card') + self.assertEqual(modified_card.description, 'Modified for context: bar') + self.assertEqual(modified_card.version, '1.0') diff --git a/tests/server/request_handlers/test_response_helpers.py b/tests/server/request_handlers/test_response_helpers.py new file mode 100644 index 000000000..36de78e62 --- /dev/null +++ b/tests/server/request_handlers/test_response_helpers.py @@ -0,0 +1,265 @@ +import unittest + +from unittest.mock import patch + +from a2a.server.request_handlers.response_helpers import ( + build_error_response, + prepare_response_object, +) +from a2a.types import ( + A2AError, + GetTaskResponse, + GetTaskSuccessResponse, + InvalidAgentResponseError, + InvalidParamsError, + JSONRPCError, + JSONRPCErrorResponse, + Task, + TaskNotFoundError, + TaskState, + TaskStatus, +) + + +class TestResponseHelpers(unittest.TestCase): + def test_build_error_response_with_a2a_error(self) -> None: + request_id = 'req1' + specific_error = TaskNotFoundError() + a2a_error = A2AError(root=specific_error) # Correctly wrap + response_wrapper = build_error_response( + request_id, a2a_error, GetTaskResponse + ) + self.assertIsInstance(response_wrapper, GetTaskResponse) + self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) + self.assertEqual(response_wrapper.root.id, request_id) + self.assertEqual( + response_wrapper.root.error, specific_error + ) # build_error_response unwraps A2AError + + def test_build_error_response_with_jsonrpc_error(self) -> None: + request_id = 123 + json_rpc_error = InvalidParamsError( + message='Custom invalid params' + ) # This is a specific error, not A2AError wrapped + response_wrapper = build_error_response( + request_id, json_rpc_error, GetTaskResponse + ) + self.assertIsInstance(response_wrapper, GetTaskResponse) + self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) + self.assertEqual(response_wrapper.root.id, request_id) + self.assertEqual( + response_wrapper.root.error, json_rpc_error + ) # No .root access for json_rpc_error + + def test_build_error_response_with_a2a_wrapping_jsonrpc_error(self) -> None: + request_id = 'req_wrap' + specific_jsonrpc_error = InvalidParamsError(message='Detail error') + a2a_error_wrapping = A2AError( + root=specific_jsonrpc_error + ) # Correctly wrap + response_wrapper = build_error_response( + request_id, a2a_error_wrapping, GetTaskResponse + ) + self.assertIsInstance(response_wrapper, GetTaskResponse) + self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) + self.assertEqual(response_wrapper.root.id, request_id) + self.assertEqual(response_wrapper.root.error, specific_jsonrpc_error) + + def test_build_error_response_with_request_id_string(self) -> None: + request_id = 'string_id_test' + # Pass an A2AError-wrapped specific error for consistency with how build_error_response handles A2AError + error = A2AError(root=TaskNotFoundError()) + response_wrapper = build_error_response( + request_id, error, GetTaskResponse + ) + self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) + self.assertEqual(response_wrapper.root.id, request_id) + + def test_build_error_response_with_request_id_int(self) -> None: + request_id = 456 + error = A2AError(root=TaskNotFoundError()) + response_wrapper = build_error_response( + request_id, error, GetTaskResponse + ) + self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) + self.assertEqual(response_wrapper.root.id, request_id) + + def test_build_error_response_with_request_id_none(self) -> None: + request_id = None + error = A2AError(root=TaskNotFoundError()) + response_wrapper = build_error_response( + request_id, error, GetTaskResponse + ) + self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) + self.assertIsNone(response_wrapper.root.id) + + def _create_sample_task( + self, task_id: str = 'task123', context_id: str = 'ctx456' + ) -> Task: + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=TaskState.submitted), + history=[], + ) + + def test_prepare_response_object_successful_response(self) -> None: + request_id = 'req_success' + task_result = self._create_sample_task() + response_wrapper = prepare_response_object( + request_id=request_id, + response=task_result, + success_response_types=(Task,), + success_payload_type=GetTaskSuccessResponse, + response_type=GetTaskResponse, + ) + self.assertIsInstance(response_wrapper, GetTaskResponse) + self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) + self.assertEqual(response_wrapper.root.id, request_id) + self.assertEqual(response_wrapper.root.result, task_result) + + @patch('a2a.server.request_handlers.response_helpers.build_error_response') + def test_prepare_response_object_with_a2a_error_instance( + self, mock_build_error + ) -> None: + request_id = 'req_a2a_err' + specific_error = TaskNotFoundError() + a2a_error_instance = A2AError( + root=specific_error + ) # Correctly wrapped A2AError + + # This is what build_error_response (when called by prepare_response_object) will return + mock_wrapped_error_response = GetTaskResponse( + root=JSONRPCErrorResponse( + id=request_id, error=specific_error, jsonrpc='2.0' + ) + ) + mock_build_error.return_value = mock_wrapped_error_response + + response_wrapper = prepare_response_object( + request_id=request_id, + response=a2a_error_instance, # Pass the A2AError instance + success_response_types=(Task,), + success_payload_type=GetTaskSuccessResponse, + response_type=GetTaskResponse, + ) + # prepare_response_object should identify A2AError and call build_error_response + mock_build_error.assert_called_once_with( + request_id, a2a_error_instance, GetTaskResponse + ) + self.assertEqual(response_wrapper, mock_wrapped_error_response) + + @patch('a2a.server.request_handlers.response_helpers.build_error_response') + def test_prepare_response_object_with_jsonrpcerror_base_instance( + self, mock_build_error + ) -> None: + request_id = 789 + # Use the base JSONRPCError class instance + json_rpc_base_error = JSONRPCError( + code=-32000, message='Generic JSONRPC error' + ) + + mock_wrapped_error_response = GetTaskResponse( + root=JSONRPCErrorResponse( + id=request_id, error=json_rpc_base_error, jsonrpc='2.0' + ) + ) + mock_build_error.return_value = mock_wrapped_error_response + + response_wrapper = prepare_response_object( + request_id=request_id, + response=json_rpc_base_error, # Pass the JSONRPCError instance + success_response_types=(Task,), + success_payload_type=GetTaskSuccessResponse, + response_type=GetTaskResponse, + ) + # prepare_response_object should identify JSONRPCError and call build_error_response + mock_build_error.assert_called_once_with( + request_id, json_rpc_base_error, GetTaskResponse + ) + self.assertEqual(response_wrapper, mock_wrapped_error_response) + + @patch('a2a.server.request_handlers.response_helpers.build_error_response') + def test_prepare_response_object_specific_error_model_as_unexpected( + self, mock_build_error + ) -> None: + request_id = 'req_specific_unexpected' + # Pass a specific error model (like TaskNotFoundError) directly, NOT wrapped in A2AError + # This should be treated as an "unexpected" type by prepare_response_object's current logic + specific_error_direct = TaskNotFoundError() + + # This is the InvalidAgentResponseError that prepare_response_object will generate + generated_error_wrapper = A2AError( + root=InvalidAgentResponseError( + message='Agent returned invalid type response for this method' + ) + ) + + # This is what build_error_response will be called with (the generated error) + # And this is what it will return (the generated error, wrapped in GetTaskResponse) + mock_final_wrapped_response = GetTaskResponse( + root=JSONRPCErrorResponse( + id=request_id, error=generated_error_wrapper.root, jsonrpc='2.0' + ) + ) + mock_build_error.return_value = mock_final_wrapped_response + + response_wrapper = prepare_response_object( + request_id=request_id, + response=specific_error_direct, # Pass TaskNotFoundError() directly + success_response_types=(Task,), + success_payload_type=GetTaskSuccessResponse, + response_type=GetTaskResponse, + ) + + self.assertEqual(mock_build_error.call_count, 1) + args, _ = mock_build_error.call_args + self.assertEqual(args[0], request_id) + # Check that the error passed to build_error_response is the generated A2AError(InvalidAgentResponseError) + self.assertIsInstance(args[1], A2AError) + self.assertIsInstance(args[1].root, InvalidAgentResponseError) + self.assertEqual(args[2], GetTaskResponse) + self.assertEqual(response_wrapper, mock_final_wrapped_response) + + def test_prepare_response_object_with_request_id_string(self) -> None: + request_id = 'string_id_prep' + task_result = self._create_sample_task() + response_wrapper = prepare_response_object( + request_id=request_id, + response=task_result, + success_response_types=(Task,), + success_payload_type=GetTaskSuccessResponse, + response_type=GetTaskResponse, + ) + self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) + self.assertEqual(response_wrapper.root.id, request_id) + + def test_prepare_response_object_with_request_id_int(self) -> None: + request_id = 101112 + task_result = self._create_sample_task() + response_wrapper = prepare_response_object( + request_id=request_id, + response=task_result, + success_response_types=(Task,), + success_payload_type=GetTaskSuccessResponse, + response_type=GetTaskResponse, + ) + self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) + self.assertEqual(response_wrapper.root.id, request_id) + + def test_prepare_response_object_with_request_id_none(self) -> None: + request_id = None + task_result = self._create_sample_task() + response_wrapper = prepare_response_object( + request_id=request_id, + response=task_result, + success_response_types=(Task,), + success_payload_type=GetTaskSuccessResponse, + response_type=GetTaskResponse, + ) + self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) + self.assertIsNone(response_wrapper.root.id) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/server/tasks/test_database_push_notification_config_store.py b/tests/server/tasks/test_database_push_notification_config_store.py new file mode 100644 index 000000000..0c3bd4683 --- /dev/null +++ b/tests/server/tasks/test_database_push_notification_config_store.py @@ -0,0 +1,565 @@ +import os + +from collections.abc import AsyncGenerator + +import pytest + + +# Skip entire test module if SQLAlchemy is not installed +pytest.importorskip('sqlalchemy', reason='Database tests require SQLAlchemy') +pytest.importorskip( + 'cryptography', + reason='Database tests require Cryptography. Install extra encryption', +) + +import pytest_asyncio + +from _pytest.mark.structures import ParameterSet + +# Now safe to import SQLAlchemy-dependent modules +from cryptography.fernet import Fernet +from sqlalchemy import select +from sqlalchemy.ext.asyncio import ( + async_sessionmaker, + create_async_engine, +) +from sqlalchemy.inspection import inspect + +from a2a.server.models import ( + Base, + PushNotificationConfigModel, +) # Important: To get Base.metadata +from a2a.server.tasks import DatabasePushNotificationConfigStore +from a2a.types import ( + PushNotificationConfig, + Task, + TaskState, + TaskStatus, +) + + +# DSNs for different databases +SQLITE_TEST_DSN = ( + 'sqlite+aiosqlite:///file:testdb?mode=memory&cache=shared&uri=true' +) +POSTGRES_TEST_DSN = os.environ.get( + 'POSTGRES_TEST_DSN' +) # e.g., "postgresql+asyncpg://user:pass@host:port/dbname" +MYSQL_TEST_DSN = os.environ.get( + 'MYSQL_TEST_DSN' +) # e.g., "mysql+aiomysql://user:pass@host:port/dbname" + +# Parameterization for the db_store fixture +DB_CONFIGS: list[ParameterSet | tuple[str | None, str]] = [ + pytest.param((SQLITE_TEST_DSN, 'sqlite'), id='sqlite') +] + +if POSTGRES_TEST_DSN: + DB_CONFIGS.append( + pytest.param((POSTGRES_TEST_DSN, 'postgresql'), id='postgresql') + ) +else: + DB_CONFIGS.append( + pytest.param( + (None, 'postgresql'), + marks=pytest.mark.skip(reason='POSTGRES_TEST_DSN not set'), + id='postgresql_skipped', + ) + ) + +if MYSQL_TEST_DSN: + DB_CONFIGS.append(pytest.param((MYSQL_TEST_DSN, 'mysql'), id='mysql')) +else: + DB_CONFIGS.append( + pytest.param( + (None, 'mysql'), + marks=pytest.mark.skip(reason='MYSQL_TEST_DSN not set'), + id='mysql_skipped', + ) + ) + + +# Minimal Task object for testing - remains the same +task_status_submitted = TaskStatus( + state=TaskState.submitted, timestamp='2023-01-01T00:00:00Z' +) +MINIMAL_TASK_OBJ = Task( + id='task-abc', + context_id='session-xyz', + status=task_status_submitted, + kind='task', + metadata={'test_key': 'test_value'}, + artifacts=[], + history=[], +) + + +@pytest_asyncio.fixture(params=DB_CONFIGS) +async def db_store_parameterized( + request, +) -> AsyncGenerator[DatabasePushNotificationConfigStore, None]: + """ + Fixture that provides a DatabaseTaskStore connected to different databases + based on parameterization (SQLite, PostgreSQL, MySQL). + """ + db_url, dialect_name = request.param + + if db_url is None: + pytest.skip(f'DSN for {dialect_name} not set in environment variables.') + + engine = create_async_engine(db_url) + store = None # Initialize store to None for the finally block + + try: + # Create tables + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + # create_table=False as we've explicitly created tables above. + store = DatabasePushNotificationConfigStore( + engine=engine, + create_table=False, + encryption_key=Fernet.generate_key(), + ) + # Initialize the store (connects, etc.). Safe to call even if tables exist. + await store.initialize() + + yield store + + finally: + if engine: # If engine was created for setup/teardown + # Drop tables using the fixture's engine + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() # Dispose the engine created in the fixture + + +@pytest.mark.asyncio +async def test_initialize_creates_table( + db_store_parameterized: DatabasePushNotificationConfigStore, +) -> None: + """Test that tables are created (implicitly by fixture setup).""" + # Ensure store is initialized (already done by fixture, but good for clarity) + await db_store_parameterized._ensure_initialized() + + # Use the store's engine for inspection + async with db_store_parameterized.engine.connect() as conn: + + def has_table_sync(sync_conn): + inspector = inspect(sync_conn) + return inspector.has_table( + PushNotificationConfigModel.__tablename__ + ) + + assert await conn.run_sync(has_table_sync) + + +@pytest.mark.asyncio +async def test_initialize_is_idempotent( + db_store_parameterized: DatabasePushNotificationConfigStore, +) -> None: + """Test that tables are created (implicitly by fixture setup).""" + # Ensure store is initialized (already done by fixture, but good for clarity) + await db_store_parameterized.initialize() + # Call initialize again to check idempotency + await db_store_parameterized.initialize() + + +@pytest.mark.asyncio +async def test_set_and_get_info_single_config( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test setting and retrieving a single configuration.""" + task_id = 'task-1' + config = PushNotificationConfig(id='config-1', url='http://example.com') + + await db_store_parameterized.set_info(task_id, config) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config + + +@pytest.mark.asyncio +async def test_set_and_get_info_multiple_configs( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test setting and retrieving multiple configurations for a single task.""" + + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://example.com/1') + config2 = PushNotificationConfig(id='config-2', url='http://example.com/2') + + await db_store_parameterized.set_info(task_id, config1) + await db_store_parameterized.set_info(task_id, config2) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 2 + assert config1 in retrieved_configs + assert config2 in retrieved_configs + + +@pytest.mark.asyncio +async def test_set_info_updates_existing_config( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that setting an existing config ID updates the record.""" + task_id = 'task-1' + config_id = 'config-1' + initial_config = PushNotificationConfig( + id=config_id, url='http://initial.url' + ) + updated_config = PushNotificationConfig( + id=config_id, url='http://updated.url' + ) + + await db_store_parameterized.set_info(task_id, initial_config) + await db_store_parameterized.set_info(task_id, updated_config) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0].url == 'http://updated.url' + + +@pytest.mark.asyncio +async def test_set_info_defaults_config_id_to_task_id( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that config.id defaults to task_id if not provided.""" + task_id = 'task-1' + config = PushNotificationConfig(url='http://example.com') # id is None + + await db_store_parameterized.set_info(task_id, config) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0].id == task_id + + +@pytest.mark.asyncio +async def test_get_info_not_found( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test getting info for a task with no configs returns an empty list.""" + retrieved_configs = await db_store_parameterized.get_info( + 'non-existent-task' + ) + assert retrieved_configs == [] + + +@pytest.mark.asyncio +async def test_delete_info_specific_config( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test deleting a single, specific configuration.""" + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://a.com') + config2 = PushNotificationConfig(id='config-2', url='http://b.com') + + await db_store_parameterized.set_info(task_id, config1) + await db_store_parameterized.set_info(task_id, config2) + + await db_store_parameterized.delete_info(task_id, 'config-1') + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config2 + + +@pytest.mark.asyncio +async def test_delete_info_all_for_task( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test deleting all configurations for a task when config_id is None.""" + + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://a.com') + config2 = PushNotificationConfig(id='config-2', url='http://b.com') + + await db_store_parameterized.set_info(task_id, config1) + await db_store_parameterized.set_info(task_id, config2) + + await db_store_parameterized.delete_info(task_id, None) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert retrieved_configs == [] + + +@pytest.mark.asyncio +async def test_delete_info_not_found( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that deleting a non-existent config does not raise an error.""" + # Should not raise + await db_store_parameterized.delete_info('task-1', 'non-existent-config') + + +@pytest.mark.asyncio +async def test_data_is_encrypted_in_db( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Verify that the data stored in the database is actually encrypted.""" + task_id = 'encrypted-task' + config = PushNotificationConfig( + id='config-1', url='http://secret.url', token='secret-token' + ) + plain_json = config.model_dump_json() + + await db_store_parameterized.set_info(task_id, config) + + # Directly query the database to inspect the raw data + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + stmt = select(PushNotificationConfigModel).where( + PushNotificationConfigModel.task_id == task_id + ) + result = await session.execute(stmt) + db_model = result.scalar_one() + + assert db_model.config_data != plain_json.encode('utf-8') + + fernet = db_store_parameterized._fernet + + decrypted_data = fernet.decrypt(db_model.config_data) # type: ignore + assert decrypted_data.decode('utf-8') == plain_json + + +@pytest.mark.asyncio +async def test_decryption_error_with_wrong_key( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that using the wrong key to decrypt raises a ValueError.""" + # 1. Store with one key + + task_id = 'wrong-key-task' + config = PushNotificationConfig(id='config-1', url='http://secret.url') + await db_store_parameterized.set_info(task_id, config) + + # 2. Try to read with a different key + # Directly query the database to inspect the raw data + wrong_key = Fernet.generate_key() + store2 = DatabasePushNotificationConfigStore( + db_store_parameterized.engine, encryption_key=wrong_key + ) + + retrieved_configs = await store2.get_info(task_id) + assert retrieved_configs == [] + + # _from_orm should raise a ValueError + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + db_model = await session.get( + PushNotificationConfigModel, (task_id, 'config-1') + ) + + with pytest.raises(ValueError): + store2._from_orm(db_model) # type: ignore + + +@pytest.mark.asyncio +async def test_decryption_error_with_no_key( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that using the wrong key to decrypt raises a ValueError.""" + # 1. Store with one key + + task_id = 'wrong-key-task' + config = PushNotificationConfig(id='config-1', url='http://secret.url') + await db_store_parameterized.set_info(task_id, config) + + # 2. Try to read with no key set + # Directly query the database to inspect the raw data + store2 = DatabasePushNotificationConfigStore(db_store_parameterized.engine) + + retrieved_configs = await store2.get_info(task_id) + assert retrieved_configs == [] + + # _from_orm should raise a ValueError + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + db_model = await session.get( + PushNotificationConfigModel, (task_id, 'config-1') + ) + + with pytest.raises(ValueError): + store2._from_orm(db_model) # type: ignore + + +@pytest.mark.asyncio +async def test_custom_table_name( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that the store works correctly with a custom table name.""" + table_name = 'my_custom_push_configs' + engine = db_store_parameterized.engine + custom_store = None + try: + # Use a new store with a custom table name + custom_store = DatabasePushNotificationConfigStore( + engine=engine, + create_table=True, + table_name=table_name, + encryption_key=Fernet.generate_key(), + ) + + task_id = 'custom-table-task' + config = PushNotificationConfig(id='config-1', url='http://custom.url') + + # This will create the table on first use + await custom_store.set_info(task_id, config) + retrieved_configs = await custom_store.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config + + # Verify the custom table exists and has data + async with custom_store.engine.connect() as conn: + + def has_table_sync(sync_conn): + inspector = inspect(sync_conn) + return inspector.has_table(table_name) + + assert await conn.run_sync(has_table_sync) + + result = await conn.execute( + select(custom_store.config_model).where( + custom_store.config_model.task_id == task_id + ) + ) + assert result.scalar_one_or_none() is not None + finally: + if custom_store: + # Clean up the dynamically created table from the metadata + # to prevent errors in subsequent parameterized test runs. + Base.metadata.remove(custom_store.config_model.__table__) # type: ignore + + +@pytest.mark.asyncio +async def test_set_and_get_info_multiple_configs_no_key( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test setting and retrieving multiple configurations for a single task.""" + + store = DatabasePushNotificationConfigStore( + engine=db_store_parameterized.engine, + create_table=False, + encryption_key=None, # No encryption key + ) + await store.initialize() + + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://example.com/1') + config2 = PushNotificationConfig(id='config-2', url='http://example.com/2') + + await store.set_info(task_id, config1) + await store.set_info(task_id, config2) + retrieved_configs = await store.get_info(task_id) + + assert len(retrieved_configs) == 2 + assert config1 in retrieved_configs + assert config2 in retrieved_configs + + +@pytest.mark.asyncio +async def test_data_is_not_encrypted_in_db_if_no_key_is_set( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test data is not encrypted when no encryption key is set.""" + + store = DatabasePushNotificationConfigStore( + engine=db_store_parameterized.engine, + create_table=False, + encryption_key=None, # No encryption key + ) + await store.initialize() + + task_id = 'task-1' + config = PushNotificationConfig(id='config-1', url='http://example.com/1') + plain_json = config.model_dump_json() + + await store.set_info(task_id, config) + + # Directly query the database to inspect the raw data + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + stmt = select(PushNotificationConfigModel).where( + PushNotificationConfigModel.task_id == task_id + ) + result = await session.execute(stmt) + db_model = result.scalar_one() + + assert db_model.config_data == plain_json.encode('utf-8') + + +@pytest.mark.asyncio +async def test_decryption_fallback_for_unencrypted_data( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test reading unencrypted data with an encryption-enabled store.""" + # 1. Store unencrypted data using a new store instance without a key + unencrypted_store = DatabasePushNotificationConfigStore( + engine=db_store_parameterized.engine, + create_table=False, # Table already exists from fixture + encryption_key=None, + ) + await unencrypted_store.initialize() + + task_id = 'mixed-encryption-task' + config = PushNotificationConfig(id='config-1', url='http://plain.url') + await unencrypted_store.set_info(task_id, config) + + # 2. Try to read with the encryption-enabled store from the fixture + retrieved_configs = await db_store_parameterized.get_info(task_id) + + # Should fall back to parsing as plain JSON and not fail + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config + + +@pytest.mark.asyncio +async def test_parsing_error_after_successful_decryption( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that a parsing error after successful decryption is handled.""" + + task_id = 'corrupted-data-task' + config_id = 'config-1' + + # 1. Encrypt data that is NOT valid JSON + fernet = Fernet(Fernet.generate_key()) + corrupted_payload = b'this is not valid json' + encrypted_data = fernet.encrypt(corrupted_payload) + + # 2. Manually insert this corrupted data into the DB + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + db_model = PushNotificationConfigModel( + task_id=task_id, + config_id=config_id, + config_data=encrypted_data, + ) + session.add(db_model) + await session.commit() + + # 3. get_info should log an error and return an empty list + retrieved_configs = await db_store_parameterized.get_info(task_id) + assert retrieved_configs == [] + + # 4. _from_orm should raise a ValueError + async with async_session() as session: + db_model_retrieved = await session.get( + PushNotificationConfigModel, (task_id, config_id) + ) + + with pytest.raises(ValueError): + db_store_parameterized._from_orm(db_model_retrieved) # type: ignore diff --git a/tests/server/tasks/test_database_task_store.py b/tests/server/tasks/test_database_task_store.py new file mode 100644 index 000000000..87069be46 --- /dev/null +++ b/tests/server/tasks/test_database_task_store.py @@ -0,0 +1,400 @@ +import os + +from collections.abc import AsyncGenerator + +import pytest +import pytest_asyncio + +from _pytest.mark.structures import ParameterSet + + +# Skip entire test module if SQLAlchemy is not installed +pytest.importorskip('sqlalchemy', reason='Database tests require SQLAlchemy') + +# Now safe to import SQLAlchemy-dependent modules +from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.inspection import inspect + +from a2a.server.models import Base, TaskModel # Important: To get Base.metadata +from a2a.server.tasks.database_task_store import DatabaseTaskStore +from a2a.types import ( + Artifact, + Message, + Part, + Role, + Task, + TaskState, + TaskStatus, + TextPart, +) + + +# DSNs for different databases +SQLITE_TEST_DSN = ( + 'sqlite+aiosqlite:///file:testdb?mode=memory&cache=shared&uri=true' +) +POSTGRES_TEST_DSN = os.environ.get( + 'POSTGRES_TEST_DSN' +) # e.g., "postgresql+asyncpg://user:pass@host:port/dbname" +MYSQL_TEST_DSN = os.environ.get( + 'MYSQL_TEST_DSN' +) # e.g., "mysql+aiomysql://user:pass@host:port/dbname" + +# Parameterization for the db_store fixture +DB_CONFIGS: list[ParameterSet | tuple[str | None, str]] = [ + pytest.param((SQLITE_TEST_DSN, 'sqlite'), id='sqlite') +] + +if POSTGRES_TEST_DSN: + DB_CONFIGS.append( + pytest.param((POSTGRES_TEST_DSN, 'postgresql'), id='postgresql') + ) +else: + DB_CONFIGS.append( + pytest.param( + (None, 'postgresql'), + marks=pytest.mark.skip(reason='POSTGRES_TEST_DSN not set'), + id='postgresql_skipped', + ) + ) + +if MYSQL_TEST_DSN: + DB_CONFIGS.append(pytest.param((MYSQL_TEST_DSN, 'mysql'), id='mysql')) +else: + DB_CONFIGS.append( + pytest.param( + (None, 'mysql'), + marks=pytest.mark.skip(reason='MYSQL_TEST_DSN not set'), + id='mysql_skipped', + ) + ) + + +# Minimal Task object for testing - remains the same +task_status_submitted = TaskStatus( + state=TaskState.submitted, timestamp='2023-01-01T00:00:00Z' +) +MINIMAL_TASK_OBJ = Task( + id='task-abc', + context_id='session-xyz', + status=task_status_submitted, + kind='task', + metadata={'test_key': 'test_value'}, + artifacts=[], + history=[], +) + + +@pytest_asyncio.fixture(params=DB_CONFIGS) +async def db_store_parameterized( + request, +) -> AsyncGenerator[DatabaseTaskStore, None]: + """ + Fixture that provides a DatabaseTaskStore connected to different databases + based on parameterization (SQLite, PostgreSQL, MySQL). + """ + db_url, dialect_name = request.param + + if db_url is None: + pytest.skip(f'DSN for {dialect_name} not set in environment variables.') + + engine = create_async_engine(db_url) + store = None # Initialize store to None for the finally block + + try: + # Create tables + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + # create_table=False as we've explicitly created tables above. + store = DatabaseTaskStore(engine=engine, create_table=False) + # Initialize the store (connects, etc.). Safe to call even if tables exist. + await store.initialize() + + yield store + + finally: + if engine: # If engine was created for setup/teardown + # Drop tables using the fixture's engine + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() # Dispose the engine created in the fixture + + +@pytest.mark.asyncio +async def test_initialize_creates_table( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test that tables are created (implicitly by fixture setup).""" + # Ensure store is initialized (already done by fixture, but good for clarity) + await db_store_parameterized._ensure_initialized() + + # Use the store's engine for inspection + async with db_store_parameterized.engine.connect() as conn: + + def has_table_sync(sync_conn): + inspector = inspect(sync_conn) + return inspector.has_table(TaskModel.__tablename__) + + assert await conn.run_sync(has_table_sync) + + +@pytest.mark.asyncio +async def test_save_task(db_store_parameterized: DatabaseTaskStore) -> None: + """Test saving a task to the DatabaseTaskStore.""" + task_to_save = MINIMAL_TASK_OBJ.model_copy(deep=True) + # Ensure unique ID for parameterized tests if needed, or rely on table isolation + task_to_save.id = ( + f'save-task-{db_store_parameterized.engine.url.drivername}' + ) + await db_store_parameterized.save(task_to_save) + + retrieved_task = await db_store_parameterized.get(task_to_save.id) + assert retrieved_task is not None + assert retrieved_task.id == task_to_save.id + assert retrieved_task.model_dump() == task_to_save.model_dump() + await db_store_parameterized.delete(task_to_save.id) # Cleanup + + +@pytest.mark.asyncio +async def test_get_task(db_store_parameterized: DatabaseTaskStore) -> None: + """Test retrieving a task from the DatabaseTaskStore.""" + task_id = f'get-test-task-{db_store_parameterized.engine.url.drivername}' + task_to_save = MINIMAL_TASK_OBJ.model_copy(update={'id': task_id}) + await db_store_parameterized.save(task_to_save) + + retrieved_task = await db_store_parameterized.get(task_to_save.id) + assert retrieved_task is not None + assert retrieved_task.id == task_to_save.id + assert retrieved_task.context_id == task_to_save.context_id + assert retrieved_task.status.state == TaskState.submitted + await db_store_parameterized.delete(task_to_save.id) # Cleanup + + +@pytest.mark.asyncio +async def test_get_nonexistent_task( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test retrieving a nonexistent task.""" + retrieved_task = await db_store_parameterized.get('nonexistent-task-id') + assert retrieved_task is None + + +@pytest.mark.asyncio +async def test_delete_task(db_store_parameterized: DatabaseTaskStore) -> None: + """Test deleting a task from the DatabaseTaskStore.""" + task_id = f'delete-test-task-{db_store_parameterized.engine.url.drivername}' + task_to_save_and_delete = MINIMAL_TASK_OBJ.model_copy( + update={'id': task_id} + ) + await db_store_parameterized.save(task_to_save_and_delete) + + assert ( + await db_store_parameterized.get(task_to_save_and_delete.id) is not None + ) + await db_store_parameterized.delete(task_to_save_and_delete.id) + assert await db_store_parameterized.get(task_to_save_and_delete.id) is None + + +@pytest.mark.asyncio +async def test_delete_nonexistent_task( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test deleting a nonexistent task. Should not error.""" + await db_store_parameterized.delete('nonexistent-delete-task-id') + + +@pytest.mark.asyncio +async def test_save_and_get_detailed_task( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test saving and retrieving a task with more fields populated.""" + task_id = f'detailed-task-{db_store_parameterized.engine.url.drivername}' + test_task = Task( + id=task_id, + context_id='test-session-1', + status=TaskStatus( + state=TaskState.working, timestamp='2023-01-01T12:00:00Z' + ), + kind='task', + metadata={'key1': 'value1', 'key2': 123}, + artifacts=[ + Artifact( + artifact_id='artifact-1', + parts=[Part(root=TextPart(text='hello'))], + ) + ], + history=[ + Message( + message_id='msg-1', + role=Role.user, + parts=[Part(root=TextPart(text='user input'))], + ) + ], + ) + + await db_store_parameterized.save(test_task) + retrieved_task = await db_store_parameterized.get(test_task.id) + + assert retrieved_task is not None + assert retrieved_task.id == test_task.id + assert retrieved_task.context_id == test_task.context_id + assert retrieved_task.status.state == TaskState.working + assert retrieved_task.status.timestamp == '2023-01-01T12:00:00Z' + assert retrieved_task.metadata == {'key1': 'value1', 'key2': 123} + + # Pydantic models handle their own serialization for comparison if model_dump is used + assert ( + retrieved_task.model_dump()['artifacts'] + == test_task.model_dump()['artifacts'] + ) + assert ( + retrieved_task.model_dump()['history'] + == test_task.model_dump()['history'] + ) + + await db_store_parameterized.delete(test_task.id) + assert await db_store_parameterized.get(test_task.id) is None + + +@pytest.mark.asyncio +async def test_update_task(db_store_parameterized: DatabaseTaskStore) -> None: + """Test updating an existing task.""" + task_id = f'update-test-task-{db_store_parameterized.engine.url.drivername}' + original_task = Task( + id=task_id, + context_id='session-update', + status=TaskStatus( + state=TaskState.submitted, timestamp='2023-01-02T10:00:00Z' + ), + kind='task', + metadata=None, # Explicitly None + artifacts=[], + history=[], + ) + await db_store_parameterized.save(original_task) + + retrieved_before_update = await db_store_parameterized.get(task_id) + assert retrieved_before_update is not None + assert retrieved_before_update.status.state == TaskState.submitted + assert retrieved_before_update.metadata is None + + updated_task = original_task.model_copy(deep=True) + updated_task.status.state = TaskState.completed + updated_task.status.timestamp = '2023-01-02T11:00:00Z' + updated_task.metadata = {'update_key': 'update_value'} + + await db_store_parameterized.save(updated_task) + + retrieved_after_update = await db_store_parameterized.get(task_id) + assert retrieved_after_update is not None + assert retrieved_after_update.status.state == TaskState.completed + assert retrieved_after_update.metadata == {'update_key': 'update_value'} + + await db_store_parameterized.delete(task_id) + + +@pytest.mark.asyncio +async def test_metadata_field_mapping( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test that metadata field is correctly mapped between Pydantic and SQLAlchemy. + + This test verifies: + 1. Metadata can be None + 2. Metadata can be a simple dict + 3. Metadata can contain nested structures + 4. Metadata is correctly saved and retrieved + 5. The mapping between task.metadata and task_metadata column works + """ + # Test 1: Task with no metadata (None) + task_no_metadata = Task( + id='task-metadata-test-1', + context_id='session-meta-1', + status=TaskStatus(state=TaskState.submitted), + kind='task', + metadata=None, + ) + await db_store_parameterized.save(task_no_metadata) + retrieved_no_metadata = await db_store_parameterized.get( + 'task-metadata-test-1' + ) + assert retrieved_no_metadata is not None + assert retrieved_no_metadata.metadata is None + + # Test 2: Task with simple metadata + simple_metadata = {'key': 'value', 'number': 42, 'boolean': True} + task_simple_metadata = Task( + id='task-metadata-test-2', + context_id='session-meta-2', + status=TaskStatus(state=TaskState.working), + kind='task', + metadata=simple_metadata, + ) + await db_store_parameterized.save(task_simple_metadata) + retrieved_simple = await db_store_parameterized.get('task-metadata-test-2') + assert retrieved_simple is not None + assert retrieved_simple.metadata == simple_metadata + + # Test 3: Task with complex nested metadata + complex_metadata = { + 'level1': { + 'level2': { + 'level3': ['a', 'b', 'c'], + 'numeric': 3.14159, + }, + 'array': [1, 2, {'nested': 'value'}], + }, + 'special_chars': 'Hello\nWorld\t!', + 'unicode': '🚀 Unicode test 你好', + 'null_value': None, + } + task_complex_metadata = Task( + id='task-metadata-test-3', + context_id='session-meta-3', + status=TaskStatus(state=TaskState.completed), + kind='task', + metadata=complex_metadata, + ) + await db_store_parameterized.save(task_complex_metadata) + retrieved_complex = await db_store_parameterized.get('task-metadata-test-3') + assert retrieved_complex is not None + assert retrieved_complex.metadata == complex_metadata + + # Test 4: Update metadata from None to dict + task_update_metadata = Task( + id='task-metadata-test-4', + context_id='session-meta-4', + status=TaskStatus(state=TaskState.submitted), + kind='task', + metadata=None, + ) + await db_store_parameterized.save(task_update_metadata) + + # Update metadata + task_update_metadata.metadata = {'updated': True, 'timestamp': '2024-01-01'} + await db_store_parameterized.save(task_update_metadata) + + retrieved_updated = await db_store_parameterized.get('task-metadata-test-4') + assert retrieved_updated is not None + assert retrieved_updated.metadata == { + 'updated': True, + 'timestamp': '2024-01-01', + } + + # Test 5: Update metadata from dict to None + task_update_metadata.metadata = None + await db_store_parameterized.save(task_update_metadata) + + retrieved_none = await db_store_parameterized.get('task-metadata-test-4') + assert retrieved_none is not None + assert retrieved_none.metadata is None + + # Cleanup + await db_store_parameterized.delete('task-metadata-test-1') + await db_store_parameterized.delete('task-metadata-test-2') + await db_store_parameterized.delete('task-metadata-test-3') + await db_store_parameterized.delete('task-metadata-test-4') + + +# Ensure aiosqlite, asyncpg, and aiomysql are installed in the test environment (added to pyproject.toml). diff --git a/tests/server/tasks/test_id_generator.py b/tests/server/tasks/test_id_generator.py new file mode 100644 index 000000000..11bfff2b9 --- /dev/null +++ b/tests/server/tasks/test_id_generator.py @@ -0,0 +1,131 @@ +import uuid + +import pytest + +from pydantic import ValidationError + +from a2a.server.id_generator import ( + IDGenerator, + IDGeneratorContext, + UUIDGenerator, +) + + +class TestIDGeneratorContext: + """Tests for IDGeneratorContext.""" + + def test_context_creation_with_all_fields(self): + """Test creating context with all fields populated.""" + context = IDGeneratorContext( + task_id='task_123', context_id='context_456' + ) + assert context.task_id == 'task_123' + assert context.context_id == 'context_456' + + def test_context_creation_with_defaults(self): + """Test creating context with default None values.""" + context = IDGeneratorContext() + assert context.task_id is None + assert context.context_id is None + + @pytest.mark.parametrize( + 'kwargs, expected_task_id, expected_context_id', + [ + ({'task_id': 'task_123'}, 'task_123', None), + ({'context_id': 'context_456'}, None, 'context_456'), + ], + ) + def test_context_creation_with_partial_fields( + self, kwargs, expected_task_id, expected_context_id + ): + """Test creating context with only some fields populated.""" + context = IDGeneratorContext(**kwargs) + assert context.task_id == expected_task_id + assert context.context_id == expected_context_id + + def test_context_mutability(self): + """Test that context fields can be updated (Pydantic models are mutable by default).""" + context = IDGeneratorContext(task_id='task_123') + context.task_id = 'task_456' + assert context.task_id == 'task_456' + + def test_context_validation(self): + """Test that context raises validation error for invalid types.""" + with pytest.raises(ValidationError): + IDGeneratorContext(task_id={'not': 'a string'}) + + +class TestIDGenerator: + """Tests for IDGenerator abstract base class.""" + + def test_cannot_instantiate_abstract_class(self): + """Test that IDGenerator cannot be instantiated directly.""" + with pytest.raises(TypeError): + IDGenerator() + + def test_subclass_must_implement_generate(self): + """Test that subclasses must implement the generate method.""" + + class IncompleteGenerator(IDGenerator): + pass + + with pytest.raises(TypeError): + IncompleteGenerator() + + def test_valid_subclass_implementation(self): + """Test that a valid subclass can be instantiated.""" + + class ValidGenerator(IDGenerator): # pylint: disable=C0115,R0903 + def generate(self, context: IDGeneratorContext) -> str: + return 'test_id' + + generator = ValidGenerator() + assert generator.generate(IDGeneratorContext()) == 'test_id' + + +@pytest.fixture +def generator(): + """Returns a UUIDGenerator instance.""" + return UUIDGenerator() + + +@pytest.fixture +def context(): + """Returns a IDGeneratorContext instance.""" + return IDGeneratorContext() + + +class TestUUIDGenerator: + """Tests for UUIDGenerator implementation.""" + + def test_generate_returns_string(self, generator, context): + """Test that generate returns a valid v4 UUID string.""" + result = generator.generate(context) + assert isinstance(result, str) + parsed_uuid = uuid.UUID(result) + assert parsed_uuid.version == 4 + + def test_generate_produces_unique_ids(self, generator, context): + """Test that multiple calls produce unique IDs.""" + ids = [generator.generate(context) for _ in range(100)] + # All IDs should be unique + assert len(ids) == len(set(ids)) + + @pytest.mark.parametrize( + 'context_arg', + [ + None, + IDGeneratorContext(), + ], + ids=[ + 'none_context', + 'empty_context', + ], + ) + def test_generate_works_with_various_contexts(self, context_arg): + """Test that generate works with various context inputs.""" + generator = UUIDGenerator() + result = generator.generate(context_arg) + assert isinstance(result, str) + parsed_uuid = uuid.UUID(result) + assert parsed_uuid.version == 4 diff --git a/tests/server/tasks/test_inmemory_push_notifications.py b/tests/server/tasks/test_inmemory_push_notifications.py new file mode 100644 index 000000000..375ed97ca --- /dev/null +++ b/tests/server/tasks/test_inmemory_push_notifications.py @@ -0,0 +1,298 @@ +import unittest + +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx + +from a2a.server.tasks.base_push_notification_sender import ( + BasePushNotificationSender, +) +from a2a.server.tasks.inmemory_push_notification_config_store import ( + InMemoryPushNotificationConfigStore, +) +from a2a.types import PushNotificationConfig, Task, TaskState, TaskStatus + + +# Suppress logging for cleaner test output, can be enabled for debugging +# logging.disable(logging.CRITICAL) + + +def create_sample_task( + task_id: str = 'task123', status_state: TaskState = TaskState.completed +) -> Task: + return Task( + id=task_id, + context_id='ctx456', + status=TaskStatus(state=status_state), + ) + + +def create_sample_push_config( + url: str = 'http://example.com/callback', + config_id: str = 'cfg1', + token: str | None = None, +) -> PushNotificationConfig: + return PushNotificationConfig(id=config_id, url=url, token=token) + + +class TestInMemoryPushNotifier(unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.mock_httpx_client = AsyncMock(spec=httpx.AsyncClient) + self.config_store = InMemoryPushNotificationConfigStore() + self.notifier = BasePushNotificationSender( + httpx_client=self.mock_httpx_client, config_store=self.config_store + ) # Corrected argument name + + def test_constructor_stores_client(self) -> None: + self.assertEqual(self.notifier._client, self.mock_httpx_client) + + async def test_set_info_adds_new_config(self) -> None: + task_id = 'task_new' + config = create_sample_push_config(url='http://new.url/callback') + + await self.config_store.set_info(task_id, config) + + self.assertIn(task_id, self.config_store._push_notification_infos) + self.assertEqual( + self.config_store._push_notification_infos[task_id], [config] + ) + + async def test_set_info_appends_to_existing_config(self) -> None: + task_id = 'task_update' + initial_config = create_sample_push_config( + url='http://initial.url/callback', config_id='cfg_initial' + ) + await self.config_store.set_info(task_id, initial_config) + + updated_config = create_sample_push_config( + url='http://updated.url/callback', config_id='cfg_updated' + ) + await self.config_store.set_info(task_id, updated_config) + + self.assertIn(task_id, self.config_store._push_notification_infos) + self.assertEqual( + self.config_store._push_notification_infos[task_id][0], + initial_config, + ) + self.assertEqual( + self.config_store._push_notification_infos[task_id][1], + updated_config, + ) + + async def test_set_info_without_config_id(self) -> None: + task_id = 'task1' + initial_config = PushNotificationConfig( + url='http://initial.url/callback' + ) + await self.config_store.set_info(task_id, initial_config) + + assert ( + self.config_store._push_notification_infos[task_id][0].id == task_id + ) + + updated_config = PushNotificationConfig( + url='http://initial.url/callback_new' + ) + await self.config_store.set_info(task_id, updated_config) + + self.assertIn(task_id, self.config_store._push_notification_infos) + assert len(self.config_store._push_notification_infos[task_id]) == 1 + self.assertEqual( + self.config_store._push_notification_infos[task_id][0].url, + updated_config.url, + ) + + async def test_get_info_existing_config(self) -> None: + task_id = 'task_get_exist' + config = create_sample_push_config(url='http://get.this/callback') + await self.config_store.set_info(task_id, config) + + retrieved_config = await self.config_store.get_info(task_id) + self.assertEqual(retrieved_config, [config]) + + async def test_get_info_non_existent_config(self) -> None: + task_id = 'task_get_non_exist' + retrieved_config = await self.config_store.get_info(task_id) + assert retrieved_config == [] + + async def test_delete_info_existing_config(self) -> None: + task_id = 'task_delete_exist' + config = create_sample_push_config(url='http://delete.this/callback') + await self.config_store.set_info(task_id, config) + + self.assertIn(task_id, self.config_store._push_notification_infos) + await self.config_store.delete_info(task_id, config_id=config.id) + self.assertNotIn(task_id, self.config_store._push_notification_infos) + + async def test_delete_info_non_existent_config(self) -> None: + task_id = 'task_delete_non_exist' + # Ensure it doesn't raise an error + try: + await self.config_store.delete_info(task_id) + except Exception as e: + self.fail( + f'delete_info raised {e} unexpectedly for nonexistent task_id' + ) + self.assertNotIn( + task_id, self.config_store._push_notification_infos + ) # Should still not be there + + async def test_send_notification_success(self) -> None: + task_id = 'task_send_success' + task_data = create_sample_task(task_id=task_id) + config = create_sample_push_config(url='http://notify.me/here') + await self.config_store.set_info(task_id, config) + + # Mock the post call to simulate success + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.notifier.send_notification(task_data) # Pass only task_data + + self.mock_httpx_client.post.assert_awaited_once() + called_args, called_kwargs = self.mock_httpx_client.post.call_args + self.assertEqual(called_args[0], config.url) + self.assertEqual( + called_kwargs['json'], + task_data.model_dump(mode='json', exclude_none=True), + ) + self.assertNotIn( + 'auth', called_kwargs + ) # auth is not passed by current implementation + mock_response.raise_for_status.assert_called_once() + + async def test_send_notification_with_token_success(self) -> None: + task_id = 'task_send_success' + task_data = create_sample_task(task_id=task_id) + config = create_sample_push_config( + url='http://notify.me/here', token='unique_token' + ) + await self.config_store.set_info(task_id, config) + + # Mock the post call to simulate success + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.notifier.send_notification(task_data) # Pass only task_data + + self.mock_httpx_client.post.assert_awaited_once() + called_args, called_kwargs = self.mock_httpx_client.post.call_args + self.assertEqual(called_args[0], config.url) + self.assertEqual( + called_kwargs['json'], + task_data.model_dump(mode='json', exclude_none=True), + ) + self.assertEqual( + called_kwargs['headers'], + {'X-A2A-Notification-Token': 'unique_token'}, + ) + self.assertNotIn( + 'auth', called_kwargs + ) # auth is not passed by current implementation + mock_response.raise_for_status.assert_called_once() + + async def test_send_notification_no_config(self) -> None: + task_id = 'task_send_no_config' + task_data = create_sample_task(task_id=task_id) + + await self.notifier.send_notification(task_data) # Pass only task_data + + self.mock_httpx_client.post.assert_not_called() + + @patch('a2a.server.tasks.base_push_notification_sender.logger') + async def test_send_notification_http_status_error( + self, mock_logger: MagicMock + ) -> None: + task_id = 'task_send_http_err' + task_data = create_sample_task(task_id=task_id) + config = create_sample_push_config(url='http://notify.me/http_error') + await self.config_store.set_info(task_id, config) + + mock_response = MagicMock( + spec=httpx.Response + ) # Use MagicMock for status_code attribute + mock_response.status_code = 404 + mock_response.text = 'Not Found' + http_error = httpx.HTTPStatusError( + 'Not Found', request=MagicMock(), response=mock_response + ) + self.mock_httpx_client.post.side_effect = http_error + + # The method should catch the error and log it, not re-raise + await self.notifier.send_notification(task_data) # Pass only task_data + + self.mock_httpx_client.post.assert_awaited_once() + mock_logger.exception.assert_called_once() + # Check that the error message contains the generic part and the specific exception string + self.assertIn( + 'Error sending push-notification', + mock_logger.exception.call_args[0][0], + ) + + @patch('a2a.server.tasks.base_push_notification_sender.logger') + async def test_send_notification_request_error( + self, mock_logger: MagicMock + ) -> None: + task_id = 'task_send_req_err' + task_data = create_sample_task(task_id=task_id) + config = create_sample_push_config(url='http://notify.me/req_error') + await self.config_store.set_info(task_id, config) + + request_error = httpx.RequestError('Network issue', request=MagicMock()) + self.mock_httpx_client.post.side_effect = request_error + + await self.notifier.send_notification(task_data) # Pass only task_data + + self.mock_httpx_client.post.assert_awaited_once() + mock_logger.exception.assert_called_once() + self.assertIn( + 'Error sending push-notification', + mock_logger.exception.call_args[0][0], + ) + + @patch('a2a.server.tasks.base_push_notification_sender.logger') + async def test_send_notification_with_auth( + self, mock_logger: MagicMock + ) -> None: + task_id = 'task_send_auth' + task_data = create_sample_task(task_id=task_id) + auth_info = ('user', 'pass') + config = create_sample_push_config(url='http://notify.me/auth') + config.authentication = MagicMock() # Mocking the structure for auth + config.authentication.schemes = ['basic'] # Assume basic for simplicity + config.authentication.credentials = ( + auth_info # This might need to be a specific model + ) + # For now, let's assume it's a tuple for basic auth + # The actual PushNotificationAuthenticationInfo is more complex + # For this test, we'll simplify and assume InMemoryPushNotifier + # directly uses tuple for httpx's `auth` param if basic. + # A more accurate test would construct the real auth model. + # Given the current implementation of InMemoryPushNotifier, + # it only supports basic auth via tuple. + + await self.config_store.set_info(task_id, config) + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.notifier.send_notification(task_data) # Pass only task_data + + self.mock_httpx_client.post.assert_awaited_once() + called_args, called_kwargs = self.mock_httpx_client.post.call_args + self.assertEqual(called_args[0], config.url) + self.assertEqual( + called_kwargs['json'], + task_data.model_dump(mode='json', exclude_none=True), + ) + self.assertNotIn( + 'auth', called_kwargs + ) # auth is not passed by current implementation + mock_response.raise_for_status.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/server/tasks/test_inmemory_task_store.py b/tests/server/tasks/test_inmemory_task_store.py index f5d9df1d6..c41e3559f 100644 --- a/tests/server/tasks/test_inmemory_task_store.py +++ b/tests/server/tasks/test_inmemory_task_store.py @@ -8,7 +8,7 @@ MINIMAL_TASK: dict[str, Any] = { 'id': 'task-abc', - 'contextId': 'session-xyz', + 'context_id': 'session-xyz', 'status': {'state': 'submitted'}, 'kind': 'task', } diff --git a/tests/server/tasks/test_push_notification_sender.py b/tests/server/tasks/test_push_notification_sender.py new file mode 100644 index 000000000..a3272c2c1 --- /dev/null +++ b/tests/server/tasks/test_push_notification_sender.py @@ -0,0 +1,164 @@ +import unittest + +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx + +from a2a.server.tasks.base_push_notification_sender import ( + BasePushNotificationSender, +) +from a2a.types import ( + PushNotificationConfig, + Task, + TaskState, + TaskStatus, +) + + +def create_sample_task( + task_id: str = 'task123', status_state: TaskState = TaskState.completed +) -> Task: + return Task( + id=task_id, + context_id='ctx456', + status=TaskStatus(state=status_state), + ) + + +def create_sample_push_config( + url: str = 'http://example.com/callback', + config_id: str = 'cfg1', + token: str | None = None, +) -> PushNotificationConfig: + return PushNotificationConfig(id=config_id, url=url, token=token) + + +class TestBasePushNotificationSender(unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.mock_httpx_client = AsyncMock(spec=httpx.AsyncClient) + self.mock_config_store = AsyncMock() + self.sender = BasePushNotificationSender( + httpx_client=self.mock_httpx_client, + config_store=self.mock_config_store, + ) + + def test_constructor_stores_client_and_config_store(self) -> None: + self.assertEqual(self.sender._client, self.mock_httpx_client) + self.assertEqual(self.sender._config_store, self.mock_config_store) + + async def test_send_notification_success(self) -> None: + task_id = 'task_send_success' + task_data = create_sample_task(task_id=task_id) + config = create_sample_push_config(url='http://notify.me/here') + self.mock_config_store.get_info.return_value = [config] + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.sender.send_notification(task_data) + + self.mock_config_store.get_info.assert_awaited_once_with + + # assert httpx_client post method got invoked with right parameters + self.mock_httpx_client.post.assert_awaited_once_with( + config.url, + json=task_data.model_dump(mode='json', exclude_none=True), + headers=None, + ) + mock_response.raise_for_status.assert_called_once() + + async def test_send_notification_with_token_success(self) -> None: + task_id = 'task_send_success' + task_data = create_sample_task(task_id=task_id) + config = create_sample_push_config( + url='http://notify.me/here', token='unique_token' + ) + self.mock_config_store.get_info.return_value = [config] + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.sender.send_notification(task_data) + + self.mock_config_store.get_info.assert_awaited_once_with + + # assert httpx_client post method got invoked with right parameters + self.mock_httpx_client.post.assert_awaited_once_with( + config.url, + json=task_data.model_dump(mode='json', exclude_none=True), + headers={'X-A2A-Notification-Token': 'unique_token'}, + ) + mock_response.raise_for_status.assert_called_once() + + async def test_send_notification_no_config(self) -> None: + task_id = 'task_send_no_config' + task_data = create_sample_task(task_id=task_id) + self.mock_config_store.get_info.return_value = [] + + await self.sender.send_notification(task_data) + + self.mock_config_store.get_info.assert_awaited_once_with(task_id) + self.mock_httpx_client.post.assert_not_called() + + @patch('a2a.server.tasks.base_push_notification_sender.logger') + async def test_send_notification_http_status_error( + self, mock_logger: MagicMock + ) -> None: + task_id = 'task_send_http_err' + task_data = create_sample_task(task_id=task_id) + config = create_sample_push_config(url='http://notify.me/http_error') + self.mock_config_store.get_info.return_value = [config] + + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = 'Not Found' + http_error = httpx.HTTPStatusError( + 'Not Found', request=MagicMock(), response=mock_response + ) + self.mock_httpx_client.post.side_effect = http_error + + await self.sender.send_notification(task_data) + + self.mock_config_store.get_info.assert_awaited_once_with(task_id) + self.mock_httpx_client.post.assert_awaited_once_with( + config.url, + json=task_data.model_dump(mode='json', exclude_none=True), + headers=None, + ) + mock_logger.exception.assert_called_once() + + async def test_send_notification_multiple_configs(self) -> None: + task_id = 'task_multiple_configs' + task_data = create_sample_task(task_id=task_id) + config1 = create_sample_push_config( + url='http://notify.me/cfg1', config_id='cfg1' + ) + config2 = create_sample_push_config( + url='http://notify.me/cfg2', config_id='cfg2' + ) + self.mock_config_store.get_info.return_value = [config1, config2] + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.sender.send_notification(task_data) + + self.mock_config_store.get_info.assert_awaited_once_with(task_id) + self.assertEqual(self.mock_httpx_client.post.call_count, 2) + + # Check calls for config1 + self.mock_httpx_client.post.assert_any_call( + config1.url, + json=task_data.model_dump(mode='json', exclude_none=True), + headers=None, + ) + # Check calls for config2 + self.mock_httpx_client.post.assert_any_call( + config2.url, + json=task_data.model_dump(mode='json', exclude_none=True), + headers=None, + ) + mock_response.raise_for_status.call_count = 2 diff --git a/tests/server/tasks/test_result_aggregator.py b/tests/server/tasks/test_result_aggregator.py new file mode 100644 index 000000000..bc970246b --- /dev/null +++ b/tests/server/tasks/test_result_aggregator.py @@ -0,0 +1,492 @@ +import asyncio +import unittest + +from collections.abc import AsyncIterator +from unittest.mock import AsyncMock, MagicMock, patch + +from typing_extensions import override + +from a2a.server.events.event_consumer import EventConsumer +from a2a.server.tasks.result_aggregator import ResultAggregator +from a2a.server.tasks.task_manager import TaskManager +from a2a.types import ( + Message, + Part, + Role, + Task, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + TextPart, +) + + +# Helper to create a simple message +def create_sample_message( + content: str = 'test message', msg_id: str = 'msg1', role: Role = Role.user +) -> Message: + return Message( + message_id=msg_id, + role=role, + parts=[Part(root=TextPart(text=content))], + ) + + +# Helper to create a simple task +def create_sample_task( + task_id: str = 'task1', + status_state: TaskState = TaskState.submitted, + context_id: str = 'ctx1', +) -> Task: + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=status_state), + ) + + +# Helper to create a TaskStatusUpdateEvent +def create_sample_status_update( + task_id: str = 'task1', + status_state: TaskState = TaskState.working, + context_id: str = 'ctx1', +) -> TaskStatusUpdateEvent: + return TaskStatusUpdateEvent( + task_id=task_id, + context_id=context_id, + status=TaskStatus(state=status_state), + final=False, # Typically false unless it's the very last update + ) + + +class TestResultAggregator(unittest.IsolatedAsyncioTestCase): + @override + def setUp(self) -> None: + self.mock_task_manager = AsyncMock(spec=TaskManager) + self.mock_event_consumer = AsyncMock(spec=EventConsumer) + self.aggregator = ResultAggregator( + task_manager=self.mock_task_manager + # event_consumer is not passed to constructor + ) + + def test_init_stores_task_manager(self) -> None: + self.assertEqual(self.aggregator.task_manager, self.mock_task_manager) + # event_consumer is also stored, can be tested if needed, but focus is on task_manager per req. + + async def test_current_result_property_with_message_set(self) -> None: + sample_message = create_sample_message(content='hola') + self.aggregator._message = sample_message + self.assertEqual(await self.aggregator.current_result, sample_message) + self.mock_task_manager.get_task.assert_not_called() + + async def test_current_result_property_with_message_none(self) -> None: + expected_task = create_sample_task(task_id='task_from_tm') + self.mock_task_manager.get_task.return_value = expected_task + self.aggregator._message = None + + current_res = await self.aggregator.current_result + + self.assertEqual(current_res, expected_task) + self.mock_task_manager.get_task.assert_called_once() + + async def test_consume_and_emit(self) -> None: + event1 = create_sample_message(content='event one', msg_id='e1') + event2 = create_sample_task( + task_id='task_event', status_state=TaskState.working + ) + event3 = create_sample_status_update( + task_id='task_event', status_state=TaskState.completed + ) + + # Mock event_consumer.consume() to be an async generator + async def mock_consume_generator(): + yield event1 + yield event2 + yield event3 + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + + # To store yielded events + yielded_events = [] + async for event in self.aggregator.consume_and_emit( + self.mock_event_consumer + ): + yielded_events.append(event) + + # Assert that all events were yielded + self.assertEqual(len(yielded_events), 3) + self.assertIn(event1, yielded_events) + self.assertIn(event2, yielded_events) + self.assertIn(event3, yielded_events) + + # Assert that task_manager.process was called for each event + self.assertEqual(self.mock_task_manager.process.call_count, 3) + self.mock_task_manager.process.assert_any_call(event1) + self.mock_task_manager.process.assert_any_call(event2) + self.mock_task_manager.process.assert_any_call(event3) + + async def test_consume_all_only_message_event(self) -> None: + sample_message = create_sample_message(content='final message') + + async def mock_consume_generator(): + yield sample_message + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + + result = await self.aggregator.consume_all(self.mock_event_consumer) + + self.assertEqual(result, sample_message) + self.mock_task_manager.process.assert_not_called() # Process is not called if message is returned directly + self.mock_task_manager.get_task.assert_not_called() # Should not be called if message is returned + + async def test_consume_all_other_event_types(self) -> None: + task_event = create_sample_task(task_id='task_other_event') + status_update_event = create_sample_status_update( + task_id='task_other_event', status_state=TaskState.completed + ) + final_task_state = create_sample_task( + task_id='task_other_event', status_state=TaskState.completed + ) + + async def mock_consume_generator(): + yield task_event + yield status_update_event + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + self.mock_task_manager.get_task.return_value = final_task_state + + result = await self.aggregator.consume_all(self.mock_event_consumer) + + self.assertEqual(result, final_task_state) + self.assertEqual(self.mock_task_manager.process.call_count, 2) + self.mock_task_manager.process.assert_any_call(task_event) + self.mock_task_manager.process.assert_any_call(status_update_event) + self.mock_task_manager.get_task.assert_called_once() + + async def test_consume_all_empty_stream(self) -> None: + empty_task_state = create_sample_task(task_id='empty_stream_task') + + async def mock_consume_generator(): + if False: # Will not yield anything + yield + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + self.mock_task_manager.get_task.return_value = empty_task_state + + result = await self.aggregator.consume_all(self.mock_event_consumer) + + self.assertEqual(result, empty_task_state) + self.mock_task_manager.process.assert_not_called() + self.mock_task_manager.get_task.assert_called_once() + + async def test_consume_all_event_consumer_exception(self) -> None: + class TestException(Exception): + pass + + self.mock_event_consumer.consume_all = ( + AsyncMock() + ) # Re-mock to make it an async generator that raises + + async def raiser_gen(): + # Yield a non-Message event first to ensure process is called + yield create_sample_task('task_before_error_consume_all') + raise TestException('Consumer error') + + self.mock_event_consumer.consume_all = MagicMock( + return_value=raiser_gen() + ) + + with self.assertRaises(TestException): + await self.aggregator.consume_all(self.mock_event_consumer) + + # Ensure process was called for the event before the exception + self.mock_task_manager.process.assert_called_once_with( + unittest.mock.ANY # Check it was called, arg is the task + ) + self.mock_task_manager.get_task.assert_not_called() + + async def test_consume_and_break_on_message(self) -> None: + sample_message = create_sample_message(content='interrupt message') + event_after = create_sample_task('task_after_msg') + + async def mock_consume_generator(): + yield sample_message + yield event_after # This should not be processed by task_manager in this call + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + + ( + result, + interrupted, + ) = await self.aggregator.consume_and_break_on_interrupt( + self.mock_event_consumer + ) + + self.assertEqual(result, sample_message) + self.assertFalse(interrupted) + self.mock_task_manager.process.assert_not_called() # Process is not called for the Message if returned directly + # _continue_consuming should not be called if it's a message interrupt + # and no auth_required state. + + @patch('asyncio.create_task') + async def test_consume_and_break_on_auth_required_task_event( + self, mock_create_task: MagicMock + ) -> None: + auth_task = create_sample_task( + task_id='auth_task', status_state=TaskState.auth_required + ) + event_after_auth = create_sample_message('after auth') + + async def mock_consume_generator(): + yield auth_task + yield event_after_auth # This event will be handled by _continue_consuming + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + self.mock_task_manager.get_task.return_value = ( + auth_task # current_result after auth_task processing + ) + + # Mock _continue_consuming to check if it's called by create_task + self.aggregator._continue_consuming = AsyncMock() + mock_create_task.side_effect = lambda coro: asyncio.ensure_future(coro) + + ( + result, + interrupted, + ) = await self.aggregator.consume_and_break_on_interrupt( + self.mock_event_consumer + ) + + self.assertEqual(result, auth_task) + self.assertTrue(interrupted) + self.mock_task_manager.process.assert_called_once_with(auth_task) + mock_create_task.assert_called_once() # Check that create_task was called + # self.aggregator._continue_consuming is an AsyncMock. + # The actual call in product code is create_task(self._continue_consuming(event_stream_arg)) + # So, we check that our mock _continue_consuming was called with an AsyncIterator arg. + self.aggregator._continue_consuming.assert_called_once() + self.assertIsInstance( + self.aggregator._continue_consuming.call_args[0][0], AsyncIterator + ) + + # Manually run the mocked _continue_consuming to check its behavior + # This requires the generator to be re-setup or passed if stateful. + # For simplicity, let's assume _continue_consuming uses the same generator instance. + # In a real scenario, the generator's state would be an issue. + # However, ResultAggregator re-assigns self.mock_event_consumer.consume() + # to self.aggregator._event_stream in the actual code. + # The test setup for _continue_consuming needs to be more robust if we want to test its internal loop. + # For now, we've verified it's called. + + @patch('asyncio.create_task') + async def test_consume_and_break_on_auth_required_status_update_event( + self, mock_create_task: MagicMock + ) -> None: + auth_status_update = create_sample_status_update( + task_id='auth_status_task', status_state=TaskState.auth_required + ) + current_task_state_after_update = create_sample_task( + task_id='auth_status_task', status_state=TaskState.auth_required + ) + + async def mock_consume_generator(): + yield auth_status_update + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + # When current_result is called after processing auth_status_update + self.mock_task_manager.get_task.return_value = ( + current_task_state_after_update + ) + self.aggregator._continue_consuming = AsyncMock() + mock_create_task.side_effect = lambda coro: asyncio.ensure_future(coro) + + ( + result, + interrupted, + ) = await self.aggregator.consume_and_break_on_interrupt( + self.mock_event_consumer + ) + + self.assertEqual(result, current_task_state_after_update) + self.assertTrue(interrupted) + self.mock_task_manager.process.assert_called_once_with( + auth_status_update + ) + mock_create_task.assert_called_once() + self.aggregator._continue_consuming.assert_called_once() + self.assertIsInstance( + self.aggregator._continue_consuming.call_args[0][0], AsyncIterator + ) + + async def test_consume_and_break_completes_normally(self) -> None: + event1 = create_sample_message('event one normal', msg_id='n1') + event2 = create_sample_task('normal_task') + final_task_state = create_sample_task( + 'normal_task', status_state=TaskState.completed + ) + + async def mock_consume_generator(): + yield event1 + yield event2 + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + self.mock_task_manager.get_task.return_value = ( + final_task_state # For the end of stream + ) + + ( + result, + interrupted, + ) = await self.aggregator.consume_and_break_on_interrupt( + self.mock_event_consumer + ) + + # If the first event is a Message, it's returned directly. + self.assertEqual(result, event1) + self.assertFalse(interrupted) + # process() is NOT called for the Message if it's the one causing the return + self.mock_task_manager.process.assert_not_called() + self.mock_task_manager.get_task.assert_not_called() + + async def test_consume_and_break_event_consumer_exception(self) -> None: + class TestInterruptException(Exception): + pass + + self.mock_event_consumer.consume_all = AsyncMock() + + async def raiser_gen_interrupt(): + # Yield a non-Message event first + yield create_sample_task('task_before_error_interrupt') + raise TestInterruptException( + 'Consumer error during interrupt check' + ) + + self.mock_event_consumer.consume_all = MagicMock( + return_value=raiser_gen_interrupt() + ) + + with self.assertRaises(TestInterruptException): + await self.aggregator.consume_and_break_on_interrupt( + self.mock_event_consumer + ) + + self.mock_task_manager.process.assert_called_once_with( + unittest.mock.ANY # Check it was called, arg is the task + ) + self.mock_task_manager.get_task.assert_not_called() + + @patch('asyncio.create_task') + async def test_consume_and_break_non_blocking( + self, mock_create_task: MagicMock + ) -> None: + """Test that with blocking=False, the method returns after the first event.""" + first_event = create_sample_task('non_blocking_task') + event_after = create_sample_message('should be consumed later') + + async def mock_consume_generator(): + yield first_event + yield event_after + + self.mock_event_consumer.consume_all.return_value = ( + mock_consume_generator() + ) + # After processing `first_event`, the current result will be that task. + self.aggregator.task_manager.get_task.return_value = first_event + + self.aggregator._continue_consuming = AsyncMock() + mock_create_task.side_effect = lambda coro: asyncio.ensure_future(coro) + + ( + result, + interrupted, + ) = await self.aggregator.consume_and_break_on_interrupt( + self.mock_event_consumer, blocking=False + ) + + self.assertEqual(result, first_event) + self.assertTrue(interrupted) + self.mock_task_manager.process.assert_called_once_with(first_event) + mock_create_task.assert_called_once() + # The background task should be created with the remaining stream + self.aggregator._continue_consuming.assert_called_once() + self.assertIsInstance( + self.aggregator._continue_consuming.call_args[0][0], AsyncIterator + ) + + @patch('asyncio.create_task') # To verify _continue_consuming is called + async def test_continue_consuming_processes_remaining_events( + self, mock_create_task: MagicMock + ) -> None: + # This test focuses on verifying that if an interrupt occurs, + # the events *after* the interrupting one are processed by _continue_consuming. + + auth_event = create_sample_task( + 'task_auth_for_continue', status_state=TaskState.auth_required + ) + event_after_auth1 = create_sample_message( + 'after auth 1', msg_id='cont1' + ) + event_after_auth2 = create_sample_task('task_after_auth_2') + + # This generator will be iterated first by consume_and_break_on_interrupt, + # then by _continue_consuming. + # We need a way to simulate this shared iterator state or provide a new one for _continue_consuming. + # The actual implementation uses self.aggregator._event_stream + + # Let's simulate the state after consume_and_break_on_interrupt has consumed auth_event + # and _event_stream is now the rest of the generator. + + # Initial stream for consume_and_break_on_interrupt + async def initial_consume_generator(): + yield auth_event + # These should be consumed by _continue_consuming + yield event_after_auth1 + yield event_after_auth2 + + self.mock_event_consumer.consume_all.return_value = ( + initial_consume_generator() + ) + self.mock_task_manager.get_task.return_value = ( + auth_event # Task state at interrupt + ) + mock_create_task.side_effect = lambda coro: asyncio.ensure_future(coro) + + # Call the main method that triggers _continue_consuming via create_task + _, _ = await self.aggregator.consume_and_break_on_interrupt( + self.mock_event_consumer + ) + + mock_create_task.assert_called_once() + # Now, we need to actually execute the coroutine passed to create_task + # to test the behavior of _continue_consuming + continue_consuming_coro = mock_create_task.call_args[0][0] + + # Reset process mock to only count calls from _continue_consuming + self.mock_task_manager.process.reset_mock() + + await continue_consuming_coro + + # Verify process was called for events after the interrupt + self.assertEqual(self.mock_task_manager.process.call_count, 2) + self.mock_task_manager.process.assert_any_call(event_after_auth1) + self.mock_task_manager.process.assert_any_call(event_after_auth2) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/server/tasks/test_task_manager.py b/tests/server/tasks/test_task_manager.py index 56205fab6..8208ca780 100644 --- a/tests/server/tasks/test_task_manager.py +++ b/tests/server/tasks/test_task_manager.py @@ -6,6 +6,7 @@ from a2a.server.tasks import TaskManager from a2a.types import ( Artifact, + InvalidParamsError, Message, Part, Role, @@ -16,11 +17,12 @@ TaskStatusUpdateEvent, TextPart, ) +from a2a.utils.errors import ServerError MINIMAL_TASK: dict[str, Any] = { 'id': 'task-abc', - 'contextId': 'session-xyz', + 'context_id': 'session-xyz', 'status': {'state': 'submitted'}, 'kind': 'task', } @@ -37,12 +39,26 @@ def task_manager(mock_task_store: AsyncMock) -> TaskManager: """Fixture for a TaskManager with a mock TaskStore.""" return TaskManager( task_id=MINIMAL_TASK['id'], - context_id=MINIMAL_TASK['contextId'], + context_id=MINIMAL_TASK['context_id'], task_store=mock_task_store, initial_message=None, ) +@pytest.mark.parametrize('invalid_task_id', ['', 123]) +def test_task_manager_invalid_task_id( + mock_task_store: AsyncMock, invalid_task_id: Any +): + """Test that TaskManager raises ValueError for an invalid task_id.""" + with pytest.raises(ValueError, match='Task ID must be a non-empty string'): + TaskManager( + task_id=invalid_task_id, + context_id='test_context', + task_store=mock_task_store, + initial_message=None, + ) + + @pytest.mark.asyncio async def test_get_task_existing( task_manager: TaskManager, mock_task_store: AsyncMock @@ -52,7 +68,7 @@ async def test_get_task_existing( mock_task_store.get.return_value = expected_task retrieved_task = await task_manager.get_task() assert retrieved_task == expected_task - mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id']) + mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id'], None) @pytest.mark.asyncio @@ -63,7 +79,7 @@ async def test_get_task_nonexistent( mock_task_store.get.return_value = None retrieved_task = await task_manager.get_task() assert retrieved_task is None - mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id']) + mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id'], None) @pytest.mark.asyncio @@ -73,7 +89,7 @@ async def test_save_task_event_new_task( """Test saving a new task.""" task = Task(**MINIMAL_TASK) await task_manager.save_task_event(task) - mock_task_store.save.assert_called_once_with(task) + mock_task_store.save.assert_called_once_with(task, None) @pytest.mark.asyncio @@ -88,19 +104,19 @@ async def test_save_task_event_status_update( message=Message( role=Role.agent, parts=[Part(TextPart(text='content'))], - messageId='message-id', + message_id='message-id', ), ) event = TaskStatusUpdateEvent( - taskId=MINIMAL_TASK['id'], - contextId=MINIMAL_TASK['contextId'], + task_id=MINIMAL_TASK['id'], + context_id=MINIMAL_TASK['context_id'], status=new_status, final=False, ) await task_manager.save_task_event(event) updated_task = initial_task updated_task.status = new_status - mock_task_store.save.assert_called_once_with(updated_task) + mock_task_store.save.assert_called_once_with(updated_task, None) @pytest.mark.asyncio @@ -111,19 +127,41 @@ async def test_save_task_event_artifact_update( initial_task = Task(**MINIMAL_TASK) mock_task_store.get.return_value = initial_task new_artifact = Artifact( - artifactId='artifact-id', + artifact_id='artifact-id', name='artifact1', parts=[Part(TextPart(text='content'))], ) event = TaskArtifactUpdateEvent( - taskId=MINIMAL_TASK['id'], - contextId=MINIMAL_TASK['contextId'], + task_id=MINIMAL_TASK['id'], + context_id=MINIMAL_TASK['context_id'], artifact=new_artifact, ) await task_manager.save_task_event(event) updated_task = initial_task updated_task.artifacts = [new_artifact] - mock_task_store.save.assert_called_once_with(updated_task) + mock_task_store.save.assert_called_once_with(updated_task, None) + + +@pytest.mark.asyncio +async def test_save_task_event_metadata_update( + task_manager: TaskManager, mock_task_store: AsyncMock +) -> None: + """Test saving an updated metadata for an existing task.""" + initial_task = Task(**MINIMAL_TASK) + mock_task_store.get.return_value = initial_task + new_metadata = {'meta_key_test': 'meta_value_test'} + + event = TaskStatusUpdateEvent( + task_id=MINIMAL_TASK['id'], + context_id=MINIMAL_TASK['context_id'], + metadata=new_metadata, + status=TaskStatus(state=TaskState.working), + final=False, + ) + await task_manager.save_task_event(event) + + updated_task = mock_task_store.save.call_args.args[0] + assert updated_task.metadata == new_metadata @pytest.mark.asyncio @@ -134,14 +172,14 @@ async def test_ensure_task_existing( expected_task = Task(**MINIMAL_TASK) mock_task_store.get.return_value = expected_task event = TaskStatusUpdateEvent( - taskId=MINIMAL_TASK['id'], - contextId=MINIMAL_TASK['contextId'], + task_id=MINIMAL_TASK['id'], + context_id=MINIMAL_TASK['context_id'], status=TaskStatus(state=TaskState.working), final=False, ) retrieved_task = await task_manager.ensure_task(event) assert retrieved_task == expected_task - mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id']) + mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id'], None) @pytest.mark.asyncio @@ -157,16 +195,16 @@ async def test_ensure_task_nonexistent( initial_message=None, ) event = TaskStatusUpdateEvent( - taskId='new-task', - contextId='some-context', + task_id='new-task', + context_id='some-context', status=TaskStatus(state=TaskState.submitted), final=False, ) new_task = await task_manager_without_id.ensure_task(event) assert new_task.id == 'new-task' - assert new_task.contextId == 'some-context' + assert new_task.context_id == 'some-context' assert new_task.status.state == TaskState.submitted - mock_task_store.save.assert_called_once_with(new_task) + mock_task_store.save.assert_called_once_with(new_task, None) assert task_manager_without_id.task_id == 'new-task' assert task_manager_without_id.context_id == 'some-context' @@ -175,7 +213,7 @@ def test_init_task_obj(task_manager: TaskManager) -> None: """Test initializing a new task object.""" new_task = task_manager._init_task_obj('new-task', 'new-context') # type: ignore assert new_task.id == 'new-task' - assert new_task.contextId == 'new-context' + assert new_task.context_id == 'new-context' assert new_task.status.state == TaskState.submitted assert new_task.history == [] @@ -187,7 +225,24 @@ async def test_save_task( """Test saving a task.""" task = Task(**MINIMAL_TASK) await task_manager._save_task(task) # type: ignore - mock_task_store.save.assert_called_once_with(task) + mock_task_store.save.assert_called_once_with(task, None) + + +@pytest.mark.asyncio +async def test_save_task_event_mismatched_id_raises_error( + task_manager: TaskManager, +) -> None: + """Test that save_task_event raises ServerError on task ID mismatch.""" + # The task_manager is initialized with 'task-abc' + mismatched_task = Task( + id='wrong-id', + context_id='session-xyz', + status=TaskStatus(state=TaskState.submitted), + ) + + with pytest.raises(ServerError) as exc_info: + await task_manager.save_task_event(mismatched_task) + assert isinstance(exc_info.value.error, InvalidParamsError) @pytest.mark.asyncio @@ -203,13 +258,13 @@ async def test_save_task_event_new_task_no_task_id( ) task_data: dict[str, Any] = { 'id': 'new-task-id', - 'contextId': 'some-context', + 'context_id': 'some-context', 'status': {'state': 'working'}, 'kind': 'task', } task = Task(**task_data) await task_manager_without_id.save_task_event(task) - mock_task_store.save.assert_called_once_with(task) + mock_task_store.save.assert_called_once_with(task, None) assert task_manager_without_id.task_id == 'new-task-id' assert task_manager_without_id.context_id == 'some-context' # initial submit should be updated to working @@ -245,8 +300,8 @@ async def test_save_task_event_no_task_existing( ) mock_task_store.get.return_value = None event = TaskStatusUpdateEvent( - taskId='event-task-id', - contextId='some-context', + task_id='event-task-id', + context_id='some-context', status=TaskStatus(state=TaskState.completed), final=True, ) @@ -256,7 +311,7 @@ async def test_save_task_event_no_task_existing( assert call_args is not None saved_task = call_args[0][0] assert saved_task.id == 'event-task-id' - assert saved_task.contextId == 'some-context' + assert saved_task.context_id == 'some-context' assert saved_task.status.state == TaskState.completed assert task_manager_without_id.task_id == 'event-task-id' assert task_manager_without_id.context_id == 'some-context' diff --git a/tests/server/tasks/test_task_updater.py b/tests/server/tasks/test_task_updater.py index fd2789293..891f8a10b 100644 --- a/tests/server/tasks/test_task_updater.py +++ b/tests/server/tasks/test_task_updater.py @@ -1,10 +1,12 @@ +import asyncio import uuid -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest from a2a.server.events import EventQueue +from a2a.server.id_generator import IDGenerator from a2a.server.tasks import TaskUpdater from a2a.types import ( Message, @@ -17,218 +19,637 @@ ) -class TestTaskUpdater: - @pytest.fixture - def event_queue(self): - """Create a mock event queue for testing.""" - return Mock(spec=EventQueue) +@pytest.fixture +def event_queue() -> AsyncMock: + """Create a mock event queue for testing.""" + return AsyncMock(spec=EventQueue) + + +@pytest.fixture +def task_updater(event_queue: AsyncMock) -> TaskUpdater: + """Create a TaskUpdater instance for testing.""" + return TaskUpdater( + event_queue=event_queue, + task_id='test-task-id', + context_id='test-context-id', + ) + + +@pytest.fixture +def sample_message() -> Message: + """Create a sample message for testing.""" + return Message( + role=Role.agent, + task_id='test-task-id', + context_id='test-context-id', + message_id='test-message-id', + parts=[Part(root=TextPart(text='Test message'))], + ) + + +@pytest.fixture +def sample_parts() -> list[Part]: + """Create sample parts for testing.""" + return [Part(root=TextPart(text='Test part'))] + + +def test_init(event_queue: AsyncMock) -> None: + """Test that TaskUpdater initializes correctly.""" + task_updater = TaskUpdater( + event_queue=event_queue, + task_id='test-task-id', + context_id='test-context-id', + ) + + assert task_updater.event_queue == event_queue + assert task_updater.task_id == 'test-task-id' + assert task_updater.context_id == 'test-context-id' + + +@pytest.mark.asyncio +async def test_update_status_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test updating status without a message.""" + await task_updater.update_status(TaskState.working) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.task_id == 'test-task-id' + assert event.context_id == 'test-context-id' + assert event.final is False + assert event.status.state == TaskState.working + assert event.status.message is None + + +@pytest.mark.asyncio +async def test_update_status_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test updating status with a message.""" + await task_updater.update_status(TaskState.working, message=sample_message) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.task_id == 'test-task-id' + assert event.context_id == 'test-context-id' + assert event.final is False + assert event.status.state == TaskState.working + assert event.status.message == sample_message + + +@pytest.mark.asyncio +async def test_update_status_final( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test updating status with final=True.""" + await task_updater.update_status(TaskState.completed, final=True) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.final is True + assert event.status.state == TaskState.completed + + +@pytest.mark.asyncio +async def test_add_artifact_with_custom_id_and_name( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_parts: list[Part] +) -> None: + """Test adding an artifact with a custom ID and name.""" + await task_updater.add_artifact( + parts=sample_parts, + artifact_id='custom-artifact-id', + name='Custom Artifact', + ) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskArtifactUpdateEvent) + assert event.artifact.artifact_id == 'custom-artifact-id' + assert event.artifact.name == 'Custom Artifact' + assert event.artifact.parts == sample_parts + + +@pytest.mark.asyncio +async def test_add_artifact_generates_id( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_parts: list[Part] +) -> None: + """Test add_artifact generates an ID if artifact_id is None.""" + known_uuid = uuid.UUID('12345678-1234-5678-1234-567812345678') + with patch('uuid.uuid4', return_value=known_uuid): + await task_updater.add_artifact(parts=sample_parts, artifact_id=None) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskArtifactUpdateEvent) + assert event.artifact.artifact_id == str(known_uuid) + assert event.artifact.parts == sample_parts + assert event.append is None + assert event.last_chunk is None + + +@pytest.mark.asyncio +async def test_add_artifact_generates_custom_id( + event_queue: AsyncMock, sample_parts: list[Part] +) -> None: + """Test add_artifact uses a custom ID generator when provided.""" + artifact_id_generator = Mock(spec=IDGenerator) + artifact_id_generator.generate.return_value = 'custom-artifact-id' + task_updater = TaskUpdater( + event_queue=event_queue, + task_id='test-task-id', + context_id='test-context-id', + artifact_id_generator=artifact_id_generator, + ) + + await task_updater.add_artifact(parts=sample_parts, artifact_id=None) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + assert isinstance(event, TaskArtifactUpdateEvent) + assert event.artifact.artifact_id == 'custom-artifact-id' + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'append_val, last_chunk_val', + [ + (False, False), + (True, True), + (True, False), + (False, True), + ], +) +async def test_add_artifact_with_append_last_chunk( + task_updater: TaskUpdater, + event_queue: AsyncMock, + sample_parts: list[Part], + append_val: bool, + last_chunk_val: bool, +) -> None: + """Test add_artifact with append and last_chunk flags.""" + await task_updater.add_artifact( + parts=sample_parts, + artifact_id='id1', + append=append_val, + last_chunk=last_chunk_val, + ) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskArtifactUpdateEvent) + assert event.artifact.artifact_id == 'id1' + assert event.artifact.parts == sample_parts + assert event.append == append_val + assert event.last_chunk == last_chunk_val + + +@pytest.mark.asyncio +async def test_complete_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as completed without a message.""" + await task_updater.complete() + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.completed + assert event.final is True + assert event.status.message is None + + +@pytest.mark.asyncio +async def test_complete_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as completed with a message.""" + await task_updater.complete(message=sample_message) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.completed + assert event.final is True + assert event.status.message == sample_message + + +@pytest.mark.asyncio +async def test_submit_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as submitted without a message.""" + await task_updater.submit() + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.submitted + assert event.final is False + assert event.status.message is None + + +@pytest.mark.asyncio +async def test_submit_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as submitted with a message.""" + await task_updater.submit(message=sample_message) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.submitted + assert event.final is False + assert event.status.message == sample_message + + +@pytest.mark.asyncio +async def test_start_work_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as working without a message.""" + await task_updater.start_work() + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.working + assert event.final is False + assert event.status.message is None + + +@pytest.mark.asyncio +async def test_start_work_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as working with a message.""" + await task_updater.start_work(message=sample_message) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] - @pytest.fixture - def task_updater(self, event_queue): - """Create a TaskUpdater instance for testing.""" - return TaskUpdater( - event_queue=event_queue, - task_id='test-task-id', - context_id='test-context-id', - ) + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.working + assert event.final is False + assert event.status.message == sample_message + + +def test_new_agent_message( + task_updater: TaskUpdater, sample_parts: list[Part] +) -> None: + """Test creating a new agent message.""" + with patch( + 'uuid.uuid4', + return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), + ): + message = task_updater.new_agent_message(parts=sample_parts) - @pytest.fixture - def sample_message(self): - """Create a sample message for testing.""" - return Message( - role=Role.agent, - taskId='test-task-id', - contextId='test-context-id', - messageId='test-message-id', - parts=[Part(root=TextPart(text='Test message'))], - ) + assert message.role == Role.agent + assert message.task_id == 'test-task-id' + assert message.context_id == 'test-context-id' + assert message.message_id == '12345678-1234-5678-1234-567812345678' + assert message.parts == sample_parts + assert message.metadata is None + + +def test_new_agent_message_with_metadata( + task_updater: TaskUpdater, sample_parts: list[Part] +) -> None: + """Test creating a new agent message with metadata and final=True.""" + metadata = {'key': 'value'} - @pytest.fixture - def sample_parts(self): - """Create sample parts for testing.""" - return [Part(root=TextPart(text='Test part'))] - - def test_init(self, event_queue): - """Test that TaskUpdater initializes correctly.""" - task_updater = TaskUpdater( - event_queue=event_queue, - task_id='test-task-id', - context_id='test-context-id', + with patch( + 'uuid.uuid4', + return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), + ): + message = task_updater.new_agent_message( + parts=sample_parts, metadata=metadata ) - assert task_updater.event_queue == event_queue - assert task_updater.task_id == 'test-task-id' - assert task_updater.context_id == 'test-context-id' + assert message.role == Role.agent + assert message.task_id == 'test-task-id' + assert message.context_id == 'test-context-id' + assert message.message_id == '12345678-1234-5678-1234-567812345678' + assert message.parts == sample_parts + assert message.metadata == metadata - def test_update_status_without_message(self, task_updater, event_queue): - """Test updating status without a message.""" - task_updater.update_status(TaskState.working) - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] +def test_new_agent_message_with_custom_id_generator( + event_queue: AsyncMock, sample_parts: list[Part] +) -> None: + """Test creating a new agent message with a custom message ID generator.""" + message_id_generator = Mock(spec=IDGenerator) + message_id_generator.generate.return_value = 'custom-message-id' + task_updater = TaskUpdater( + event_queue=event_queue, + task_id='test-task-id', + context_id='test-context-id', + message_id_generator=message_id_generator, + ) - assert isinstance(event, TaskStatusUpdateEvent) - assert event.taskId == 'test-task-id' - assert event.contextId == 'test-context-id' - assert event.final is False - assert event.status.state == TaskState.working - assert event.status.message is None + message = task_updater.new_agent_message(parts=sample_parts) - def test_update_status_with_message( - self, task_updater, event_queue, sample_message - ): - """Test updating status with a message.""" - task_updater.update_status(TaskState.working, message=sample_message) + assert message.message_id == 'custom-message-id' - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] - assert isinstance(event, TaskStatusUpdateEvent) - assert event.taskId == 'test-task-id' - assert event.contextId == 'test-context-id' - assert event.final is False - assert event.status.state == TaskState.working - assert event.status.message == sample_message +@pytest.mark.asyncio +async def test_failed_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as failed without a message.""" + await task_updater.failed() - def test_update_status_final(self, task_updater, event_queue): - """Test updating status with final=True.""" - task_updater.update_status(TaskState.completed, final=True) + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.failed + assert event.final is True + assert event.status.message is None - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] - assert isinstance(event, TaskStatusUpdateEvent) - assert event.final is True - assert event.status.state == TaskState.completed +@pytest.mark.asyncio +async def test_failed_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as failed with a message.""" + await task_updater.failed(message=sample_message) - def test_add_artifact_with_custom_id_and_name( - self, task_updater, event_queue, sample_parts - ): - """Test adding an artifact with a custom ID and name.""" - task_updater.add_artifact( - parts=sample_parts, - artifact_id='custom-artifact-id', - name='Custom Artifact', - ) + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.failed + assert event.final is True + assert event.status.message == sample_message - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] - assert isinstance(event, TaskArtifactUpdateEvent) - assert event.artifact.artifactId == 'custom-artifact-id' - assert event.artifact.name == 'Custom Artifact' - assert event.artifact.parts == sample_parts +@pytest.mark.asyncio +async def test_reject_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as rejected without a message.""" + await task_updater.reject() - def test_complete_without_message(self, task_updater, event_queue): - """Test marking a task as completed without a message.""" - task_updater.complete() + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.rejected + assert event.final is True + assert event.status.message is None - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] - assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.completed - assert event.final is True - assert event.status.message is None +@pytest.mark.asyncio +async def test_reject_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as rejected with a message.""" + await task_updater.reject(message=sample_message) - def test_complete_with_message( - self, task_updater, event_queue, sample_message - ): - """Test marking a task as completed with a message.""" - task_updater.complete(message=sample_message) + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.rejected + assert event.final is True + assert event.status.message == sample_message - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] - assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.completed - assert event.final is True - assert event.status.message == sample_message +@pytest.mark.asyncio +async def test_requires_input_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as input required without a message.""" + await task_updater.requires_input() - def test_submit_without_message(self, task_updater, event_queue): - """Test marking a task as submitted without a message.""" - task_updater.submit() + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.input_required + assert event.final is False + assert event.status.message is None - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] - assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.submitted - assert event.final is False - assert event.status.message is None +@pytest.mark.asyncio +async def test_requires_input_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as input required with a message.""" + await task_updater.requires_input(message=sample_message) - def test_submit_with_message( - self, task_updater, event_queue, sample_message - ): - """Test marking a task as submitted with a message.""" - task_updater.submit(message=sample_message) + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.input_required + assert event.final is False + assert event.status.message == sample_message - assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.submitted - assert event.final is False - assert event.status.message == sample_message - def test_start_work_without_message(self, task_updater, event_queue): - """Test marking a task as working without a message.""" - task_updater.start_work() +@pytest.mark.asyncio +async def test_requires_input_final_true( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as input required with final=True.""" + await task_updater.requires_input(final=True) - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] - assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.working - assert event.final is False - assert event.status.message is None + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.input_required + assert event.final is True + assert event.status.message is None - def test_start_work_with_message( - self, task_updater, event_queue, sample_message - ): - """Test marking a task as working with a message.""" - task_updater.start_work(message=sample_message) - - event_queue.enqueue_event.assert_called_once() - event = event_queue.enqueue_event.call_args[0][0] - - assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.working - assert event.final is False - assert event.status.message == sample_message - - def test_new_agent_message(self, task_updater, sample_parts): - """Test creating a new agent message.""" - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), - ): - message = task_updater.new_agent_message(parts=sample_parts) - - assert message.role == Role.agent - assert message.taskId == 'test-task-id' - assert message.contextId == 'test-context-id' - assert message.messageId == '12345678-1234-5678-1234-567812345678' - assert message.parts == sample_parts - assert message.metadata is None - - def test_new_agent_message_with_metadata( - self, task_updater, sample_parts - ): - """Test creating a new agent message with metadata and final=True.""" - metadata = {'key': 'value'} - - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), - ): - message = task_updater.new_agent_message( - parts=sample_parts, metadata=metadata - ) - - assert message.role == Role.agent - assert message.taskId == 'test-task-id' - assert message.contextId == 'test-context-id' - assert message.messageId == '12345678-1234-5678-1234-567812345678' - assert message.parts == sample_parts - assert message.metadata == metadata + +@pytest.mark.asyncio +async def test_requires_input_with_message_and_final( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as input required with message and final=True.""" + await task_updater.requires_input(message=sample_message, final=True) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.input_required + assert event.final is True + assert event.status.message == sample_message + + +@pytest.mark.asyncio +async def test_requires_auth_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as auth required without a message.""" + await task_updater.requires_auth() + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.auth_required + assert event.final is False + assert event.status.message is None + + +@pytest.mark.asyncio +async def test_requires_auth_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as auth required with a message.""" + await task_updater.requires_auth(message=sample_message) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.auth_required + assert event.final is False + assert event.status.message == sample_message + + +@pytest.mark.asyncio +async def test_requires_auth_final_true( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as auth required with final=True.""" + await task_updater.requires_auth(final=True) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.auth_required + assert event.final is True + assert event.status.message is None + + +@pytest.mark.asyncio +async def test_requires_auth_with_message_and_final( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as auth required with message and final=True.""" + await task_updater.requires_auth(message=sample_message, final=True) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.auth_required + assert event.final is True + assert event.status.message == sample_message + + +@pytest.mark.asyncio +async def test_cancel_without_message( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + """Test marking a task as cancelled without a message.""" + await task_updater.cancel() + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.canceled + assert event.final is True + assert event.status.message is None + + +@pytest.mark.asyncio +async def test_cancel_with_message( + task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message +) -> None: + """Test marking a task as cancelled with a message.""" + await task_updater.cancel(message=sample_message) + + event_queue.enqueue_event.assert_called_once() + event = event_queue.enqueue_event.call_args[0][0] + + assert isinstance(event, TaskStatusUpdateEvent) + assert event.status.state == TaskState.canceled + assert event.final is True + assert event.status.message == sample_message + + +@pytest.mark.asyncio +async def test_update_status_raises_error_if_terminal_state_reached( + task_updater: TaskUpdater, event_queue: AsyncMock +) -> None: + await task_updater.complete() + event_queue.reset_mock() + with pytest.raises(RuntimeError): + await task_updater.start_work() + event_queue.enqueue_event.assert_not_called() + + +@pytest.mark.asyncio +async def test_concurrent_updates_race_condition( + event_queue: AsyncMock, +) -> None: + task_updater = TaskUpdater( + event_queue=event_queue, + task_id='test-task-id', + context_id='test-context-id', + ) + tasks = [ + task_updater.complete(), + task_updater.failed(), + ] + results = await asyncio.gather(*tasks, return_exceptions=True) + successes = [r for r in results if not isinstance(r, Exception)] + failures = [r for r in results if isinstance(r, RuntimeError)] + assert len(successes) == 1 + assert len(failures) == 1 + assert event_queue.enqueue_event.call_count == 1 + + +@pytest.mark.asyncio +async def test_reject_concurrently_with_complete( + event_queue: AsyncMock, +) -> None: + """Test for race conditions when reject and complete are called concurrently.""" + task_updater = TaskUpdater( + event_queue=event_queue, + task_id='concurrent-task', + context_id='concurrent-context', + ) + + tasks = [ + task_updater.reject(), + task_updater.complete(), + ] + + results = await asyncio.gather(*tasks, return_exceptions=True) + + successes = [r for r in results if not isinstance(r, Exception)] + failures = [r for r in results if isinstance(r, RuntimeError)] + + assert len(successes) == 1 + assert len(failures) == 1 + + assert event_queue.enqueue_event.call_count == 1 + + event = event_queue.enqueue_event.call_args[0][0] + assert isinstance(event, TaskStatusUpdateEvent) + assert event.final is True + assert event.status.state in [TaskState.rejected, TaskState.completed] diff --git a/tests/server/test_integration.py b/tests/server/test_integration.py index c0a54e94b..8080136c1 100644 --- a/tests/server/test_integration.py +++ b/tests/server/test_integration.py @@ -1,21 +1,60 @@ import asyncio + from typing import Any from unittest import mock import pytest + +from starlette.authentication import ( + AuthCredentials, + AuthenticationBackend, + BaseUser, + SimpleUser, +) +from starlette.middleware import Middleware +from starlette.middleware.authentication import AuthenticationMiddleware +from starlette.requests import HTTPConnection from starlette.responses import JSONResponse from starlette.routing import Route from starlette.testclient import TestClient -from a2a.server.apps.starlette_app import A2AStarletteApplication -from a2a.types import (AgentCapabilities, AgentCard, Artifact, DataPart, - InternalError, InvalidRequestError, JSONParseError, - Part, PushNotificationConfig, Task, - TaskArtifactUpdateEvent, TaskPushNotificationConfig, - TaskState, TaskStatus, TextPart, - UnsupportedOperationError) +from a2a.server.apps import ( + A2AFastAPIApplication, + A2AStarletteApplication, +) +from a2a.server.context import ServerCallContext +from a2a.types import ( + AgentCapabilities, + AgentCard, + Artifact, + DataPart, + InternalError, + InvalidParamsError, + InvalidRequestError, + JSONParseError, + Message, + MethodNotFoundError, + Part, + PushNotificationConfig, + Role, + SendMessageResponse, + SendMessageSuccessResponse, + Task, + TaskArtifactUpdateEvent, + TaskPushNotificationConfig, + TaskState, + TaskStatus, + TextPart, + UnsupportedOperationError, +) +from a2a.utils import ( + AGENT_CARD_WELL_KNOWN_PATH, + EXTENDED_AGENT_CARD_PATH, + PREV_AGENT_CARD_WELL_KNOWN_PATH, +) from a2a.utils.errors import MethodNotImplementedError + # === TEST SETUP === MINIMAL_AGENT_SKILL: dict[str, Any] = { @@ -28,7 +67,7 @@ MINIMAL_AGENT_AUTH: dict[str, Any] = {'schemes': ['Bearer']} AGENT_CAPS = AgentCapabilities( - pushNotifications=True, stateTransitionHistory=False, streaming=True + push_notifications=True, state_transition_history=False, streaming=True ) MINIMAL_AGENT_CARD: dict[str, Any] = { @@ -64,7 +103,7 @@ MINIMAL_MESSAGE_USER: dict[str, Any] = { 'role': 'user', 'parts': [TEXT_PART_DATA], - 'messageId': 'msg-123', + 'message_id': 'msg-123', 'kind': 'message', } @@ -106,9 +145,9 @@ def app(agent_card: AgentCard, handler: mock.AsyncMock): @pytest.fixture -def client(app: A2AStarletteApplication): - """Create a test client with the app.""" - return TestClient(app.build()) +def client(app: A2AStarletteApplication, **kwargs): + """Create a test client with the Starlette app.""" + return TestClient(app.build(**kwargs)) # === BASIC FUNCTIONALITY TESTS === @@ -116,7 +155,7 @@ def client(app: A2AStarletteApplication): def test_agent_card_endpoint(client: TestClient, agent_card: AgentCard): """Test the agent card endpoint returns expected data.""" - response = client.get('/.well-known/agent.json') + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) assert response.status_code == 200 data = response.json() assert data['name'] == agent_card.name @@ -129,22 +168,69 @@ def test_authenticated_extended_agent_card_endpoint_not_supported( ): """Test extended card endpoint returns 404 if not supported by main card.""" # Ensure supportsAuthenticatedExtendedCard is False or None - agent_card.supportsAuthenticatedExtendedCard = False + agent_card.supports_authenticated_extended_card = False app_instance = A2AStarletteApplication(agent_card, handler) # The route should not even be added if supportsAuthenticatedExtendedCard is false # So, building the app and trying to hit it should result in 404 from Starlette itself client = TestClient(app_instance.build()) response = client.get('/agent/authenticatedExtendedCard') - assert response.status_code == 404 # Starlette's default for no route + assert response.status_code == 404 # Starlette's default for no route + + +def test_agent_card_default_endpoint_has_deprecated_route( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test agent card deprecated route is available for default route.""" + app_instance = A2AStarletteApplication(agent_card, handler) + client = TestClient(app_instance.build()) + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == agent_card.name + response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == agent_card.name + + +def test_agent_card_custom_endpoint_has_no_deprecated_route( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test agent card deprecated route is not available for custom route.""" + app_instance = A2AStarletteApplication(agent_card, handler) + client = TestClient(app_instance.build(agent_card_url='/my-agent')) + response = client.get('/my-agent') + assert response.status_code == 200 + data = response.json() + assert data['name'] == agent_card.name + response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 404 + + +def test_authenticated_extended_agent_card_endpoint_not_supported_fastapi( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test extended card endpoint returns 404 if not supported by main card.""" + # Ensure supportsAuthenticatedExtendedCard is False or None + agent_card.supports_authenticated_extended_card = False + app_instance = A2AFastAPIApplication(agent_card, handler) + # The route should not even be added if supportsAuthenticatedExtendedCard is false + # So, building the app and trying to hit it should result in 404 from FastAPI itself + client = TestClient(app_instance.build()) + response = client.get('/agent/authenticatedExtendedCard') + assert response.status_code == 404 # FastAPI's default for no route -def test_authenticated_extended_agent_card_endpoint_supported_with_specific_extended_card( +def test_authenticated_extended_agent_card_endpoint_supported_with_specific_extended_card_starlette( agent_card: AgentCard, extended_agent_card_fixture: AgentCard, handler: mock.AsyncMock, ): """Test extended card endpoint returns the specific extended card when provided.""" - agent_card.supportsAuthenticatedExtendedCard = True # Main card must support it + agent_card.supports_authenticated_extended_card = ( + True # Main card must support it + ) + app_instance = A2AStarletteApplication( agent_card, handler, extended_agent_card=extended_agent_card_fixture ) @@ -157,11 +243,36 @@ def test_authenticated_extended_agent_card_endpoint_supported_with_specific_exte assert data['name'] == extended_agent_card_fixture.name assert data['version'] == extended_agent_card_fixture.version assert len(data['skills']) == len(extended_agent_card_fixture.skills) - assert any( - skill['id'] == 'skill-extended' for skill in data['skills'] - ), "Extended skill not found in served card" + assert any(skill['id'] == 'skill-extended' for skill in data['skills']), ( + 'Extended skill not found in served card' + ) +def test_authenticated_extended_agent_card_endpoint_supported_with_specific_extended_card_fastapi( + agent_card: AgentCard, + extended_agent_card_fixture: AgentCard, + handler: mock.AsyncMock, +): + """Test extended card endpoint returns the specific extended card when provided.""" + agent_card.supports_authenticated_extended_card = ( + True # Main card must support it + ) + app_instance = A2AFastAPIApplication( + agent_card, handler, extended_agent_card=extended_agent_card_fixture + ) + client = TestClient(app_instance.build()) + + response = client.get('/agent/authenticatedExtendedCard') + assert response.status_code == 200 + data = response.json() + # Verify it's the extended card's data + assert data['name'] == extended_agent_card_fixture.name + assert data['version'] == extended_agent_card_fixture.version + assert len(data['skills']) == len(extended_agent_card_fixture.skills) + assert any(skill['id'] == 'skill-extended' for skill in data['skills']), ( + 'Extended skill not found in served card' + ) + def test_agent_card_custom_url( app: A2AStarletteApplication, agent_card: AgentCard @@ -174,15 +285,36 @@ def test_agent_card_custom_url( assert data['name'] == agent_card.name -def test_rpc_endpoint_custom_url( +def test_starlette_rpc_endpoint_custom_url( app: A2AStarletteApplication, handler: mock.AsyncMock ): """Test the RPC endpoint with a custom URL.""" # Provide a valid Task object as the return value task_status = TaskStatus(**MINIMAL_TASK_STATUS) - task = Task( - id='task1', contextId='ctx1', state='completed', status=task_status + task = Task(id='task1', context_id='ctx1', status=task_status) + handler.on_get_task.return_value = task + client = TestClient(app.build(rpc_url='/api/rpc')) + response = client.post( + '/api/rpc', + json={ + 'jsonrpc': '2.0', + 'id': '123', + 'method': 'tasks/get', + 'params': {'id': 'task1'}, + }, ) + assert response.status_code == 200 + data = response.json() + assert data['result']['id'] == 'task1' + + +def test_fastapi_rpc_endpoint_custom_url( + app: A2AFastAPIApplication, handler: mock.AsyncMock +): + """Test the RPC endpoint with a custom URL.""" + # Provide a valid Task object as the return value + task_status = TaskStatus(**MINIMAL_TASK_STATUS) + task = Task(id='task1', context_id='ctx1', status=task_status) handler.on_get_task.return_value = task client = TestClient(app.build(rpc_url='/api/rpc')) response = client.post( @@ -199,7 +331,7 @@ def test_rpc_endpoint_custom_url( assert data['result']['id'] == 'task1' -def test_build_with_extra_routes( +def test_starlette_build_with_extra_routes( app: A2AStarletteApplication, agent_card: AgentCard ): """Test building the app with additional routes.""" @@ -217,12 +349,65 @@ def custom_handler(request): assert response.json() == {'message': 'Hello'} # Ensure default routes still work - response = client.get('/.well-known/agent.json') + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) assert response.status_code == 200 data = response.json() assert data['name'] == agent_card.name +def test_fastapi_build_with_extra_routes( + app: A2AFastAPIApplication, agent_card: AgentCard +): + """Test building the app with additional routes.""" + + def custom_handler(request): + return JSONResponse({'message': 'Hello'}) + + extra_route = Route('/hello', custom_handler, methods=['GET']) + test_app = app.build(routes=[extra_route]) + client = TestClient(test_app) + + # Test the added route + response = client.get('/hello') + assert response.status_code == 200 + assert response.json() == {'message': 'Hello'} + + # Ensure default routes still work + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == agent_card.name + + # check if deprecated agent card path route is available with default well-known path + response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == agent_card.name + + +def test_fastapi_build_custom_agent_card_path( + app: A2AFastAPIApplication, agent_card: AgentCard +): + """Test building the app with a custom agent card path.""" + + test_app = app.build(agent_card_url='/agent-card') + client = TestClient(test_app) + + # Ensure custom card path works + response = client.get('/agent-card') + assert response.status_code == 200 + data = response.json() + assert data['name'] == agent_card.name + + # Ensure default agent card location is not available + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 404 + + # check if deprecated agent card path route is not available + response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 404 + + # === REQUEST METHODS TESTS === @@ -232,8 +417,7 @@ def test_send_message(client: TestClient, handler: mock.AsyncMock): task_status = TaskStatus(**MINIMAL_TASK_STATUS) mock_task = Task( id='task1', - contextId='session-xyz', - state='completed', + context_id='session-xyz', status=task_status, ) handler.on_message_send.return_value = mock_task @@ -249,10 +433,10 @@ def test_send_message(client: TestClient, handler: mock.AsyncMock): 'message': { 'role': 'agent', 'parts': [{'kind': 'text', 'text': 'Hello'}], - 'messageId': '111', + 'message_id': '111', 'kind': 'message', - 'taskId': 'task1', - 'contextId': 'session-xyz', + 'task_id': 'task1', + 'context_id': 'session-xyz', } }, }, @@ -274,9 +458,7 @@ def test_cancel_task(client: TestClient, handler: mock.AsyncMock): # Setup mock response task_status = TaskStatus(**MINIMAL_TASK_STATUS) task_status.state = TaskState.canceled # 'cancelled' # - task = Task( - id='task1', contextId='ctx1', state='cancelled', status=task_status - ) + task = Task(id='task1', context_id='ctx1', status=task_status) handler.on_cancel_task.return_value = task # Send request @@ -304,9 +486,7 @@ def test_get_task(client: TestClient, handler: mock.AsyncMock): """Test getting a task.""" # Setup mock response task_status = TaskStatus(**MINIMAL_TASK_STATUS) - task = Task( - id='task1', contextId='ctx1', state='completed', status=task_status - ) + task = Task(id='task1', context_id='ctx1', status=task_status) handler.on_get_task.return_value = task # JSONRPCResponse(root=task) # Send request @@ -335,8 +515,8 @@ def test_set_push_notification_config( """Test setting push notification configuration.""" # Setup mock response task_push_config = TaskPushNotificationConfig( - taskId='t2', - pushNotificationConfig=PushNotificationConfig( + task_id='t2', + push_notification_config=PushNotificationConfig( url='https://example.com', token='secret-token' ), ) @@ -350,7 +530,7 @@ def test_set_push_notification_config( 'id': '123', 'method': 'tasks/pushNotificationConfig/set', 'params': { - 'taskId': 't2', + 'task_id': 't2', 'pushNotificationConfig': { 'url': 'https://example.com', 'token': 'secret-token', @@ -374,8 +554,8 @@ def test_get_push_notification_config( """Test getting push notification configuration.""" # Setup mock response task_push_config = TaskPushNotificationConfig( - taskId='task1', - pushNotificationConfig=PushNotificationConfig( + task_id='task1', + push_notification_config=PushNotificationConfig( url='https://example.com', token='secret-token' ), ) @@ -402,6 +582,67 @@ def test_get_push_notification_config( handler.on_get_task_push_notification_config.assert_awaited_once() +def test_server_auth(app: A2AStarletteApplication, handler: mock.AsyncMock): + class TestAuthMiddleware(AuthenticationBackend): + async def authenticate( + self, conn: HTTPConnection + ) -> tuple[AuthCredentials, BaseUser] | None: + # For the purposes of this test, all requests are authenticated! + return (AuthCredentials(['authenticated']), SimpleUser('test_user')) + + client = TestClient( + app.build( + middleware=[ + Middleware( + AuthenticationMiddleware, backend=TestAuthMiddleware() + ) + ] + ) + ) + + # Set the output message to be the authenticated user name + handler.on_message_send.side_effect = lambda params, context: Message( + context_id='session-xyz', + message_id='112', + role=Role.agent, + parts=[ + Part(TextPart(text=context.user.user_name)), + ], + ) + + # Send request + response = client.post( + '/', + json={ + 'jsonrpc': '2.0', + 'id': '123', + 'method': 'message/send', + 'params': { + 'message': { + 'role': 'agent', + 'parts': [{'kind': 'text', 'text': 'Hello'}], + 'message_id': '111', + 'kind': 'message', + 'task_id': 'task1', + 'context_id': 'session-xyz', + } + }, + }, + ) + + # Verify response + assert response.status_code == 200 + result = SendMessageResponse.model_validate(response.json()) + assert isinstance(result.root, SendMessageSuccessResponse) + assert isinstance(result.root.result, Message) + message = result.root.result + assert isinstance(message.parts[0].root, TextPart) + assert message.parts[0].root.text == 'test_user' + + # Verify handler was called + handler.on_message_send.assert_awaited_once() + + # === STREAMING TESTS === @@ -417,15 +658,15 @@ async def stream_generator(): text_part = TextPart(**TEXT_PART_DATA) data_part = DataPart(**DATA_PART_DATA) artifact = Artifact( - artifactId=f'artifact-{i}', + artifact_id=f'artifact-{i}', name='result_data', parts=[Part(root=text_part), Part(root=data_part)], ) last = [False, False, True] task_artifact_update_event_data: dict[str, Any] = { 'artifact': artifact, - 'taskId': 'task_id', - 'contextId': 'session-xyz', + 'task_id': 'task_id', + 'context_id': 'session-xyz', 'append': False, 'lastChunk': last[i], 'kind': 'artifact-update', @@ -453,10 +694,10 @@ async def stream_generator(): 'message': { 'role': 'agent', 'parts': [{'kind': 'text', 'text': 'Hello'}], - 'messageId': '111', + 'message_id': '111', 'kind': 'message', - 'taskId': 'taskId', - 'contextId': 'session-xyz', + 'task_id': 'task_id', + 'context_id': 'session-xyz', } }, }, @@ -507,15 +748,15 @@ async def stream_generator(): text_part = TextPart(**TEXT_PART_DATA) data_part = DataPart(**DATA_PART_DATA) artifact = Artifact( - artifactId=f'artifact-{i}', + artifact_id=f'artifact-{i}', name='result_data', parts=[Part(root=text_part), Part(root=data_part)], ) last = [False, False, True] task_artifact_update_event_data: dict[str, Any] = { 'artifact': artifact, - 'taskId': 'task_id', - 'contextId': 'session-xyz', + 'task_id': 'task_id', + 'context_id': 'session-xyz', 'append': False, 'lastChunk': last[i], 'kind': 'artifact-update', @@ -598,8 +839,9 @@ def test_invalid_request_structure(client: TestClient): response = client.post( '/', json={ - # Missing required fields - 'id': '123' + 'jsonrpc': 'aaaa', # Missing or wrong required fields + 'id': '123', + 'method': 'foo/bar', }, ) assert response.status_code == 200 @@ -608,6 +850,193 @@ def test_invalid_request_structure(client: TestClient): assert data['error']['code'] == InvalidRequestError().code +# === DYNAMIC CARD MODIFIER TESTS === + + +def test_dynamic_agent_card_modifier( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test that the card_modifier dynamically alters the public agent card.""" + + async def modifier(card: AgentCard) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Dynamically Modified Agent' + return modified_card + + app_instance = A2AStarletteApplication( + agent_card, handler, card_modifier=modifier + ) + client = TestClient(app_instance.build()) + + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == 'Dynamically Modified Agent' + assert ( + data['version'] == agent_card.version + ) # Ensure other fields are intact + + +def test_dynamic_agent_card_modifier_sync( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test that a synchronous card_modifier dynamically alters the public agent card.""" + + def modifier(card: AgentCard) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Dynamically Modified Agent' + return modified_card + + app_instance = A2AStarletteApplication( + agent_card, handler, card_modifier=modifier + ) + client = TestClient(app_instance.build()) + + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == 'Dynamically Modified Agent' + assert ( + data['version'] == agent_card.version + ) # Ensure other fields are intact + + +def test_dynamic_extended_agent_card_modifier( + agent_card: AgentCard, + extended_agent_card_fixture: AgentCard, + handler: mock.AsyncMock, +): + """Test that the extended_card_modifier dynamically alters the extended agent card.""" + agent_card.supports_authenticated_extended_card = True + + async def modifier( + card: AgentCard, context: ServerCallContext + ) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.description = 'Dynamically Modified Extended Description' + return modified_card + + # Test with a base extended card + app_instance = A2AStarletteApplication( + agent_card, + handler, + extended_agent_card=extended_agent_card_fixture, + extended_card_modifier=modifier, + ) + client = TestClient(app_instance.build()) + + response = client.get(EXTENDED_AGENT_CARD_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == extended_agent_card_fixture.name + assert data['description'] == 'Dynamically Modified Extended Description' + + # Test without a base extended card (modifier should receive public card) + app_instance_no_base = A2AStarletteApplication( + agent_card, + handler, + extended_agent_card=None, + extended_card_modifier=modifier, + ) + client_no_base = TestClient(app_instance_no_base.build()) + response_no_base = client_no_base.get(EXTENDED_AGENT_CARD_PATH) + assert response_no_base.status_code == 200 + data_no_base = response_no_base.json() + assert data_no_base['name'] == agent_card.name + assert ( + data_no_base['description'] + == 'Dynamically Modified Extended Description' + ) + + +def test_dynamic_extended_agent_card_modifier_sync( + agent_card: AgentCard, + extended_agent_card_fixture: AgentCard, + handler: mock.AsyncMock, +): + """Test that a synchronous extended_card_modifier dynamically alters the extended agent card.""" + agent_card.supports_authenticated_extended_card = True + + def modifier(card: AgentCard, context: ServerCallContext) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.description = 'Dynamically Modified Extended Description' + return modified_card + + # Test with a base extended card + app_instance = A2AStarletteApplication( + agent_card, + handler, + extended_agent_card=extended_agent_card_fixture, + extended_card_modifier=modifier, + ) + client = TestClient(app_instance.build()) + + response = client.get(EXTENDED_AGENT_CARD_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == extended_agent_card_fixture.name + assert data['description'] == 'Dynamically Modified Extended Description' + + # Test without a base extended card (modifier should receive public card) + app_instance_no_base = A2AStarletteApplication( + agent_card, + handler, + extended_agent_card=None, + extended_card_modifier=modifier, + ) + client_no_base = TestClient(app_instance_no_base.build()) + response_no_base = client_no_base.get(EXTENDED_AGENT_CARD_PATH) + assert response_no_base.status_code == 200 + data_no_base = response_no_base.json() + assert data_no_base['name'] == agent_card.name + assert ( + data_no_base['description'] + == 'Dynamically Modified Extended Description' + ) + + +def test_fastapi_dynamic_agent_card_modifier( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test that the card_modifier dynamically alters the public agent card for FastAPI.""" + + async def modifier(card: AgentCard) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Dynamically Modified Agent' + return modified_card + + app_instance = A2AFastAPIApplication( + agent_card, handler, card_modifier=modifier + ) + client = TestClient(app_instance.build()) + + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == 'Dynamically Modified Agent' + + +def test_fastapi_dynamic_agent_card_modifier_sync( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test that a synchronous card_modifier dynamically alters the public agent card for FastAPI.""" + + def modifier(card: AgentCard) -> AgentCard: + modified_card = card.model_copy(deep=True) + modified_card.name = 'Dynamically Modified Agent' + return modified_card + + app_instance = A2AFastAPIApplication( + agent_card, handler, card_modifier=modifier + ) + client = TestClient(app_instance.build()) + + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data['name'] == 'Dynamically Modified Agent' + + def test_method_not_implemented(client: TestClient, handler: mock.AsyncMock): """Test handling MethodNotImplementedError.""" handler.on_get_task.side_effect = MethodNotImplementedError() @@ -642,7 +1071,7 @@ def test_unknown_method(client: TestClient): data = response.json() assert 'error' in data # This should produce an UnsupportedOperationError error code - assert data['error']['code'] == InvalidRequestError().code + assert data['error']['code'] == MethodNotFoundError().code def test_validation_error(client: TestClient): @@ -653,7 +1082,7 @@ def test_validation_error(client: TestClient): json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'messages/send', + 'method': 'message/send', 'params': { 'message': { # Missing required fields @@ -665,7 +1094,7 @@ def test_validation_error(client: TestClient): assert response.status_code == 200 data = response.json() assert 'error' in data - assert data['error']['code'] == InvalidRequestError().code + assert data['error']['code'] == InvalidParamsError().code def test_unhandled_exception(client: TestClient, handler: mock.AsyncMock): diff --git a/tests/server/test_models.py b/tests/server/test_models.py new file mode 100644 index 000000000..64fed1008 --- /dev/null +++ b/tests/server/test_models.py @@ -0,0 +1,118 @@ +"""Tests for a2a.server.models module.""" + +from unittest.mock import MagicMock + +from sqlalchemy.orm import DeclarativeBase + +from a2a.server.models import ( + PydanticListType, + PydanticType, + create_push_notification_config_model, + create_task_model, +) +from a2a.types import Artifact, TaskState, TaskStatus, TextPart + + +class TestPydanticType: + """Tests for PydanticType SQLAlchemy type decorator.""" + + def test_process_bind_param_with_pydantic_model(self): + pydantic_type = PydanticType(TaskStatus) + status = TaskStatus(state=TaskState.working) + dialect = MagicMock() + + result = pydantic_type.process_bind_param(status, dialect) + assert result['state'] == 'working' + assert result['message'] is None + # TaskStatus may have other optional fields + + def test_process_bind_param_with_none(self): + pydantic_type = PydanticType(TaskStatus) + dialect = MagicMock() + + result = pydantic_type.process_bind_param(None, dialect) + assert result is None + + def test_process_result_value(self): + pydantic_type = PydanticType(TaskStatus) + dialect = MagicMock() + + result = pydantic_type.process_result_value( + {'state': 'completed', 'message': None}, dialect + ) + assert isinstance(result, TaskStatus) + assert result.state == 'completed' + + +class TestPydanticListType: + """Tests for PydanticListType SQLAlchemy type decorator.""" + + def test_process_bind_param_with_list(self): + pydantic_list_type = PydanticListType(Artifact) + artifacts = [ + Artifact( + artifact_id='1', parts=[TextPart(type='text', text='Hello')] + ), + Artifact( + artifact_id='2', parts=[TextPart(type='text', text='World')] + ), + ] + dialect = MagicMock() + + result = pydantic_list_type.process_bind_param(artifacts, dialect) + assert len(result) == 2 + assert result[0]['artifactId'] == '1' # JSON mode uses camelCase + assert result[1]['artifactId'] == '2' + + def test_process_result_value_with_list(self): + pydantic_list_type = PydanticListType(Artifact) + dialect = MagicMock() + data = [ + {'artifact_id': '1', 'parts': [{'type': 'text', 'text': 'Hello'}]}, + {'artifact_id': '2', 'parts': [{'type': 'text', 'text': 'World'}]}, + ] + + result = pydantic_list_type.process_result_value(data, dialect) + assert len(result) == 2 + assert all(isinstance(art, Artifact) for art in result) + assert result[0].artifact_id == '1' + assert result[1].artifact_id == '2' + + +def test_create_task_model(): + """Test dynamic task model creation.""" + + # Create a fresh base to avoid table conflicts + class TestBase(DeclarativeBase): + pass + + # Create with default table name + default_task_model = create_task_model('test_tasks_1', TestBase) + assert default_task_model.__tablename__ == 'test_tasks_1' + assert default_task_model.__name__ == 'TaskModel_test_tasks_1' + + # Create with custom table name + custom_task_model = create_task_model('test_tasks_2', TestBase) + assert custom_task_model.__tablename__ == 'test_tasks_2' + assert custom_task_model.__name__ == 'TaskModel_test_tasks_2' + + +def test_create_push_notification_config_model(): + """Test dynamic push notification config model creation.""" + + # Create a fresh base to avoid table conflicts + class TestBase(DeclarativeBase): + pass + + # Create with default table name + default_model = create_push_notification_config_model( + 'test_push_configs_1', TestBase + ) + assert default_model.__tablename__ == 'test_push_configs_1' + + # Create with custom table name + custom_model = create_push_notification_config_model( + 'test_push_configs_2', TestBase + ) + assert custom_model.__tablename__ == 'test_push_configs_2' + assert 'test_push_configs_2' in custom_model.__name__ diff --git a/tests/test_types.py b/tests/test_types.py index d57ddda0f..73e6af7bb 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -22,6 +22,10 @@ FilePart, FileWithBytes, FileWithUri, + GetAuthenticatedExtendedCardRequest, + GetAuthenticatedExtendedCardResponse, + GetAuthenticatedExtendedCardSuccessResponse, + GetTaskPushNotificationConfigParams, GetTaskPushNotificationConfigRequest, GetTaskPushNotificationConfigResponse, GetTaskPushNotificationConfigSuccessResponse, @@ -123,7 +127,7 @@ MINIMAL_MESSAGE_USER: dict[str, Any] = { 'role': 'user', 'parts': [TEXT_PART_DATA], - 'messageId': 'msg-123', + 'message_id': 'msg-123', 'kind': 'message', } @@ -131,7 +135,7 @@ 'role': 'agent', 'parts': [TEXT_PART_DATA, FILE_URI_PART_DATA], 'metadata': {'timestamp': 'now'}, - 'messageId': 'msg-456', + 'message_id': 'msg-456', } MINIMAL_TASK_STATUS: dict[str, Any] = {'state': 'submitted'} @@ -143,13 +147,13 @@ MINIMAL_TASK: dict[str, Any] = { 'id': 'task-abc', - 'contextId': 'session-xyz', + 'context_id': 'session-xyz', 'status': MINIMAL_TASK_STATUS, 'kind': 'task', } FULL_TASK: dict[str, Any] = { 'id': 'task-abc', - 'contextId': 'session-xyz', + 'context_id': 'session-xyz', 'status': FULL_TASK_STATUS, 'history': [MINIMAL_MESSAGE_USER, AGENT_MESSAGE_WITH_FILE], 'artifacts': [ @@ -192,24 +196,25 @@ def test_security_scheme_invalid(): name='my_api_key', ) # Missing "in" # type: ignore + with pytest.raises(ValidationError): OAuth2SecurityScheme( description='OAuth2 scheme missing flows', - ) # Missing "flows" + ) # Missing "flows" # type: ignore def test_agent_capabilities(): caps = AgentCapabilities( - streaming=None, stateTransitionHistory=None, pushNotifications=None + streaming=None, state_transition_history=None, push_notifications=None ) # All optional - assert caps.pushNotifications is None - assert caps.stateTransitionHistory is None + assert caps.push_notifications is None + assert caps.state_transition_history is None assert caps.streaming is None caps_full = AgentCapabilities( - pushNotifications=True, stateTransitionHistory=False, streaming=True + push_notifications=True, state_transition_history=False, streaming=True ) - assert caps_full.pushNotifications is True - assert caps_full.stateTransitionHistory is False + assert caps_full.push_notifications is True + assert caps_full.state_transition_history is False assert caps_full.streaming is True @@ -232,7 +237,7 @@ def test_agent_skill_valid(): skill_full = AgentSkill(**FULL_AGENT_SKILL) assert skill_full.examples == ['Find me a pasta recipe'] - assert skill_full.inputModes == ['text/plain'] + assert skill_full.input_modes == ['text/plain'] def test_agent_skill_invalid(): @@ -284,12 +289,12 @@ def test_text_part(): def test_file_part_variants(): # URI variant file_uri = FileWithUri( - uri='file:///path/to/file.txt', mimeType='text/plain' + uri='file:///path/to/file.txt', mime_type='text/plain' ) part_uri = FilePart(kind='file', file=file_uri) assert isinstance(part_uri.file, FileWithUri) assert part_uri.file.uri == 'file:///path/to/file.txt' - assert part_uri.file.mimeType == 'text/plain' + assert part_uri.file.mime_type == 'text/plain' assert not hasattr(part_uri.file, 'bytes') # Bytes variant @@ -390,7 +395,7 @@ def test_task_status(): def test_task(): task = Task(**MINIMAL_TASK) assert task.id == 'task-abc' - assert task.contextId == 'session-xyz' + assert task.context_id == 'session-xyz' assert task.status.state == TaskState.submitted assert task.history is None assert task.artifacts is None @@ -533,7 +538,7 @@ def test_send_subscribe_request() -> None: def test_get_task_request() -> None: - params = TaskQueryParams(id='task-1', historyLength=2) + params = TaskQueryParams(id='task-1', history_length=2) req_data: dict[str, Any] = { 'jsonrpc': '2.0', 'method': 'tasks/get', @@ -544,7 +549,7 @@ def test_get_task_request() -> None: assert req.method == 'tasks/get' assert isinstance(req.params, TaskQueryParams) assert req.params.id == 'task-1' - assert req.params.historyLength == 2 + assert req.params.history_length == 2 with pytest.raises(ValidationError): # Wrong method literal GetTaskRequest.model_validate({**req_data, 'method': 'wrong/method'}) @@ -653,7 +658,7 @@ def test_send_message_streaming_status_update_response() -> None: task_status_update_event_data: dict[str, Any] = { 'status': MINIMAL_TASK_STATUS, 'taskId': '1', - 'contextId': '2', + 'context_id': '2', 'final': False, 'kind': 'status-update', } @@ -668,7 +673,7 @@ def test_send_message_streaming_status_update_response() -> None: assert isinstance(response.root, SendStreamingMessageSuccessResponse) assert isinstance(response.root.result, TaskStatusUpdateEvent) assert response.root.result.status.state == TaskState.submitted - assert response.root.result.taskId == '1' + assert response.root.result.task_id == '1' assert not response.root.result.final with pytest.raises( @@ -705,14 +710,14 @@ def test_send_message_streaming_artifact_update_response() -> None: text_part = TextPart(**TEXT_PART_DATA) data_part = DataPart(**DATA_PART_DATA) artifact = Artifact( - artifactId='artifact-123', + artifact_id='artifact-123', name='result_data', parts=[Part(root=text_part), Part(root=data_part)], ) task_artifact_update_event_data: dict[str, Any] = { 'artifact': artifact, 'taskId': 'task_id', - 'contextId': '2', + 'context_id': '2', 'append': False, 'lastChunk': True, 'kind': 'artifact-update', @@ -726,11 +731,11 @@ def test_send_message_streaming_artifact_update_response() -> None: assert response.root.id == 1 assert isinstance(response.root, SendStreamingMessageSuccessResponse) assert isinstance(response.root.result, TaskArtifactUpdateEvent) - assert response.root.result.artifact.artifactId == 'artifact-123' + assert response.root.result.artifact.artifact_id == 'artifact-123' assert response.root.result.artifact.name == 'result_data' - assert response.root.result.taskId == 'task_id' + assert response.root.result.task_id == 'task_id' assert not response.root.result.append - assert response.root.result.lastChunk + assert response.root.result.last_chunk assert len(response.root.result.artifact.parts) == 2 assert isinstance(response.root.result.artifact.parts[0].root, TextPart) assert isinstance(response.root.result.artifact.parts[1].root, DataPart) @@ -738,8 +743,8 @@ def test_send_message_streaming_artifact_update_response() -> None: def test_set_task_push_notification_response() -> None: task_push_config = TaskPushNotificationConfig( - taskId='t2', - pushNotificationConfig=PushNotificationConfig( + task_id='t2', + push_notification_config=PushNotificationConfig( url='https://example.com', token='token' ), ) @@ -752,16 +757,18 @@ def test_set_task_push_notification_response() -> None: assert resp.root.id == 1 assert isinstance(resp.root, SetTaskPushNotificationConfigSuccessResponse) assert isinstance(resp.root.result, TaskPushNotificationConfig) - assert resp.root.result.taskId == 't2' - assert resp.root.result.pushNotificationConfig.url == 'https://example.com' - assert resp.root.result.pushNotificationConfig.token == 'token' - assert resp.root.result.pushNotificationConfig.authentication is None + assert resp.root.result.task_id == 't2' + assert ( + resp.root.result.push_notification_config.url == 'https://example.com' + ) + assert resp.root.result.push_notification_config.token == 'token' + assert resp.root.result.push_notification_config.authentication is None auth_info_dict: dict[str, Any] = { 'schemes': ['Bearer', 'Basic'], 'credentials': 'user:pass', } - task_push_config.pushNotificationConfig.authentication = ( + task_push_config.push_notification_config.authentication = ( PushNotificationAuthenticationInfo(**auth_info_dict) ) resp_data = { @@ -771,13 +778,13 @@ def test_set_task_push_notification_response() -> None: } resp = SetTaskPushNotificationConfigResponse.model_validate(resp_data) assert isinstance(resp.root, SetTaskPushNotificationConfigSuccessResponse) - assert resp.root.result.pushNotificationConfig.authentication is not None - assert resp.root.result.pushNotificationConfig.authentication.schemes == [ + assert resp.root.result.push_notification_config.authentication is not None + assert resp.root.result.push_notification_config.authentication.schemes == [ 'Bearer', 'Basic', ] assert ( - resp.root.result.pushNotificationConfig.authentication.credentials + resp.root.result.push_notification_config.authentication.credentials == 'user:pass' ) @@ -797,8 +804,8 @@ def test_set_task_push_notification_response() -> None: def test_get_task_push_notification_response() -> None: task_push_config = TaskPushNotificationConfig( - taskId='t2', - pushNotificationConfig=PushNotificationConfig( + task_id='t2', + push_notification_config=PushNotificationConfig( url='https://example.com', token='token' ), ) @@ -811,16 +818,18 @@ def test_get_task_push_notification_response() -> None: assert resp.root.id == 1 assert isinstance(resp.root, GetTaskPushNotificationConfigSuccessResponse) assert isinstance(resp.root.result, TaskPushNotificationConfig) - assert resp.root.result.taskId == 't2' - assert resp.root.result.pushNotificationConfig.url == 'https://example.com' - assert resp.root.result.pushNotificationConfig.token == 'token' - assert resp.root.result.pushNotificationConfig.authentication is None + assert resp.root.result.task_id == 't2' + assert ( + resp.root.result.push_notification_config.url == 'https://example.com' + ) + assert resp.root.result.push_notification_config.token == 'token' + assert resp.root.result.push_notification_config.authentication is None auth_info_dict: dict[str, Any] = { 'schemes': ['Bearer', 'Basic'], 'credentials': 'user:pass', } - task_push_config.pushNotificationConfig.authentication = ( + task_push_config.push_notification_config.authentication = ( PushNotificationAuthenticationInfo(**auth_info_dict) ) resp_data = { @@ -830,13 +839,13 @@ def test_get_task_push_notification_response() -> None: } resp = GetTaskPushNotificationConfigResponse.model_validate(resp_data) assert isinstance(resp.root, GetTaskPushNotificationConfigSuccessResponse) - assert resp.root.result.pushNotificationConfig.authentication is not None - assert resp.root.result.pushNotificationConfig.authentication.schemes == [ + assert resp.root.result.push_notification_config.authentication is not None + assert resp.root.result.push_notification_config.authentication.schemes == [ 'Bearer', 'Basic', ] assert ( - resp.root.result.pushNotificationConfig.authentication.credentials + resp.root.result.push_notification_config.authentication.credentials == 'user:pass' ) @@ -907,8 +916,8 @@ def test_a2a_request_root_model() -> None: # SetTaskPushNotificationConfigRequest task_push_config = TaskPushNotificationConfig( - taskId='t2', - pushNotificationConfig=PushNotificationConfig( + task_id='t2', + push_notification_config=PushNotificationConfig( url='https://example.com', token='token' ), ) @@ -917,7 +926,6 @@ def test_a2a_request_root_model() -> None: 'jsonrpc': '2.0', 'method': 'tasks/pushNotificationConfig/set', 'params': task_push_config.model_dump(), - 'taskId': 2, } a2a_req_set_push_req = A2ARequest.model_validate(set_push_notif_req_data) assert isinstance( @@ -937,7 +945,6 @@ def test_a2a_request_root_model() -> None: 'jsonrpc': '2.0', 'method': 'tasks/pushNotificationConfig/get', 'params': id_params.model_dump(), - 'taskId': 2, } a2a_req_get_push_req = A2ARequest.model_validate(get_push_notif_req_data) assert isinstance( @@ -964,6 +971,21 @@ def test_a2a_request_root_model() -> None: assert isinstance(a2a_req_task_resubscribe_req.root.params, TaskIdParams) assert a2a_req_task_resubscribe_req.root.method == 'tasks/resubscribe' + # GetAuthenticatedExtendedCardRequest + get_auth_card_req_data: dict[str, Any] = { + 'jsonrpc': '2.0', + 'method': 'agent/getAuthenticatedExtendedCard', + 'id': 2, + } + a2a_req_get_auth_card = A2ARequest.model_validate(get_auth_card_req_data) + assert isinstance( + a2a_req_get_auth_card.root, GetAuthenticatedExtendedCardRequest + ) + assert ( + a2a_req_get_auth_card.root.method + == 'agent/getAuthenticatedExtendedCard' + ) + # Invalid method case invalid_req_data: dict[str, Any] = { 'jsonrpc': '2.0', @@ -1017,8 +1039,8 @@ def test_a2a_request_root_model_id_validation() -> None: # SetTaskPushNotificationConfigRequest task_push_config = TaskPushNotificationConfig( - taskId='t2', - pushNotificationConfig=PushNotificationConfig( + task_id='t2', + push_notification_config=PushNotificationConfig( url='https://example.com', token='token' ), ) @@ -1026,7 +1048,7 @@ def test_a2a_request_root_model_id_validation() -> None: 'jsonrpc': '2.0', 'method': 'tasks/pushNotificationConfig/set', 'params': task_push_config.model_dump(), - 'taskId': 2, + 'task_id': 2, } with pytest.raises(ValidationError): A2ARequest.model_validate(set_push_notif_req_data) # missing id @@ -1037,7 +1059,7 @@ def test_a2a_request_root_model_id_validation() -> None: 'jsonrpc': '2.0', 'method': 'tasks/pushNotificationConfig/get', 'params': id_params.model_dump(), - 'taskId': 2, + 'task_id': 2, } with pytest.raises(ValidationError): A2ARequest.model_validate(get_push_notif_req_data) @@ -1051,6 +1073,14 @@ def test_a2a_request_root_model_id_validation() -> None: with pytest.raises(ValidationError): A2ARequest.model_validate(task_resubscribe_req_data) + # GetAuthenticatedExtendedCardRequest + get_auth_card_req_data: dict[str, Any] = { + 'jsonrpc': '2.0', + 'method': 'agent/getAuthenticatedExtendedCard', + } + with pytest.raises(ValidationError): + A2ARequest.model_validate(get_auth_card_req_data) # missing id + def test_content_type_not_supported_error(): # Test ContentTypeNotSupportedError @@ -1300,11 +1330,11 @@ def test_task_push_notification_config() -> None: assert push_notification_config.authentication == auth_info task_push_notification_config = TaskPushNotificationConfig( - taskId='task-123', pushNotificationConfig=push_notification_config + task_id='task-123', push_notification_config=push_notification_config ) - assert task_push_notification_config.taskId == 'task-123' + assert task_push_notification_config.task_id == 'task-123' assert ( - task_push_notification_config.pushNotificationConfig + task_push_notification_config.push_notification_config == push_notification_config ) assert task_push_notification_config.model_dump(exclude_none=True) == { @@ -1356,22 +1386,22 @@ def test_file_base_valid(): """Tests successful validation of FileBase.""" # No optional fields base1 = FileBase() - assert base1.mimeType is None + assert base1.mime_type is None assert base1.name is None - # With mimeType only - base2 = FileBase(mimeType='image/png') - assert base2.mimeType == 'image/png' + # With mime_type only + base2 = FileBase(mime_type='image/png') + assert base2.mime_type == 'image/png' assert base2.name is None # With name only base3 = FileBase(name='document.pdf') - assert base3.mimeType is None + assert base3.mime_type is None assert base3.name == 'document.pdf' # With both fields - base4 = FileBase(mimeType='application/json', name='data.json') - assert base4.mimeType == 'application/json' + base4 = FileBase(mime_type='application/json', name='data.json') + assert base4.mime_type == 'application/json' assert base4.name == 'data.json' @@ -1379,10 +1409,10 @@ def test_file_base_invalid(): """Tests validation errors for FileBase.""" FileBase(extra_field='allowed') # type: ignore - # Incorrect type for mimeType + # Incorrect type for mime_type with pytest.raises(ValidationError) as excinfo_type_mime: - FileBase(mimeType=123) # type: ignore - assert 'mimeType' in str(excinfo_type_mime.value) + FileBase(mime_type=123) # type: ignore + assert 'mime_type' in str(excinfo_type_mime.value) # Incorrect type for name with pytest.raises(ValidationError) as excinfo_type_name: @@ -1491,8 +1521,146 @@ def test_a2a_error_validation_and_serialization() -> None: def test_subclass_enums() -> None: """validate subtype enum types""" - assert "cookie" == In.cookie + assert In.cookie == 'cookie' + + assert Role.user == 'user' + + assert TaskState.working == 'working' + + +def test_get_task_push_config_params() -> None: + """Tests successful validation of GetTaskPushNotificationConfigParams.""" + # Minimal valid data + params = {'id': 'task-1234'} + TaskIdParams.model_validate(params) + GetTaskPushNotificationConfigParams.model_validate(params) - assert "user" == Role.user - assert "working" == TaskState.working +def test_use_get_task_push_notification_params_for_request() -> None: + # GetTaskPushNotificationConfigRequest + get_push_notif_req_data: dict[str, Any] = { + 'id': 1, + 'jsonrpc': '2.0', + 'method': 'tasks/pushNotificationConfig/get', + 'params': {'id': 'task-1234', 'pushNotificationConfigId': 'c1'}, + } + a2a_req_get_push_req = A2ARequest.model_validate(get_push_notif_req_data) + assert isinstance( + a2a_req_get_push_req.root, GetTaskPushNotificationConfigRequest + ) + assert isinstance( + a2a_req_get_push_req.root.params, GetTaskPushNotificationConfigParams + ) + assert ( + a2a_req_get_push_req.root.method == 'tasks/pushNotificationConfig/get' + ) + + +def test_camelCase_access_raises_attribute_error() -> None: + """ + Tests that accessing or setting fields via their camelCase alias + raises an AttributeError. + """ + skill = AgentSkill( + id='hello_world', + name='Returns hello world', + description='just returns hello world', + tags=['hello world'], + examples=['hi', 'hello world'], + ) + + # Initialization with camelCase still works due to Pydantic's populate_by_name config + agent_card = AgentCard( + name='Hello World Agent', + description='Just a hello world agent', + url='http://localhost:9999/', + version='1.0.0', + defaultInputModes=['text'], # type: ignore + defaultOutputModes=['text'], # type: ignore + capabilities=AgentCapabilities(streaming=True), + skills=[skill], + supportsAuthenticatedExtendedCard=True, # type: ignore + ) + + # --- Test that using camelCase aliases raises errors --- + + # Test setting an attribute via camelCase alias raises AttributeError + with pytest.raises( + ValueError, + match='"AgentCard" object has no field "supportsAuthenticatedExtendedCard"', + ): + agent_card.supportsAuthenticatedExtendedCard = False + + # Test getting an attribute via camelCase alias raises AttributeError + with pytest.raises( + AttributeError, + match="'AgentCard' object has no attribute 'defaultInputModes'", + ): + _ = agent_card.defaultInputModes + + # --- Test that using snake_case names works correctly --- + + # The value should be unchanged because the camelCase setattr failed + assert agent_card.supports_authenticated_extended_card is True + + # Now, set it correctly using the snake_case name + agent_card.supports_authenticated_extended_card = False + assert agent_card.supports_authenticated_extended_card is False + + # Get the attribute correctly using the snake_case name + default_input_modes = agent_card.default_input_modes + assert default_input_modes == ['text'] + assert agent_card.default_input_modes == ['text'] + + +def test_get_authenticated_extended_card_request() -> None: + req_data: dict[str, Any] = { + 'jsonrpc': '2.0', + 'method': 'agent/getAuthenticatedExtendedCard', + 'id': 5, + } + req = GetAuthenticatedExtendedCardRequest.model_validate(req_data) + assert req.method == 'agent/getAuthenticatedExtendedCard' + assert req.id == 5 + # This request has no params, so we don't check for that. + + with pytest.raises(ValidationError): # Wrong method literal + GetAuthenticatedExtendedCardRequest.model_validate( + {**req_data, 'method': 'wrong/method'} + ) + + with pytest.raises(ValidationError): # Missing id + GetAuthenticatedExtendedCardRequest.model_validate( + {'jsonrpc': '2.0', 'method': 'agent/getAuthenticatedExtendedCard'} + ) + + +def test_get_authenticated_extended_card_response() -> None: + resp_data: dict[str, Any] = { + 'jsonrpc': '2.0', + 'result': MINIMAL_AGENT_CARD, + 'id': 'resp-1', + } + resp = GetAuthenticatedExtendedCardResponse.model_validate(resp_data) + assert resp.root.id == 'resp-1' + assert isinstance(resp.root, GetAuthenticatedExtendedCardSuccessResponse) + assert isinstance(resp.root.result, AgentCard) + assert resp.root.result.name == 'TestAgent' + + with pytest.raises(ValidationError): # Result is not an AgentCard + GetAuthenticatedExtendedCardResponse.model_validate( + {'jsonrpc': '2.0', 'result': {'wrong': 'data'}, 'id': 1} + ) + + resp_data_err: dict[str, Any] = { + 'jsonrpc': '2.0', + 'error': JSONRPCError(**TaskNotFoundError().model_dump()), + 'id': 'resp-1', + } + resp_err = GetAuthenticatedExtendedCardResponse.model_validate( + resp_data_err + ) + assert resp_err.root.id == 'resp-1' + assert isinstance(resp_err.root, JSONRPCErrorResponse) + assert resp_err.root.error is not None + assert isinstance(resp_err.root.error, JSONRPCError) diff --git a/tests/utils/test_artifact.py b/tests/utils/test_artifact.py new file mode 100644 index 000000000..489c047c4 --- /dev/null +++ b/tests/utils/test_artifact.py @@ -0,0 +1,159 @@ +import unittest +import uuid + +from unittest.mock import patch + +from a2a.types import ( + Artifact, + DataPart, + Part, + TextPart, +) +from a2a.utils.artifact import ( + get_artifact_text, + new_artifact, + new_data_artifact, + new_text_artifact, +) + + +class TestArtifact(unittest.TestCase): + @patch('uuid.uuid4') + def test_new_artifact_generates_id(self, mock_uuid4): + mock_uuid = uuid.UUID('abcdef12-1234-5678-1234-567812345678') + mock_uuid4.return_value = mock_uuid + artifact = new_artifact(parts=[], name='test_artifact') + self.assertEqual(artifact.artifact_id, str(mock_uuid)) + + def test_new_artifact_assigns_parts_name_description(self): + parts = [Part(root=TextPart(text='Sample text'))] + name = 'My Artifact' + description = 'This is a test artifact.' + artifact = new_artifact(parts=parts, name=name, description=description) + self.assertEqual(artifact.parts, parts) + self.assertEqual(artifact.name, name) + self.assertEqual(artifact.description, description) + + def test_new_artifact_empty_description_if_not_provided(self): + parts = [Part(root=TextPart(text='Another sample'))] + name = 'Artifact_No_Desc' + artifact = new_artifact(parts=parts, name=name) + self.assertEqual(artifact.description, None) + + def test_new_text_artifact_creates_single_text_part(self): + text = 'This is a text artifact.' + name = 'Text_Artifact' + artifact = new_text_artifact(text=text, name=name) + self.assertEqual(len(artifact.parts), 1) + self.assertIsInstance(artifact.parts[0].root, TextPart) + + def test_new_text_artifact_part_contains_provided_text(self): + text = 'Hello, world!' + name = 'Greeting_Artifact' + artifact = new_text_artifact(text=text, name=name) + self.assertEqual(artifact.parts[0].root.text, text) + + def test_new_text_artifact_assigns_name_description(self): + text = 'Some content.' + name = 'Named_Text_Artifact' + description = 'Description for text artifact.' + artifact = new_text_artifact( + text=text, name=name, description=description + ) + self.assertEqual(artifact.name, name) + self.assertEqual(artifact.description, description) + + def test_new_data_artifact_creates_single_data_part(self): + sample_data = {'key': 'value', 'number': 123} + name = 'Data_Artifact' + artifact = new_data_artifact(data=sample_data, name=name) + self.assertEqual(len(artifact.parts), 1) + self.assertIsInstance(artifact.parts[0].root, DataPart) + + def test_new_data_artifact_part_contains_provided_data(self): + sample_data = {'content': 'test_data', 'is_valid': True} + name = 'Structured_Data_Artifact' + artifact = new_data_artifact(data=sample_data, name=name) + self.assertIsInstance(artifact.parts[0].root, DataPart) + # Ensure the 'data' attribute of DataPart is accessed for comparison + self.assertEqual(artifact.parts[0].root.data, sample_data) + + def test_new_data_artifact_assigns_name_description(self): + sample_data = {'info': 'some details'} + name = 'Named_Data_Artifact' + description = 'Description for data artifact.' + artifact = new_data_artifact( + data=sample_data, name=name, description=description + ) + self.assertEqual(artifact.name, name) + self.assertEqual(artifact.description, description) + + +class TestGetArtifactText(unittest.TestCase): + def test_get_artifact_text_single_part(self): + # Setup + artifact = Artifact( + name='test-artifact', + parts=[Part(root=TextPart(text='Hello world'))], + artifact_id='test-artifact-id', + ) + + # Exercise + result = get_artifact_text(artifact) + + # Verify + assert result == 'Hello world' + + def test_get_artifact_text_multiple_parts(self): + # Setup + artifact = Artifact( + name='test-artifact', + parts=[ + Part(root=TextPart(text='First line')), + Part(root=TextPart(text='Second line')), + Part(root=TextPart(text='Third line')), + ], + artifact_id='test-artifact-id', + ) + + # Exercise + result = get_artifact_text(artifact) + + # Verify - default delimiter is newline + assert result == 'First line\nSecond line\nThird line' + + def test_get_artifact_text_custom_delimiter(self): + # Setup + artifact = Artifact( + name='test-artifact', + parts=[ + Part(root=TextPart(text='First part')), + Part(root=TextPart(text='Second part')), + Part(root=TextPart(text='Third part')), + ], + artifact_id='test-artifact-id', + ) + + # Exercise + result = get_artifact_text(artifact, delimiter=' | ') + + # Verify + assert result == 'First part | Second part | Third part' + + def test_get_artifact_text_empty_parts(self): + # Setup + artifact = Artifact( + name='test-artifact', + parts=[], + artifact_id='test-artifact-id', + ) + + # Exercise + result = get_artifact_text(artifact) + + # Verify + assert result == '' + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/utils/test_constants.py b/tests/utils/test_constants.py new file mode 100644 index 000000000..59e9b8366 --- /dev/null +++ b/tests/utils/test_constants.py @@ -0,0 +1,21 @@ +"""Tests for a2a.utils.constants module.""" + +from a2a.utils import constants + + +def test_agent_card_constants(): + """Test that agent card constants have expected values.""" + assert ( + constants.AGENT_CARD_WELL_KNOWN_PATH == '/.well-known/agent-card.json' + ) + assert ( + constants.PREV_AGENT_CARD_WELL_KNOWN_PATH == '/.well-known/agent.json' + ) + assert ( + constants.EXTENDED_AGENT_CARD_PATH == '/agent/authenticatedExtendedCard' + ) + + +def test_default_rpc_url(): + """Test default RPC URL constant.""" + assert constants.DEFAULT_RPC_URL == '/' diff --git a/tests/utils/test_error_handlers.py b/tests/utils/test_error_handlers.py new file mode 100644 index 000000000..ec41dc1f5 --- /dev/null +++ b/tests/utils/test_error_handlers.py @@ -0,0 +1,92 @@ +"""Tests for a2a.utils.error_handlers module.""" + +from unittest.mock import patch + +import pytest + +from a2a.types import ( + InternalError, + InvalidRequestError, + MethodNotFoundError, + TaskNotFoundError, +) +from a2a.utils.error_handlers import ( + A2AErrorToHttpStatus, + rest_error_handler, + rest_stream_error_handler, +) +from a2a.utils.errors import ServerError + + +class MockJSONResponse: + def __init__(self, content, status_code): + self.content = content + self.status_code = status_code + + +@pytest.mark.asyncio +async def test_rest_error_handler_server_error(): + """Test rest_error_handler with ServerError.""" + error = InvalidRequestError(message='Bad request') + + @rest_error_handler + async def failing_func(): + raise ServerError(error=error) + + with patch('a2a.utils.error_handlers.JSONResponse', MockJSONResponse): + result = await failing_func() + + assert isinstance(result, MockJSONResponse) + assert result.status_code == 400 + assert result.content == {'message': 'Bad request'} + + +@pytest.mark.asyncio +async def test_rest_error_handler_unknown_exception(): + """Test rest_error_handler with unknown exception.""" + + @rest_error_handler + async def failing_func(): + raise ValueError('Unexpected error') + + with patch('a2a.utils.error_handlers.JSONResponse', MockJSONResponse): + result = await failing_func() + + assert isinstance(result, MockJSONResponse) + assert result.status_code == 500 + assert result.content == {'message': 'unknown exception'} + + +@pytest.mark.asyncio +async def test_rest_stream_error_handler_server_error(): + """Test rest_stream_error_handler with ServerError.""" + error = InternalError(message='Internal server error') + + @rest_stream_error_handler + async def failing_stream(): + raise ServerError(error=error) + + with pytest.raises(ServerError) as exc_info: + await failing_stream() + + assert exc_info.value.error == error + + +@pytest.mark.asyncio +async def test_rest_stream_error_handler_reraises_exception(): + """Test rest_stream_error_handler reraises other exceptions.""" + + @rest_stream_error_handler + async def failing_stream(): + raise RuntimeError('Stream failed') + + with pytest.raises(RuntimeError, match='Stream failed'): + await failing_stream() + + +def test_a2a_error_to_http_status_mapping(): + """Test A2AErrorToHttpStatus mapping.""" + assert A2AErrorToHttpStatus[InvalidRequestError] == 400 + assert A2AErrorToHttpStatus[MethodNotFoundError] == 404 + assert A2AErrorToHttpStatus[TaskNotFoundError] == 404 + assert A2AErrorToHttpStatus[InternalError] == 500 diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py index e556b9c81..f3227d327 100644 --- a/tests/utils/test_helpers.py +++ b/tests/utils/test_helpers.py @@ -1,12 +1,20 @@ +import uuid + from typing import Any +from unittest.mock import patch import pytest from a2a.types import ( Artifact, + AgentCard, + AgentCardSignature, + AgentCapabilities, + AgentSkill, Message, MessageSendParams, Part, + Role, Task, TaskArtifactUpdateEvent, TaskState, @@ -15,9 +23,11 @@ from a2a.utils.errors import ServerError from a2a.utils.helpers import ( append_artifact_to_task, + are_modalities_compatible, build_text_artifact, create_task_obj, validate, + canonicalize_agent_card, ) @@ -27,7 +37,7 @@ MINIMAL_MESSAGE_USER: dict[str, Any] = { 'role': 'user', 'parts': [TEXT_PART_DATA], - 'messageId': 'msg-123', + 'message_id': 'msg-123', 'type': 'message', } @@ -35,11 +45,39 @@ MINIMAL_TASK: dict[str, Any] = { 'id': 'task-abc', - 'contextId': 'session-xyz', + 'context_id': 'session-xyz', 'status': MINIMAL_TASK_STATUS, 'type': 'task', } +SAMPLE_AGENT_CARD: dict[str, Any] = { + 'name': 'Test Agent', + 'description': 'A test agent', + 'url': 'http://localhost', + 'version': '1.0.0', + 'capabilities': AgentCapabilities( + streaming=None, + push_notifications=True, + ), + 'default_input_modes': ['text/plain'], + 'default_output_modes': ['text/plain'], + 'documentation_url': None, + 'icon_url': '', + 'skills': [ + AgentSkill( + id='skill1', + name='Test Skill', + description='A test skill', + tags=['test'], + ) + ], + 'signatures': [ + AgentCardSignature( + protected='protected_header', signature='test_signature' + ) + ], +} + # Test create_task_obj def test_create_task_obj(): @@ -48,18 +86,60 @@ def test_create_task_obj(): task = create_task_obj(send_params) assert task.id is not None - assert task.contextId == message.contextId + assert task.context_id == message.context_id assert task.status.state == TaskState.submitted assert len(task.history) == 1 assert task.history[0] == message +def test_create_task_obj_generates_context_id(): + """Test that create_task_obj generates context_id if not present and uses it for the task.""" + # Message without context_id + message_no_context_id = Message( + role=Role.user, + parts=[Part(root=TextPart(text='test'))], + message_id='msg-no-ctx', + task_id='task-from-msg', # Provide a task_id to differentiate from generated task.id + ) + send_params = MessageSendParams(message=message_no_context_id) + + # Ensure message.context_id is None initially + assert send_params.message.context_id is None + + known_task_uuid = uuid.UUID('11111111-1111-1111-1111-111111111111') + known_context_uuid = uuid.UUID('22222222-2222-2222-2222-222222222222') + + # Patch uuid.uuid4 to return specific UUIDs in sequence + # The first call will be for message.context_id (if None), the second for task.id. + with patch( + 'a2a.utils.helpers.uuid4', + side_effect=[known_context_uuid, known_task_uuid], + ) as mock_uuid4: + task = create_task_obj(send_params) + + # Assert that uuid4 was called twice (once for context_id, once for task.id) + assert mock_uuid4.call_count == 2 + + # Assert that message.context_id was set to the first generated UUID + assert send_params.message.context_id == str(known_context_uuid) + + # Assert that task.context_id is the same generated UUID + assert task.context_id == str(known_context_uuid) + + # Assert that task.id is the second generated UUID + assert task.id == str(known_task_uuid) + + # Ensure the original message in history also has the updated context_id + assert len(task.history) == 1 + assert task.history[0].context_id == str(known_context_uuid) + + # Test append_artifact_to_task def test_append_artifact_to_task(): # Prepare base task task = Task(**MINIMAL_TASK) assert task.id == 'task-abc' - assert task.contextId == 'session-xyz' + assert task.context_id == 'session-xyz' assert task.status.state == TaskState.submitted assert task.history is None assert task.artifacts is None @@ -67,42 +147,45 @@ def test_append_artifact_to_task(): # Prepare appending artifact and event artifact_1 = Artifact( - artifactId='artifact-123', parts=[Part(root=TextPart(text='Hello'))] + artifact_id='artifact-123', parts=[Part(root=TextPart(text='Hello'))] ) append_event_1 = TaskArtifactUpdateEvent( - artifact=artifact_1, append=False, taskId='123', contextId='123' + artifact=artifact_1, append=False, task_id='123', context_id='123' ) # Test adding a new artifact (not appending) append_artifact_to_task(task, append_event_1) assert len(task.artifacts) == 1 - assert task.artifacts[0].artifactId == 'artifact-123' + assert task.artifacts[0].artifact_id == 'artifact-123' assert task.artifacts[0].name is None assert len(task.artifacts[0].parts) == 1 assert task.artifacts[0].parts[0].root.text == 'Hello' # Test replacing the artifact artifact_2 = Artifact( - artifactId='artifact-123', + artifact_id='artifact-123', name='updated name', parts=[Part(root=TextPart(text='Updated'))], ) append_event_2 = TaskArtifactUpdateEvent( - artifact=artifact_2, append=False, taskId='123', contextId='123' + artifact=artifact_2, append=False, task_id='123', context_id='123' ) append_artifact_to_task(task, append_event_2) assert len(task.artifacts) == 1 # Should still have one artifact - assert task.artifacts[0].artifactId == 'artifact-123' + assert task.artifacts[0].artifact_id == 'artifact-123' assert task.artifacts[0].name == 'updated name' assert len(task.artifacts[0].parts) == 1 assert task.artifacts[0].parts[0].root.text == 'Updated' # Test appending parts to an existing artifact artifact_with_parts = Artifact( - artifactId='artifact-123', parts=[Part(root=TextPart(text='Part 2'))] + artifact_id='artifact-123', parts=[Part(root=TextPart(text='Part 2'))] ) append_event_3 = TaskArtifactUpdateEvent( - artifact=artifact_with_parts, append=True, taskId='123', contextId='123' + artifact=artifact_with_parts, + append=True, + task_id='123', + context_id='123', ) append_artifact_to_task(task, append_event_3) assert len(task.artifacts[0].parts) == 2 @@ -111,31 +194,31 @@ def test_append_artifact_to_task(): # Test adding another new artifact another_artifact_with_parts = Artifact( - artifactId='new_artifact', + artifact_id='new_artifact', parts=[Part(root=TextPart(text='new artifact Part 1'))], ) append_event_4 = TaskArtifactUpdateEvent( artifact=another_artifact_with_parts, append=False, - taskId='123', - contextId='123', + task_id='123', + context_id='123', ) append_artifact_to_task(task, append_event_4) assert len(task.artifacts) == 2 - assert task.artifacts[0].artifactId == 'artifact-123' - assert task.artifacts[1].artifactId == 'new_artifact' + assert task.artifacts[0].artifact_id == 'artifact-123' + assert task.artifacts[1].artifact_id == 'new_artifact' assert len(task.artifacts[0].parts) == 2 assert len(task.artifacts[1].parts) == 1 # Test appending part to a task that does not have a matching artifact non_existing_artifact_with_parts = Artifact( - artifactId='artifact-456', parts=[Part(root=TextPart(text='Part 1'))] + artifact_id='artifact-456', parts=[Part(root=TextPart(text='Part 1'))] ) append_event_5 = TaskArtifactUpdateEvent( artifact=non_existing_artifact_with_parts, append=True, - taskId='123', - contextId='123', + task_id='123', + context_id='123', ) append_artifact_to_task(task, append_event_5) assert len(task.artifacts) == 2 @@ -149,7 +232,7 @@ def test_build_text_artifact(): text = 'This is a sample text' artifact = build_text_artifact(text, artifact_id) - assert artifact.artifactId == artifact_id + assert artifact.artifact_id == artifact_id assert len(artifact.parts) == 1 assert artifact.parts[0].root.text == text @@ -160,7 +243,7 @@ class TestClass: condition = True @validate(lambda self: self.condition, 'Condition not met') - def test_method(self): + def test_method(self) -> str: return 'Success' obj = TestClass() @@ -173,3 +256,127 @@ def test_method(self): with pytest.raises(ServerError) as exc_info: obj.test_method() assert 'Condition not met' in str(exc_info.value) + + +# Tests for are_modalities_compatible +def test_are_modalities_compatible_client_none(): + assert ( + are_modalities_compatible( + client_output_modes=None, server_output_modes=['text/plain'] + ) + is True + ) + + +def test_are_modalities_compatible_client_empty(): + assert ( + are_modalities_compatible( + client_output_modes=[], server_output_modes=['text/plain'] + ) + is True + ) + + +def test_are_modalities_compatible_server_none(): + assert ( + are_modalities_compatible( + server_output_modes=None, client_output_modes=['text/plain'] + ) + is True + ) + + +def test_are_modalities_compatible_server_empty(): + assert ( + are_modalities_compatible( + server_output_modes=[], client_output_modes=['text/plain'] + ) + is True + ) + + +def test_are_modalities_compatible_common_mode(): + assert ( + are_modalities_compatible( + server_output_modes=['text/plain', 'application/json'], + client_output_modes=['application/json', 'image/png'], + ) + is True + ) + + +def test_are_modalities_compatible_no_common_modes(): + assert ( + are_modalities_compatible( + server_output_modes=['text/plain'], + client_output_modes=['application/json'], + ) + is False + ) + + +def test_are_modalities_compatible_exact_match(): + assert ( + are_modalities_compatible( + server_output_modes=['text/plain'], + client_output_modes=['text/plain'], + ) + is True + ) + + +def test_are_modalities_compatible_server_more_but_common(): + assert ( + are_modalities_compatible( + server_output_modes=['text/plain', 'image/jpeg'], + client_output_modes=['text/plain'], + ) + is True + ) + + +def test_are_modalities_compatible_client_more_but_common(): + assert ( + are_modalities_compatible( + server_output_modes=['text/plain'], + client_output_modes=['text/plain', 'image/jpeg'], + ) + is True + ) + + +def test_are_modalities_compatible_both_none(): + assert ( + are_modalities_compatible( + server_output_modes=None, client_output_modes=None + ) + is True + ) + + +def test_are_modalities_compatible_both_empty(): + assert ( + are_modalities_compatible( + server_output_modes=[], client_output_modes=[] + ) + is True + ) + + +def test_canonicalize_agent_card(): + """Test canonicalize_agent_card with defaults, optionals, and exceptions. + + - extensions is omitted as it's not set and optional. + - protocolVersion is included because it's always added by canonicalize_agent_card. + - signatures should be omitted. + """ + agent_card = AgentCard(**SAMPLE_AGENT_CARD) + expected_jcs = ( + '{"capabilities":{"pushNotifications":true},' + '"defaultInputModes":["text/plain"],"defaultOutputModes":["text/plain"],' + '"description":"A test agent","name":"Test Agent",' + '"skills":[{"description":"A test skill","id":"skill1","name":"Test Skill","tags":["test"]}],' + '"url":"http://localhost","version":"1.0.0"}' + ) + result = canonicalize_agent_card(agent_card) + assert result == expected_jcs diff --git a/tests/utils/test_message.py b/tests/utils/test_message.py index 6851a3ca4..11523cbdf 100644 --- a/tests/utils/test_message.py +++ b/tests/utils/test_message.py @@ -3,12 +3,17 @@ from unittest.mock import patch from a2a.types import ( + DataPart, Message, Part, Role, TextPart, ) -from a2a.utils import get_message_text, get_text_parts, new_agent_text_message +from a2a.utils.message import ( + get_message_text, + new_agent_parts_message, + new_agent_text_message, +) class TestNewAgentTextMessage: @@ -27,9 +32,9 @@ def test_new_agent_text_message_basic(self): assert message.role == Role.agent assert len(message.parts) == 1 assert message.parts[0].root.text == text - assert message.messageId == '12345678-1234-5678-1234-567812345678' - assert message.taskId is None - assert message.contextId is None + assert message.message_id == '12345678-1234-5678-1234-567812345678' + assert message.task_id is None + assert message.context_id is None def test_new_agent_text_message_with_context_id(self): # Setup @@ -46,9 +51,9 @@ def test_new_agent_text_message_with_context_id(self): # Verify assert message.role == Role.agent assert message.parts[0].root.text == text - assert message.messageId == '12345678-1234-5678-1234-567812345678' - assert message.contextId == context_id - assert message.taskId is None + assert message.message_id == '12345678-1234-5678-1234-567812345678' + assert message.context_id == context_id + assert message.task_id is None def test_new_agent_text_message_with_task_id(self): # Setup @@ -65,9 +70,9 @@ def test_new_agent_text_message_with_task_id(self): # Verify assert message.role == Role.agent assert message.parts[0].root.text == text - assert message.messageId == '12345678-1234-5678-1234-567812345678' - assert message.taskId == task_id - assert message.contextId is None + assert message.message_id == '12345678-1234-5678-1234-567812345678' + assert message.task_id == task_id + assert message.context_id is None def test_new_agent_text_message_with_both_ids(self): # Setup @@ -87,9 +92,9 @@ def test_new_agent_text_message_with_both_ids(self): # Verify assert message.role == Role.agent assert message.parts[0].root.text == text - assert message.messageId == '12345678-1234-5678-1234-567812345678' - assert message.contextId == context_id - assert message.taskId == task_id + assert message.message_id == '12345678-1234-5678-1234-567812345678' + assert message.context_id == context_id + assert message.task_id == task_id def test_new_agent_text_message_empty_text(self): # Setup @@ -105,43 +110,35 @@ def test_new_agent_text_message_empty_text(self): # Verify assert message.role == Role.agent assert message.parts[0].root.text == '' - assert message.messageId == '12345678-1234-5678-1234-567812345678' + assert message.message_id == '12345678-1234-5678-1234-567812345678' -class TestGetTextParts: - def test_get_text_parts_single_text_part(self): - # Setup - parts = [Part(root=TextPart(text='Hello world'))] - - # Exercise - result = get_text_parts(parts) - - # Verify - assert result == ['Hello world'] - - def test_get_text_parts_multiple_text_parts(self): +class TestNewAgentPartsMessage: + def test_new_agent_parts_message(self): + """Test creating an agent message with multiple, mixed parts.""" # Setup parts = [ - Part(root=TextPart(text='First part')), - Part(root=TextPart(text='Second part')), - Part(root=TextPart(text='Third part')), + Part(root=TextPart(text='Here is some text.')), + Part(root=DataPart(data={'product_id': 123, 'quantity': 2})), ] + context_id = 'ctx-multi-part' + task_id = 'task-multi-part' # Exercise - result = get_text_parts(parts) - - # Verify - assert result == ['First part', 'Second part', 'Third part'] - - def test_get_text_parts_empty_list(self): - # Setup - parts = [] - - # Exercise - result = get_text_parts(parts) + with patch( + 'uuid.uuid4', + return_value=uuid.UUID('abcdefab-cdef-abcd-efab-cdefabcdefab'), + ): + message = new_agent_parts_message( + parts, context_id=context_id, task_id=task_id + ) # Verify - assert result == [] + assert message.role == Role.agent + assert message.parts == parts + assert message.context_id == context_id + assert message.task_id == task_id + assert message.message_id == 'abcdefab-cdef-abcd-efab-cdefabcdefab' class TestGetMessageText: @@ -150,7 +147,7 @@ def test_get_message_text_single_part(self): message = Message( role=Role.agent, parts=[Part(root=TextPart(text='Hello world'))], - messageId='test-message-id', + message_id='test-message-id', ) # Exercise @@ -168,7 +165,7 @@ def test_get_message_text_multiple_parts(self): Part(root=TextPart(text='Second line')), Part(root=TextPart(text='Third line')), ], - messageId='test-message-id', + message_id='test-message-id', ) # Exercise @@ -186,7 +183,7 @@ def test_get_message_text_custom_delimiter(self): Part(root=TextPart(text='Second part')), Part(root=TextPart(text='Third part')), ], - messageId='test-message-id', + message_id='test-message-id', ) # Exercise @@ -200,7 +197,7 @@ def test_get_message_text_empty_parts(self): message = Message( role=Role.agent, parts=[], - messageId='test-message-id', + message_id='test-message-id', ) # Exercise diff --git a/tests/utils/test_parts.py b/tests/utils/test_parts.py new file mode 100644 index 000000000..dcb027c2b --- /dev/null +++ b/tests/utils/test_parts.py @@ -0,0 +1,184 @@ +from a2a.types import ( + DataPart, + FilePart, + FileWithBytes, + FileWithUri, + Part, + TextPart, +) +from a2a.utils.parts import ( + get_data_parts, + get_file_parts, + get_text_parts, +) + + +class TestGetTextParts: + def test_get_text_parts_single_text_part(self): + # Setup + parts = [Part(root=TextPart(text='Hello world'))] + + # Exercise + result = get_text_parts(parts) + + # Verify + assert result == ['Hello world'] + + def test_get_text_parts_multiple_text_parts(self): + # Setup + parts = [ + Part(root=TextPart(text='First part')), + Part(root=TextPart(text='Second part')), + Part(root=TextPart(text='Third part')), + ] + + # Exercise + result = get_text_parts(parts) + + # Verify + assert result == ['First part', 'Second part', 'Third part'] + + def test_get_text_parts_empty_list(self): + # Setup + parts = [] + + # Exercise + result = get_text_parts(parts) + + # Verify + assert result == [] + + +class TestGetDataParts: + def test_get_data_parts_single_data_part(self): + # Setup + parts = [Part(root=DataPart(data={'key': 'value'}))] + + # Exercise + result = get_data_parts(parts) + + # Verify + assert result == [{'key': 'value'}] + + def test_get_data_parts_multiple_data_parts(self): + # Setup + parts = [ + Part(root=DataPart(data={'key1': 'value1'})), + Part(root=DataPart(data={'key2': 'value2'})), + ] + + # Exercise + result = get_data_parts(parts) + + # Verify + assert result == [{'key1': 'value1'}, {'key2': 'value2'}] + + def test_get_data_parts_mixed_parts(self): + # Setup + parts = [ + Part(root=TextPart(text='some text')), + Part(root=DataPart(data={'key1': 'value1'})), + Part(root=DataPart(data={'key2': 'value2'})), + ] + + # Exercise + result = get_data_parts(parts) + + # Verify + assert result == [{'key1': 'value1'}, {'key2': 'value2'}] + + def test_get_data_parts_no_data_parts(self): + # Setup + parts = [ + Part(root=TextPart(text='some text')), + ] + + # Exercise + result = get_data_parts(parts) + + # Verify + assert result == [] + + def test_get_data_parts_empty_list(self): + # Setup + parts = [] + + # Exercise + result = get_data_parts(parts) + + # Verify + assert result == [] + + +class TestGetFileParts: + def test_get_file_parts_single_file_part(self): + # Setup + file_with_uri = FileWithUri( + uri='file://path/to/file', mimeType='text/plain' + ) + parts = [Part(root=FilePart(file=file_with_uri))] + + # Exercise + result = get_file_parts(parts) + + # Verify + assert result == [file_with_uri] + + def test_get_file_parts_multiple_file_parts(self): + # Setup + file_with_uri1 = FileWithUri( + uri='file://path/to/file1', mime_type='text/plain' + ) + file_with_bytes = FileWithBytes( + bytes='ZmlsZSBjb250ZW50', + mime_type='application/octet-stream', # 'file content' + ) + parts = [ + Part(root=FilePart(file=file_with_uri1)), + Part(root=FilePart(file=file_with_bytes)), + ] + + # Exercise + result = get_file_parts(parts) + + # Verify + assert result == [file_with_uri1, file_with_bytes] + + def test_get_file_parts_mixed_parts(self): + # Setup + file_with_uri = FileWithUri( + uri='file://path/to/file', mime_type='text/plain' + ) + parts = [ + Part(root=TextPart(text='some text')), + Part(root=FilePart(file=file_with_uri)), + ] + + # Exercise + result = get_file_parts(parts) + + # Verify + assert result == [file_with_uri] + + def test_get_file_parts_no_file_parts(self): + # Setup + parts = [ + Part(root=TextPart(text='some text')), + Part(root=DataPart(data={'key': 'value'})), + ] + + # Exercise + result = get_file_parts(parts) + + # Verify + assert result == [] + + def test_get_file_parts_empty_list(self): + # Setup + parts = [] + + # Exercise + result = get_file_parts(parts) + + # Verify + assert result == [] diff --git a/tests/utils/test_proto_utils.py b/tests/utils/test_proto_utils.py new file mode 100644 index 000000000..7fc82aad7 --- /dev/null +++ b/tests/utils/test_proto_utils.py @@ -0,0 +1,688 @@ +from unittest import mock + +import pytest + +from a2a import types +from a2a.grpc import a2a_pb2 +from a2a.utils import proto_utils +from a2a.utils.errors import ServerError + + +# --- Test Data --- + + +@pytest.fixture +def sample_message() -> types.Message: + return types.Message( + message_id='msg-1', + context_id='ctx-1', + task_id='task-1', + role=types.Role.user, + parts=[ + types.Part(root=types.TextPart(text='Hello')), + types.Part( + root=types.FilePart( + file=types.FileWithUri( + uri='file:///test.txt', + name='test.txt', + mime_type='text/plain', + ), + ) + ), + types.Part(root=types.DataPart(data={'key': 'value'})), + ], + metadata={'source': 'test'}, + ) + + +@pytest.fixture +def sample_task(sample_message: types.Message) -> types.Task: + return types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus( + state=types.TaskState.working, message=sample_message + ), + history=[sample_message], + artifacts=[ + types.Artifact( + artifact_id='art-1', + parts=[ + types.Part(root=types.TextPart(text='Artifact content')) + ], + ) + ], + metadata={'source': 'test'}, + ) + + +@pytest.fixture +def sample_agent_card() -> types.AgentCard: + return types.AgentCard( + name='Test Agent', + description='A test agent', + url='http://localhost', + version='1.0.0', + capabilities=types.AgentCapabilities( + streaming=True, push_notifications=True + ), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + skills=[ + types.AgentSkill( + id='skill1', + name='Test Skill', + description='A test skill', + tags=['test'], + ) + ], + provider=types.AgentProvider( + organization='Test Org', url='http://test.org' + ), + security=[{'oauth_scheme': ['read', 'write']}], + security_schemes={ + 'oauth_scheme': types.SecurityScheme( + root=types.OAuth2SecurityScheme( + flows=types.OAuthFlows( + client_credentials=types.ClientCredentialsOAuthFlow( + token_url='http://token.url', + scopes={ + 'read': 'Read access', + 'write': 'Write access', + }, + ) + ) + ) + ), + 'apiKey': types.SecurityScheme( + root=types.APIKeySecurityScheme( + name='X-API-KEY', in_=types.In.header + ) + ), + 'httpAuth': types.SecurityScheme( + root=types.HTTPAuthSecurityScheme(scheme='bearer') + ), + 'oidc': types.SecurityScheme( + root=types.OpenIdConnectSecurityScheme( + open_id_connect_url='http://oidc.url' + ) + ), + }, + signatures=[ + types.AgentCardSignature( + protected='protected_test', + signature='signature_test', + header={'alg': 'ES256'}, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256', 'kid': 'unique-key-identifier-123'}, + ), + ], + ) + + +# --- Test Cases --- + + +class TestToProto: + def test_part_unsupported_type(self): + """Test that ToProto.part raises ValueError for an unsupported Part type.""" + + class FakePartType: + kind = 'fake' + + # Create a mock Part object that has a .root attribute pointing to the fake type + mock_part = mock.MagicMock(spec=types.Part) + mock_part.root = FakePartType() + + with pytest.raises(ValueError, match='Unsupported part type'): + proto_utils.ToProto.part(mock_part) + + +class TestFromProto: + def test_part_unsupported_type(self): + """Test that FromProto.part raises ValueError for an unsupported part type in proto.""" + unsupported_proto_part = ( + a2a_pb2.Part() + ) # An empty part with no oneof field set + with pytest.raises(ValueError, match='Unsupported part type'): + proto_utils.FromProto.part(unsupported_proto_part) + + def test_task_query_params_invalid_name(self): + request = a2a_pb2.GetTaskRequest(name='invalid-name-format') + with pytest.raises(ServerError) as exc_info: + proto_utils.FromProto.task_query_params(request) + assert isinstance(exc_info.value.error, types.InvalidParamsError) + + +class TestProtoUtils: + def test_roundtrip_message(self, sample_message: types.Message): + """Test conversion of Message to proto and back.""" + proto_msg = proto_utils.ToProto.message(sample_message) + assert isinstance(proto_msg, a2a_pb2.Message) + + # Test file part handling + assert proto_msg.content[1].file.file_with_uri == 'file:///test.txt' + assert proto_msg.content[1].file.mime_type == 'text/plain' + assert proto_msg.content[1].file.name == 'test.txt' + + roundtrip_msg = proto_utils.FromProto.message(proto_msg) + assert roundtrip_msg == sample_message + + def test_enum_conversions(self): + """Test conversions for all enum types.""" + assert ( + proto_utils.ToProto.role(types.Role.agent) + == a2a_pb2.Role.ROLE_AGENT + ) + assert ( + proto_utils.FromProto.role(a2a_pb2.Role.ROLE_USER) + == types.Role.user + ) + + for state in types.TaskState: + proto_state = proto_utils.ToProto.task_state(state) + assert proto_utils.FromProto.task_state(proto_state) == state + + # Test unknown state case + assert ( + proto_utils.FromProto.task_state( + a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED + ) + == types.TaskState.unknown + ) + assert ( + proto_utils.ToProto.task_state(types.TaskState.unknown) + == a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED + ) + + def test_oauth_flows_conversion(self): + """Test conversion of different OAuth2 flows.""" + # Test password flow + password_flow = types.OAuthFlows( + password=types.PasswordOAuthFlow( + token_url='http://token.url', scopes={'read': 'Read'} + ) + ) + proto_password_flow = proto_utils.ToProto.oauth2_flows(password_flow) + assert proto_password_flow.HasField('password') + + # Test implicit flow + implicit_flow = types.OAuthFlows( + implicit=types.ImplicitOAuthFlow( + authorization_url='http://auth.url', scopes={'read': 'Read'} + ) + ) + proto_implicit_flow = proto_utils.ToProto.oauth2_flows(implicit_flow) + assert proto_implicit_flow.HasField('implicit') + + # Test authorization code flow + auth_code_flow = types.OAuthFlows( + authorization_code=types.AuthorizationCodeOAuthFlow( + authorization_url='http://auth.url', + token_url='http://token.url', + scopes={'read': 'read'}, + ) + ) + proto_auth_code_flow = proto_utils.ToProto.oauth2_flows(auth_code_flow) + assert proto_auth_code_flow.HasField('authorization_code') + + # Test invalid flow + with pytest.raises(ValueError): + proto_utils.ToProto.oauth2_flows(types.OAuthFlows()) + + # Test FromProto + roundtrip_password = proto_utils.FromProto.oauth2_flows( + proto_password_flow + ) + assert roundtrip_password.password is not None + + roundtrip_implicit = proto_utils.FromProto.oauth2_flows( + proto_implicit_flow + ) + assert roundtrip_implicit.implicit is not None + + def test_task_id_params_from_proto_invalid_name(self): + request = a2a_pb2.CancelTaskRequest(name='invalid-name-format') + with pytest.raises(ServerError) as exc_info: + proto_utils.FromProto.task_id_params(request) + assert isinstance(exc_info.value.error, types.InvalidParamsError) + + def test_task_push_config_from_proto_invalid_parent(self): + request = a2a_pb2.TaskPushNotificationConfig(name='invalid-name-format') + with pytest.raises(ServerError) as exc_info: + proto_utils.FromProto.task_push_notification_config(request) + assert isinstance(exc_info.value.error, types.InvalidParamsError) + + def test_none_handling(self): + """Test that None inputs are handled gracefully.""" + assert proto_utils.ToProto.message(None) is None + assert proto_utils.ToProto.metadata(None) is None + assert proto_utils.ToProto.provider(None) is None + assert proto_utils.ToProto.security(None) is None + assert proto_utils.ToProto.security_schemes(None) is None + + def test_metadata_conversion(self): + """Test metadata conversion with various data types.""" + metadata = { + 'null_value': None, + 'bool_value': True, + 'int_value': 42, + 'float_value': 3.14, + 'string_value': 'hello', + 'dict_value': {'nested': 'dict', 'count': 10}, + 'list_value': [1, 'two', 3.0, True, None], + 'tuple_value': (1, 2, 3), + 'complex_list': [ + {'name': 'item1', 'values': [1, 2, 3]}, + {'name': 'item2', 'values': [4, 5, 6]}, + ], + } + + # Convert to proto + proto_metadata = proto_utils.ToProto.metadata(metadata) + assert proto_metadata is not None + + # Convert back to Python + roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) + + # Verify all values are preserved correctly + assert roundtrip_metadata['null_value'] is None + assert roundtrip_metadata['bool_value'] is True + assert roundtrip_metadata['int_value'] == 42 + assert roundtrip_metadata['float_value'] == 3.14 + assert roundtrip_metadata['string_value'] == 'hello' + assert roundtrip_metadata['dict_value']['nested'] == 'dict' + assert roundtrip_metadata['dict_value']['count'] == 10 + assert roundtrip_metadata['list_value'] == [1, 'two', 3.0, True, None] + assert roundtrip_metadata['tuple_value'] == [ + 1, + 2, + 3, + ] # tuples become lists + assert len(roundtrip_metadata['complex_list']) == 2 + assert roundtrip_metadata['complex_list'][0]['name'] == 'item1' + + def test_metadata_with_custom_objects(self): + """Test metadata conversion with custom objects using preprocessing utility.""" + + class CustomObject: + def __str__(self): + return 'custom_object_str' + + def __repr__(self): + return 'CustomObject()' + + metadata = { + 'custom_obj': CustomObject(), + 'list_with_custom': [1, CustomObject(), 'text'], + 'nested_custom': {'obj': CustomObject(), 'normal': 'value'}, + } + + # Use preprocessing utility to make it serializable + serializable_metadata = proto_utils.make_dict_serializable(metadata) + + # Convert to proto + proto_metadata = proto_utils.ToProto.metadata(serializable_metadata) + assert proto_metadata is not None + + # Convert back to Python + roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) + + # Custom objects should be converted to strings + assert roundtrip_metadata['custom_obj'] == 'custom_object_str' + assert roundtrip_metadata['list_with_custom'] == [ + 1, + 'custom_object_str', + 'text', + ] + assert roundtrip_metadata['nested_custom']['obj'] == 'custom_object_str' + assert roundtrip_metadata['nested_custom']['normal'] == 'value' + + def test_metadata_edge_cases(self): + """Test metadata conversion with edge cases.""" + metadata = { + 'empty_dict': {}, + 'empty_list': [], + 'zero': 0, + 'false': False, + 'empty_string': '', + 'unicode_string': 'string test', + 'safe_number': 9007199254740991, # JavaScript MAX_SAFE_INTEGER + 'negative_number': -42, + 'float_precision': 0.123456789, + 'numeric_string': '12345', + } + + # Convert to proto and back + proto_metadata = proto_utils.ToProto.metadata(metadata) + roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) + + # Verify edge cases are handled correctly + assert roundtrip_metadata['empty_dict'] == {} + assert roundtrip_metadata['empty_list'] == [] + assert roundtrip_metadata['zero'] == 0 + assert roundtrip_metadata['false'] is False + assert roundtrip_metadata['empty_string'] == '' + assert roundtrip_metadata['unicode_string'] == 'string test' + assert roundtrip_metadata['safe_number'] == 9007199254740991 + assert roundtrip_metadata['negative_number'] == -42 + assert abs(roundtrip_metadata['float_precision'] - 0.123456789) < 1e-10 + assert roundtrip_metadata['numeric_string'] == '12345' + + def test_make_dict_serializable(self): + """Test the make_dict_serializable utility function.""" + + class CustomObject: + def __str__(self): + return 'custom_str' + + test_data = { + 'string': 'hello', + 'int': 42, + 'float': 3.14, + 'bool': True, + 'none': None, + 'custom': CustomObject(), + 'list': [1, 'two', CustomObject()], + 'tuple': (1, 2, CustomObject()), + 'nested': {'inner_custom': CustomObject(), 'inner_normal': 'value'}, + } + + result = proto_utils.make_dict_serializable(test_data) + + # Basic types should be unchanged + assert result['string'] == 'hello' + assert result['int'] == 42 + assert result['float'] == 3.14 + assert result['bool'] is True + assert result['none'] is None + + # Custom objects should be converted to strings + assert result['custom'] == 'custom_str' + assert result['list'] == [1, 'two', 'custom_str'] + assert result['tuple'] == [1, 2, 'custom_str'] # tuples become lists + assert result['nested']['inner_custom'] == 'custom_str' + assert result['nested']['inner_normal'] == 'value' + + def test_normalize_large_integers_to_strings(self): + """Test the normalize_large_integers_to_strings utility function.""" + + test_data = { + 'small_int': 42, + 'large_int': 9999999999999999999, # > 15 digits + 'negative_large': -9999999999999999999, + 'float': 3.14, + 'string': 'hello', + 'list': [123, 9999999999999999999, 'text'], + 'nested': {'inner_large': 9999999999999999999, 'inner_small': 100}, + } + + result = proto_utils.normalize_large_integers_to_strings(test_data) + + # Small integers should remain as integers + assert result['small_int'] == 42 + assert isinstance(result['small_int'], int) + + # Large integers should be converted to strings + assert result['large_int'] == '9999999999999999999' + assert isinstance(result['large_int'], str) + assert result['negative_large'] == '-9999999999999999999' + assert isinstance(result['negative_large'], str) + + # Other types should be unchanged + assert result['float'] == 3.14 + assert result['string'] == 'hello' + + # Lists should be processed recursively + assert result['list'] == [123, '9999999999999999999', 'text'] + + # Nested dicts should be processed recursively + assert result['nested']['inner_large'] == '9999999999999999999' + assert result['nested']['inner_small'] == 100 + + def test_parse_string_integers_in_dict(self): + """Test the parse_string_integers_in_dict utility function.""" + + test_data = { + 'regular_string': 'hello', + 'numeric_string_small': '123', # small, should stay as string + 'numeric_string_large': '9999999999999999999', # > 15 digits, should become int + 'negative_large_string': '-9999999999999999999', + 'float_string': '3.14', # not all digits, should stay as string + 'mixed_string': '123abc', # not all digits, should stay as string + 'int': 42, + 'list': ['hello', '9999999999999999999', '123'], + 'nested': { + 'inner_large_string': '9999999999999999999', + 'inner_regular': 'value', + }, + } + + result = proto_utils.parse_string_integers_in_dict(test_data) + + # Regular strings should remain unchanged + assert result['regular_string'] == 'hello' + assert ( + result['numeric_string_small'] == '123' + ) # too small, stays string + assert result['float_string'] == '3.14' # not all digits + assert result['mixed_string'] == '123abc' # not all digits + + # Large numeric strings should be converted to integers + assert result['numeric_string_large'] == 9999999999999999999 + assert isinstance(result['numeric_string_large'], int) + assert result['negative_large_string'] == -9999999999999999999 + assert isinstance(result['negative_large_string'], int) + + # Other types should be unchanged + assert result['int'] == 42 + + # Lists should be processed recursively + assert result['list'] == ['hello', 9999999999999999999, '123'] + + # Nested dicts should be processed recursively + assert result['nested']['inner_large_string'] == 9999999999999999999 + assert result['nested']['inner_regular'] == 'value' + + def test_large_integer_roundtrip_with_utilities(self): + """Test large integer handling with preprocessing and post-processing utilities.""" + + original_data = { + 'large_int': 9999999999999999999, + 'small_int': 42, + 'nested': {'another_large': 12345678901234567890, 'normal': 'text'}, + } + + # Step 1: Preprocess to convert large integers to strings + preprocessed = proto_utils.normalize_large_integers_to_strings( + original_data + ) + + # Step 2: Convert to proto + proto_metadata = proto_utils.ToProto.metadata(preprocessed) + assert proto_metadata is not None + + # Step 3: Convert back from proto + dict_from_proto = proto_utils.FromProto.metadata(proto_metadata) + + # Step 4: Post-process to convert large integer strings back to integers + final_result = proto_utils.parse_string_integers_in_dict( + dict_from_proto + ) + + # Verify roundtrip preserved the original data + assert final_result['large_int'] == 9999999999999999999 + assert isinstance(final_result['large_int'], int) + assert final_result['small_int'] == 42 + assert final_result['nested']['another_large'] == 12345678901234567890 + assert isinstance(final_result['nested']['another_large'], int) + assert final_result['nested']['normal'] == 'text' + + def test_task_conversion_roundtrip( + self, sample_task: types.Task, sample_message: types.Message + ): + """Test conversion of Task to proto and back.""" + proto_task = proto_utils.ToProto.task(sample_task) + assert isinstance(proto_task, a2a_pb2.Task) + + roundtrip_task = proto_utils.FromProto.task(proto_task) + assert roundtrip_task.id == 'task-1' + assert roundtrip_task.context_id == 'ctx-1' + assert roundtrip_task.status == types.TaskStatus( + state=types.TaskState.working, message=sample_message + ) + assert roundtrip_task.history == sample_task.history + assert roundtrip_task.artifacts == [ + types.Artifact( + artifact_id='art-1', + description='', + metadata={}, + name='', + parts=[ + types.Part(root=types.TextPart(text='Artifact content')) + ], + ) + ] + assert roundtrip_task.metadata == {'source': 'test'} + + def test_agent_card_conversion_roundtrip( + self, sample_agent_card: types.AgentCard + ): + """Test conversion of AgentCard to proto and back.""" + proto_card = proto_utils.ToProto.agent_card(sample_agent_card) + assert isinstance(proto_card, a2a_pb2.AgentCard) + + roundtrip_card = proto_utils.FromProto.agent_card(proto_card) + assert roundtrip_card.name == 'Test Agent' + assert roundtrip_card.description == 'A test agent' + assert roundtrip_card.url == 'http://localhost' + assert roundtrip_card.version == '1.0.0' + assert roundtrip_card.capabilities == types.AgentCapabilities( + extensions=[], streaming=True, push_notifications=True + ) + assert roundtrip_card.default_input_modes == ['text/plain'] + assert roundtrip_card.default_output_modes == ['text/plain'] + assert roundtrip_card.skills == [ + types.AgentSkill( + id='skill1', + name='Test Skill', + description='A test skill', + tags=['test'], + examples=[], + input_modes=[], + output_modes=[], + ) + ] + assert roundtrip_card.provider == types.AgentProvider( + organization='Test Org', url='http://test.org' + ) + assert roundtrip_card.security == [{'oauth_scheme': ['read', 'write']}] + + # Normalized version of security_schemes. None fields are filled with defaults. + expected_security_schemes = { + 'oauth_scheme': types.SecurityScheme( + root=types.OAuth2SecurityScheme( + description='', + flows=types.OAuthFlows( + client_credentials=types.ClientCredentialsOAuthFlow( + refresh_url='', + scopes={ + 'write': 'Write access', + 'read': 'Read access', + }, + token_url='http://token.url', + ), + ), + ) + ), + 'apiKey': types.SecurityScheme( + root=types.APIKeySecurityScheme( + description='', + in_=types.In.header, + name='X-API-KEY', + ) + ), + 'httpAuth': types.SecurityScheme( + root=types.HTTPAuthSecurityScheme( + bearer_format='', + description='', + scheme='bearer', + ) + ), + 'oidc': types.SecurityScheme( + root=types.OpenIdConnectSecurityScheme( + description='', + open_id_connect_url='http://oidc.url', + ) + ), + } + assert roundtrip_card.security_schemes == expected_security_schemes + assert roundtrip_card.signatures == [ + types.AgentCardSignature( + protected='protected_test', + signature='signature_test', + header={'alg': 'ES256'}, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256', 'kid': 'unique-key-identifier-123'}, + ), + ] + + @pytest.mark.parametrize( + 'signature_data, expected_data', + [ + ( + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256'}, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256'}, + ), + ), + ( + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header=None, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={}, + ), + ), + ( + types.AgentCardSignature( + protected='', + signature='', + header={}, + ), + types.AgentCardSignature( + protected='', + signature='', + header={}, + ), + ), + ], + ) + def test_agent_card_signature_conversion_roundtrip( + self, signature_data, expected_data + ): + """Test conversion of AgentCardSignature to proto and back.""" + proto_signature = proto_utils.ToProto.agent_card_signature( + signature_data + ) + assert isinstance(proto_signature, a2a_pb2.AgentCardSignature) + roundtrip_signature = proto_utils.FromProto.agent_card_signature( + proto_signature + ) + assert roundtrip_signature == expected_data diff --git a/tests/utils/test_signing.py b/tests/utils/test_signing.py new file mode 100644 index 000000000..9a843d340 --- /dev/null +++ b/tests/utils/test_signing.py @@ -0,0 +1,185 @@ +from a2a.types import ( + AgentCard, + AgentCapabilities, + AgentSkill, +) +from a2a.types import ( + AgentCard, + AgentCapabilities, + AgentSkill, + AgentCardSignature, +) +from a2a.utils import signing +from typing import Any +from jwt.utils import base64url_encode + +import pytest +from cryptography.hazmat.primitives import asymmetric + + +def create_key_provider(verification_key: str | bytes | dict[str, Any]): + """Creates a key provider function for testing.""" + + def key_provider(kid: str | None, jku: str | None): + return verification_key + + return key_provider + + +# Fixture for a complete sample AgentCard +@pytest.fixture +def sample_agent_card() -> AgentCard: + return AgentCard( + name='Test Agent', + description='A test agent', + url='http://localhost', + version='1.0.0', + capabilities=AgentCapabilities( + streaming=None, + push_notifications=True, + ), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + documentation_url=None, + icon_url='', + skills=[ + AgentSkill( + id='skill1', + name='Test Skill', + description='A test skill', + tags=['test'], + ) + ], + ) + + +def test_signer_and_verifier_symmetric(sample_agent_card: AgentCard): + """Test the agent card signing and verification process with symmetric key encryption.""" + key = 'key12345' # Using a simple symmetric key for HS256 + wrong_key = 'wrongkey' + + agent_card_signer = signing.create_agent_card_signer( + signing_key=key, + protected_header={ + 'alg': 'HS384', + 'kid': 'key1', + 'jku': None, + 'typ': 'JOSE', + }, + ) + signed_card = agent_card_signer(sample_agent_card) + + assert signed_card.signatures is not None + assert len(signed_card.signatures) == 1 + signature = signed_card.signatures[0] + assert signature.protected is not None + assert signature.signature is not None + + # Verify the signature + verifier = signing.create_signature_verifier( + create_key_provider(key), ['HS256', 'HS384', 'ES256', 'RS256'] + ) + try: + verifier(signed_card) + except signing.InvalidSignaturesError: + pytest.fail('Signature verification failed with correct key') + + # Verify with wrong key + verifier_wrong_key = signing.create_signature_verifier( + create_key_provider(wrong_key), ['HS256', 'HS384', 'ES256', 'RS256'] + ) + with pytest.raises(signing.InvalidSignaturesError): + verifier_wrong_key(signed_card) + + +def test_signer_and_verifier_symmetric_multiple_signatures( + sample_agent_card: AgentCard, +): + """Test the agent card signing and verification process with symmetric key encryption. + This test adds a signatures to the AgentCard before signing.""" + encoded_header = base64url_encode( + b'{"alg": "HS256", "kid": "old_key"}' + ).decode('utf-8') + sample_agent_card.signatures = [ + AgentCardSignature(protected=encoded_header, signature='old_signature') + ] + key = 'key12345' # Using a simple symmetric key for HS256 + wrong_key = 'wrongkey' + + agent_card_signer = signing.create_agent_card_signer( + signing_key=key, + protected_header={ + 'alg': 'HS384', + 'kid': 'key1', + 'jku': None, + 'typ': 'JOSE', + }, + ) + signed_card = agent_card_signer(sample_agent_card) + + assert signed_card.signatures is not None + assert len(signed_card.signatures) == 2 + signature = signed_card.signatures[1] + assert signature.protected is not None + assert signature.signature is not None + + # Verify the signature + verifier = signing.create_signature_verifier( + create_key_provider(key), ['HS256', 'HS384', 'ES256', 'RS256'] + ) + try: + verifier(signed_card) + except signing.InvalidSignaturesError: + pytest.fail('Signature verification failed with correct key') + + # Verify with wrong key + verifier_wrong_key = signing.create_signature_verifier( + create_key_provider(wrong_key), ['HS256', 'HS384', 'ES256', 'RS256'] + ) + with pytest.raises(signing.InvalidSignaturesError): + verifier_wrong_key(signed_card) + + +def test_signer_and_verifier_asymmetric(sample_agent_card: AgentCard): + """Test the agent card signing and verification process with an asymmetric key encryption.""" + # Generate a dummy EC private key for ES256 + private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + public_key = private_key.public_key() + # Generate another key pair for negative test + private_key_error = asymmetric.ec.generate_private_key( + asymmetric.ec.SECP256R1() + ) + public_key_error = private_key_error.public_key() + + agent_card_signer = signing.create_agent_card_signer( + signing_key=private_key, + protected_header={ + 'alg': 'ES256', + 'kid': 'key2', + 'jku': None, + 'typ': 'JOSE', + }, + ) + signed_card = agent_card_signer(sample_agent_card) + + assert signed_card.signatures is not None + assert len(signed_card.signatures) == 1 + signature = signed_card.signatures[0] + assert signature.protected is not None + assert signature.signature is not None + + verifier = signing.create_signature_verifier( + create_key_provider(public_key), ['HS256', 'HS384', 'ES256', 'RS256'] + ) + try: + verifier(signed_card) + except signing.InvalidSignaturesError: + pytest.fail('Signature verification failed with correct key') + + # Verify with wrong key + verifier_wrong_key = signing.create_signature_verifier( + create_key_provider(public_key_error), + ['HS256', 'HS384', 'ES256', 'RS256'], + ) + with pytest.raises(signing.InvalidSignaturesError): + verifier_wrong_key(signed_card) diff --git a/tests/utils/test_task.py b/tests/utils/test_task.py new file mode 100644 index 000000000..cb3dc3868 --- /dev/null +++ b/tests/utils/test_task.py @@ -0,0 +1,193 @@ +import unittest +import uuid + +from unittest.mock import patch + +import pytest + +from a2a.types import Artifact, Message, Part, Role, TextPart +from a2a.utils.task import completed_task, new_task + + +class TestTask(unittest.TestCase): + def test_new_task_status(self): + message = Message( + role=Role.user, + parts=[Part(root=TextPart(text='test message'))], + message_id=str(uuid.uuid4()), + ) + task = new_task(message) + self.assertEqual(task.status.state.value, 'submitted') + + @patch('uuid.uuid4') + def test_new_task_generates_ids(self, mock_uuid4): + mock_uuid = uuid.UUID('12345678-1234-5678-1234-567812345678') + mock_uuid4.return_value = mock_uuid + message = Message( + role=Role.user, + parts=[Part(root=TextPart(text='test message'))], + message_id=str(uuid.uuid4()), + ) + task = new_task(message) + self.assertEqual(task.id, str(mock_uuid)) + self.assertEqual(task.context_id, str(mock_uuid)) + + def test_new_task_uses_provided_ids(self): + task_id = str(uuid.uuid4()) + context_id = str(uuid.uuid4()) + message = Message( + role=Role.user, + parts=[Part(root=TextPart(text='test message'))], + message_id=str(uuid.uuid4()), + task_id=task_id, + context_id=context_id, + ) + task = new_task(message) + self.assertEqual(task.id, task_id) + self.assertEqual(task.context_id, context_id) + + def test_new_task_initial_message_in_history(self): + message = Message( + role=Role.user, + parts=[Part(root=TextPart(text='test message'))], + message_id=str(uuid.uuid4()), + ) + task = new_task(message) + self.assertEqual(len(task.history), 1) + self.assertEqual(task.history[0], message) + + def test_completed_task_status(self): + task_id = str(uuid.uuid4()) + context_id = str(uuid.uuid4()) + artifacts = [ + Artifact( + artifact_id='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] + task = completed_task( + task_id=task_id, + context_id=context_id, + artifacts=artifacts, + history=[], + ) + self.assertEqual(task.status.state.value, 'completed') + + def test_completed_task_assigns_ids_and_artifacts(self): + task_id = str(uuid.uuid4()) + context_id = str(uuid.uuid4()) + artifacts = [ + Artifact( + artifact_id='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] + task = completed_task( + task_id=task_id, + context_id=context_id, + artifacts=artifacts, + history=[], + ) + self.assertEqual(task.id, task_id) + self.assertEqual(task.context_id, context_id) + self.assertEqual(task.artifacts, artifacts) + + def test_completed_task_empty_history_if_not_provided(self): + task_id = str(uuid.uuid4()) + context_id = str(uuid.uuid4()) + artifacts = [ + Artifact( + artifact_id='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] + task = completed_task( + task_id=task_id, context_id=context_id, artifacts=artifacts + ) + self.assertEqual(task.history, []) + + def test_completed_task_uses_provided_history(self): + task_id = str(uuid.uuid4()) + context_id = str(uuid.uuid4()) + artifacts = [ + Artifact( + artifact_id='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] + history = [ + Message( + role=Role.user, + parts=[Part(root=TextPart(text='Hello'))], + message_id=str(uuid.uuid4()), + ), + Message( + role=Role.agent, + parts=[Part(root=TextPart(text='Hi there'))], + message_id=str(uuid.uuid4()), + ), + ] + task = completed_task( + task_id=task_id, + context_id=context_id, + artifacts=artifacts, + history=history, + ) + self.assertEqual(task.history, history) + + def test_new_task_invalid_message_empty_parts(self): + with self.assertRaises(ValueError): + new_task( + Message( + role=Role.user, + parts=[], + message_id=str(uuid.uuid4()), + ) + ) + + def test_new_task_invalid_message_empty_content(self): + with self.assertRaises(ValueError): + new_task( + Message( + role=Role.user, + parts=[Part(root=TextPart(text=''))], + messageId=str(uuid.uuid4()), + ) + ) + + def test_new_task_invalid_message_none_role(self): + with self.assertRaises(TypeError): + msg = Message.model_construct( + role=None, + parts=[Part(root=TextPart(text='test message'))], + message_id=str(uuid.uuid4()), + ) + new_task(msg) + + def test_completed_task_empty_artifacts(self): + with pytest.raises( + ValueError, + match='artifacts must be a non-empty list of Artifact objects', + ): + completed_task( + task_id='task-123', + context_id='ctx-456', + artifacts=[], + history=[], + ) + + def test_completed_task_invalid_artifact_type(self): + with pytest.raises( + ValueError, + match='artifacts must be a non-empty list of Artifact objects', + ): + completed_task( + task_id='task-123', + context_id='ctx-456', + artifacts=['not an artifact'], + history=[], + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/utils/test_telemetry.py b/tests/utils/test_telemetry.py index 90ea17b07..a43bf1fa3 100644 --- a/tests/utils/test_telemetry.py +++ b/tests/utils/test_telemetry.py @@ -1,5 +1,9 @@ import asyncio +import importlib +import sys +from collections.abc import Callable, Generator +from typing import Any, NoReturn from unittest import mock import pytest @@ -8,12 +12,12 @@ @pytest.fixture -def mock_span(): +def mock_span() -> mock.MagicMock: return mock.MagicMock() @pytest.fixture -def mock_tracer(mock_span): +def mock_tracer(mock_span: mock.MagicMock) -> mock.MagicMock: tracer = mock.MagicMock() tracer.start_as_current_span.return_value.__enter__.return_value = mock_span tracer.start_as_current_span.return_value.__exit__.return_value = False @@ -21,12 +25,40 @@ def mock_tracer(mock_span): @pytest.fixture(autouse=True) -def patch_trace_get_tracer(mock_tracer): +def patch_trace_get_tracer( + mock_tracer: mock.MagicMock, +) -> Generator[None, Any, None]: with mock.patch('opentelemetry.trace.get_tracer', return_value=mock_tracer): yield -def test_trace_function_sync_success(mock_span): +@pytest.fixture +def reload_telemetry_module( + monkeypatch: pytest.MonkeyPatch, +) -> Generator[Callable[[str | None], Any], None, None]: + """Fixture to handle telemetry module reloading with env var control.""" + + def _reload(env_value: str | None = None) -> Any: + if env_value is None: + monkeypatch.delenv( + 'OTEL_INSTRUMENTATION_A2A_SDK_ENABLED', raising=False + ) + else: + monkeypatch.setenv( + 'OTEL_INSTRUMENTATION_A2A_SDK_ENABLED', env_value + ) + + sys.modules.pop('a2a.utils.telemetry', None) + module = importlib.import_module('a2a.utils.telemetry') + return module + + yield _reload + + # Cleanup to ensure other tests aren't affected by a "poisoned" sys.modules + sys.modules.pop('a2a.utils.telemetry', None) + + +def test_trace_function_sync_success(mock_span: mock.MagicMock) -> None: @trace_function def foo(x, y): return x + y @@ -38,9 +70,9 @@ def foo(x, y): mock_span.record_exception.assert_not_called() -def test_trace_function_sync_exception(mock_span): +def test_trace_function_sync_exception(mock_span: mock.MagicMock) -> None: @trace_function - def bar(): + def bar() -> NoReturn: raise ValueError('fail') with pytest.raises(ValueError): @@ -49,39 +81,46 @@ def bar(): mock_span.set_status.assert_any_call(mock.ANY, description='fail') -def test_trace_function_sync_attribute_extractor_called(mock_span): +def test_trace_function_sync_attribute_extractor_called( + mock_span: mock.MagicMock, +) -> None: called = {} - def attr_extractor(span, args, kwargs, result, exception): + def attr_extractor(span, args, kwargs, result, exception) -> None: called['called'] = True assert span is mock_span assert exception is None assert result == 42 @trace_function(attribute_extractor=attr_extractor) - def foo(): + def foo() -> int: return 42 foo() assert called['called'] -def test_trace_function_sync_attribute_extractor_error_logged(mock_span): +def test_trace_function_sync_attribute_extractor_error_logged( + mock_span: mock.MagicMock, +) -> None: with mock.patch('a2a.utils.telemetry.logger') as logger: - def attr_extractor(span, args, kwargs, result, exception): + def attr_extractor(span, args, kwargs, result, exception) -> NoReturn: raise RuntimeError('attr fail') @trace_function(attribute_extractor=attr_extractor) - def foo(): + def foo() -> int: return 1 foo() - logger.error.assert_any_call(mock.ANY) + logger.exception.assert_any_call( + 'attribute_extractor error in span %s', + 'test_telemetry.foo', + ) @pytest.mark.asyncio -async def test_trace_function_async_success(mock_span): +async def test_trace_function_async_success(mock_span: mock.MagicMock) -> None: @trace_function async def foo(x): await asyncio.sleep(0) @@ -94,9 +133,11 @@ async def foo(x): @pytest.mark.asyncio -async def test_trace_function_async_exception(mock_span): +async def test_trace_function_async_exception( + mock_span: mock.MagicMock, +) -> None: @trace_function - async def bar(): + async def bar() -> NoReturn: await asyncio.sleep(0) raise RuntimeError('async fail') @@ -107,41 +148,45 @@ async def bar(): @pytest.mark.asyncio -async def test_trace_function_async_attribute_extractor_called(mock_span): +async def test_trace_function_async_attribute_extractor_called( + mock_span: mock.MagicMock, +) -> None: called = {} - def attr_extractor(span, args, kwargs, result, exception): + def attr_extractor(span, args, kwargs, result, exception) -> None: called['called'] = True assert exception is None assert result == 99 @trace_function(attribute_extractor=attr_extractor) - async def foo(): + async def foo() -> int: return 99 await foo() assert called['called'] -def test_trace_function_with_args_and_attributes(mock_span): +def test_trace_function_with_args_and_attributes( + mock_span: mock.MagicMock, +) -> None: @trace_function(span_name='custom.span', attributes={'foo': 'bar'}) - def foo(): + def foo() -> int: return 1 foo() mock_span.set_attribute.assert_any_call('foo', 'bar') -def test_trace_class_exclude_list(mock_span): +def test_trace_class_exclude_list(mock_span: mock.MagicMock) -> None: @trace_class(exclude_list=['skip_me']) class MyClass: - def a(self): + def a(self) -> str: return 'a' - def skip_me(self): + def skip_me(self) -> str: return 'skip' - def __str__(self): + def __str__(self) -> str: return 'str' obj = MyClass() @@ -152,13 +197,13 @@ def __str__(self): assert not hasattr(obj.skip_me, '__wrapped__') -def test_trace_class_include_list(mock_span): +def test_trace_class_include_list(mock_span: mock.MagicMock) -> None: @trace_class(include_list=['only_this']) class MyClass: - def only_this(self): + def only_this(self) -> str: return 'yes' - def not_this(self): + def not_this(self) -> str: return 'no' obj = MyClass() @@ -168,16 +213,56 @@ def not_this(self): assert not hasattr(obj.not_this, '__wrapped__') -def test_trace_class_dunder_not_traced(mock_span): +def test_trace_class_dunder_not_traced(mock_span: mock.MagicMock) -> None: @trace_class() class MyClass: - def __init__(self): + def __init__(self) -> None: self.x = 1 - def foo(self): + def foo(self) -> str: return 'foo' obj = MyClass() assert obj.foo() == 'foo' assert hasattr(obj.foo, '__wrapped__') assert hasattr(obj, 'x') + + +@pytest.mark.xdist_group(name='telemetry_isolation') +@pytest.mark.parametrize( + 'env_value,expected_tracing', + [ + (None, True), # Default: env var not set, tracing enabled + ('true', True), # Explicitly enabled + ('True', True), # Case insensitive + ('false', False), # Disabled + ('', False), # Empty string = false + ], +) +def test_env_var_controls_instrumentation( + reload_telemetry_module: Callable[[str | None], Any], + env_value: str | None, + expected_tracing: bool, +) -> None: + """Test OTEL_INSTRUMENTATION_A2A_SDK_ENABLED controls span creation.""" + telemetry_module = reload_telemetry_module(env_value) + + is_noop = type(telemetry_module.trace).__name__ == '_NoOp' + + assert is_noop != expected_tracing + + +@pytest.mark.xdist_group(name='telemetry_isolation') +def test_env_var_disabled_logs_message( + reload_telemetry_module: Callable[[str | None], Any], + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that disabling via env var logs appropriate debug message.""" + with caplog.at_level('DEBUG', logger='a2a.utils.telemetry'): + reload_telemetry_module('false') + + assert ( + 'A2A OTEL instrumentation disabled via environment variable' + in caplog.text + ) + assert 'OTEL_INSTRUMENTATION_A2A_SDK_ENABLED' in caplog.text diff --git a/uv.lock b/uv.lock index 4bf862d33..58ccefe4f 100644 --- a/uv.lock +++ b/uv.lock @@ -1,68 +1,194 @@ version = 1 -revision = 1 +revision = 3 requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.13'", - "python_full_version >= '3.12.4' and python_full_version < '3.13'", - "python_full_version >= '3.11' and python_full_version < '3.12.4'", - "python_full_version < '3.11'", + "python_full_version < '3.13'", ] [[package]] name = "a2a-sdk" source = { editable = "." } dependencies = [ + { name = "google-api-core" }, { name = "httpx" }, { name = "httpx-sse" }, + { name = "protobuf" }, + { name = "pydantic" }, +] + +[package.optional-dependencies] +all = [ + { name = "cryptography" }, + { name = "fastapi" }, + { name = "grpcio" }, + { name = "grpcio-reflection" }, + { name = "grpcio-tools" }, { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, - { name = "pydantic" }, + { name = "pyjwt" }, + { name = "sqlalchemy", extra = ["aiomysql", "aiosqlite", "asyncio", "postgresql-asyncpg"] }, { name = "sse-starlette" }, { name = "starlette" }, ] +encryption = [ + { name = "cryptography" }, +] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-reflection" }, + { name = "grpcio-tools" }, +] +http-server = [ + { name = "fastapi" }, + { name = "sse-starlette" }, + { name = "starlette" }, +] +mysql = [ + { name = "sqlalchemy", extra = ["aiomysql", "asyncio"] }, +] +postgresql = [ + { name = "sqlalchemy", extra = ["asyncio", "postgresql-asyncpg"] }, +] +signing = [ + { name = "pyjwt" }, +] +sql = [ + { name = "sqlalchemy", extra = ["aiomysql", "aiosqlite", "asyncio", "postgresql-asyncpg"] }, +] +sqlite = [ + { name = "sqlalchemy", extra = ["aiosqlite", "asyncio"] }, +] +telemetry = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, +] [package.dev-dependencies] dev = [ + { name = "a2a-sdk", extra = ["all"] }, + { name = "autoflake" }, { name = "datamodel-code-generator" }, { name = "mypy" }, + { name = "no-implicit-optional" }, + { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, { name = "pytest-mock" }, + { name = "pytest-xdist" }, + { name = "pyupgrade" }, + { name = "respx" }, { name = "ruff" }, + { name = "trio" }, + { name = "types-protobuf" }, + { name = "types-requests" }, { name = "uv-dynamic-versioning" }, + { name = "uvicorn" }, ] [package.metadata] requires-dist = [ + { name = "cryptography", marker = "extra == 'all'", specifier = ">=43.0.0" }, + { name = "cryptography", marker = "extra == 'encryption'", specifier = ">=43.0.0" }, + { name = "fastapi", marker = "extra == 'all'", specifier = ">=0.115.2" }, + { name = "fastapi", marker = "extra == 'http-server'", specifier = ">=0.115.2" }, + { name = "google-api-core", specifier = ">=1.26.0" }, + { name = "grpcio", marker = "extra == 'all'", specifier = ">=1.60" }, + { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.60" }, + { name = "grpcio-reflection", marker = "extra == 'all'", specifier = ">=1.7.0" }, + { name = "grpcio-reflection", marker = "extra == 'grpc'", specifier = ">=1.7.0" }, + { name = "grpcio-tools", marker = "extra == 'all'", specifier = ">=1.60" }, + { name = "grpcio-tools", marker = "extra == 'grpc'", specifier = ">=1.60" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "httpx-sse", specifier = ">=0.4.0" }, - { name = "opentelemetry-api", specifier = ">=1.33.0" }, - { name = "opentelemetry-sdk", specifier = ">=1.33.0" }, + { name = "opentelemetry-api", marker = "extra == 'all'", specifier = ">=1.33.0" }, + { name = "opentelemetry-api", marker = "extra == 'telemetry'", specifier = ">=1.33.0" }, + { name = "opentelemetry-sdk", marker = "extra == 'all'", specifier = ">=1.33.0" }, + { name = "opentelemetry-sdk", marker = "extra == 'telemetry'", specifier = ">=1.33.0" }, + { name = "protobuf", specifier = ">=5.29.5" }, { name = "pydantic", specifier = ">=2.11.3" }, - { name = "sse-starlette", specifier = ">=2.3.3" }, - { name = "starlette", specifier = ">=0.46.2" }, -] + { name = "pyjwt", marker = "extra == 'all'", specifier = ">=2.0.0" }, + { name = "pyjwt", marker = "extra == 'signing'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["aiomysql", "asyncio"], marker = "extra == 'all'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["aiomysql", "asyncio"], marker = "extra == 'mysql'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["aiomysql", "asyncio"], marker = "extra == 'sql'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["aiosqlite", "asyncio"], marker = "extra == 'all'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["aiosqlite", "asyncio"], marker = "extra == 'sql'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["aiosqlite", "asyncio"], marker = "extra == 'sqlite'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["asyncio", "postgresql-asyncpg"], marker = "extra == 'all'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["asyncio", "postgresql-asyncpg"], marker = "extra == 'postgresql'", specifier = ">=2.0.0" }, + { name = "sqlalchemy", extras = ["asyncio", "postgresql-asyncpg"], marker = "extra == 'sql'", specifier = ">=2.0.0" }, + { name = "sse-starlette", marker = "extra == 'all'" }, + { name = "sse-starlette", marker = "extra == 'http-server'" }, + { name = "starlette", marker = "extra == 'all'" }, + { name = "starlette", marker = "extra == 'http-server'" }, +] +provides-extras = ["all", "encryption", "grpc", "http-server", "mysql", "postgresql", "signing", "sql", "sqlite", "telemetry"] [package.metadata.requires-dev] dev = [ + { name = "a2a-sdk", extras = ["all"], editable = "." }, + { name = "autoflake" }, { name = "datamodel-code-generator", specifier = ">=0.30.0" }, { name = "mypy", specifier = ">=1.15.0" }, + { name = "no-implicit-optional" }, + { name = "pre-commit" }, { name = "pytest", specifier = ">=8.3.5" }, { name = "pytest-asyncio", specifier = ">=0.26.0" }, { name = "pytest-cov", specifier = ">=6.1.1" }, { name = "pytest-mock", specifier = ">=3.14.0" }, - { name = "ruff", specifier = ">=0.11.6" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, + { name = "pyupgrade" }, + { name = "respx", specifier = ">=0.20.2" }, + { name = "ruff", specifier = ">=0.12.8" }, + { name = "trio" }, + { name = "types-protobuf" }, + { name = "types-requests" }, { name = "uv-dynamic-versioning", specifier = ">=0.8.2" }, + { name = "uvicorn", specifier = ">=0.35.0" }, +] + +[[package]] +name = "aiomysql" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pymysql" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/76/2c5b55e4406a1957ffdfd933a94c2517455291c97d2b81cec6813754791a/aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67", size = 114706, upload-time = "2023-06-11T19:57:53.608Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/87/c982ee8b333c85b8ae16306387d703a1fcdfc81a2f3f15a24820ab1a512d/aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a", size = 44215, upload-time = "2023-06-11T19:57:51.09Z" }, +] + +[[package]] +name = "aiosqlite" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454, upload-time = "2025-02-03T07:30:16.235Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792, upload-time = "2025-02-03T07:30:13.6Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, ] [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] [[package]] @@ -75,18 +201,101 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, ] [[package]] name = "argcomplete" version = "3.6.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/0f/861e168fc813c56a78b35f3c30d91c6757d1fd185af1110f1aec784b35d0/argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf", size = 73403 } +sdist = { url = "https://files.pythonhosted.org/packages/16/0f/861e168fc813c56a78b35f3c30d91c6757d1fd185af1110f1aec784b35d0/argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf", size = 73403, upload-time = "2025-04-03T04:57:03.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/da/e42d7a9d8dd33fa775f467e4028a47936da2f01e4b0e561f9ba0d74cb0ca/argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591", size = 43708, upload-time = "2025-04-03T04:57:01.591Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/da/e42d7a9d8dd33fa775f467e4028a47936da2f01e4b0e561f9ba0d74cb0ca/argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591", size = 43708 }, + { url = "https://files.pythonhosted.org/packages/bb/07/1650a8c30e3a5c625478fa8aafd89a8dd7d85999bf7169b16f54973ebf2c/asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e", size = 673143, upload-time = "2024-10-20T00:29:08.846Z" }, + { url = "https://files.pythonhosted.org/packages/a0/9a/568ff9b590d0954553c56806766914c149609b828c426c5118d4869111d3/asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0", size = 645035, upload-time = "2024-10-20T00:29:12.02Z" }, + { url = "https://files.pythonhosted.org/packages/de/11/6f2fa6c902f341ca10403743701ea952bca896fc5b07cc1f4705d2bb0593/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f", size = 2912384, upload-time = "2024-10-20T00:29:13.644Z" }, + { url = "https://files.pythonhosted.org/packages/83/83/44bd393919c504ffe4a82d0aed8ea0e55eb1571a1dea6a4922b723f0a03b/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af", size = 2947526, upload-time = "2024-10-20T00:29:15.871Z" }, + { url = "https://files.pythonhosted.org/packages/08/85/e23dd3a2b55536eb0ded80c457b0693352262dc70426ef4d4a6fc994fa51/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75", size = 2895390, upload-time = "2024-10-20T00:29:19.346Z" }, + { url = "https://files.pythonhosted.org/packages/9b/26/fa96c8f4877d47dc6c1864fef5500b446522365da3d3d0ee89a5cce71a3f/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f", size = 3015630, upload-time = "2024-10-20T00:29:21.186Z" }, + { url = "https://files.pythonhosted.org/packages/34/00/814514eb9287614188a5179a8b6e588a3611ca47d41937af0f3a844b1b4b/asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf", size = 568760, upload-time = "2024-10-20T00:29:22.769Z" }, + { url = "https://files.pythonhosted.org/packages/f0/28/869a7a279400f8b06dd237266fdd7220bc5f7c975348fea5d1e6909588e9/asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50", size = 625764, upload-time = "2024-10-20T00:29:25.882Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506, upload-time = "2024-10-20T00:29:27.988Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922, upload-time = "2024-10-20T00:29:29.391Z" }, + { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565, upload-time = "2024-10-20T00:29:30.832Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962, upload-time = "2024-10-20T00:29:33.114Z" }, + { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791, upload-time = "2024-10-20T00:29:34.677Z" }, + { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696, upload-time = "2024-10-20T00:29:36.389Z" }, + { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358, upload-time = "2024-10-20T00:29:37.915Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375, upload-time = "2024-10-20T00:29:39.987Z" }, + { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162, upload-time = "2024-10-20T00:29:41.88Z" }, + { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025, upload-time = "2024-10-20T00:29:43.352Z" }, + { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243, upload-time = "2024-10-20T00:29:44.922Z" }, + { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059, upload-time = "2024-10-20T00:29:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596, upload-time = "2024-10-20T00:29:49.201Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632, upload-time = "2024-10-20T00:29:50.768Z" }, + { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186, upload-time = "2024-10-20T00:29:52.394Z" }, + { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064, upload-time = "2024-10-20T00:29:53.757Z" }, + { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373, upload-time = "2024-10-20T00:29:55.165Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745, upload-time = "2024-10-20T00:29:57.14Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103, upload-time = "2024-10-20T00:29:58.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471, upload-time = "2024-10-20T00:30:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253, upload-time = "2024-10-20T00:30:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720, upload-time = "2024-10-20T00:30:04.501Z" }, + { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404, upload-time = "2024-10-20T00:30:06.537Z" }, + { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623, upload-time = "2024-10-20T00:30:09.024Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "autoflake" +version = "2.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyflakes" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/cb/486f912d6171bc5748c311a2984a301f4e2d054833a1da78485866c71522/autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e", size = 27642, upload-time = "2024-03-13T03:41:28.977Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/ee/3fd29bf416eb4f1c5579cf12bf393ae954099258abd7bde03c4f9716ef6b/autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840", size = 32483, upload-time = "2024-03-13T03:41:26.969Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, ] [[package]] @@ -102,34 +311,195 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449 } +sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419, upload-time = "2025-01-29T05:37:06.642Z" }, + { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080, upload-time = "2025-01-29T05:37:09.321Z" }, + { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404, upload-time = "2025-01-29T04:19:04.296Z" }, + { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372, upload-time = "2025-01-29T05:37:11.71Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865, upload-time = "2025-01-29T05:37:14.309Z" }, + { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" }, + { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028, upload-time = "2025-01-29T04:18:51.711Z" }, + { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" }, + { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" }, + { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, + { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" }, + { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" }, + { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, + { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" }, + { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, +] + +[[package]] +name = "cachetools" +version = "5.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380, upload-time = "2025-02-20T21:01:19.524Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419 }, - { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080 }, - { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886 }, - { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404 }, - { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372 }, - { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865 }, - { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699 }, - { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028 }, - { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988 }, - { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985 }, - { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816 }, - { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860 }, - { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673 }, - { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190 }, - { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926 }, - { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613 }, - { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646 }, + { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080, upload-time = "2025-02-20T21:01:16.647Z" }, ] [[package]] name = "certifi" -version = "2025.4.26" +version = "2025.7.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705 } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618 }, + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, + { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, + { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, ] [[package]] @@ -139,78 +509,117 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342 } +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215 }, + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] name = "coverage" -version = "7.8.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/27/b4/a707d96c2c1ce9402ce1ce7124c53b9e4e1f3e617652a5ed2fbba4c9b4be/coverage-7.8.1.tar.gz", hash = "sha256:d41d4da5f2871b1782c6b74948d2d37aac3a5b39b43a6ba31d736b97a02ae1f1", size = 812193 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/d7/8beb40ec92d6f7bd25ff84dd1a23e46d02ea0c2291cf085c59b6ad351dbc/coverage-7.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7af3990490982fbd2437156c69edbe82b7edf99bc60302cceeeaf79afb886b8", size = 211571 }, - { url = "https://files.pythonhosted.org/packages/6f/ec/977d4a7e0c03d43895555bc8b1a9230cb346622e3fd4c5389304cc517355/coverage-7.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5757a7b25fe48040fa120ba6597f5f885b01e323e0d13fe21ff95a70c0f76b7", size = 212002 }, - { url = "https://files.pythonhosted.org/packages/31/ac/8c3d0cb74a734e2dfc29ed390691f11fec269a7719425c98b8d255e0558c/coverage-7.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8f105631835fdf191c971c4da93d27e732e028d73ecaa1a88f458d497d026cf", size = 241128 }, - { url = "https://files.pythonhosted.org/packages/05/32/12159834aed6a8ed99364db284de79a782aa236a4c8187f28f25579248d4/coverage-7.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21645788c5c2afa3df2d4b607638d86207b84cb495503b71e80e16b4c6b44e80", size = 239026 }, - { url = "https://files.pythonhosted.org/packages/04/85/4b384f71c49f5fb8d753eaa128f05ed338d0421663e0545038860839c592/coverage-7.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e93f36a5c9d995f40e9c4cd9bbabd83fd78705792fa250980256c93accd07bb6", size = 240172 }, - { url = "https://files.pythonhosted.org/packages/31/dc/4d01e976489971edee5ccd5ae302503909d0e0adffc6ea4fba637a3f4f94/coverage-7.8.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d591f2ddad432b794f77dc1e94334a80015a3fc7fa07fd6aed8f40362083be5b", size = 240086 }, - { url = "https://files.pythonhosted.org/packages/27/74/e1543f1de992f823edf7232c6ce7488aa5807bd24e9ab1ab3c95895f32d3/coverage-7.8.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:be2b1a455b3ecfee20638289bb091a95216887d44924a41c28a601efac0916e8", size = 238791 }, - { url = "https://files.pythonhosted.org/packages/e3/a7/344dba28ab0815024a0c005e2a6c1546c00e9acdd20a9d23bf1b14f6c16c/coverage-7.8.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:061a3bf679dc38fe34d3822f10a9977d548de86b440010beb1e3b44ba93d20f7", size = 239096 }, - { url = "https://files.pythonhosted.org/packages/09/df/4c69d6fee9a91672bd96c3aa7a8b3daa204d6a754aaa1203d0797417a088/coverage-7.8.1-cp310-cp310-win32.whl", hash = "sha256:12950b6373dc9dfe1ce22a8506ec29c82bfc5b38146ced0a222f38cf5d99a56d", size = 214146 }, - { url = "https://files.pythonhosted.org/packages/5e/cc/58712d4627dc36e9028ed3a04b21c7eb421076421daa8114af7a45c4ca6a/coverage-7.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:11e5ea0acd8cc5d23030c34dfb2eb6638ad886328df18cc69f8eefab73d1ece5", size = 215045 }, - { url = "https://files.pythonhosted.org/packages/78/7e/224415a4424b610f7f05429b1099daee32eeb98cb39b3b8e8a1981431273/coverage-7.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc6bebc15c3b275174c66cf4e1c949a94c5c2a3edaa2f193a1225548c52c771", size = 211689 }, - { url = "https://files.pythonhosted.org/packages/c1/22/87ab73762926a50fb9f2eefe52951ce4f764097480370db86c1e99e075dc/coverage-7.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a6c35afd5b912101fabf42975d92d750cfce33c571508a82ff334a133c40d5", size = 212116 }, - { url = "https://files.pythonhosted.org/packages/96/39/cb084825f22d7d9f0064e47bb3af2b9a633172d573a8da72460c96564bd5/coverage-7.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b37729ba34c116a3b2b6fb99df5c37a4ca40e96f430070488fd7a1077ad44907", size = 244739 }, - { url = "https://files.pythonhosted.org/packages/2b/5f/fdf000ea0ec1741b4c81367a44eeec036db92ba8e18a0cc5f9e2c840d0a9/coverage-7.8.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6424c716f4c38ff8f62b602e6b94cde478dadda542a1cb3fe2fe2520cc2aae3", size = 242429 }, - { url = "https://files.pythonhosted.org/packages/ca/7f/3697436ca527d4cf69e3f251fe24cd2958137442f1fe83b297bb94a7a490/coverage-7.8.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bcfafb2809cd01be8ffe5f962e01b0fbe4cc1d74513434c52ff2dd05b86d492", size = 244218 }, - { url = "https://files.pythonhosted.org/packages/71/fa/486c4c0cbed2ab67ff840c90c40184140f54c31d507344451afa26c3bb0e/coverage-7.8.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e3f65da9701648d226b6b24ded3e2528b72075e48d7540968cd857c3bd4c5321", size = 243866 }, - { url = "https://files.pythonhosted.org/packages/cb/77/03e336b4c4fa329c9c6ec93ac7f64d2d4984ce8e0a585c195b35e9a3c2a6/coverage-7.8.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:173e16969f990688aae4b4487717c44330bc57fd8b61a6216ce8eeb827eb5c0d", size = 242038 }, - { url = "https://files.pythonhosted.org/packages/d9/fb/2ced07e129e2735b7e4102891f380b05f994e3764abac711c597ea83c60c/coverage-7.8.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3763b9a4bc128f72da5dcfd7fcc7c7d6644ed28e8f2db473ce1ef0dd37a43fa9", size = 242568 }, - { url = "https://files.pythonhosted.org/packages/59/96/47c47ab041f795979f8eed3fb2a93c8eb5dba83a8b78ee5c47535f10f015/coverage-7.8.1-cp311-cp311-win32.whl", hash = "sha256:d074380f587360d2500f3b065232c67ae248aaf739267807adbcd29b88bdf864", size = 214197 }, - { url = "https://files.pythonhosted.org/packages/e9/14/7cf088fc11df2e20a531f13e2ce123579e0dcbcb052a76ece6fdb9f2997d/coverage-7.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:cd21de85aa0e247b79c6c41f8b5541b54285550f2da6a9448d82b53234d3611b", size = 215111 }, - { url = "https://files.pythonhosted.org/packages/aa/78/781501aa4759026dcef8024b404cacc4094348e5e199ed660c31f4650a33/coverage-7.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2d8f844e837374a9497e11722d9eb9dfeb33b1b5d31136786c39a4c1a3073c6d", size = 211875 }, - { url = "https://files.pythonhosted.org/packages/e6/00/a8a4548c22b73f8fd4373714f5a4cce3584827e2603847a8d90fba129807/coverage-7.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9cd54a762667c32112df5d6f059c5d61fa532ee06460948cc5bcbf60c502f5c9", size = 212129 }, - { url = "https://files.pythonhosted.org/packages/9e/41/5cdc34afdc53b7f200439eb915f50d6ba17e3b0b5cdb6bb04d0ed9662703/coverage-7.8.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:958b513e23286178b513a6b4d975fe9e7cddbcea6e5ebe8d836e4ef067577154", size = 246176 }, - { url = "https://files.pythonhosted.org/packages/f0/1f/ca8e37fdd282dd6ebc4191a9fafcb46b6bf75e05a0fd796d6907399e44ea/coverage-7.8.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b31756ea647b6ef53190f6b708ad0c4c2ea879bc17799ba5b0699eee59ecf7b", size = 243068 }, - { url = "https://files.pythonhosted.org/packages/cf/89/727503da5870fe1031ec443699beab44e02548d9873fe0a60adf6589fdd1/coverage-7.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccad4e29ac1b6f75bfeedb2cac4860fe5bd9e0a2f04c3e3218f661fa389ab101", size = 245329 }, - { url = "https://files.pythonhosted.org/packages/25/1f/6935baf26071a66f390159ceb5c5bccfc898d00a90166b6ffc61b964856a/coverage-7.8.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:452f3831c64f5f50260e18a89e613594590d6ceac5206a9b7d76ba43586b01b3", size = 245100 }, - { url = "https://files.pythonhosted.org/packages/3b/1f/0e5d68b12deb8a5c9648f61b515798e201f72fec17a0c7373a5f4903f8d8/coverage-7.8.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9296df6a33b8539cd753765eb5b47308602263a14b124a099cbcf5f770d7cf90", size = 243314 }, - { url = "https://files.pythonhosted.org/packages/21/5d/375ba28a78e96a06ef0f1572b393e3fefd9d0deecf3ef9995eff1b1cea67/coverage-7.8.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d52d79dfd3b410b153b6d65b0e3afe834eca2b969377f55ad73c67156d35af0d", size = 244487 }, - { url = "https://files.pythonhosted.org/packages/08/92/1b7fdf0924d8e6d7c2418d313c12d6e19c9a748448faedcc017082ec5b63/coverage-7.8.1-cp312-cp312-win32.whl", hash = "sha256:ebdf212e1ed85af63fa1a76d556c0a3c7b34348ffba6e145a64b15f003ad0a2b", size = 214367 }, - { url = "https://files.pythonhosted.org/packages/07/b1/632f9e128ee9e149cfa80a3130362684244668b0dc6efedd6e466baaeb48/coverage-7.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:c04a7903644ccea8fa07c3e76db43ca31c8d453f93c5c94c0f9b82efca225543", size = 215169 }, - { url = "https://files.pythonhosted.org/packages/ed/0a/696a8d6c245a72f61589e2015a633fab5aacd8c916802df41d23e387b442/coverage-7.8.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd5c305faa2e69334a53061b3168987847dadc2449bab95735242a9bde92fde8", size = 211902 }, - { url = "https://files.pythonhosted.org/packages/3b/2f/0c065dfaf497586cf1693dee2a94e7489a4be840a5bbe765a7a78735268b/coverage-7.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:af6b8cdf0857fd4e6460dd6639c37c3f82163127f6112c1942b5e6a52a477676", size = 212175 }, - { url = "https://files.pythonhosted.org/packages/ff/a1/a8a40658f67311c96c3d9073293fefee8a9485906ed531546dffe35fdd4b/coverage-7.8.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e233a56bbf99e4cb134c4f8e63b16c77714e3987daf2c5aa10c3ba8c4232d730", size = 245564 }, - { url = "https://files.pythonhosted.org/packages/6e/94/dc36e2256ce484f482ed5b2a103a261009c301cdad237fdefe2a9b6ddeab/coverage-7.8.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dabc70012fd7b58a8040a7bc1b5f71fd0e62e2138aefdd8367d3d24bf82c349", size = 242719 }, - { url = "https://files.pythonhosted.org/packages/73/d7/d096859c59f02d4550e6bc9180bd06c88313c32977d7458e0d4ed06ed057/coverage-7.8.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1f8e96455907496b3e4ea16f63bb578da31e17d2805278b193525e7714f17f2", size = 244634 }, - { url = "https://files.pythonhosted.org/packages/be/a0/6f4db84d1d3334ca37c2dae02a54761a1a3918aec56faec26f1590077181/coverage-7.8.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0034ceec8e91fdaf77350901cc48f47efd00f23c220a3f9fc1187774ddf307cb", size = 244824 }, - { url = "https://files.pythonhosted.org/packages/96/46/1e74016ba7d9f4242170f9d814454e6483a640332a67c0e139dab7d85762/coverage-7.8.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82db9344a07dd9106796b9fe8805425633146a7ea7fed5ed07c65a64d0bb79e1", size = 242872 }, - { url = "https://files.pythonhosted.org/packages/22/41/51df77f279b49e7dd05ee9dfe746cf8698c873ffdf7fbe57aaee9522ec67/coverage-7.8.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9772c9e266b2ca4999180c12b90c8efb4c5c9ad3e55f301d78bc579af6467ad9", size = 244179 }, - { url = "https://files.pythonhosted.org/packages/b8/83/6207522f3afb64592c47353bc79b0e3e6c3f48fde5e5221ab2b80a12e93d/coverage-7.8.1-cp313-cp313-win32.whl", hash = "sha256:6f24a1e2c373a77afae21bc512466a91e31251685c271c5309ee3e557f6e3e03", size = 214395 }, - { url = "https://files.pythonhosted.org/packages/43/b8/cd40a8fff1633112ac40edde9006aceaa55b32a84976394a42c33547ef95/coverage-7.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:76a4e1d62505a21971968be61ae17cbdc5e0c483265a37f7ddbbc050f9c0b8ec", size = 215195 }, - { url = "https://files.pythonhosted.org/packages/7e/f0/8fea9beb378cdce803ba838293314b21527f4edab58dcbe2e6a5553e7dc8/coverage-7.8.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:35dd5d405a1d378c39f3f30f628a25b0b99f1b8e5bdd78275df2e7b0404892d7", size = 212738 }, - { url = "https://files.pythonhosted.org/packages/0c/90/f28953cd1246ad7839874ef97e181f153d4274cc6db21857fbca18b89c97/coverage-7.8.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:87b86a87f8de2e1bd0bcd45faf1b1edf54f988c8857157300e0336efcfb8ede6", size = 212958 }, - { url = "https://files.pythonhosted.org/packages/fb/70/3f3d34ef68534afa73aee75537d1daf1e91029738cbf052ef828313aa960/coverage-7.8.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce4553a573edb363d5db12be1c044826878bec039159d6d4eafe826ef773396d", size = 257024 }, - { url = "https://files.pythonhosted.org/packages/cf/66/96ab415609b777adfcfa00f29d75d2278da139c0958de7a50dd0023811e6/coverage-7.8.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db181a1896e0bad75b3bf4916c49fd3cf6751f9cc203fe0e0ecbee1fc43590fa", size = 252867 }, - { url = "https://files.pythonhosted.org/packages/52/4f/3d48704c62fa5f72447005b8a77cc9cce5e164c2df357433442d17f2ac0a/coverage-7.8.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ce2606a171f9cf7c15a77ca61f979ffc0e0d92cd2fb18767cead58c1d19f58e", size = 255096 }, - { url = "https://files.pythonhosted.org/packages/64/1d/e8d4ac647c1967dd3dbc250fb4595b838b7067ad32602a7339ac467d9c5a/coverage-7.8.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4fc4f7cff2495d6d112353c33a439230a6de0b7cd0c2578f1e8d75326f63d783", size = 256276 }, - { url = "https://files.pythonhosted.org/packages/9c/e4/62e2f9521f3758dea07bcefc2c9c0dd34fa67d7035b0443c7c3072e6308b/coverage-7.8.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ff619c58322d9d6df0a859dc76c3532d7bdbc125cb040f7cd642141446b4f654", size = 254478 }, - { url = "https://files.pythonhosted.org/packages/49/41/7af246f5e68272f97a31a122da5878747e941fef019430485534d1f6a44a/coverage-7.8.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c0d6290a466a6f3fadf6add2dd4ec11deba4e1a6e3db2dd284edd497aadf802f", size = 255255 }, - { url = "https://files.pythonhosted.org/packages/05/5d/5dacd7915972f82d909f36974c6415667dae08a32478d87dfdbac6788e22/coverage-7.8.1-cp313-cp313t-win32.whl", hash = "sha256:e4e893c7f7fb12271a667d5c1876710fae06d7580343afdb5f3fc4488b73209e", size = 215112 }, - { url = "https://files.pythonhosted.org/packages/8b/89/48e77e71e81e5b79fd6471083d087cd69517e5f585b548d87c92d5ae873c/coverage-7.8.1-cp313-cp313t-win_amd64.whl", hash = "sha256:41d142eefbc0bb3be160a77b2c0fbec76f345387676265052e224eb6c67b7af3", size = 216270 }, - { url = "https://files.pythonhosted.org/packages/94/aa/f2063b32526002f639ac0081f177f8f0d3a8389ac08e84a02b8cca22d2c0/coverage-7.8.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:adafe9d71a940927dd3ad8d487f521f11277f133568b7da622666ebd08923191", size = 203637 }, - { url = "https://files.pythonhosted.org/packages/1b/a1/4d968d4605f3a87a809f0c8f495eed81656c93cf6c00818334498ad6ad45/coverage-7.8.1-py3-none-any.whl", hash = "sha256:e54b80885b0e61d346accc5709daf8762471a452345521cc9281604a907162c2", size = 203623 }, +version = "7.13.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" }, + { url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" }, + { url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" }, + { url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" }, + { url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" }, + { url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" }, + { url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" }, + { url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" }, + { url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" }, + { url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" }, + { url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" }, + { url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" }, + { url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" }, + { url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7c/347280982982383621d29b8c544cf497ae07ac41e44b1ca4903024131f55/coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee", size = 251581, upload-time = "2025-12-28T15:40:36.131Z" }, + { url = "https://files.pythonhosted.org/packages/82/f6/ebcfed11036ade4c0d75fa4453a6282bdd225bc073862766eec184a4c643/coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf", size = 253691, upload-time = "2025-12-28T15:40:37.626Z" }, + { url = "https://files.pythonhosted.org/packages/02/92/af8f5582787f5d1a8b130b2dcba785fa5e9a7a8e121a0bb2220a6fdbdb8a/coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3", size = 249799, upload-time = "2025-12-28T15:40:39.47Z" }, + { url = "https://files.pythonhosted.org/packages/24/aa/0e39a2a3b16eebf7f193863323edbff38b6daba711abaaf807d4290cf61a/coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef", size = 251389, upload-time = "2025-12-28T15:40:40.954Z" }, + { url = "https://files.pythonhosted.org/packages/73/46/7f0c13111154dc5b978900c0ccee2e2ca239b910890e674a77f1363d483e/coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851", size = 249450, upload-time = "2025-12-28T15:40:42.489Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ca/e80da6769e8b669ec3695598c58eef7ad98b0e26e66333996aee6316db23/coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb", size = 249170, upload-time = "2025-12-28T15:40:44.279Z" }, + { url = "https://files.pythonhosted.org/packages/af/18/9e29baabdec1a8644157f572541079b4658199cfd372a578f84228e860de/coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba", size = 250081, upload-time = "2025-12-28T15:40:45.748Z" }, + { url = "https://files.pythonhosted.org/packages/00/f8/c3021625a71c3b2f516464d322e41636aea381018319050a8114105872ee/coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19", size = 221281, upload-time = "2025-12-28T15:40:47.232Z" }, + { url = "https://files.pythonhosted.org/packages/27/56/c216625f453df6e0559ed666d246fcbaaa93f3aa99eaa5080cea1229aa3d/coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a", size = 222215, upload-time = "2025-12-28T15:40:49.19Z" }, + { url = "https://files.pythonhosted.org/packages/5c/9a/be342e76f6e531cae6406dc46af0d350586f24d9b67fdfa6daee02df71af/coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c", size = 220886, upload-time = "2025-12-28T15:40:51.067Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, + { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, + { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, + { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, + { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, + { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, + { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, + { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, + { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, + { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, + { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, + { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, + { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, + { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, + { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, + { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, + { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, + { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, + { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, + { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, + { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, + { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, + { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, + { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, + { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, + { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, + { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, + { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, + { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, + { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, + { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, + { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, + { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, + { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, + { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, + { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, + { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, ] [package.optional-dependencies] @@ -218,9 +627,69 @@ toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] +[[package]] +name = "cryptography" +version = "46.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485", size = 7173686, upload-time = "2026-01-28T00:23:07.515Z" }, + { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" }, + { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" }, + { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" }, + { url = "https://files.pythonhosted.org/packages/0c/d8/4bb7aec442a9049827aa34cee1aa83803e528fa55da9a9d45d01d1bb933e/cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81", size = 4947652, upload-time = "2026-01-28T00:23:14.554Z" }, + { url = "https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255", size = 4455157, upload-time = "2026-01-28T00:23:16.443Z" }, + { url = "https://files.pythonhosted.org/packages/0a/05/19d849cf4096448779d2dcc9bb27d097457dac36f7273ffa875a93b5884c/cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e", size = 3981078, upload-time = "2026-01-28T00:23:17.838Z" }, + { url = "https://files.pythonhosted.org/packages/e6/89/f7bac81d66ba7cde867a743ea5b37537b32b5c633c473002b26a226f703f/cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c", size = 4276213, upload-time = "2026-01-28T00:23:19.257Z" }, + { url = "https://files.pythonhosted.org/packages/da/9f/7133e41f24edd827020ad21b068736e792bc68eecf66d93c924ad4719fb3/cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32", size = 4912190, upload-time = "2026-01-28T00:23:21.244Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" }, + { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a7/0fca0fd3591dffc297278a61813d7f661a14243dd60f499a7a5b48acb52a/cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5", size = 3026378, upload-time = "2026-01-28T00:23:28.317Z" }, + { url = "https://files.pythonhosted.org/packages/2d/12/652c84b6f9873f0909374864a57b003686c642ea48c84d6c7e2c515e6da5/cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b", size = 3478614, upload-time = "2026-01-28T00:23:30.275Z" }, + { url = "https://files.pythonhosted.org/packages/b9/27/542b029f293a5cce59349d799d4d8484b3b1654a7b9a0585c266e974a488/cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908", size = 7116417, upload-time = "2026-01-28T00:23:31.958Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f5/559c25b77f40b6bf828eabaf988efb8b0e17b573545edb503368ca0a2a03/cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da", size = 4264508, upload-time = "2026-01-28T00:23:34.264Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/551fa162d33074b660dc35c9bc3616fefa21a0e8c1edd27b92559902e408/cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829", size = 4409080, upload-time = "2026-01-28T00:23:35.793Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/4d8d129a755f5d6df1bbee69ea2f35ebfa954fa1847690d1db2e8bca46a5/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2", size = 4270039, upload-time = "2026-01-28T00:23:37.263Z" }, + { url = "https://files.pythonhosted.org/packages/4c/f5/ed3fcddd0a5e39321e595e144615399e47e7c153a1fb8c4862aec3151ff9/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085", size = 4926748, upload-time = "2026-01-28T00:23:38.884Z" }, + { url = "https://files.pythonhosted.org/packages/43/ae/9f03d5f0c0c00e85ecb34f06d3b79599f20630e4db91b8a6e56e8f83d410/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b", size = 4442307, upload-time = "2026-01-28T00:23:40.56Z" }, + { url = "https://files.pythonhosted.org/packages/8b/22/e0f9f2dae8040695103369cf2283ef9ac8abe4d51f68710bec2afd232609/cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd", size = 3959253, upload-time = "2026-01-28T00:23:42.827Z" }, + { url = "https://files.pythonhosted.org/packages/01/5b/6a43fcccc51dae4d101ac7d378a8724d1ba3de628a24e11bf2f4f43cba4d/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2", size = 4269372, upload-time = "2026-01-28T00:23:44.655Z" }, + { url = "https://files.pythonhosted.org/packages/17/b7/0f6b8c1dd0779df2b526e78978ff00462355e31c0a6f6cff8a3e99889c90/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e", size = 4891908, upload-time = "2026-01-28T00:23:46.48Z" }, + { url = "https://files.pythonhosted.org/packages/83/17/259409b8349aa10535358807a472c6a695cf84f106022268d31cea2b6c97/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f", size = 4441254, upload-time = "2026-01-28T00:23:48.403Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fe/e4a1b0c989b00cee5ffa0764401767e2d1cf59f45530963b894129fd5dce/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82", size = 4396520, upload-time = "2026-01-28T00:23:50.26Z" }, + { url = "https://files.pythonhosted.org/packages/b3/81/ba8fd9657d27076eb40d6a2f941b23429a3c3d2f56f5a921d6b936a27bc9/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c", size = 4651479, upload-time = "2026-01-28T00:23:51.674Z" }, + { url = "https://files.pythonhosted.org/packages/00/03/0de4ed43c71c31e4fe954edd50b9d28d658fef56555eba7641696370a8e2/cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061", size = 3001986, upload-time = "2026-01-28T00:23:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/5c/70/81830b59df7682917d7a10f833c4dab2a5574cd664e86d18139f2b421329/cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7", size = 3468288, upload-time = "2026-01-28T00:23:55.09Z" }, + { url = "https://files.pythonhosted.org/packages/56/f7/f648fdbb61d0d45902d3f374217451385edc7e7768d1b03ff1d0e5ffc17b/cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab", size = 7169583, upload-time = "2026-01-28T00:23:56.558Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" }, + { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" }, + { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" }, + { url = "https://files.pythonhosted.org/packages/20/0b/a7fce65ee08c3c02f7a8310cc090a732344066b990ac63a9dfd0a655d321/cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4", size = 4939441, upload-time = "2026-01-28T00:24:03.175Z" }, + { url = "https://files.pythonhosted.org/packages/db/a7/20c5701e2cd3e1dfd7a19d2290c522a5f435dd30957d431dcb531d0f1413/cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af", size = 4451617, upload-time = "2026-01-28T00:24:05.403Z" }, + { url = "https://files.pythonhosted.org/packages/00/dc/3e16030ea9aa47b63af6524c354933b4fb0e352257c792c4deeb0edae367/cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263", size = 3977774, upload-time = "2026-01-28T00:24:06.851Z" }, + { url = "https://files.pythonhosted.org/packages/42/c8/ad93f14118252717b465880368721c963975ac4b941b7ef88f3c56bf2897/cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095", size = 4277008, upload-time = "2026-01-28T00:24:08.926Z" }, + { url = "https://files.pythonhosted.org/packages/00/cf/89c99698151c00a4631fbfcfcf459d308213ac29e321b0ff44ceeeac82f1/cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b", size = 4903339, upload-time = "2026-01-28T00:24:12.009Z" }, + { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" }, + { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" }, + { url = "https://files.pythonhosted.org/packages/0a/21/f7433d18fe6d5845329cbdc597e30caf983229c7a245bcf54afecc555938/cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc", size = 3009779, upload-time = "2026-01-28T00:24:20.198Z" }, + { url = "https://files.pythonhosted.org/packages/3a/6a/bd2e7caa2facffedf172a45c1a02e551e6d7d4828658c9a245516a598d94/cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976", size = 3466633, upload-time = "2026-01-28T00:24:21.851Z" }, + { url = "https://files.pythonhosted.org/packages/59/e0/f9c6c53e1f2a1c2507f00f2faba00f01d2f334b35b0fbfe5286715da2184/cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b", size = 3476316, upload-time = "2026-01-28T00:24:24.144Z" }, + { url = "https://files.pythonhosted.org/packages/27/7a/f8d2d13227a9a1a9fe9c7442b057efecffa41f1e3c51d8622f26b9edbe8f/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da", size = 4216693, upload-time = "2026-01-28T00:24:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/c5/de/3787054e8f7972658370198753835d9d680f6cd4a39df9f877b57f0dd69c/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80", size = 4382765, upload-time = "2026-01-28T00:24:27.577Z" }, + { url = "https://files.pythonhosted.org/packages/8a/5f/60e0afb019973ba6a0b322e86b3d61edf487a4f5597618a430a2a15f2d22/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822", size = 4216066, upload-time = "2026-01-28T00:24:29.056Z" }, + { url = "https://files.pythonhosted.org/packages/81/8e/bf4a0de294f147fee66f879d9bae6f8e8d61515558e3d12785dd90eca0be/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947", size = 4382025, upload-time = "2026-01-28T00:24:30.681Z" }, + { url = "https://files.pythonhosted.org/packages/79/f4/9ceb90cfd6a3847069b0b0b353fd3075dc69b49defc70182d8af0c4ca390/cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3", size = 3406043, upload-time = "2026-01-28T00:24:32.236Z" }, +] + [[package]] name = "datamodel-code-generator" -version = "0.30.1" +version = "0.53.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "argcomplete" }, @@ -234,33 +703,30 @@ dependencies = [ { name = "pyyaml" }, { name = "tomli", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/bc/627a77eafcf7101c9f5710130b2def98593709a8d29676e4a58f09cd2a23/datamodel_code_generator-0.30.1.tar.gz", hash = "sha256:d125012face4cd1eca6c9300297a1f5775a9d5ff8fc3f68d34d0944a7beea105", size = 446630 } +sdist = { url = "https://files.pythonhosted.org/packages/40/65/3802abca0291263862a16e032e984e61e4d0d30a344d9be97815721d64ff/datamodel_code_generator-0.53.0.tar.gz", hash = "sha256:af46b57ad78e6435873132c52843ef0ec7b768a591d3b9917d3409dfc1ab1c90", size = 809949, upload-time = "2026-01-12T18:14:05.459Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/b3/01aab190372914399bbc77f89ac3b24b439c3d97a52a6198f1cd1396ef3a/datamodel_code_generator-0.30.1-py3-none-any.whl", hash = "sha256:9601dfa3da8aa8d8d54e182059f78836b1768a807d5c26df798db12d4054c8f3", size = 118045 }, + { url = "https://files.pythonhosted.org/packages/ff/43/5dbb6fe09842e10062f94016ccb48c9613f2443253866de3d7b815713b4d/datamodel_code_generator-0.53.0-py3-none-any.whl", hash = "sha256:d1cc2abe79f99b8208c363f5f4b603c29290327ff4e3219a08c0fff45f42aff4", size = 258912, upload-time = "2026-01-12T18:14:02.737Z" }, ] [[package]] -name = "deprecated" -version = "1.2.18" +name = "distlib" +version = "0.4.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744 } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998 }, + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] [[package]] name = "dunamai" -version = "1.24.1" +version = "1.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/22/7f46b0146ef614cd6f80e4bcb188dabe33e90b4e0af028e16f597f5826ad/dunamai-1.24.1.tar.gz", hash = "sha256:3aa3348f77242da8628b23f11e89569343440f0f912bcef32a1fa891cf8e7215", size = 45616 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/194d9a34c4d831c6563d2d990720850f0baef9ab60cb4ad8ae0eff6acd34/dunamai-1.25.0.tar.gz", hash = "sha256:a7f8360ea286d3dbaf0b6a1473f9253280ac93d619836ad4514facb70c0719d1", size = 46155, upload-time = "2025-07-04T19:25:56.082Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/d6/6ed8b439906ca2e88d65bddf002e21239678aca6001d8fb82e8e2b196245/dunamai-1.24.1-py3-none-any.whl", hash = "sha256:4370e406d8ce195fc4b066b5c326bfa9adb269c4b8719b4e4fd90b63a2144bf7", size = 26654 }, + { url = "https://files.pythonhosted.org/packages/36/41/04e2a649058b0713b00d6c9bd22da35618bb157289e05d068e51fddf8d7e/dunamai-1.25.0-py3-none-any.whl", hash = "sha256:7f9dc687dd3256e613b6cc978d9daabfd2bb5deb8adc541fc135ee423ffa98ab", size = 27022, upload-time = "2025-07-04T19:25:54.863Z" }, ] [[package]] @@ -268,29 +734,292 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, +] + +[[package]] +name = "fastapi" +version = "0.128.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749 } +sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674 }, + { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" }, +] + +[[package]] +name = "filelock" +version = "3.20.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, ] [[package]] name = "genson" version = "1.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c5/cf/2303c8ad276dcf5ee2ad6cf69c4338fd86ef0f471a5207b069adf7a393cf/genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37", size = 34919 } +sdist = { url = "https://files.pythonhosted.org/packages/c5/cf/2303c8ad276dcf5ee2ad6cf69c4338fd86ef0f471a5207b069adf7a393cf/genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37", size = 34919, upload-time = "2024-05-15T22:08:49.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/5c/e226de133afd8bb267ec27eead9ae3d784b95b39a287ed404caab39a5f50/genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7", size = 21470, upload-time = "2024-05-15T22:08:47.056Z" }, +] + +[[package]] +name = "google-api-core" +version = "2.29.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/10/05572d33273292bac49c2d1785925f7bc3ff2fe50e3044cf1062c1dde32e/google_api_core-2.29.0.tar.gz", hash = "sha256:84181be0f8e6b04006df75ddfe728f24489f0af57c96a529ff7cf45bc28797f7", size = 177828, upload-time = "2026-01-08T22:21:39.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/b6/85c4d21067220b9a78cfb81f516f9725ea6befc1544ec9bd2c1acd97c324/google_api_core-2.29.0-py3-none-any.whl", hash = "sha256:d30bc60980daa36e314b5d5a3e5958b0200cb44ca8fa1be2b614e932b75a3ea9", size = 173906, upload-time = "2026-01-08T22:21:36.093Z" }, +] + +[[package]] +name = "google-auth" +version = "2.40.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/9b/e92ef23b84fa10a64ce4831390b7a4c2e53c0132568d99d4ae61d04c8855/google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77", size = 281029, upload-time = "2025-06-04T18:04:57.577Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/63/b19553b658a1692443c62bd07e5868adaa0ad746a0751ba62c59568cd45b/google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca", size = 216137, upload-time = "2025-06-04T18:04:55.573Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, +] + +[[package]] +name = "greenlet" +version = "3.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/5c/e226de133afd8bb267ec27eead9ae3d784b95b39a287ed404caab39a5f50/genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7", size = 21470 }, + { url = "https://files.pythonhosted.org/packages/92/db/b4c12cff13ebac2786f4f217f06588bccd8b53d260453404ef22b121fc3a/greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be", size = 268977, upload-time = "2025-06-05T16:10:24.001Z" }, + { url = "https://files.pythonhosted.org/packages/52/61/75b4abd8147f13f70986df2801bf93735c1bd87ea780d70e3b3ecda8c165/greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac", size = 627351, upload-time = "2025-06-05T16:38:50.685Z" }, + { url = "https://files.pythonhosted.org/packages/35/aa/6894ae299d059d26254779a5088632874b80ee8cf89a88bca00b0709d22f/greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392", size = 638599, upload-time = "2025-06-05T16:41:34.057Z" }, + { url = "https://files.pythonhosted.org/packages/30/64/e01a8261d13c47f3c082519a5e9dbf9e143cc0498ed20c911d04e54d526c/greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c", size = 634482, upload-time = "2025-06-05T16:48:16.26Z" }, + { url = "https://files.pythonhosted.org/packages/47/48/ff9ca8ba9772d083a4f5221f7b4f0ebe8978131a9ae0909cf202f94cd879/greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db", size = 633284, upload-time = "2025-06-05T16:13:01.599Z" }, + { url = "https://files.pythonhosted.org/packages/e9/45/626e974948713bc15775b696adb3eb0bd708bec267d6d2d5c47bb47a6119/greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b", size = 582206, upload-time = "2025-06-05T16:12:48.51Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8e/8b6f42c67d5df7db35b8c55c9a850ea045219741bb14416255616808c690/greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712", size = 1111412, upload-time = "2025-06-05T16:36:45.479Z" }, + { url = "https://files.pythonhosted.org/packages/05/46/ab58828217349500a7ebb81159d52ca357da747ff1797c29c6023d79d798/greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00", size = 1135054, upload-time = "2025-06-05T16:12:36.478Z" }, + { url = "https://files.pythonhosted.org/packages/68/7f/d1b537be5080721c0f0089a8447d4ef72839039cdb743bdd8ffd23046e9a/greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302", size = 296573, upload-time = "2025-06-05T16:34:26.521Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" }, + { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" }, + { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" }, + { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" }, + { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" }, + { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" }, + { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" }, + { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" }, + { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" }, + { url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" }, + { url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" }, + { url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" }, + { url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" }, + { url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/1fc0cc068cfde885170e01de40a619b00eaa8f2916bf3541744730ffb4c3/greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36", size = 1147121, upload-time = "2025-06-05T16:12:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/27/1a/199f9587e8cb08a0658f9c30f3799244307614148ffe8b1e3aa22f324dea/greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3", size = 297603, upload-time = "2025-06-05T16:20:12.651Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" }, + { url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" }, + { url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" }, + { url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" }, + { url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" }, +] + +[[package]] +name = "grpcio" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/17/ff4795dc9a34b6aee6ec379f1b66438a3789cd1315aac0cbab60d92f74b3/grpcio-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:65a20de41e85648e00305c1bb09a3598f840422e522277641145a32d42dcefcc", size = 5840037, upload-time = "2025-10-21T16:20:25.069Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ff/35f9b96e3fa2f12e1dcd58a4513a2e2294a001d64dec81677361b7040c9a/grpcio-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:40ad3afe81676fd9ec6d9d406eda00933f218038433980aa19d401490e46ecde", size = 11836482, upload-time = "2025-10-21T16:20:30.113Z" }, + { url = "https://files.pythonhosted.org/packages/3e/1c/8374990f9545e99462caacea5413ed783014b3b66ace49e35c533f07507b/grpcio-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:035d90bc79eaa4bed83f524331d55e35820725c9fbb00ffa1904d5550ed7ede3", size = 6407178, upload-time = "2025-10-21T16:20:32.733Z" }, + { url = "https://files.pythonhosted.org/packages/1e/77/36fd7d7c75a6c12542c90a6d647a27935a1ecaad03e0ffdb7c42db6b04d2/grpcio-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4215d3a102bd95e2e11b5395c78562967959824156af11fa93d18fdd18050990", size = 7075684, upload-time = "2025-10-21T16:20:35.435Z" }, + { url = "https://files.pythonhosted.org/packages/38/f7/e3cdb252492278e004722306c5a8935eae91e64ea11f0af3437a7de2e2b7/grpcio-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49ce47231818806067aea3324d4bf13825b658ad662d3b25fada0bdad9b8a6af", size = 6611133, upload-time = "2025-10-21T16:20:37.541Z" }, + { url = "https://files.pythonhosted.org/packages/7e/20/340db7af162ccd20a0893b5f3c4a5d676af7b71105517e62279b5b61d95a/grpcio-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8cc3309d8e08fd79089e13ed4819d0af72aa935dd8f435a195fd152796752ff2", size = 7195507, upload-time = "2025-10-21T16:20:39.643Z" }, + { url = "https://files.pythonhosted.org/packages/10/f0/b2160addc1487bd8fa4810857a27132fb4ce35c1b330c2f3ac45d697b106/grpcio-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:971fd5a1d6e62e00d945423a567e42eb1fa678ba89072832185ca836a94daaa6", size = 8160651, upload-time = "2025-10-21T16:20:42.492Z" }, + { url = "https://files.pythonhosted.org/packages/2c/2c/ac6f98aa113c6ef111b3f347854e99ebb7fb9d8f7bb3af1491d438f62af4/grpcio-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d9adda641db7207e800a7f089068f6f645959f2df27e870ee81d44701dd9db3", size = 7620568, upload-time = "2025-10-21T16:20:45.995Z" }, + { url = "https://files.pythonhosted.org/packages/90/84/7852f7e087285e3ac17a2703bc4129fafee52d77c6c82af97d905566857e/grpcio-1.76.0-cp310-cp310-win32.whl", hash = "sha256:063065249d9e7e0782d03d2bca50787f53bd0fb89a67de9a7b521c4a01f1989b", size = 3998879, upload-time = "2025-10-21T16:20:48.592Z" }, + { url = "https://files.pythonhosted.org/packages/10/30/d3d2adcbb6dd3ff59d6ac3df6ef830e02b437fb5c90990429fd180e52f30/grpcio-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6ae758eb08088d36812dd5d9af7a9859c05b1e0f714470ea243694b49278e7b", size = 4706892, upload-time = "2025-10-21T16:20:50.697Z" }, + { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" }, + { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" }, + { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" }, + { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" }, + { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" }, + { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" }, + { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" }, + { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" }, + { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" }, + { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" }, + { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" }, + { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" }, + { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" }, + { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" }, + { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" }, + { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" }, + { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" }, +] + +[[package]] +name = "grpcio-reflection" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bd/10/767f9c2719c435616141efb3371f6e158f95cdde36a34876ae1d08ba7440/grpcio_reflection-1.76.0.tar.gz", hash = "sha256:e0e7e49921c2ee951e5ddff0bdbacbd1ac1a70888beb61d567f3d01b799decb1", size = 18845, upload-time = "2025-10-21T16:28:57.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/af/6168cf4ff389deed1388b1196281c67cb36dbbf44aaee40e2bfb72ac0202/grpcio_reflection-1.76.0-py3-none-any.whl", hash = "sha256:d7c43f2047a2a9c9320a5905aa7133c677977436b5f63e6a868e507864a11c73", size = 22702, upload-time = "2025-10-21T16:27:40.846Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/77/17d60d636ccd86a0db0eccc24d02967bbc3eea86b9db7324b04507ebaa40/grpcio_tools-1.76.0.tar.gz", hash = "sha256:ce80169b5e6adf3e8302f3ebb6cb0c3a9f08089133abca4b76ad67f751f5ad88", size = 5390807, upload-time = "2025-10-21T16:26:55.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/4b/6fceb806f6d5055793f5db0d7a1e3449ea16482c2aec3ad93b05678c325a/grpcio_tools-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:9b99086080ca394f1da9894ee20dedf7292dd614e985dcba58209a86a42de602", size = 2545596, upload-time = "2025-10-21T16:24:25.134Z" }, + { url = "https://files.pythonhosted.org/packages/3b/11/57af2f3f32016e6e2aae063a533aae2c0e6c577bc834bef97277a7fa9733/grpcio_tools-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8d95b5c2394bbbe911cbfc88d15e24c9e174958cb44dad6aa8c46fe367f6cc2a", size = 5843462, upload-time = "2025-10-21T16:24:31.046Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8b/470bedaf7fb75fb19500b4c160856659746dcf53e3d9241fcc17e3af7155/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d54e9ce2ffc5d01341f0c8898c1471d887ae93d77451884797776e0a505bd503", size = 2591938, upload-time = "2025-10-21T16:24:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/77/3e/530e848e00d6fe2db152984b2c9432bb8497a3699719fd7898d05cb7d95e/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:c83f39f64c2531336bd8d5c846a2159c9ea6635508b0f8ed3ad0d433e25b53c9", size = 2905296, upload-time = "2025-10-21T16:24:34.938Z" }, + { url = "https://files.pythonhosted.org/packages/75/b5/632229d17364eb7db5d3d793131172b2380323c4e6500f528743e477267c/grpcio_tools-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be480142fae0d986d127d6cb5cbc0357e4124ba22e96bb8b9ece32c48bc2c8ea", size = 2656266, upload-time = "2025-10-21T16:24:37.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/71/5756aa9a14d16738b04677b89af8612112d69fb098ffdbc5666020933f23/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7fefd41fc4ca11fab36f42bdf0f3812252988f8798fca8bec8eae049418deacd", size = 3105798, upload-time = "2025-10-21T16:24:40.408Z" }, + { url = "https://files.pythonhosted.org/packages/ab/de/9058021da11be399abe6c5d2a9a2abad1b00d367111018637195d107539b/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:63551f371082173e259e7f6ec24b5f1fe7d66040fadd975c966647bca605a2d3", size = 3654923, upload-time = "2025-10-21T16:24:42.52Z" }, + { url = "https://files.pythonhosted.org/packages/8e/93/29f04cc18f1023b2a4342374a45b1cd87a0e1458fc44aea74baad5431dcd/grpcio_tools-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:75a2c34584c99ff47e5bb267866e7dec68d30cd3b2158e1ee495bfd6db5ad4f0", size = 3322558, upload-time = "2025-10-21T16:24:44.356Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ab/8936708d30b9a2484f6b093dfc57843c1d0380de0eba78a8ad8693535f26/grpcio_tools-1.76.0-cp310-cp310-win32.whl", hash = "sha256:908758789b0a612102c88e8055b7191eb2c4290d5d6fc50fb9cac737f8011ef1", size = 993621, upload-time = "2025-10-21T16:24:46.7Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d2/c5211feb81a532eca2c4dddd00d4971b91c10837cd083781f6ab3a6fdb5b/grpcio_tools-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:ec6e49e7c4b2a222eb26d1e1726a07a572b6e629b2cf37e6bb784c9687904a52", size = 1158401, upload-time = "2025-10-21T16:24:48.416Z" }, + { url = "https://files.pythonhosted.org/packages/73/d1/efbeed1a864c846228c0a3b322e7a2d6545f025e35246aebf96496a36004/grpcio_tools-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6480f6af6833850a85cca1c6b435ef4ffd2ac8e88ef683b4065233827950243", size = 2545931, upload-time = "2025-10-21T16:24:50.201Z" }, + { url = "https://files.pythonhosted.org/packages/af/8e/f257c0f565d9d44658301238b01a9353bc6f3b272bb4191faacae042579d/grpcio_tools-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c7c23fe1dc09818e16a48853477806ad77dd628b33996f78c05a293065f8210c", size = 5844794, upload-time = "2025-10-21T16:24:53.312Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c0/6c1e89c67356cb20e19ed670c5099b13e40fd678cac584c778f931666a86/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fcdce7f7770ff052cd4e60161764b0b3498c909bde69138f8bd2e7b24a3ecd8f", size = 2591772, upload-time = "2025-10-21T16:24:55.729Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/5f33aa7bc3ddaad0cfd2f4e950ac4f1a310e8d0c7b1358622a581e8b7a2f/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b598fdcebffa931c7da5c9e90b5805fff7e9bc6cf238319358a1b85704c57d33", size = 2905140, upload-time = "2025-10-21T16:24:57.952Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3e/23e3a52a77368f47188ed83c34eb53866d3ce0f73835b2f6764844ae89eb/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6a9818ff884796b12dcf8db32126e40ec1098cacf5697f27af9cfccfca1c1fae", size = 2656475, upload-time = "2025-10-21T16:25:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/85/a74ae87ec7dbd3d2243881f5c548215aed1148660df7945be3a125ba9a21/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:105e53435b2eed3961da543db44a2a34479d98d18ea248219856f30a0ca4646b", size = 3106158, upload-time = "2025-10-21T16:25:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/54/d5/a6ed1e5823bc5d55a1eb93e0c14ccee0b75951f914832ab51fb64d522a0f/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:454a1232c7f99410d92fa9923c7851fd4cdaf657ee194eac73ea1fe21b406d6e", size = 3654980, upload-time = "2025-10-21T16:25:05.717Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/c05d5501ba156a242079ef71d073116d2509c195b5e5e74c545f0a3a3a69/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ca9ccf667afc0268d45ab202af4556c72e57ea36ebddc93535e1a25cbd4f8aba", size = 3322658, upload-time = "2025-10-21T16:25:07.885Z" }, + { url = "https://files.pythonhosted.org/packages/02/b6/ee0317b91da19a7537d93c4161cbc2a45a165c8893209b0bbd470d830ffa/grpcio_tools-1.76.0-cp311-cp311-win32.whl", hash = "sha256:a83c87513b708228b4cad7619311daba65b40937745103cadca3db94a6472d9c", size = 993837, upload-time = "2025-10-21T16:25:10.133Z" }, + { url = "https://files.pythonhosted.org/packages/81/63/9623cadf0406b264737f16d4ed273bb2d65001d87fbd803b565c45d665d1/grpcio_tools-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:2ce5e87ec71f2e4041dce4351f2a8e3b713e3bca6b54c69c3fbc6c7ad1f4c386", size = 1158634, upload-time = "2025-10-21T16:25:12.705Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ca/a931c1439cabfe305c9afd07e233150cd0565aa062c20d1ee412ed188852/grpcio_tools-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:4ad555b8647de1ebaffb25170249f89057721ffb74f7da96834a07b4855bb46a", size = 2546852, upload-time = "2025-10-21T16:25:15.024Z" }, + { url = "https://files.pythonhosted.org/packages/4c/07/935cfbb7dccd602723482a86d43fbd992f91e9867bca0056a1e9f348473e/grpcio_tools-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:243af7c8fc7ff22a40a42eb8e0f6f66963c1920b75aae2a2ec503a9c3c8b31c1", size = 5841777, upload-time = "2025-10-21T16:25:17.425Z" }, + { url = "https://files.pythonhosted.org/packages/e4/92/8fcb5acebdccb647e0fa3f002576480459f6cf81e79692d7b3c4d6e29605/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8207b890f423142cc0025d041fb058f7286318df6a049565c27869d73534228b", size = 2594004, upload-time = "2025-10-21T16:25:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ea/64838e8113b7bfd4842b15c815a7354cb63242fdce9d6648d894b5d50897/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3dafa34c2626a6691d103877e8a145f54c34cf6530975f695b396ed2fc5c98f8", size = 2905563, upload-time = "2025-10-21T16:25:21.889Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d6/53798827d821098219e58518b6db52161ce4985620850aa74ce3795da8a7/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:30f1d2dda6ece285b3d9084e94f66fa721ebdba14ae76b2bc4c581c8a166535c", size = 2656936, upload-time = "2025-10-21T16:25:24.369Z" }, + { url = "https://files.pythonhosted.org/packages/89/a3/d9c1cefc46a790eec520fe4e70e87279abb01a58b1a3b74cf93f62b824a2/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a889af059dc6dbb82d7b417aa581601316e364fe12eb54c1b8d95311ea50916d", size = 3109811, upload-time = "2025-10-21T16:25:26.711Z" }, + { url = "https://files.pythonhosted.org/packages/50/75/5997752644b73b5d59377d333a51c8a916606df077f5a487853e37dca289/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c3f2c3c44c56eb5d479ab178f0174595d0a974c37dade442f05bb73dfec02f31", size = 3658786, upload-time = "2025-10-21T16:25:28.819Z" }, + { url = "https://files.pythonhosted.org/packages/84/47/dcf8380df4bd7931ffba32fc6adc2de635b6569ca27fdec7121733797062/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:479ce02dff684046f909a487d452a83a96b4231f7c70a3b218a075d54e951f56", size = 3325144, upload-time = "2025-10-21T16:25:30.863Z" }, + { url = "https://files.pythonhosted.org/packages/04/88/ea3e5fdb874d8c2d04488e4b9d05056537fba70915593f0c283ac77df188/grpcio_tools-1.76.0-cp312-cp312-win32.whl", hash = "sha256:9ba4bb539936642a44418b38ee6c3e8823c037699e2cb282bd8a44d76a4be833", size = 993523, upload-time = "2025-10-21T16:25:32.594Z" }, + { url = "https://files.pythonhosted.org/packages/de/b1/ce7d59d147675ec191a55816be46bc47a343b5ff07279eef5817c09cc53e/grpcio_tools-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd489016766b05f9ed8a6b6596004b62c57d323f49593eac84add032a6d43f7", size = 1158493, upload-time = "2025-10-21T16:25:34.5Z" }, + { url = "https://files.pythonhosted.org/packages/13/01/b16fe73f129df49811d886dc99d3813a33cf4d1c6e101252b81c895e929f/grpcio_tools-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ff48969f81858397ef33a36b326f2dbe2053a48b254593785707845db73c8f44", size = 2546312, upload-time = "2025-10-21T16:25:37.138Z" }, + { url = "https://files.pythonhosted.org/packages/25/17/2594c5feb76bb0b25bfbf91ec1075b276e1b2325e4bc7ea649a7b5dbf353/grpcio_tools-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa2f030fd0ef17926026ee8e2b700e388d3439155d145c568fa6b32693277613", size = 5839627, upload-time = "2025-10-21T16:25:40.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c6/097b1aa26fbf72fb3cdb30138a2788529e4f10d8759de730a83f5c06726e/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bacbf3c54f88c38de8e28f8d9b97c90b76b105fb9ddef05d2c50df01b32b92af", size = 2592817, upload-time = "2025-10-21T16:25:42.301Z" }, + { url = "https://files.pythonhosted.org/packages/03/78/d1d985b48592a674509a85438c1a3d4c36304ddfc99d1b05d27233b51062/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0d4e4afe9a0e3c24fad2f1af45f98cf8700b2bfc4d790795756ba035d2ea7bdc", size = 2905186, upload-time = "2025-10-21T16:25:44.395Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/770afbb47f0b5f594b93a7b46a95b892abda5eebe60efb511e96cee52170/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fbbd4e1fc5af98001ceef5e780e8c10921d94941c3809238081e73818ef707f1", size = 2656188, upload-time = "2025-10-21T16:25:46.942Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2b/017c2fcf4c5d3cf00cf7d5ce21eb88521de0d89bdcf26538ad2862ec6d07/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b05efe5a59883ab8292d596657273a60e0c3e4f5a9723c32feb9fc3a06f2f3ef", size = 3109141, upload-time = "2025-10-21T16:25:49.137Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5f/2495f88e3d50c6f2c2da2752bad4fa3a30c52ece6c9d8b0c636cd8b1430b/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:be483b90e62b7892eb71fa1fc49750bee5b2ee35b5ec99dd2b32bed4bedb5d71", size = 3657892, upload-time = "2025-10-21T16:25:52.362Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1d/c4f39d31b19d9baf35d900bf3f969ce1c842f63a8560c8003ed2e5474760/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:630cd7fd3e8a63e20703a7ad816979073c2253e591b5422583c27cae2570de73", size = 3324778, upload-time = "2025-10-21T16:25:54.629Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b6/35ee3a6e4af85a93da28428f81f4b29bcb36f6986b486ad71910fcc02e25/grpcio_tools-1.76.0-cp313-cp313-win32.whl", hash = "sha256:eb2567280f9f6da5444043f0e84d8408c7a10df9ba3201026b30e40ef3814736", size = 993084, upload-time = "2025-10-21T16:25:56.52Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7a/5bd72344d86ee860e5920c9a7553cfe3bc7b1fce79f18c00ac2497f5799f/grpcio_tools-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:0071b1c0bd0f5f9d292dca4efab32c92725d418e57f9c60acdc33c0172af8b53", size = 1158151, upload-time = "2025-10-21T16:25:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c0/aa20eebe8f3553b7851643e9c88d237c3a6ca30ade646897e25dbb27be99/grpcio_tools-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:c53c5719ef2a435997755abde3826ba4087174bd432aa721d8fac781fcea79e4", size = 2546297, upload-time = "2025-10-21T16:26:01.258Z" }, + { url = "https://files.pythonhosted.org/packages/d9/98/6af702804934443c1d0d4d27d21b990d92d22ddd1b6bec6b056558cbbffa/grpcio_tools-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:e3db1300d7282264639eeee7243f5de7e6a7c0283f8bf05d66c0315b7b0f0b36", size = 5839804, upload-time = "2025-10-21T16:26:05.495Z" }, + { url = "https://files.pythonhosted.org/packages/ea/8d/7725fa7b134ef8405ffe0a37c96eeb626e5af15d70e1bdac4f8f1abf842e/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b018a4b7455a7e8c16d0fdb3655a6ba6c9536da6de6c5d4f11b6bb73378165b", size = 2593922, upload-time = "2025-10-21T16:26:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/de/ff/5b6b5012c79fa72f9107dc13f7226d9ce7e059ea639fd8c779e0dd284386/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ec6e4de3866e47cfde56607b1fae83ecc5aa546e06dec53de11f88063f4b5275", size = 2905327, upload-time = "2025-10-21T16:26:09.668Z" }, + { url = "https://files.pythonhosted.org/packages/24/01/2691d369ea462cd6b6c92544122885ca01f7fa5ac75dee023e975e675858/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b8da4d828883913f1852bdd67383713ae5c11842f6c70f93f31893eab530aead", size = 2656214, upload-time = "2025-10-21T16:26:11.773Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e7/3f8856e6ec3dd492336a91572993344966f237b0e3819fbe96437b19d313/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5c120c2cf4443121800e7f9bcfe2e94519fa25f3bb0b9882359dd3b252c78a7b", size = 3109889, upload-time = "2025-10-21T16:26:15.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ce5248072e47db276dc7e069e93978dcde490c959788ce7cce8081d0bfdc/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8b7df5591d699cd9076065f1f15049e9c3597e0771bea51c8c97790caf5e4197", size = 3657939, upload-time = "2025-10-21T16:26:17.34Z" }, + { url = "https://files.pythonhosted.org/packages/f6/df/81ff88af93c52135e425cd5ec9fe8b186169c7d5f9e0409bdf2bbedc3919/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a25048c5f984d33e3f5b6ad7618e98736542461213ade1bd6f2fcfe8ce804e3d", size = 3324752, upload-time = "2025-10-21T16:26:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/35/3d/f6b83044afbf6522254a3b509515a00fed16a819c87731a478dbdd1d35c1/grpcio_tools-1.76.0-cp314-cp314-win32.whl", hash = "sha256:4b77ce6b6c17869858cfe14681ad09ed3a8a80e960e96035de1fd87f78158740", size = 1015578, upload-time = "2025-10-21T16:26:22.517Z" }, + { url = "https://files.pythonhosted.org/packages/95/4d/31236cddb7ffb09ba4a49f4f56d2608fec3bbb21c7a0a975d93bca7cd22e/grpcio_tools-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:2ccd2c8d041351cc29d0fc4a84529b11ee35494a700b535c1f820b642f2a72fc", size = 1190242, upload-time = "2025-10-21T16:26:25.296Z" }, ] [[package]] name = "h11" version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] [[package]] @@ -304,9 +1033,9 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "trove-classifiers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8f/8a/cc1debe3514da292094f1c3a700e4ca25442489731ef7c0814358816bb03/hatchling-1.27.0.tar.gz", hash = "sha256:971c296d9819abb3811112fc52c7a9751c8d381898f36533bb16f9791e941fd6", size = 54983 } +sdist = { url = "https://files.pythonhosted.org/packages/8f/8a/cc1debe3514da292094f1c3a700e4ca25442489731ef7c0814358816bb03/hatchling-1.27.0.tar.gz", hash = "sha256:971c296d9819abb3811112fc52c7a9751c8d381898f36533bb16f9791e941fd6", size = 54983, upload-time = "2024-12-15T17:08:11.894Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/e7/ae38d7a6dfba0533684e0b2136817d667588ae3ec984c1a4e5df5eb88482/hatchling-1.27.0-py3-none-any.whl", hash = "sha256:d3a2f3567c4f926ea39849cdf924c7e99e6686c9c8e288ae1037c8fa2a5d937b", size = 75794 }, + { url = "https://files.pythonhosted.org/packages/08/e7/ae38d7a6dfba0533684e0b2136817d667588ae3ec984c1a4e5df5eb88482/hatchling-1.27.0-py3-none-any.whl", hash = "sha256:d3a2f3567c4f926ea39849cdf924c7e99e6686c9c8e288ae1037c8fa2a5d937b", size = 75794, upload-time = "2024-12-15T17:08:10.364Z" }, ] [[package]] @@ -317,9 +1046,9 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] [[package]] @@ -332,39 +1061,48 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] [[package]] name = "httpx-sse" -version = "0.4.0" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, +] + +[[package]] +name = "identify" +version = "2.6.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, + { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] [[package]] name = "importlib-metadata" -version = "8.6.1" +version = "8.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/08/c1395a292bb23fd03bdf572a1357c5a733d3eecbab877641ceacab23db6e/importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580", size = 55767 } +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/9d/0fb148dc4d6fa4a7dd1d8378168d9b4cd8d4560a6fbf6f0121c5fc34eb68/importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e", size = 26971 }, + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, ] [[package]] @@ -375,27 +1113,27 @@ dependencies = [ { name = "more-itertools" }, { name = "typeguard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751 } +sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751, upload-time = "2024-12-28T17:11:18.897Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197 }, + { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197, upload-time = "2024-12-28T17:11:15.931Z" }, ] [[package]] name = "iniconfig" version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] [[package]] name = "isort" version = "6.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955 } +sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955, upload-time = "2025-02-26T21:13:16.955Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186 }, + { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186, upload-time = "2025-02-26T21:13:14.911Z" }, ] [[package]] @@ -405,204 +1143,453 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "libcst" +version = "1.8.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml", marker = "python_full_version < '3.13'" }, + { name = "pyyaml-ft", marker = "python_full_version >= '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/aa/b52d195b167958fe1bd106a260f64cc80ec384f6ac2a9cda874d8803df06/libcst-1.8.2.tar.gz", hash = "sha256:66e82cedba95a6176194a817be4232c720312f8be6d2c8f3847f3317d95a0c7f", size = 881534, upload-time = "2025-06-13T20:56:37.915Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/2e/1d7f67d2ef6f875e9e8798c024f7cb3af3fe861e417bff485c69b655ac96/libcst-1.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:67d9720d91f507c87b3e5f070627ad640a00bc6cfdf5635f8c6ee9f2964cf71c", size = 2195106, upload-time = "2025-06-13T20:54:49.166Z" }, + { url = "https://files.pythonhosted.org/packages/82/d0/3d94fee2685f263fd8d85a83e2537fcc78b644eae450738bf2c72604f0df/libcst-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:94b7c032b72566077614a02baab1929739fd0af0cc1d46deaba4408b870faef2", size = 2080577, upload-time = "2025-06-13T20:54:51.518Z" }, + { url = "https://files.pythonhosted.org/packages/14/87/c9b49bebb9a930fdcb59bf841f1c45719d2a4a39c3eb7efacfd30a2bfb0a/libcst-1.8.2-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:11ea148902e3e1688afa392087c728ac3a843e54a87d334d1464d2097d3debb7", size = 2404076, upload-time = "2025-06-13T20:54:53.303Z" }, + { url = "https://files.pythonhosted.org/packages/49/fa/9ca145aa9033f9a8362a5663ceb28dfb67082574de8118424b6b8e445e7a/libcst-1.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:22c9473a2cc53faabcc95a0ac6ca4e52d127017bf34ba9bc0f8e472e44f7b38e", size = 2219813, upload-time = "2025-06-13T20:54:55.351Z" }, + { url = "https://files.pythonhosted.org/packages/0c/25/496a025c09e96116437a57fd34abefe84c041d930f832c6e42d84d9e028c/libcst-1.8.2-cp310-cp310-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b5269b96367e65793a7714608f6d906418eb056d59eaac9bba980486aabddbed", size = 2189782, upload-time = "2025-06-13T20:54:57.013Z" }, + { url = "https://files.pythonhosted.org/packages/b3/75/826b5772192826d70480efe93bab3e4f0b4a24d31031f45547257ad5f9a8/libcst-1.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d20e932ddd9a389da57b060c26e84a24118c96ff6fc5dcc7b784da24e823b694", size = 2312403, upload-time = "2025-06-13T20:54:58.996Z" }, + { url = "https://files.pythonhosted.org/packages/93/f4/316fa14ea6c61ea8755672d60e012558f0216300b3819e72bebc7864a507/libcst-1.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a553d452004e44b841788f6faa7231a02157527ddecc89dbbe5b689b74822226", size = 2280566, upload-time = "2025-06-13T20:55:00.707Z" }, + { url = "https://files.pythonhosted.org/packages/fc/52/74b69350db379b1646739288b88ffab2981b2ad48407faf03df3768d7d2f/libcst-1.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe762c4c390039b79b818cbc725d8663586b25351dc18a2704b0e357d69b924", size = 2388508, upload-time = "2025-06-13T20:55:02.769Z" }, + { url = "https://files.pythonhosted.org/packages/bc/c6/fa92699b537ed65e93c2869144e23bdf156ec81ae7b84b4f34cbc20d6048/libcst-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:5c513e64eff0f7bf2a908e2d987a98653eb33e1062ce2afd3a84af58159a24f9", size = 2093260, upload-time = "2025-06-13T20:55:04.771Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ac/4ec4ae9da311f72cd97e930c325bb605e9ad0baaafcafadb0588e1dc5c4e/libcst-1.8.2-cp310-cp310-win_arm64.whl", hash = "sha256:41613fe08e647213546c7c59a5a1fc5484666e7d4cab6e80260c612acbb20e8c", size = 1985236, upload-time = "2025-06-13T20:55:06.317Z" }, + { url = "https://files.pythonhosted.org/packages/c5/73/f0a4d807bff6931e3d8c3180472cf43d63a121aa60be895425fba2ed4f3a/libcst-1.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:688a03bac4dfb9afc5078ec01d53c21556381282bdf1a804dd0dbafb5056de2a", size = 2195040, upload-time = "2025-06-13T20:55:08.117Z" }, + { url = "https://files.pythonhosted.org/packages/e5/fa/ede0cfc410e498e1279eb489603f31077d2ca112d84e1327b04b508c0cbe/libcst-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c34060ff2991707c710250463ae9f415ebb21653f2f5b013c61c9c376ff9b715", size = 2080304, upload-time = "2025-06-13T20:55:09.729Z" }, + { url = "https://files.pythonhosted.org/packages/39/8d/59f7c488dbedf96454c07038dea72ee2a38de13d52b4f796a875a1dc45a6/libcst-1.8.2-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f54f5c4176d60e7cd6b0880e18fb3fa8501ae046069151721cab457c7c538a3d", size = 2403816, upload-time = "2025-06-13T20:55:11.527Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c2/af8d6cc0c6dcd1a5d0ed5cf846be242354513139a9358e005c63252c6ab7/libcst-1.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d11992561de0ad29ec2800230fbdcbef9efaa02805d5c633a73ab3cf2ba51bf1", size = 2219415, upload-time = "2025-06-13T20:55:13.144Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b8/1638698d6c33bdb4397ee6f60e534e7504ef2cd1447b24104df65623dedb/libcst-1.8.2-cp311-cp311-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fa3b807c2d2b34397c135d19ad6abb20c47a2ddb7bf65d90455f2040f7797e1e", size = 2189568, upload-time = "2025-06-13T20:55:15.119Z" }, + { url = "https://files.pythonhosted.org/packages/05/16/51c1015dada47b8464c5fa0cbf70fecc5fce0facd07d05a5cb6e7eb68b88/libcst-1.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b0110140738be1287e3724080a101e7cec6ae708008b7650c9d8a1c1788ec03a", size = 2312018, upload-time = "2025-06-13T20:55:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ea/8d24158f345ea2921d0d7ff49a6bf86fd4a08b0f05735f14a84ea9e28fa9/libcst-1.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a50618f4819a97ef897e055ac7aaf1cad5df84c206f33be35b0759d671574197", size = 2279875, upload-time = "2025-06-13T20:55:18.418Z" }, + { url = "https://files.pythonhosted.org/packages/73/fd/0441cc1bcf188300aaa41ca5d473919a00939cc7f4934b3b08b23c8740c1/libcst-1.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9bb599c175dc34a4511f0e26d5b5374fbcc91ea338871701a519e95d52f3c28", size = 2388060, upload-time = "2025-06-13T20:55:20.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fc/28f6380eefd58543f80589b77cab81eb038e7cc86f7c34a815a287dba82f/libcst-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:96e2363e1f6e44bd7256bbbf3a53140743f821b5133046e6185491e0d9183447", size = 2093117, upload-time = "2025-06-13T20:55:21.977Z" }, + { url = "https://files.pythonhosted.org/packages/ef/db/cdbd1531bca276c44bc485e40c3156e770e01020f8c1a737282bf884d69f/libcst-1.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:f5391d71bd7e9e6c73dcb3ee8d8c63b09efc14ce6e4dad31568d4838afc9aae0", size = 1985285, upload-time = "2025-06-13T20:55:24.438Z" }, + { url = "https://files.pythonhosted.org/packages/31/2d/8726bf8ea8252e8fd1e48980753eef5449622c5f6cf731102bc43dcdc2c6/libcst-1.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2e8c1dfa854e700fcf6cd79b2796aa37d55697a74646daf5ea47c7c764bac31c", size = 2185942, upload-time = "2025-06-13T20:55:26.105Z" }, + { url = "https://files.pythonhosted.org/packages/99/b3/565d24db8daed66eae7653c1fc1bc97793d49d5d3bcef530450ee8da882c/libcst-1.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b5c57a3c1976c365678eb0730bcb140d40510990cb77df9a91bb5c41d587ba6", size = 2072622, upload-time = "2025-06-13T20:55:27.548Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d6/5a433e8a58eeb5c5d46635cfe958d0605f598d87977d4560484e3662d438/libcst-1.8.2-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:0f23409add2aaebbb6d8e881babab43c2d979f051b8bd8aed5fe779ea180a4e8", size = 2402738, upload-time = "2025-06-13T20:55:29.539Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/0dd752c1880b570118fa91ac127589e6cf577ddcb2eef1aaf8b81ecc3f79/libcst-1.8.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b88e9104c456590ad0ef0e82851d4fc03e9aa9d621fa8fdd4cd0907152a825ae", size = 2219932, upload-time = "2025-06-13T20:55:31.17Z" }, + { url = "https://files.pythonhosted.org/packages/42/bc/fceae243c6a329477ac6d4edb887bcaa2ae7a3686158d8d9b9abb3089c37/libcst-1.8.2-cp312-cp312-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5ba3ea570c8fb6fc44f71aa329edc7c668e2909311913123d0d7ab8c65fc357", size = 2191891, upload-time = "2025-06-13T20:55:33.066Z" }, + { url = "https://files.pythonhosted.org/packages/7d/7d/eb341bdc11f1147e7edeccffd0f2f785eff014e72134f5e46067472012b0/libcst-1.8.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:460fcf3562f078781e1504983cb11909eb27a1d46eaa99e65c4b0fafdc298298", size = 2311927, upload-time = "2025-06-13T20:55:34.614Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/78bfc7aa5a542574d2ab0768210d084901dec5fc373103ca119905408cf2/libcst-1.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c1381ddbd1066d543e05d580c15beacf671e1469a0b2adb6dba58fec311f4eed", size = 2281098, upload-time = "2025-06-13T20:55:36.089Z" }, + { url = "https://files.pythonhosted.org/packages/83/37/a41788a72dc06ed3566606f7cf50349c9918cee846eeae45d1bac03d54c2/libcst-1.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a70e40ce7600e1b32e293bb9157e9de3b69170e2318ccb219102f1abb826c94a", size = 2387649, upload-time = "2025-06-13T20:55:37.797Z" }, + { url = "https://files.pythonhosted.org/packages/bb/df/7a49576c9fd55cdfd8bcfb725273aa4ee7dc41e87609f3451a4901d68057/libcst-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:3ece08ba778b6eeea74d9c705e9af2d1b4e915e9bc6de67ad173b962e575fcc0", size = 2094574, upload-time = "2025-06-13T20:55:39.833Z" }, + { url = "https://files.pythonhosted.org/packages/29/60/27381e194d2af08bfd0fed090c905b2732907b69da48d97d86c056d70790/libcst-1.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:5efd1bf6ee5840d1b0b82ec8e0b9c64f182fa5a7c8aad680fbd918c4fa3826e0", size = 1984568, upload-time = "2025-06-13T20:55:41.511Z" }, + { url = "https://files.pythonhosted.org/packages/11/9c/e3d4c7f1eb5c23907f905f84a4da271b60cd15b746ac794d42ea18bb105e/libcst-1.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08e9dca4ab6f8551794ce7ec146f86def6a82da41750cbed2c07551345fa10d3", size = 2185848, upload-time = "2025-06-13T20:55:43.653Z" }, + { url = "https://files.pythonhosted.org/packages/59/e0/635cbb205d42fd296c01ab5cd1ba485b0aee92bffe061de587890c81f1bf/libcst-1.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8310521f2ccb79b5c4345750d475b88afa37bad930ab5554735f85ad5e3add30", size = 2072510, upload-time = "2025-06-13T20:55:45.287Z" }, + { url = "https://files.pythonhosted.org/packages/fe/45/8911cfe9413fd690a024a1ff2c8975f060dd721160178679d3f6a21f939e/libcst-1.8.2-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:da2d8b008aff72acd5a4a588491abdda1b446f17508e700f26df9be80d8442ae", size = 2403226, upload-time = "2025-06-13T20:55:46.927Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/819d2b1b1fd870ad34ce4f34ec68704ca69bf48ef2d7665483115f267ec4/libcst-1.8.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:be821d874ce8b26cbadd7277fa251a9b37f6d2326f8b5682b6fc8966b50a3a59", size = 2220669, upload-time = "2025-06-13T20:55:48.597Z" }, + { url = "https://files.pythonhosted.org/packages/d4/2f/2c4742bf834f88a9803095915c4f41cafefb7b04bde66ea86f74668b4b7b/libcst-1.8.2-cp313-cp313-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f74b0bc7378ad5afcf25ac9d0367b4dbba50f6f6468faa41f5dfddcf8bf9c0f8", size = 2191919, upload-time = "2025-06-13T20:55:50.092Z" }, + { url = "https://files.pythonhosted.org/packages/64/f4/107e13815f1ee5aad642d4eb4671c0273ee737f3832e3dbca9603b39f8d9/libcst-1.8.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:b68ea4a6018abfea1f68d50f74de7d399172684c264eb09809023e2c8696fc23", size = 2311965, upload-time = "2025-06-13T20:55:51.974Z" }, + { url = "https://files.pythonhosted.org/packages/03/63/2948b6e4be367ad375d273a8ad00df573029cffe5ac8f6c09398c250de5b/libcst-1.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2e264307ec49b2c72480422abafe80457f90b4e6e693b7ddf8a23d24b5c24001", size = 2281704, upload-time = "2025-06-13T20:55:54.036Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d3/590cde9c8c386d5f4f05fdef3394c437ea51060478a5141ff4a1f289e747/libcst-1.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5d5519962ce7c72d81888fb0c09e58e308ba4c376e76bcd853b48151063d6a8", size = 2387511, upload-time = "2025-06-13T20:55:55.538Z" }, + { url = "https://files.pythonhosted.org/packages/96/3d/ba5e36c663028043fc607dc33e5c390c7f73136fb15a890fb3710ee9d158/libcst-1.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:b62aa11d6b74ed5545e58ac613d3f63095e5fd0254b3e0d1168fda991b9a6b41", size = 2094526, upload-time = "2025-06-13T20:55:57.486Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/530ca3b972dddad562f266c81190bea29376f8ba70054ea7b45b114504cd/libcst-1.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9c2bd4ac288a9cdb7ffc3229a9ce8027a66a3fd3f2ab9e13da60f5fbfe91f3b2", size = 1984627, upload-time = "2025-06-13T20:55:59.017Z" }, + { url = "https://files.pythonhosted.org/packages/19/9f/491f7b8d9d93444cd9bf711156ee1f122c38d25b903599e363d669acc8ab/libcst-1.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:08a8c7d9922ca6eed24e2c13a3c552b3c186af8fc78e5d4820b58487d780ec19", size = 2175415, upload-time = "2025-06-13T20:56:01.157Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fe/4d13437f453f92687246aa7c5138e102ee5186fe96609ee4c598bb9f9ecb/libcst-1.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bba7c2b5063e8ada5a5477f9fa0c01710645426b5a8628ec50d558542a0a292e", size = 2063719, upload-time = "2025-06-13T20:56:02.787Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/758ae142c6607f275269021362b731e0f22ff5c9aa7cc67b0ed3a6bc930f/libcst-1.8.2-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d97c9fe13aacfbefded6861f5200dcb8e837da7391a9bdeb44ccb133705990af", size = 2380624, upload-time = "2025-06-13T20:56:04.909Z" }, + { url = "https://files.pythonhosted.org/packages/ac/c5/31d214a0bcb3523243a9b5643b597ff653d6ec9e1f3326cfcc16bcbf185d/libcst-1.8.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:d2194ae959630aae4176a4b75bd320b3274c20bef2a5ca6b8d6fc96d3c608edf", size = 2208801, upload-time = "2025-06-13T20:56:06.983Z" }, + { url = "https://files.pythonhosted.org/packages/70/16/a53f852322b266c63b492836a5c4968f192ee70fb52795a79feb4924e9ed/libcst-1.8.2-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0be639f5b2e1999a4b4a82a0f4633969f97336f052d0c131627983589af52f56", size = 2179557, upload-time = "2025-06-13T20:56:09.09Z" }, + { url = "https://files.pythonhosted.org/packages/fa/49/12a5664c73107187ba3af14869d3878fca1fd4c37f6fbb9adb943cb7a791/libcst-1.8.2-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6753e50904e05c27915933da41518ecd7a8ca4dd3602112ba44920c6e353a455", size = 2302499, upload-time = "2025-06-13T20:56:10.751Z" }, + { url = "https://files.pythonhosted.org/packages/e9/46/2d62552a9346a040c045d6619b645d59bb707a586318121f099abd0cd5c4/libcst-1.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:706d07106af91c343150be86caeae1ea3851b74aa0730fcbbf8cd089e817f818", size = 2271070, upload-time = "2025-06-13T20:56:12.445Z" }, + { url = "https://files.pythonhosted.org/packages/af/67/b625fd6ae22575255aade0a24f45e1d430b7e7279729c9c51d4faac982d2/libcst-1.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd4310ea8ddc49cc8872e083737cf806299b17f93159a1f354d59aa08993e876", size = 2380767, upload-time = "2025-06-13T20:56:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/e6/84/fb88f2ffdb045ff7323a6c05dd3d243a9eb3cb3517a6269dee43fbfb9990/libcst-1.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:51bbafdd847529e8a16d1965814ed17831af61452ee31943c414cb23451de926", size = 2083403, upload-time = "2025-06-13T20:56:15.959Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8f/da755d6d517eb8ec9664afae967b00a9b8dd567bbbb350e261359c1b47fc/libcst-1.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:4f14f5045766646ed9e8826b959c6d07194788babed1e0ba08c94ea4f39517e3", size = 1974355, upload-time = "2025-06-13T20:56:18.064Z" }, +] + +[[package]] +name = "librt" +version = "0.7.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/24/5f3646ff414285e0f7708fa4e946b9bf538345a41d1c375c439467721a5e/librt-0.7.8.tar.gz", hash = "sha256:1a4ede613941d9c3470b0368be851df6bb78ab218635512d0370b27a277a0862", size = 148323, upload-time = "2026-01-14T12:56:16.876Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/13/57b06758a13550c5f09563893b004f98e9537ee6ec67b7df85c3571c8832/librt-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b45306a1fc5f53c9330fbee134d8b3227fe5da2ab09813b892790400aa49352d", size = 56521, upload-time = "2026-01-14T12:54:40.066Z" }, + { url = "https://files.pythonhosted.org/packages/c2/24/bbea34d1452a10612fb45ac8356f95351ba40c2517e429602160a49d1fd0/librt-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:864c4b7083eeee250ed55135d2127b260d7eb4b5e953a9e5df09c852e327961b", size = 58456, upload-time = "2026-01-14T12:54:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/04/72/a168808f92253ec3a810beb1eceebc465701197dbc7e865a1c9ceb3c22c7/librt-0.7.8-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6938cc2de153bc927ed8d71c7d2f2ae01b4e96359126c602721340eb7ce1a92d", size = 164392, upload-time = "2026-01-14T12:54:42.843Z" }, + { url = "https://files.pythonhosted.org/packages/14/5c/4c0d406f1b02735c2e7af8ff1ff03a6577b1369b91aa934a9fa2cc42c7ce/librt-0.7.8-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66daa6ac5de4288a5bbfbe55b4caa7bf0cd26b3269c7a476ffe8ce45f837f87d", size = 172959, upload-time = "2026-01-14T12:54:44.602Z" }, + { url = "https://files.pythonhosted.org/packages/82/5f/3e85351c523f73ad8d938989e9a58c7f59fb9c17f761b9981b43f0025ce7/librt-0.7.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4864045f49dc9c974dadb942ac56a74cd0479a2aafa51ce272c490a82322ea3c", size = 186717, upload-time = "2026-01-14T12:54:45.986Z" }, + { url = "https://files.pythonhosted.org/packages/08/f8/18bfe092e402d00fe00d33aa1e01dda1bd583ca100b393b4373847eade6d/librt-0.7.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a36515b1328dc5b3ffce79fe204985ca8572525452eacabee2166f44bb387b2c", size = 184585, upload-time = "2026-01-14T12:54:47.139Z" }, + { url = "https://files.pythonhosted.org/packages/4e/fc/f43972ff56fd790a9fa55028a52ccea1875100edbb856b705bd393b601e3/librt-0.7.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b7e7f140c5169798f90b80d6e607ed2ba5059784968a004107c88ad61fb3641d", size = 180497, upload-time = "2026-01-14T12:54:48.946Z" }, + { url = "https://files.pythonhosted.org/packages/e1/3a/25e36030315a410d3ad0b7d0f19f5f188e88d1613d7d3fd8150523ea1093/librt-0.7.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff71447cb778a4f772ddc4ce360e6ba9c95527ed84a52096bd1bbf9fee2ec7c0", size = 200052, upload-time = "2026-01-14T12:54:50.382Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b8/f3a5a1931ae2a6ad92bf6893b9ef44325b88641d58723529e2c2935e8abe/librt-0.7.8-cp310-cp310-win32.whl", hash = "sha256:047164e5f68b7a8ebdf9fae91a3c2161d3192418aadd61ddd3a86a56cbe3dc85", size = 43477, upload-time = "2026-01-14T12:54:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/fe/91/c4202779366bc19f871b4ad25db10fcfa1e313c7893feb942f32668e8597/librt-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:d6f254d096d84156a46a84861183c183d30734e52383602443292644d895047c", size = 49806, upload-time = "2026-01-14T12:54:53.149Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a3/87ea9c1049f2c781177496ebee29430e4631f439b8553a4969c88747d5d8/librt-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ff3e9c11aa260c31493d4b3197d1e28dd07768594a4f92bec4506849d736248f", size = 56507, upload-time = "2026-01-14T12:54:54.156Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4a/23bcef149f37f771ad30203d561fcfd45b02bc54947b91f7a9ac34815747/librt-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb52499d0b3ed4aa88746aaf6f36a08314677d5c346234c3987ddc506404eac", size = 58455, upload-time = "2026-01-14T12:54:55.978Z" }, + { url = "https://files.pythonhosted.org/packages/22/6e/46eb9b85c1b9761e0f42b6e6311e1cc544843ac897457062b9d5d0b21df4/librt-0.7.8-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e9c0afebbe6ce177ae8edba0c7c4d626f2a0fc12c33bb993d163817c41a7a05c", size = 164956, upload-time = "2026-01-14T12:54:57.311Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3f/aa7c7f6829fb83989feb7ba9aa11c662b34b4bd4bd5b262f2876ba3db58d/librt-0.7.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:631599598e2c76ded400c0a8722dec09217c89ff64dc54b060f598ed68e7d2a8", size = 174364, upload-time = "2026-01-14T12:54:59.089Z" }, + { url = "https://files.pythonhosted.org/packages/3f/2d/d57d154b40b11f2cb851c4df0d4c4456bacd9b1ccc4ecb593ddec56c1a8b/librt-0.7.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c1ba843ae20db09b9d5c80475376168feb2640ce91cd9906414f23cc267a1ff", size = 188034, upload-time = "2026-01-14T12:55:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/59/f9/36c4dad00925c16cd69d744b87f7001792691857d3b79187e7a673e812fb/librt-0.7.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b5b007bb22ea4b255d3ee39dfd06d12534de2fcc3438567d9f48cdaf67ae1ae3", size = 186295, upload-time = "2026-01-14T12:55:01.303Z" }, + { url = "https://files.pythonhosted.org/packages/23/9b/8a9889d3df5efb67695a67785028ccd58e661c3018237b73ad081691d0cb/librt-0.7.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dbd79caaf77a3f590cbe32dc2447f718772d6eea59656a7dcb9311161b10fa75", size = 181470, upload-time = "2026-01-14T12:55:02.492Z" }, + { url = "https://files.pythonhosted.org/packages/43/64/54d6ef11afca01fef8af78c230726a9394759f2addfbf7afc5e3cc032a45/librt-0.7.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:87808a8d1e0bd62a01cafc41f0fd6818b5a5d0ca0d8a55326a81643cdda8f873", size = 201713, upload-time = "2026-01-14T12:55:03.919Z" }, + { url = "https://files.pythonhosted.org/packages/2d/29/73e7ed2991330b28919387656f54109139b49e19cd72902f466bd44415fd/librt-0.7.8-cp311-cp311-win32.whl", hash = "sha256:31724b93baa91512bd0a376e7cf0b59d8b631ee17923b1218a65456fa9bda2e7", size = 43803, upload-time = "2026-01-14T12:55:04.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/de/66766ff48ed02b4d78deea30392ae200bcbd99ae61ba2418b49fd50a4831/librt-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:978e8b5f13e52cf23a9e80f3286d7546baa70bc4ef35b51d97a709d0b28e537c", size = 50080, upload-time = "2026-01-14T12:55:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e3/33450438ff3a8c581d4ed7f798a70b07c3206d298cf0b87d3806e72e3ed8/librt-0.7.8-cp311-cp311-win_arm64.whl", hash = "sha256:20e3946863d872f7cabf7f77c6c9d370b8b3d74333d3a32471c50d3a86c0a232", size = 43383, upload-time = "2026-01-14T12:55:07.49Z" }, + { url = "https://files.pythonhosted.org/packages/56/04/79d8fcb43cae376c7adbab7b2b9f65e48432c9eced62ac96703bcc16e09b/librt-0.7.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9b6943885b2d49c48d0cff23b16be830ba46b0152d98f62de49e735c6e655a63", size = 57472, upload-time = "2026-01-14T12:55:08.528Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ba/60b96e93043d3d659da91752689023a73981336446ae82078cddf706249e/librt-0.7.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46ef1f4b9b6cc364b11eea0ecc0897314447a66029ee1e55859acb3dd8757c93", size = 58986, upload-time = "2026-01-14T12:55:09.466Z" }, + { url = "https://files.pythonhosted.org/packages/7c/26/5215e4cdcc26e7be7eee21955a7e13cbf1f6d7d7311461a6014544596fac/librt-0.7.8-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:907ad09cfab21e3c86e8f1f87858f7049d1097f77196959c033612f532b4e592", size = 168422, upload-time = "2026-01-14T12:55:10.499Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/e8d1bc86fa0159bfc24f3d798d92cafd3897e84c7fea7fe61b3220915d76/librt-0.7.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2991b6c3775383752b3ca0204842743256f3ad3deeb1d0adc227d56b78a9a850", size = 177478, upload-time = "2026-01-14T12:55:11.577Z" }, + { url = "https://files.pythonhosted.org/packages/57/11/d0268c4b94717a18aa91df1100e767b010f87b7ae444dafaa5a2d80f33a6/librt-0.7.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03679b9856932b8c8f674e87aa3c55ea11c9274301f76ae8dc4d281bda55cf62", size = 192439, upload-time = "2026-01-14T12:55:12.7Z" }, + { url = "https://files.pythonhosted.org/packages/8d/56/1e8e833b95fe684f80f8894ae4d8b7d36acc9203e60478fcae599120a975/librt-0.7.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3968762fec1b2ad34ce57458b6de25dbb4142713e9ca6279a0d352fa4e9f452b", size = 191483, upload-time = "2026-01-14T12:55:13.838Z" }, + { url = "https://files.pythonhosted.org/packages/17/48/f11cf28a2cb6c31f282009e2208312aa84a5ee2732859f7856ee306176d5/librt-0.7.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bb7a7807523a31f03061288cc4ffc065d684c39db7644c676b47d89553c0d714", size = 185376, upload-time = "2026-01-14T12:55:15.017Z" }, + { url = "https://files.pythonhosted.org/packages/b8/6a/d7c116c6da561b9155b184354a60a3d5cdbf08fc7f3678d09c95679d13d9/librt-0.7.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad64a14b1e56e702e19b24aae108f18ad1bf7777f3af5fcd39f87d0c5a814449", size = 206234, upload-time = "2026-01-14T12:55:16.571Z" }, + { url = "https://files.pythonhosted.org/packages/61/de/1975200bb0285fc921c5981d9978ce6ce11ae6d797df815add94a5a848a3/librt-0.7.8-cp312-cp312-win32.whl", hash = "sha256:0241a6ed65e6666236ea78203a73d800dbed896cf12ae25d026d75dc1fcd1dac", size = 44057, upload-time = "2026-01-14T12:55:18.077Z" }, + { url = "https://files.pythonhosted.org/packages/8e/cd/724f2d0b3461426730d4877754b65d39f06a41ac9d0a92d5c6840f72b9ae/librt-0.7.8-cp312-cp312-win_amd64.whl", hash = "sha256:6db5faf064b5bab9675c32a873436b31e01d66ca6984c6f7f92621656033a708", size = 50293, upload-time = "2026-01-14T12:55:19.179Z" }, + { url = "https://files.pythonhosted.org/packages/bd/cf/7e899acd9ee5727ad8160fdcc9994954e79fab371c66535c60e13b968ffc/librt-0.7.8-cp312-cp312-win_arm64.whl", hash = "sha256:57175aa93f804d2c08d2edb7213e09276bd49097611aefc37e3fa38d1fb99ad0", size = 43574, upload-time = "2026-01-14T12:55:20.185Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fe/b1f9de2829cf7fc7649c1dcd202cfd873837c5cc2fc9e526b0e7f716c3d2/librt-0.7.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4c3995abbbb60b3c129490fa985dfe6cac11d88fc3c36eeb4fb1449efbbb04fc", size = 57500, upload-time = "2026-01-14T12:55:21.219Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d4/4a60fbe2e53b825f5d9a77325071d61cd8af8506255067bf0c8527530745/librt-0.7.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:44e0c2cbc9bebd074cf2cdbe472ca185e824be4e74b1c63a8e934cea674bebf2", size = 59019, upload-time = "2026-01-14T12:55:22.256Z" }, + { url = "https://files.pythonhosted.org/packages/6a/37/61ff80341ba5159afa524445f2d984c30e2821f31f7c73cf166dcafa5564/librt-0.7.8-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d2f1e492cae964b3463a03dc77a7fe8742f7855d7258c7643f0ee32b6651dd3", size = 169015, upload-time = "2026-01-14T12:55:23.24Z" }, + { url = "https://files.pythonhosted.org/packages/1c/86/13d4f2d6a93f181ebf2fc953868826653ede494559da8268023fe567fca3/librt-0.7.8-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:451e7ffcef8f785831fdb791bd69211f47e95dc4c6ddff68e589058806f044c6", size = 178161, upload-time = "2026-01-14T12:55:24.826Z" }, + { url = "https://files.pythonhosted.org/packages/88/26/e24ef01305954fc4d771f1f09f3dd682f9eb610e1bec188ffb719374d26e/librt-0.7.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3469e1af9f1380e093ae06bedcbdd11e407ac0b303a56bbe9afb1d6824d4982d", size = 193015, upload-time = "2026-01-14T12:55:26.04Z" }, + { url = "https://files.pythonhosted.org/packages/88/a0/92b6bd060e720d7a31ed474d046a69bd55334ec05e9c446d228c4b806ae3/librt-0.7.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f11b300027ce19a34f6d24ebb0a25fd0e24a9d53353225a5c1e6cadbf2916b2e", size = 192038, upload-time = "2026-01-14T12:55:27.208Z" }, + { url = "https://files.pythonhosted.org/packages/06/bb/6f4c650253704279c3a214dad188101d1b5ea23be0606628bc6739456624/librt-0.7.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4adc73614f0d3c97874f02f2c7fd2a27854e7e24ad532ea6b965459c5b757eca", size = 186006, upload-time = "2026-01-14T12:55:28.594Z" }, + { url = "https://files.pythonhosted.org/packages/dc/00/1c409618248d43240cadf45f3efb866837fa77e9a12a71481912135eb481/librt-0.7.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60c299e555f87e4c01b2eca085dfccda1dde87f5a604bb45c2906b8305819a93", size = 206888, upload-time = "2026-01-14T12:55:30.214Z" }, + { url = "https://files.pythonhosted.org/packages/d9/83/b2cfe8e76ff5c1c77f8a53da3d5de62d04b5ebf7cf913e37f8bca43b5d07/librt-0.7.8-cp313-cp313-win32.whl", hash = "sha256:b09c52ed43a461994716082ee7d87618096851319bf695d57ec123f2ab708951", size = 44126, upload-time = "2026-01-14T12:55:31.44Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0b/c59d45de56a51bd2d3a401fc63449c0ac163e4ef7f523ea8b0c0dee86ec5/librt-0.7.8-cp313-cp313-win_amd64.whl", hash = "sha256:f8f4a901a3fa28969d6e4519deceab56c55a09d691ea7b12ca830e2fa3461e34", size = 50262, upload-time = "2026-01-14T12:55:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b9/973455cec0a1ec592395250c474164c4a58ebf3e0651ee920fef1a2623f1/librt-0.7.8-cp313-cp313-win_arm64.whl", hash = "sha256:43d4e71b50763fcdcf64725ac680d8cfa1706c928b844794a7aa0fa9ac8e5f09", size = 43600, upload-time = "2026-01-14T12:55:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/1a/73/fa8814c6ce2d49c3827829cadaa1589b0bf4391660bd4510899393a23ebc/librt-0.7.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:be927c3c94c74b05128089a955fba86501c3b544d1d300282cc1b4bd370cb418", size = 57049, upload-time = "2026-01-14T12:55:35.056Z" }, + { url = "https://files.pythonhosted.org/packages/53/fe/f6c70956da23ea235fd2e3cc16f4f0b4ebdfd72252b02d1164dd58b4e6c3/librt-0.7.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7b0803e9008c62a7ef79058233db7ff6f37a9933b8f2573c05b07ddafa226611", size = 58689, upload-time = "2026-01-14T12:55:36.078Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4d/7a2481444ac5fba63050d9abe823e6bc16896f575bfc9c1e5068d516cdce/librt-0.7.8-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:79feb4d00b2a4e0e05c9c56df707934f41fcb5fe53fd9efb7549068d0495b758", size = 166808, upload-time = "2026-01-14T12:55:37.595Z" }, + { url = "https://files.pythonhosted.org/packages/ac/3c/10901d9e18639f8953f57c8986796cfbf4c1c514844a41c9197cf87cb707/librt-0.7.8-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9122094e3f24aa759c38f46bd8863433820654927370250f460ae75488b66ea", size = 175614, upload-time = "2026-01-14T12:55:38.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/01/5cbdde0951a5090a80e5ba44e6357d375048123c572a23eecfb9326993a7/librt-0.7.8-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e03bea66af33c95ce3addf87a9bf1fcad8d33e757bc479957ddbc0e4f7207ac", size = 189955, upload-time = "2026-01-14T12:55:39.939Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b4/e80528d2f4b7eaf1d437fcbd6fc6ba4cbeb3e2a0cb9ed5a79f47c7318706/librt-0.7.8-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f1ade7f31675db00b514b98f9ab9a7698c7282dad4be7492589109471852d398", size = 189370, upload-time = "2026-01-14T12:55:41.057Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ab/938368f8ce31a9787ecd4becb1e795954782e4312095daf8fd22420227c8/librt-0.7.8-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a14229ac62adcf1b90a15992f1ab9c69ae8b99ffb23cb64a90878a6e8a2f5b81", size = 183224, upload-time = "2026-01-14T12:55:42.328Z" }, + { url = "https://files.pythonhosted.org/packages/3c/10/559c310e7a6e4014ac44867d359ef8238465fb499e7eb31b6bfe3e3f86f5/librt-0.7.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5bcaaf624fd24e6a0cb14beac37677f90793a96864c67c064a91458611446e83", size = 203541, upload-time = "2026-01-14T12:55:43.501Z" }, + { url = "https://files.pythonhosted.org/packages/f8/db/a0db7acdb6290c215f343835c6efda5b491bb05c3ddc675af558f50fdba3/librt-0.7.8-cp314-cp314-win32.whl", hash = "sha256:7aa7d5457b6c542ecaed79cec4ad98534373c9757383973e638ccced0f11f46d", size = 40657, upload-time = "2026-01-14T12:55:44.668Z" }, + { url = "https://files.pythonhosted.org/packages/72/e0/4f9bdc2a98a798511e81edcd6b54fe82767a715e05d1921115ac70717f6f/librt-0.7.8-cp314-cp314-win_amd64.whl", hash = "sha256:3d1322800771bee4a91f3b4bd4e49abc7d35e65166821086e5afd1e6c0d9be44", size = 46835, upload-time = "2026-01-14T12:55:45.655Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3d/59c6402e3dec2719655a41ad027a7371f8e2334aa794ed11533ad5f34969/librt-0.7.8-cp314-cp314-win_arm64.whl", hash = "sha256:5363427bc6a8c3b1719f8f3845ea53553d301382928a86e8fab7984426949bce", size = 39885, upload-time = "2026-01-14T12:55:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9c/2481d80950b83085fb14ba3c595db56330d21bbc7d88a19f20165f3538db/librt-0.7.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ca916919793a77e4a98d4a1701e345d337ce53be4a16620f063191f7322ac80f", size = 59161, upload-time = "2026-01-14T12:55:48.45Z" }, + { url = "https://files.pythonhosted.org/packages/96/79/108df2cfc4e672336765d54e3ff887294c1cc36ea4335c73588875775527/librt-0.7.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:54feb7b4f2f6706bb82325e836a01be805770443e2400f706e824e91f6441dde", size = 61008, upload-time = "2026-01-14T12:55:49.527Z" }, + { url = "https://files.pythonhosted.org/packages/46/f2/30179898f9994a5637459d6e169b6abdc982012c0a4b2d4c26f50c06f911/librt-0.7.8-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:39a4c76fee41007070f872b648cc2f711f9abf9a13d0c7162478043377b52c8e", size = 187199, upload-time = "2026-01-14T12:55:50.587Z" }, + { url = "https://files.pythonhosted.org/packages/b4/da/f7563db55cebdc884f518ba3791ad033becc25ff68eb70902b1747dc0d70/librt-0.7.8-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac9c8a458245c7de80bc1b9765b177055efff5803f08e548dd4bb9ab9a8d789b", size = 198317, upload-time = "2026-01-14T12:55:51.991Z" }, + { url = "https://files.pythonhosted.org/packages/b3/6c/4289acf076ad371471fa86718c30ae353e690d3de6167f7db36f429272f1/librt-0.7.8-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b67aa7eff150f075fda09d11f6bfb26edffd300f6ab1666759547581e8f666", size = 210334, upload-time = "2026-01-14T12:55:53.682Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7f/377521ac25b78ac0a5ff44127a0360ee6d5ddd3ce7327949876a30533daa/librt-0.7.8-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:535929b6eff670c593c34ff435d5440c3096f20fa72d63444608a5aef64dd581", size = 211031, upload-time = "2026-01-14T12:55:54.827Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/e1e96c3e20b23d00cf90f4aad48f0deb4cdfec2f0ed8380d0d85acf98bbf/librt-0.7.8-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:63937bd0f4d1cb56653dc7ae900d6c52c41f0015e25aaf9902481ee79943b33a", size = 204581, upload-time = "2026-01-14T12:55:56.811Z" }, + { url = "https://files.pythonhosted.org/packages/43/71/0f5d010e92ed9747e14bef35e91b6580533510f1e36a8a09eb79ee70b2f0/librt-0.7.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf243da9e42d914036fd362ac3fa77d80a41cadcd11ad789b1b5eec4daaf67ca", size = 224731, upload-time = "2026-01-14T12:55:58.175Z" }, + { url = "https://files.pythonhosted.org/packages/22/f0/07fb6ab5c39a4ca9af3e37554f9d42f25c464829254d72e4ebbd81da351c/librt-0.7.8-cp314-cp314t-win32.whl", hash = "sha256:171ca3a0a06c643bd0a2f62a8944e1902c94aa8e5da4db1ea9a8daf872685365", size = 41173, upload-time = "2026-01-14T12:55:59.315Z" }, + { url = "https://files.pythonhosted.org/packages/24/d4/7e4be20993dc6a782639625bd2f97f3c66125c7aa80c82426956811cfccf/librt-0.7.8-cp314-cp314t-win_amd64.whl", hash = "sha256:445b7304145e24c60288a2f172b5ce2ca35c0f81605f5299f3fa567e189d2e32", size = 47668, upload-time = "2026-01-14T12:56:00.261Z" }, + { url = "https://files.pythonhosted.org/packages/fc/85/69f92b2a7b3c0f88ffe107c86b952b397004b5b8ea5a81da3d9c04c04422/librt-0.7.8-cp314-cp314t-win_arm64.whl", hash = "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06", size = 40550, upload-time = "2026-01-14T12:56:01.542Z" }, ] [[package]] name = "markupsafe" version = "3.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 }, - { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 }, - { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 }, - { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 }, - { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 }, - { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 }, - { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 }, - { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 }, - { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 }, - { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 }, - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, ] [[package]] name = "more-itertools" version = "10.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/a0/834b0cebabbfc7e311f30b46c8188790a37f89fc8d756660346fe5abfd09/more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3", size = 127671 } +sdist = { url = "https://files.pythonhosted.org/packages/ce/a0/834b0cebabbfc7e311f30b46c8188790a37f89fc8d756660346fe5abfd09/more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3", size = 127671, upload-time = "2025-04-22T14:17:41.838Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278 }, + { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, ] [[package]] name = "mypy" -version = "1.15.0" +version = "1.19.1" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, { name = "mypy-extensions" }, + { name = "pathspec" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", size = 10738433 }, - { url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", size = 9861472 }, - { url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", size = 11611424 }, - { url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", size = 12365450 }, - { url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", size = 12551765 }, - { url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", size = 9274701 }, - { url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", size = 10662338 }, - { url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", size = 9787540 }, - { url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", size = 11538051 }, - { url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", size = 12286751 }, - { url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", size = 12421783 }, - { url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", size = 9265618 }, - { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 }, - { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 }, - { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 }, - { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 }, - { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 }, - { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 }, - { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 }, - { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 }, - { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 }, - { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 }, - { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 }, - { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 }, - { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 }, +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec", size = 13101333, upload-time = "2025-12-15T05:03:03.28Z" }, + { url = "https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b", size = 12164102, upload-time = "2025-12-15T05:02:33.611Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/952928dd081bf88a83a5ccd49aaecfcd18fd0d2710c7ff07b8fb6f7032b9/mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6", size = 12765799, upload-time = "2025-12-15T05:03:28.44Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74", size = 13522149, upload-time = "2025-12-15T05:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/33a294b56aaad2b338d203e3a1d8b453637ac36cb278b45005e0901cf148/mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1", size = 13810105, upload-time = "2025-12-15T05:02:40.327Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/3e82603a0cb66b67c5e7abababce6bf1a929ddf67bf445e652684af5c5a0/mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac", size = 10057200, upload-time = "2025-12-15T05:02:51.012Z" }, + { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" }, + { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, ] [[package]] name = "mypy-extensions" version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "no-implicit-optional" +version = "1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "libcst" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/a2/4f801596575d59bfe745eb8c22ddf8a4bfcdc725db42a4ab160e4f7606a2/no_implicit_optional-1.4.tar.gz", hash = "sha256:92d55c220c5bf8a05391e182767395f9b24e245123ce36d14719bdb1dd975185", size = 5482, upload-time = "2023-06-21T06:42:16.588Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/f9/c98a9e7b5f2f42fa3fb2f64f9fa4fea7cc2a226cf1cf6993da9cc32e3630/no_implicit_optional-1.4-py3-none-any.whl", hash = "sha256:452118300f26222f631dd6976a1ea9e17822df74b7da587642bdebd30b02edbe", size = 4773, upload-time = "2023-06-21T06:42:15.573Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963 }, + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] [[package]] name = "opentelemetry-api" -version = "1.33.1" +version = "1.39.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "importlib-metadata" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/8d/1f5a45fbcb9a7d87809d460f09dc3399e3fbd31d7f3e14888345e9d29951/opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8", size = 65002 } +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/44/4c45a34def3506122ae61ad684139f0bbc4e00c39555d4f7e20e0e001c8a/opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83", size = 65771 }, + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, ] [[package]] name = "opentelemetry-sdk" -version = "1.33.1" +version = "1.39.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/12/909b98a7d9b110cce4b28d49b2e311797cffdce180371f35eba13a72dd00/opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531", size = 161885 } +sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/8e/ae2d0742041e0bd7fe0d2dcc5e7cce51dcf7d3961a26072d5b43cc8fa2a7/opentelemetry_sdk-1.33.1-py3-none-any.whl", hash = "sha256:19ea73d9a01be29cacaa5d6c8ce0adc0b7f7b4d58cc52f923e4413609f670112", size = 118950 }, + { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.54b1" +version = "0.60b1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, +] + +[[package]] +name = "outcome" +version = "1.3.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/2c/d7990fc1ffc82889d466e7cd680788ace44a26789809924813b164344393/opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee", size = 118642 } +sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload-time = "2023-10-26T04:26:04.361Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/80/08b1698c52ff76d96ba440bf15edc2f4bc0a279868778928e947c1004bdd/opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl", hash = "sha256:29dab644a7e435b58d3a3918b58c333c92686236b30f7891d5e51f02933ca60d", size = 194938 }, + { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload-time = "2023-10-26T04:26:02.532Z" }, ] [[package]] name = "packaging" -version = "24.2" +version = "25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] [[package]] name = "pathspec" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] [[package]] name = "platformdirs" version = "4.3.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362 } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567 }, + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, ] [[package]] name = "pluggy" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, +] + +[[package]] +name = "proto-plus" +version = "1.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, ] [[package]] name = "pydantic" -version = "2.11.5" +version = "2.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -610,101 +1597,168 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102 } +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229 }, + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817 }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357 }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011 }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730 }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178 }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462 }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652 }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306 }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720 }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915 }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884 }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496 }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019 }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584 }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071 }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823 }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792 }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338 }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998 }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200 }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890 }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359 }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883 }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074 }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538 }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909 }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786 }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982 }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412 }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749 }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527 }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225 }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490 }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525 }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446 }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678 }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200 }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123 }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852 }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484 }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896 }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475 }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013 }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715 }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757 }, +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pyflakes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/45/dc/fd034dc20b4b264b3d015808458391acbf9df40b1e54750ef175d39180b1/pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58", size = 64669, upload-time = "2025-06-20T18:45:27.834Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/2f/81d580a0fb83baeb066698975cb14a618bdbed7720678566f1b046a95fe8/pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f", size = 63551, upload-time = "2025-06-20T18:45:26.937Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" }, +] + +[[package]] +name = "pymysql" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/ce59b5e5ed4ce8512f879ff1fa5ab699d211ae2495f1adaa5fbba2a1eada/pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0", size = 47678, upload-time = "2024-05-21T11:03:43.722Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/94/e4181a1f6286f545507528c78016e00065ea913276888db2262507693ce5/PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", size = 44972, upload-time = "2024-05-21T11:03:41.216Z" }, ] [[package]] name = "pytest" -version = "8.3.5" +version = "9.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -712,328 +1766,524 @@ dependencies = [ { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, + { name = "pygments" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, ] [[package]] name = "pytest-asyncio" -version = "0.26.0" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8e/c4/453c52c659521066969523e87d85d54139bbd17b78f09532fb8eb8cdb58e/pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f", size = 54156 } +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694 }, + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, ] [[package]] name = "pytest-cov" -version = "6.1.1" +version = "7.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/69/5f1e57f6c5a39f81411b550027bf72842c4567ff5fd572bed1edc9e4b5d9/pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", size = 66857 } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/d0/def53b4a790cfb21483016430ed828f64830dd981ebe1089971cd10cab25/pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde", size = 23841 }, + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] [[package]] name = "pytest-mock" -version = "3.14.0" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "execnet" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/90/a955c3ab35ccd41ad4de556596fa86685bf4fc5ffcc62d22d856cfd4e29a/pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0", size = 32814 } +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/3b/b26f90f74e2986a82df6e7ac7e319b8ea7ccece1caec9f8ab6104dc70603/pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", size = 9863 }, + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[[package]] +name = "pyupgrade" +version = "3.21.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tokenize-rt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/a1/dc63caaeed232b1c58eae1b7a75f262d64ab8435882f696ffa9b58c0c415/pyupgrade-3.21.2.tar.gz", hash = "sha256:1a361bea39deda78d1460f65d9dd548d3a36ff8171d2482298539b9dc11c9c06", size = 45455, upload-time = "2025-11-19T00:39:48.012Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/8c/433dac11910989a90c40b10149d07ef7224232236971a562d3976790ec53/pyupgrade-3.21.2-py2.py3-none-any.whl", hash = "sha256:2ac7b95cbd176475041e4dfe8ef81298bd4654a244f957167bd68af37d52be9f", size = 62814, upload-time = "2025-11-19T00:39:46.958Z" }, ] [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "pyyaml-ft" +version = "8.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/eb/5a0d575de784f9a1f94e2b1288c6886f13f34185e13117ed530f32b6f8a8/pyyaml_ft-8.0.0.tar.gz", hash = "sha256:0c947dce03954c7b5d38869ed4878b2e6ff1d44b08a0d84dc83fdad205ae39ab", size = 141057, upload-time = "2025-06-10T15:32:15.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/ba/a067369fe61a2e57fb38732562927d5bae088c73cb9bb5438736a9555b29/pyyaml_ft-8.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c1306282bc958bfda31237f900eb52c9bedf9b93a11f82e1aab004c9a5657a6", size = 187027, upload-time = "2025-06-10T15:31:48.722Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c5/a3d2020ce5ccfc6aede0d45bcb870298652ac0cf199f67714d250e0cdf39/pyyaml_ft-8.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30c5f1751625786c19de751e3130fc345ebcba6a86f6bddd6e1285342f4bbb69", size = 176146, upload-time = "2025-06-10T15:31:50.584Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bb/23a9739291086ca0d3189eac7cd92b4d00e9fdc77d722ab610c35f9a82ba/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fa992481155ddda2e303fcc74c79c05eddcdbc907b888d3d9ce3ff3e2adcfb0", size = 746792, upload-time = "2025-06-10T15:31:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c2/e8825f4ff725b7e560d62a3609e31d735318068e1079539ebfde397ea03e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cec6c92b4207004b62dfad1f0be321c9f04725e0f271c16247d8b39c3bf3ea42", size = 786772, upload-time = "2025-06-10T15:31:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/35/be/58a4dcae8854f2fdca9b28d9495298fd5571a50d8430b1c3033ec95d2d0e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06237267dbcab70d4c0e9436d8f719f04a51123f0ca2694c00dd4b68c338e40b", size = 778723, upload-time = "2025-06-10T15:31:56.093Z" }, + { url = "https://files.pythonhosted.org/packages/86/ed/fed0da92b5d5d7340a082e3802d84c6dc9d5fa142954404c41a544c1cb92/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8a7f332bc565817644cdb38ffe4739e44c3e18c55793f75dddb87630f03fc254", size = 758478, upload-time = "2025-06-10T15:31:58.314Z" }, + { url = "https://files.pythonhosted.org/packages/f0/69/ac02afe286275980ecb2dcdc0156617389b7e0c0a3fcdedf155c67be2b80/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d10175a746be65f6feb86224df5d6bc5c049ebf52b89a88cf1cd78af5a367a8", size = 799159, upload-time = "2025-06-10T15:31:59.675Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ac/c492a9da2e39abdff4c3094ec54acac9747743f36428281fb186a03fab76/pyyaml_ft-8.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:58e1015098cf8d8aec82f360789c16283b88ca670fe4275ef6c48c5e30b22a96", size = 158779, upload-time = "2025-06-10T15:32:01.029Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9b/41998df3298960d7c67653669f37710fa2d568a5fc933ea24a6df60acaf6/pyyaml_ft-8.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5f3e2ceb790d50602b2fd4ec37abbd760a8c778e46354df647e7c5a4ebb", size = 191331, upload-time = "2025-06-10T15:32:02.602Z" }, + { url = "https://files.pythonhosted.org/packages/0f/16/2710c252ee04cbd74d9562ebba709e5a284faeb8ada88fcda548c9191b47/pyyaml_ft-8.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8d445bf6ea16bb93c37b42fdacfb2f94c8e92a79ba9e12768c96ecde867046d1", size = 182879, upload-time = "2025-06-10T15:32:04.466Z" }, + { url = "https://files.pythonhosted.org/packages/9a/40/ae8163519d937fa7bfa457b6f78439cc6831a7c2b170e4f612f7eda71815/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c56bb46b4fda34cbb92a9446a841da3982cdde6ea13de3fbd80db7eeeab8b49", size = 811277, upload-time = "2025-06-10T15:32:06.214Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/28d82dbff7f87b96f0eeac79b7d972a96b4980c1e445eb6a857ba91eda00/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab0abb46eb1780da486f022dce034b952c8ae40753627b27a626d803926483b", size = 831650, upload-time = "2025-06-10T15:32:08.076Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/161c4566facac7d75a9e182295c223060373d4116dead9cc53a265de60b9/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd48d639cab5ca50ad957b6dd632c7dd3ac02a1abe0e8196a3c24a52f5db3f7a", size = 815755, upload-time = "2025-06-10T15:32:09.435Z" }, + { url = "https://files.pythonhosted.org/packages/05/10/f42c48fa5153204f42eaa945e8d1fd7c10d6296841dcb2447bf7da1be5c4/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:052561b89d5b2a8e1289f326d060e794c21fa068aa11255fe71d65baf18a632e", size = 810403, upload-time = "2025-06-10T15:32:11.051Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d2/e369064aa51009eb9245399fd8ad2c562bd0bcd392a00be44b2a824ded7c/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3bb4b927929b0cb162fb1605392a321e3333e48ce616cdcfa04a839271373255", size = 835581, upload-time = "2025-06-10T15:32:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/c0/28/26534bed77109632a956977f60d8519049f545abc39215d086e33a61f1f2/pyyaml_ft-8.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:de04cfe9439565e32f178106c51dd6ca61afaa2907d143835d501d84703d3793", size = 171579, upload-time = "2025-06-10T15:32:14.34Z" }, +] + +[[package]] +name = "requests" +version = "2.32.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, +] + +[[package]] +name = "respx" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, ] [[package]] name = "ruff" -version = "0.11.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/53/ae4857030d59286924a8bdb30d213d6ff22d8f0957e738d0289990091dd8/ruff-0.11.11.tar.gz", hash = "sha256:7774173cc7c1980e6bf67569ebb7085989a78a103922fb83ef3dfe230cd0687d", size = 4186707 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/14/f2326676197bab099e2a24473158c21656fbf6a207c65f596ae15acb32b9/ruff-0.11.11-py3-none-linux_armv6l.whl", hash = "sha256:9924e5ae54125ed8958a4f7de320dab7380f6e9fa3195e3dc3b137c6842a0092", size = 10229049 }, - { url = "https://files.pythonhosted.org/packages/9a/f3/bff7c92dd66c959e711688b2e0768e486bbca46b2f35ac319bb6cce04447/ruff-0.11.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8a93276393d91e952f790148eb226658dd275cddfde96c6ca304873f11d2ae4", size = 11053601 }, - { url = "https://files.pythonhosted.org/packages/e2/38/8e1a3efd0ef9d8259346f986b77de0f62c7a5ff4a76563b6b39b68f793b9/ruff-0.11.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6e333dbe2e6ae84cdedefa943dfd6434753ad321764fd937eef9d6b62022bcd", size = 10367421 }, - { url = "https://files.pythonhosted.org/packages/b4/50/557ad9dd4fb9d0bf524ec83a090a3932d284d1a8b48b5906b13b72800e5f/ruff-0.11.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7885d9a5e4c77b24e8c88aba8c80be9255fa22ab326019dac2356cff42089fc6", size = 10581980 }, - { url = "https://files.pythonhosted.org/packages/c4/b2/e2ed82d6e2739ece94f1bdbbd1d81b712d3cdaf69f0a1d1f1a116b33f9ad/ruff-0.11.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b5ab797fcc09121ed82e9b12b6f27e34859e4227080a42d090881be888755d4", size = 10089241 }, - { url = "https://files.pythonhosted.org/packages/3d/9f/b4539f037a5302c450d7c695c82f80e98e48d0d667ecc250e6bdeb49b5c3/ruff-0.11.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e231ff3132c1119ece836487a02785f099a43992b95c2f62847d29bace3c75ac", size = 11699398 }, - { url = "https://files.pythonhosted.org/packages/61/fb/32e029d2c0b17df65e6eaa5ce7aea5fbeaed22dddd9fcfbbf5fe37c6e44e/ruff-0.11.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a97c9babe1d4081037a90289986925726b802d180cca784ac8da2bbbc335f709", size = 12427955 }, - { url = "https://files.pythonhosted.org/packages/6e/e3/160488dbb11f18c8121cfd588e38095ba779ae208292765972f7732bfd95/ruff-0.11.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8c4ddcbe8a19f59f57fd814b8b117d4fcea9bee7c0492e6cf5fdc22cfa563c8", size = 12069803 }, - { url = "https://files.pythonhosted.org/packages/ff/16/3b006a875f84b3d0bff24bef26b8b3591454903f6f754b3f0a318589dcc3/ruff-0.11.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6224076c344a7694c6fbbb70d4f2a7b730f6d47d2a9dc1e7f9d9bb583faf390b", size = 11242630 }, - { url = "https://files.pythonhosted.org/packages/65/0d/0338bb8ac0b97175c2d533e9c8cdc127166de7eb16d028a43c5ab9e75abd/ruff-0.11.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:882821fcdf7ae8db7a951df1903d9cb032bbe838852e5fc3c2b6c3ab54e39875", size = 11507310 }, - { url = "https://files.pythonhosted.org/packages/6f/bf/d7130eb26174ce9b02348b9f86d5874eafbf9f68e5152e15e8e0a392e4a3/ruff-0.11.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:dcec2d50756463d9df075a26a85a6affbc1b0148873da3997286caf1ce03cae1", size = 10441144 }, - { url = "https://files.pythonhosted.org/packages/b3/f3/4be2453b258c092ff7b1761987cf0749e70ca1340cd1bfb4def08a70e8d8/ruff-0.11.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99c28505ecbaeb6594701a74e395b187ee083ee26478c1a795d35084d53ebd81", size = 10081987 }, - { url = "https://files.pythonhosted.org/packages/6c/6e/dfa4d2030c5b5c13db158219f2ec67bf333e8a7748dccf34cfa2a6ab9ebc/ruff-0.11.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9263f9e5aa4ff1dec765e99810f1cc53f0c868c5329b69f13845f699fe74f639", size = 11073922 }, - { url = "https://files.pythonhosted.org/packages/ff/f4/f7b0b0c3d32b593a20ed8010fa2c1a01f2ce91e79dda6119fcc51d26c67b/ruff-0.11.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:64ac6f885e3ecb2fdbb71de2701d4e34526651f1e8503af8fb30d4915a3fe345", size = 11568537 }, - { url = "https://files.pythonhosted.org/packages/d2/46/0e892064d0adc18bcc81deed9aaa9942a27fd2cd9b1b7791111ce468c25f/ruff-0.11.11-py3-none-win32.whl", hash = "sha256:1adcb9a18802268aaa891ffb67b1c94cd70578f126637118e8099b8e4adcf112", size = 10536492 }, - { url = "https://files.pythonhosted.org/packages/1b/d9/232e79459850b9f327e9f1dc9c047a2a38a6f9689e1ec30024841fc4416c/ruff-0.11.11-py3-none-win_amd64.whl", hash = "sha256:748b4bb245f11e91a04a4ff0f96e386711df0a30412b9fe0c74d5bdc0e4a531f", size = 11612562 }, - { url = "https://files.pythonhosted.org/packages/ce/eb/09c132cff3cc30b2e7244191dcce69437352d6d6709c0adf374f3e6f476e/ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b", size = 10735951 }, +version = "0.14.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/06/f71e3a86b2df0dfa2d2f72195941cd09b44f87711cb7fa5193732cb9a5fc/ruff-0.14.14.tar.gz", hash = "sha256:2d0f819c9a90205f3a867dbbd0be083bee9912e170fd7d9704cc8ae45824896b", size = 4515732, upload-time = "2026-01-22T22:30:17.527Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/89/20a12e97bc6b9f9f68343952da08a8099c57237aef953a56b82711d55edd/ruff-0.14.14-py3-none-linux_armv6l.whl", hash = "sha256:7cfe36b56e8489dee8fbc777c61959f60ec0f1f11817e8f2415f429552846aed", size = 10467650, upload-time = "2026-01-22T22:30:08.578Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/c5de3fd2d5a831fcae21beda5e3589c0ba67eec8202e992388e4b17a6040/ruff-0.14.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6006a0082336e7920b9573ef8a7f52eec837add1265cc74e04ea8a4368cd704c", size = 10883245, upload-time = "2026-01-22T22:30:04.155Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7c/3c1db59a10e7490f8f6f8559d1db8636cbb13dccebf18686f4e3c9d7c772/ruff-0.14.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:026c1d25996818f0bf498636686199d9bd0d9d6341c9c2c3b62e2a0198b758de", size = 10231273, upload-time = "2026-01-22T22:30:34.642Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6e/5e0e0d9674be0f8581d1f5e0f0a04761203affce3232c1a1189d0e3b4dad/ruff-0.14.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f666445819d31210b71e0a6d1c01e24447a20b85458eea25a25fe8142210ae0e", size = 10585753, upload-time = "2026-01-22T22:30:31.781Z" }, + { url = "https://files.pythonhosted.org/packages/23/09/754ab09f46ff1884d422dc26d59ba18b4e5d355be147721bb2518aa2a014/ruff-0.14.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c0f18b922c6d2ff9a5e6c3ee16259adc513ca775bcf82c67ebab7cbd9da5bc8", size = 10286052, upload-time = "2026-01-22T22:30:24.827Z" }, + { url = "https://files.pythonhosted.org/packages/c8/cc/e71f88dd2a12afb5f50733851729d6b571a7c3a35bfdb16c3035132675a0/ruff-0.14.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1629e67489c2dea43e8658c3dba659edbfd87361624b4040d1df04c9740ae906", size = 11043637, upload-time = "2026-01-22T22:30:13.239Z" }, + { url = "https://files.pythonhosted.org/packages/67/b2/397245026352494497dac935d7f00f1468c03a23a0c5db6ad8fc49ca3fb2/ruff-0.14.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:27493a2131ea0f899057d49d303e4292b2cae2bb57253c1ed1f256fbcd1da480", size = 12194761, upload-time = "2026-01-22T22:30:22.542Z" }, + { url = "https://files.pythonhosted.org/packages/5b/06/06ef271459f778323112c51b7587ce85230785cd64e91772034ddb88f200/ruff-0.14.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ff589aab3f5b539e35db38425da31a57521efd1e4ad1ae08fc34dbe30bd7df", size = 12005701, upload-time = "2026-01-22T22:30:20.499Z" }, + { url = "https://files.pythonhosted.org/packages/41/d6/99364514541cf811ccc5ac44362f88df66373e9fec1b9d1c4cc830593fe7/ruff-0.14.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc12d74eef0f29f51775f5b755913eb523546b88e2d733e1d701fe65144e89b", size = 11282455, upload-time = "2026-01-22T22:29:59.679Z" }, + { url = "https://files.pythonhosted.org/packages/ca/71/37daa46f89475f8582b7762ecd2722492df26421714a33e72ccc9a84d7a5/ruff-0.14.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb8481604b7a9e75eff53772496201690ce2687067e038b3cc31aaf16aa0b974", size = 11215882, upload-time = "2026-01-22T22:29:57.032Z" }, + { url = "https://files.pythonhosted.org/packages/2c/10/a31f86169ec91c0705e618443ee74ede0bdd94da0a57b28e72db68b2dbac/ruff-0.14.14-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:14649acb1cf7b5d2d283ebd2f58d56b75836ed8c6f329664fa91cdea19e76e66", size = 11180549, upload-time = "2026-01-22T22:30:27.175Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1e/c723f20536b5163adf79bdd10c5f093414293cdf567eed9bdb7b83940f3f/ruff-0.14.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8058d2145566510790eab4e2fad186002e288dec5e0d343a92fe7b0bc1b3e13", size = 10543416, upload-time = "2026-01-22T22:30:01.964Z" }, + { url = "https://files.pythonhosted.org/packages/3e/34/8a84cea7e42c2d94ba5bde1d7a4fae164d6318f13f933d92da6d7c2041ff/ruff-0.14.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e651e977a79e4c758eb807f0481d673a67ffe53cfa92209781dfa3a996cf8412", size = 10285491, upload-time = "2026-01-22T22:30:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/55/ef/b7c5ea0be82518906c978e365e56a77f8de7678c8bb6651ccfbdc178c29f/ruff-0.14.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cc8b22da8d9d6fdd844a68ae937e2a0adf9b16514e9a97cc60355e2d4b219fc3", size = 10733525, upload-time = "2026-01-22T22:30:06.499Z" }, + { url = "https://files.pythonhosted.org/packages/6a/5b/aaf1dfbcc53a2811f6cc0a1759de24e4b03e02ba8762daabd9b6bd8c59e3/ruff-0.14.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:16bc890fb4cc9781bb05beb5ab4cd51be9e7cb376bf1dd3580512b24eb3fda2b", size = 11315626, upload-time = "2026-01-22T22:30:36.848Z" }, + { url = "https://files.pythonhosted.org/packages/2c/aa/9f89c719c467dfaf8ad799b9bae0df494513fb21d31a6059cb5870e57e74/ruff-0.14.14-py3-none-win32.whl", hash = "sha256:b530c191970b143375b6a68e6f743800b2b786bbcf03a7965b06c4bf04568167", size = 10502442, upload-time = "2026-01-22T22:30:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/87/44/90fa543014c45560cae1fffc63ea059fb3575ee6e1cb654562197e5d16fb/ruff-0.14.14-py3-none-win_amd64.whl", hash = "sha256:3dde1435e6b6fe5b66506c1dff67a421d0b7f6488d466f651c07f4cab3bf20fd", size = 11630486, upload-time = "2026-01-22T22:30:10.852Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6a/40fee331a52339926a92e17ae748827270b288a35ef4a15c9c8f2ec54715/ruff-0.14.14-py3-none-win_arm64.whl", hash = "sha256:56e6981a98b13a32236a72a8da421d7839221fa308b223b9283312312e5ac76c", size = 10920448, upload-time = "2026-01-22T22:30:15.417Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.42" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/03/a0af991e3a43174d6b83fca4fb399745abceddd1171bdabae48ce877ff47/sqlalchemy-2.0.42.tar.gz", hash = "sha256:160bedd8a5c28765bd5be4dec2d881e109e33b34922e50a3b881a7681773ac5f", size = 9749972, upload-time = "2025-07-29T12:48:09.323Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/12/33ff43214c2c6cc87499b402fe419869d2980a08101c991daae31345e901/sqlalchemy-2.0.42-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:172b244753e034d91a826f80a9a70f4cbac690641207f2217f8404c261473efe", size = 2130469, upload-time = "2025-07-29T13:25:15.215Z" }, + { url = "https://files.pythonhosted.org/packages/63/c4/4d2f2c21ddde9a2c7f7b258b202d6af0bac9fc5abfca5de367461c86d766/sqlalchemy-2.0.42-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be28f88abd74af8519a4542185ee80ca914933ca65cdfa99504d82af0e4210df", size = 2120393, upload-time = "2025-07-29T13:25:16.367Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0d/5ff2f2dfbac10e4a9ade1942f8985ffc4bd8f157926b1f8aed553dfe3b88/sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98b344859d282fde388047f1710860bb23f4098f705491e06b8ab52a48aafea9", size = 3206173, upload-time = "2025-07-29T13:29:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/1f/59/71493fe74bd76a773ae8fa0c50bfc2ccac1cbf7cfa4f9843ad92897e6dcf/sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97978d223b11f1d161390a96f28c49a13ce48fdd2fed7683167c39bdb1b8aa09", size = 3206910, upload-time = "2025-07-29T13:24:50.58Z" }, + { url = "https://files.pythonhosted.org/packages/a9/51/01b1d85bbb492a36b25df54a070a0f887052e9b190dff71263a09f48576b/sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e35b9b000c59fcac2867ab3a79fc368a6caca8706741beab3b799d47005b3407", size = 3145479, upload-time = "2025-07-29T13:29:02.3Z" }, + { url = "https://files.pythonhosted.org/packages/fa/78/10834f010e2a3df689f6d1888ea6ea0074ff10184e6a550b8ed7f9189a89/sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bc7347ad7a7b1c78b94177f2d57263113bb950e62c59b96ed839b131ea4234e1", size = 3169605, upload-time = "2025-07-29T13:24:52.135Z" }, + { url = "https://files.pythonhosted.org/packages/0c/75/e6fdd66d237582c8488dd1dfa90899f6502822fbd866363ab70e8ac4a2ce/sqlalchemy-2.0.42-cp310-cp310-win32.whl", hash = "sha256:739e58879b20a179156b63aa21f05ccacfd3e28e08e9c2b630ff55cd7177c4f1", size = 2098759, upload-time = "2025-07-29T13:23:55.809Z" }, + { url = "https://files.pythonhosted.org/packages/a5/a8/366db192641c2c2d1ea8977e7c77b65a0d16a7858907bb76ea68b9dd37af/sqlalchemy-2.0.42-cp310-cp310-win_amd64.whl", hash = "sha256:1aef304ada61b81f1955196f584b9e72b798ed525a7c0b46e09e98397393297b", size = 2122423, upload-time = "2025-07-29T13:23:56.968Z" }, + { url = "https://files.pythonhosted.org/packages/ea/3c/7bfd65f3c2046e2fb4475b21fa0b9d7995f8c08bfa0948df7a4d2d0de869/sqlalchemy-2.0.42-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c34100c0b7ea31fbc113c124bcf93a53094f8951c7bf39c45f39d327bad6d1e7", size = 2133779, upload-time = "2025-07-29T13:25:18.446Z" }, + { url = "https://files.pythonhosted.org/packages/66/17/19be542fe9dd64a766090e90e789e86bdaa608affda6b3c1e118a25a2509/sqlalchemy-2.0.42-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad59dbe4d1252448c19d171dfba14c74e7950b46dc49d015722a4a06bfdab2b0", size = 2123843, upload-time = "2025-07-29T13:25:19.749Z" }, + { url = "https://files.pythonhosted.org/packages/14/fc/83e45fc25f0acf1c26962ebff45b4c77e5570abb7c1a425a54b00bcfa9c7/sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9187498c2149919753a7fd51766ea9c8eecdec7da47c1b955fa8090bc642eaa", size = 3294824, upload-time = "2025-07-29T13:29:03.879Z" }, + { url = "https://files.pythonhosted.org/packages/b9/81/421efc09837104cd1a267d68b470e5b7b6792c2963b8096ca1e060ba0975/sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f092cf83ebcafba23a247f5e03f99f5436e3ef026d01c8213b5eca48ad6efa9", size = 3294662, upload-time = "2025-07-29T13:24:53.715Z" }, + { url = "https://files.pythonhosted.org/packages/2f/ba/55406e09d32ed5e5f9e8aaec5ef70c4f20b4ae25b9fa9784f4afaa28e7c3/sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc6afee7e66fdba4f5a68610b487c1f754fccdc53894a9567785932dbb6a265e", size = 3229413, upload-time = "2025-07-29T13:29:05.638Z" }, + { url = "https://files.pythonhosted.org/packages/d4/c4/df596777fce27bde2d1a4a2f5a7ddea997c0c6d4b5246aafba966b421cc0/sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:260ca1d2e5910f1f1ad3fe0113f8fab28657cee2542cb48c2f342ed90046e8ec", size = 3255563, upload-time = "2025-07-29T13:24:55.17Z" }, + { url = "https://files.pythonhosted.org/packages/16/ed/b9c4a939b314400f43f972c9eb0091da59d8466ef9c51d0fd5b449edc495/sqlalchemy-2.0.42-cp311-cp311-win32.whl", hash = "sha256:2eb539fd83185a85e5fcd6b19214e1c734ab0351d81505b0f987705ba0a1e231", size = 2098513, upload-time = "2025-07-29T13:23:58.946Z" }, + { url = "https://files.pythonhosted.org/packages/91/72/55b0c34e39feb81991aa3c974d85074c356239ac1170dfb81a474b4c23b3/sqlalchemy-2.0.42-cp311-cp311-win_amd64.whl", hash = "sha256:9193fa484bf00dcc1804aecbb4f528f1123c04bad6a08d7710c909750fa76aeb", size = 2123380, upload-time = "2025-07-29T13:24:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/61/66/ac31a9821fc70a7376321fb2c70fdd7eadbc06dadf66ee216a22a41d6058/sqlalchemy-2.0.42-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:09637a0872689d3eb71c41e249c6f422e3e18bbd05b4cd258193cfc7a9a50da2", size = 2132203, upload-time = "2025-07-29T13:29:19.291Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ba/fd943172e017f955d7a8b3a94695265b7114efe4854feaa01f057e8f5293/sqlalchemy-2.0.42-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3cb3ec67cc08bea54e06b569398ae21623534a7b1b23c258883a7c696ae10df", size = 2120373, upload-time = "2025-07-29T13:29:21.049Z" }, + { url = "https://files.pythonhosted.org/packages/ea/a2/b5f7d233d063ffadf7e9fff3898b42657ba154a5bec95a96f44cba7f818b/sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87e6a5ef6f9d8daeb2ce5918bf5fddecc11cae6a7d7a671fcc4616c47635e01", size = 3317685, upload-time = "2025-07-29T13:26:40.837Z" }, + { url = "https://files.pythonhosted.org/packages/86/00/fcd8daab13a9119d41f3e485a101c29f5d2085bda459154ba354c616bf4e/sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b718011a9d66c0d2f78e1997755cd965f3414563b31867475e9bc6efdc2281d", size = 3326967, upload-time = "2025-07-29T13:22:31.009Z" }, + { url = "https://files.pythonhosted.org/packages/a3/85/e622a273d648d39d6771157961956991a6d760e323e273d15e9704c30ccc/sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:16d9b544873fe6486dddbb859501a07d89f77c61d29060bb87d0faf7519b6a4d", size = 3255331, upload-time = "2025-07-29T13:26:42.579Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a0/2c2338b592c7b0a61feffd005378c084b4c01fabaf1ed5f655ab7bd446f0/sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21bfdf57abf72fa89b97dd74d3187caa3172a78c125f2144764a73970810c4ee", size = 3291791, upload-time = "2025-07-29T13:22:32.454Z" }, + { url = "https://files.pythonhosted.org/packages/41/19/b8a2907972a78285fdce4c880ecaab3c5067eb726882ca6347f7a4bf64f6/sqlalchemy-2.0.42-cp312-cp312-win32.whl", hash = "sha256:78b46555b730a24901ceb4cb901c6b45c9407f8875209ed3c5d6bcd0390a6ed1", size = 2096180, upload-time = "2025-07-29T13:16:08.952Z" }, + { url = "https://files.pythonhosted.org/packages/48/1f/67a78f3dfd08a2ed1c7be820fe7775944f5126080b5027cc859084f8e223/sqlalchemy-2.0.42-cp312-cp312-win_amd64.whl", hash = "sha256:4c94447a016f36c4da80072e6c6964713b0af3c8019e9c4daadf21f61b81ab53", size = 2123533, upload-time = "2025-07-29T13:16:11.705Z" }, + { url = "https://files.pythonhosted.org/packages/e9/7e/25d8c28b86730c9fb0e09156f601d7a96d1c634043bf8ba36513eb78887b/sqlalchemy-2.0.42-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941804f55c7d507334da38133268e3f6e5b0340d584ba0f277dd884197f4ae8c", size = 2127905, upload-time = "2025-07-29T13:29:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/e5/a1/9d8c93434d1d983880d976400fcb7895a79576bd94dca61c3b7b90b1ed0d/sqlalchemy-2.0.42-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d3d06a968a760ce2aa6a5889fefcbdd53ca935735e0768e1db046ec08cbf01", size = 2115726, upload-time = "2025-07-29T13:29:23.496Z" }, + { url = "https://files.pythonhosted.org/packages/a2/cc/d33646fcc24c87cc4e30a03556b611a4e7bcfa69a4c935bffb923e3c89f4/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cf10396a8a700a0f38ccd220d940be529c8f64435c5d5b29375acab9267a6c9", size = 3246007, upload-time = "2025-07-29T13:26:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/67/08/4e6c533d4c7f5e7c4cbb6fe8a2c4e813202a40f05700d4009a44ec6e236d/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cae6c2b05326d7c2c7c0519f323f90e0fb9e8afa783c6a05bb9ee92a90d0f04", size = 3250919, upload-time = "2025-07-29T13:22:33.74Z" }, + { url = "https://files.pythonhosted.org/packages/5c/82/f680e9a636d217aece1b9a8030d18ad2b59b5e216e0c94e03ad86b344af3/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f50f7b20677b23cfb35b6afcd8372b2feb348a38e3033f6447ee0704540be894", size = 3180546, upload-time = "2025-07-29T13:26:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a2/8c8f6325f153894afa3775584c429cc936353fb1db26eddb60a549d0ff4b/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d88a1c0d66d24e229e3938e1ef16ebdbd2bf4ced93af6eff55225f7465cf350", size = 3216683, upload-time = "2025-07-29T13:22:34.977Z" }, + { url = "https://files.pythonhosted.org/packages/39/44/3a451d7fa4482a8ffdf364e803ddc2cfcafc1c4635fb366f169ecc2c3b11/sqlalchemy-2.0.42-cp313-cp313-win32.whl", hash = "sha256:45c842c94c9ad546c72225a0c0d1ae8ef3f7c212484be3d429715a062970e87f", size = 2093990, upload-time = "2025-07-29T13:16:13.036Z" }, + { url = "https://files.pythonhosted.org/packages/4b/9e/9bce34f67aea0251c8ac104f7bdb2229d58fb2e86a4ad8807999c4bee34b/sqlalchemy-2.0.42-cp313-cp313-win_amd64.whl", hash = "sha256:eb9905f7f1e49fd57a7ed6269bc567fcbbdac9feadff20ad6bd7707266a91577", size = 2120473, upload-time = "2025-07-29T13:16:14.502Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/ba2546ab09a6adebc521bf3974440dc1d8c06ed342cceb30ed62a8858835/sqlalchemy-2.0.42-py3-none-any.whl", hash = "sha256:defcdff7e661f0043daa381832af65d616e060ddb54d3fe4476f51df7eaa1835", size = 1922072, upload-time = "2025-07-29T13:09:17.061Z" }, +] + +[package.optional-dependencies] +aiomysql = [ + { name = "aiomysql" }, + { name = "greenlet" }, +] +aiosqlite = [ + { name = "aiosqlite" }, + { name = "greenlet" }, + { name = "typing-extensions" }, +] +asyncio = [ + { name = "greenlet" }, +] +postgresql-asyncpg = [ + { name = "asyncpg" }, + { name = "greenlet" }, ] [[package]] name = "sse-starlette" -version = "2.3.5" +version = "3.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/5f/28f45b1ff14bee871bacafd0a97213f7ec70e389939a80c60c0fb72a9fc9/sse_starlette-2.3.5.tar.gz", hash = "sha256:228357b6e42dcc73a427990e2b4a03c023e2495ecee82e14f07ba15077e334b2", size = 17511 } +sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/48/3e49cf0f64961656402c0023edbc51844fe17afe53ab50e958a6dbbbd499/sse_starlette-2.3.5-py3-none-any.whl", hash = "sha256:251708539a335570f10eaaa21d1848a10c42ee6dc3a9cf37ef42266cdb1c52a8", size = 10233 }, + { url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" }, ] [[package]] name = "starlette" -version = "0.46.2" +version = "0.50.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846 } +sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037 }, + { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, +] + +[[package]] +name = "tokenize-rt" +version = "6.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/ed/8f07e893132d5051d86a553e749d5c89b2a4776eb3a579b72ed61f8559ca/tokenize_rt-6.2.0.tar.gz", hash = "sha256:8439c042b330c553fdbe1758e4a05c0ed460dbbbb24a606f11f0dee75da4cad6", size = 5476, upload-time = "2025-05-23T23:48:00.035Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/f0/3fe8c6e69135a845f4106f2ff8b6805638d4e85c264e70114e8126689587/tokenize_rt-6.2.0-py2.py3-none-any.whl", hash = "sha256:a152bf4f249c847a66497a4a95f63376ed68ac6abf092a2f7cfb29d044ecff44", size = 6004, upload-time = "2025-05-23T23:47:58.812Z" }, ] [[package]] name = "tomli" version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ] [[package]] name = "tomlkit" -version = "0.13.2" +version = "0.13.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/18/0bbf3884e9eaa38819ebe46a7bd25dcd56b67434402b66a58c4b8e552575/tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", size = 185207, upload-time = "2025-06-05T07:13:44.947Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 }, + { url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901, upload-time = "2025-06-05T07:13:43.546Z" }, +] + +[[package]] +name = "trio" +version = "0.32.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "cffi", marker = "implementation_name != 'pypy' and os_name == 'nt'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "outcome" }, + { name = "sniffio" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/ce/0041ddd9160aac0031bcf5ab786c7640d795c797e67c438e15cfedf815c8/trio-0.32.0.tar.gz", hash = "sha256:150f29ec923bcd51231e1d4c71c7006e65247d68759dd1c19af4ea815a25806b", size = 605323, upload-time = "2025-10-31T07:18:17.466Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/bf/945d527ff706233636c73880b22c7c953f3faeb9d6c7e2e85bfbfd0134a0/trio-0.32.0-py3-none-any.whl", hash = "sha256:4ab65984ef8370b79a76659ec87aa3a30c5c7c83ff250b4de88c29a8ab6123c5", size = 512030, upload-time = "2025-10-31T07:18:15.885Z" }, ] [[package]] name = "trove-classifiers" version = "2025.5.9.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/04/1cd43f72c241fedcf0d9a18d0783953ee301eac9e5d9db1df0f0f089d9af/trove_classifiers-2025.5.9.12.tar.gz", hash = "sha256:7ca7c8a7a76e2cd314468c677c69d12cc2357711fcab4a60f87994c1589e5cb5", size = 16940 } +sdist = { url = "https://files.pythonhosted.org/packages/38/04/1cd43f72c241fedcf0d9a18d0783953ee301eac9e5d9db1df0f0f089d9af/trove_classifiers-2025.5.9.12.tar.gz", hash = "sha256:7ca7c8a7a76e2cd314468c677c69d12cc2357711fcab4a60f87994c1589e5cb5", size = 16940, upload-time = "2025-05-09T12:04:48.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/ef/c6deb083748be3bcad6f471b6ae983950c161890bf5ae1b2af80cc56c530/trove_classifiers-2025.5.9.12-py3-none-any.whl", hash = "sha256:e381c05537adac78881c8fa345fd0e9970159f4e4a04fcc42cfd3129cca640ce", size = 14119 }, + { url = "https://files.pythonhosted.org/packages/92/ef/c6deb083748be3bcad6f471b6ae983950c161890bf5ae1b2af80cc56c530/trove_classifiers-2025.5.9.12-py3-none-any.whl", hash = "sha256:e381c05537adac78881c8fa345fd0e9970159f4e4a04fcc42cfd3129cca640ce", size = 14119, upload-time = "2025-05-09T12:04:46.38Z" }, ] [[package]] name = "typeguard" -version = "4.4.2" +version = "4.4.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/70/60/8cd6a3d78d00ceeb2193c02b7ed08f063d5341ccdfb24df88e61f383048e/typeguard-4.4.2.tar.gz", hash = "sha256:a6f1065813e32ef365bc3b3f503af8a96f9dd4e0033a02c28c4a4983de8c6c49", size = 75746 } +sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203, upload-time = "2025-06-18T09:56:07.624Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/4b/9a77dc721aa0b7f74440a42e4ef6f9a4fae7324e17f64f88b96f4c25cc05/typeguard-4.4.2-py3-none-any.whl", hash = "sha256:77a78f11f09777aeae7fa08585f33b5f4ef0e7335af40005b0c422ed398ff48c", size = 35801 }, + { url = "https://files.pythonhosted.org/packages/1b/a9/e3aee762739c1d7528da1c3e06d518503f8b6c439c35549b53735ba52ead/typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e", size = 34874, upload-time = "2025-06-18T09:56:05.999Z" }, +] + +[[package]] +name = "types-protobuf" +version = "6.32.1.20251210" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/59/c743a842911887cd96d56aa8936522b0cd5f7a7f228c96e81b59fced45be/types_protobuf-6.32.1.20251210.tar.gz", hash = "sha256:c698bb3f020274b1a2798ae09dc773728ce3f75209a35187bd11916ebfde6763", size = 63900, upload-time = "2025-12-10T03:14:25.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/43/58e75bac4219cbafee83179505ff44cae3153ec279be0e30583a73b8f108/types_protobuf-6.32.1.20251210-py3-none-any.whl", hash = "sha256:2641f78f3696822a048cfb8d0ff42ccd85c25f12f871fbebe86da63793692140", size = 77921, upload-time = "2025-12-10T03:14:24.477Z" }, +] + +[[package]] +name = "types-requests" +version = "2.32.4.20260107" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/f3/a0663907082280664d745929205a89d41dffb29e89a50f753af7d57d0a96/types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f", size = 23165, upload-time = "2026-01-07T03:20:54.091Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/12/709ea261f2bf91ef0a26a9eed20f2623227a8ed85610c1e54c5805692ecb/types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d", size = 20676, upload-time = "2026-01-07T03:20:52.929Z" }, ] [[package]] name = "typing-extensions" -version = "4.13.2" +version = "4.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, ] [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726 } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552 }, + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] [[package]] name = "uv-dynamic-versioning" -version = "0.8.2" +version = "0.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dunamai" }, { name = "hatchling" }, { name = "jinja2" }, - { name = "pydantic" }, { name = "tomlkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/9e/1cf1ddf02e5459076b6fe0e90e1315df461b94c0db6c09b07e5730a0e0fb/uv_dynamic_versioning-0.8.2.tar.gz", hash = "sha256:a9c228a46f5752d99cfead1ed83b40628385cbfb537179488d280853c786bf82", size = 41559 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/55/a6cffd78511faebf208d4ba1f119d489680668f8d36114564c6f499054b9/uv_dynamic_versioning-0.8.2-py3-none-any.whl", hash = "sha256:400ade6b4a3fc02895c3d24dd0214171e4d60106def343b39ad43143a2615e8c", size = 8851 }, -] - -[[package]] -name = "wrapt" -version = "1.17.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/d1/1daec934997e8b160040c78d7b31789f19b122110a75eca3d4e8da0049e1/wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984", size = 53307 }, - { url = "https://files.pythonhosted.org/packages/1b/7b/13369d42651b809389c1a7153baa01d9700430576c81a2f5c5e460df0ed9/wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22", size = 38486 }, - { url = "https://files.pythonhosted.org/packages/62/bf/e0105016f907c30b4bd9e377867c48c34dc9c6c0c104556c9c9126bd89ed/wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7", size = 38777 }, - { url = "https://files.pythonhosted.org/packages/27/70/0f6e0679845cbf8b165e027d43402a55494779295c4b08414097b258ac87/wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c", size = 83314 }, - { url = "https://files.pythonhosted.org/packages/0f/77/0576d841bf84af8579124a93d216f55d6f74374e4445264cb378a6ed33eb/wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72", size = 74947 }, - { url = "https://files.pythonhosted.org/packages/90/ec/00759565518f268ed707dcc40f7eeec38637d46b098a1f5143bff488fe97/wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061", size = 82778 }, - { url = "https://files.pythonhosted.org/packages/f8/5a/7cffd26b1c607b0b0c8a9ca9d75757ad7620c9c0a9b4a25d3f8a1480fafc/wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2", size = 81716 }, - { url = "https://files.pythonhosted.org/packages/7e/09/dccf68fa98e862df7e6a60a61d43d644b7d095a5fc36dbb591bbd4a1c7b2/wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c", size = 74548 }, - { url = "https://files.pythonhosted.org/packages/b7/8e/067021fa3c8814952c5e228d916963c1115b983e21393289de15128e867e/wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62", size = 81334 }, - { url = "https://files.pythonhosted.org/packages/4b/0d/9d4b5219ae4393f718699ca1c05f5ebc0c40d076f7e65fd48f5f693294fb/wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563", size = 36427 }, - { url = "https://files.pythonhosted.org/packages/72/6a/c5a83e8f61aec1e1aeef939807602fb880e5872371e95df2137142f5c58e/wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f", size = 38774 }, - { url = "https://files.pythonhosted.org/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308 }, - { url = "https://files.pythonhosted.org/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488 }, - { url = "https://files.pythonhosted.org/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776 }, - { url = "https://files.pythonhosted.org/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776 }, - { url = "https://files.pythonhosted.org/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420 }, - { url = "https://files.pythonhosted.org/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199 }, - { url = "https://files.pythonhosted.org/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307 }, - { url = "https://files.pythonhosted.org/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025 }, - { url = "https://files.pythonhosted.org/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879 }, - { url = "https://files.pythonhosted.org/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419 }, - { url = "https://files.pythonhosted.org/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773 }, - { url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799 }, - { url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821 }, - { url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919 }, - { url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721 }, - { url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899 }, - { url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222 }, - { url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707 }, - { url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685 }, - { url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567 }, - { url = "https://files.pythonhosted.org/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672 }, - { url = "https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865 }, - { url = "https://files.pythonhosted.org/packages/ce/b9/0ffd557a92f3b11d4c5d5e0c5e4ad057bd9eb8586615cdaf901409920b14/wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125", size = 53800 }, - { url = "https://files.pythonhosted.org/packages/c0/ef/8be90a0b7e73c32e550c73cfb2fa09db62234227ece47b0e80a05073b375/wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998", size = 38824 }, - { url = "https://files.pythonhosted.org/packages/36/89/0aae34c10fe524cce30fe5fc433210376bce94cf74d05b0d68344c8ba46e/wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5", size = 38920 }, - { url = "https://files.pythonhosted.org/packages/3b/24/11c4510de906d77e0cfb5197f1b1445d4fec42c9a39ea853d482698ac681/wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8", size = 88690 }, - { url = "https://files.pythonhosted.org/packages/71/d7/cfcf842291267bf455b3e266c0c29dcb675b5540ee8b50ba1699abf3af45/wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6", size = 80861 }, - { url = "https://files.pythonhosted.org/packages/d5/66/5d973e9f3e7370fd686fb47a9af3319418ed925c27d72ce16b791231576d/wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc", size = 89174 }, - { url = "https://files.pythonhosted.org/packages/a7/d3/8e17bb70f6ae25dabc1aaf990f86824e4fd98ee9cadf197054e068500d27/wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2", size = 86721 }, - { url = "https://files.pythonhosted.org/packages/6f/54/f170dfb278fe1c30d0ff864513cff526d624ab8de3254b20abb9cffedc24/wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b", size = 79763 }, - { url = "https://files.pythonhosted.org/packages/4a/98/de07243751f1c4a9b15c76019250210dd3486ce098c3d80d5f729cba029c/wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504", size = 87585 }, - { url = "https://files.pythonhosted.org/packages/f9/f0/13925f4bd6548013038cdeb11ee2cbd4e37c30f8bfd5db9e5a2a370d6e20/wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a", size = 36676 }, - { url = "https://files.pythonhosted.org/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845", size = 38871 }, - { url = "https://files.pythonhosted.org/packages/3d/bc/30f903f891a82d402ffb5fda27ec1d621cc97cb74c16fea0b6141f1d4e87/wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192", size = 56312 }, - { url = "https://files.pythonhosted.org/packages/8a/04/c97273eb491b5f1c918857cd26f314b74fc9b29224521f5b83f872253725/wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b", size = 40062 }, - { url = "https://files.pythonhosted.org/packages/4e/ca/3b7afa1eae3a9e7fefe499db9b96813f41828b9fdb016ee836c4c379dadb/wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0", size = 40155 }, - { url = "https://files.pythonhosted.org/packages/89/be/7c1baed43290775cb9030c774bc53c860db140397047cc49aedaf0a15477/wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306", size = 113471 }, - { url = "https://files.pythonhosted.org/packages/32/98/4ed894cf012b6d6aae5f5cc974006bdeb92f0241775addad3f8cd6ab71c8/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb", size = 101208 }, - { url = "https://files.pythonhosted.org/packages/ea/fd/0c30f2301ca94e655e5e057012e83284ce8c545df7661a78d8bfca2fac7a/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681", size = 109339 }, - { url = "https://files.pythonhosted.org/packages/75/56/05d000de894c4cfcb84bcd6b1df6214297b8089a7bd324c21a4765e49b14/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6", size = 110232 }, - { url = "https://files.pythonhosted.org/packages/53/f8/c3f6b2cf9b9277fb0813418e1503e68414cd036b3b099c823379c9575e6d/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6", size = 100476 }, - { url = "https://files.pythonhosted.org/packages/a7/b1/0bb11e29aa5139d90b770ebbfa167267b1fc548d2302c30c8f7572851738/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f", size = 106377 }, - { url = "https://files.pythonhosted.org/packages/6a/e1/0122853035b40b3f333bbb25f1939fc1045e21dd518f7f0922b60c156f7c/wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555", size = 37986 }, - { url = "https://files.pythonhosted.org/packages/09/5e/1655cf481e079c1f22d0cabdd4e51733679932718dc23bf2db175f329b76/wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c", size = 40750 }, - { url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 }, +sdist = { url = "https://files.pythonhosted.org/packages/24/b7/46e3106071b85016237f6de589e99f614565d10a16af17b374d003272076/uv_dynamic_versioning-0.13.0.tar.gz", hash = "sha256:3220cbf10987d862d78e9931957782a274fa438d33efb1fa26b8155353749e06", size = 38797, upload-time = "2026-01-19T09:45:33.366Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/4f/15d9ec8aaed4a78aca1b8f0368f0cdd3cca8a04a81edbf03bc9e12c1a188/uv_dynamic_versioning-0.13.0-py3-none-any.whl", hash = "sha256:86d37b89fa2b6836a515301f74ea2d56a1bc59a46a74d66a24c869d1fc8f7585", size = 11480, upload-time = "2026-01-19T09:45:32.002Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.36.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, ] [[package]] name = "zipp" -version = "3.21.0" +version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, ]