diff --git a/.env-docker b/.env-docker new file mode 100644 index 0000000..0edf277 --- /dev/null +++ b/.env-docker @@ -0,0 +1,6 @@ +OPENTDF_PLATFORM_HOST="localhost" +OPENTDF_PLATFORM_PORT=8080 +OPENTDF_PLATFORM_URL="http://localhost:8080" + +KEYCLOAK_URL="http://localhost:8888/auth" +OIDC_OP_TOKEN_ENDPOINT="http://localhost:8888/auth/realms/opentdf/protocol/openid-connect/token" diff --git a/.github/check_entitlements.sh b/.github/check_entitlements.sh new file mode 100755 index 0000000..ea98f1a --- /dev/null +++ b/.github/check_entitlements.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +# Derive additional environment variables +TOKEN_URL="${OIDC_OP_TOKEN_ENDPOINT}" +OTDF_HOST_AND_PORT="${OPENTDF_PLATFORM_HOST}" +OTDF_CLIENT="${OPENTDF_CLIENT_ID}" +OTDF_CLIENT_SECRET="${OPENTDF_CLIENT_SECRET}" + +echo "🔧 Environment Configuration:" +echo " TOKEN_URL: ${TOKEN_URL}" +echo " OTDF_HOST_AND_PORT: ${OTDF_HOST_AND_PORT}" +echo " OTDF_CLIENT: ${OTDF_CLIENT}" +echo " OTDF_CLIENT_SECRET: ${OTDF_CLIENT_SECRET}" +echo "" + +get_token() { + curl -k --location "$TOKEN_URL" \ + --header "X-VirtruPubKey;" \ + --header "Content-Type: application/x-www-form-urlencoded" \ + --data-urlencode "grant_type=client_credentials" \ + --data-urlencode "client_id=$OTDF_CLIENT" \ + --data-urlencode "client_secret=$OTDF_CLIENT_SECRET" +} + +echo "🔐 Getting access token..." +BEARER=$( get_token | jq -r '.access_token' ) +# NOTE: It's always okay to print this token, because it will +# only be valid / available in dummy / dev scenarios +[[ "${DEBUG:-}" == "1" ]] && echo "Got Access Token: ${BEARER}" +echo "" + +# Array of usernames to check +USERNAMES=("opentdf" "sample-user" "sample-user-1" "cli-client" "opentdf-sdk") + +for USERNAME in "${USERNAMES[@]}"; do + echo "👤 Fetching entitlements for username: ${USERNAME}" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + + grpcurl -plaintext \ + -H "authorization: Bearer $BEARER" \ + -d "{ + \"entities\": [ + { + \"userName\": \"$USERNAME\" + } + ] + }" \ + "$OTDF_HOST_AND_PORT" \ + authorization.AuthorizationService/GetEntitlements + + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "✅ Entitlements retrieval complete for ${USERNAME}!" + echo "" +done + +echo "🎉 All entitlement checks completed!" diff --git a/.github/start_opentdf_docker.sh b/.github/start_opentdf_docker.sh new file mode 100755 index 0000000..09be1cc --- /dev/null +++ b/.github/start_opentdf_docker.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if ! [ -d platform ]; then + git clone https://github.com/opentdf/platform.git +fi +cd platform +git checkout 3360befcb3e6e9791d7bfd2e89128aee0e7d2818 # Branch 'DSPX-1539-keytoolnomore' + +yq -i '.realms[0].clients[0].client.directAccessGrantsEnabled = true | .realms[0].clients[0].client.serviceAccountsEnabled = true' service/cmd/keycloak_data.yaml + +yq -i '.realms[0].clients[1].client.directAccessGrantsEnabled = true | .realms[0].clients[1].client.serviceAccountsEnabled = true' service/cmd/keycloak_data.yaml + +yq -i '.realms[0].clients[4].client.directAccessGrantsEnabled = true | .realms[0].clients[4].client.serviceAccountsEnabled = true' service/cmd/keycloak_data.yaml + + +if ! [ -d ./keys ]; then + go mod download + + go mod verify + + .github/scripts/init-temp-keys.sh + cp opentdf-example.yaml opentdf.yaml + + # Edit 'opentdf.yaml' for our use case + yq -i 'del(.db) | .services.entityresolution.url = "http://localhost:8888/auth" | .server.auth.issuer = "http://localhost:8888/auth/realms/opentdf"' opentdf.yaml + # The above expression can also be written as 3 separate commands: + # yq -i 'del(.db)' opentdf.yaml + # yq -i '.services.entityresolution.url = "http://localhost:8888/auth"' opentdf.yaml + # yq -i '.server.auth.issuer = "http://localhost:8888/auth/realms/opentdf"' opentdf.yaml + + yq -i ' +.server.cryptoProvider = { + "type": "standard", + "standard": { + "keys": [ + { + "kid": "r1", + "alg": "rsa:2048", + "private": "kas-private.pem", + "cert": "kas-cert.pem" + }, + { + "kid": "e1", + "alg": "ec:secp256r1", + "private": "kas-ec-private.pem", + "cert": "kas-ec-cert.pem" + } + ] + } +} +' opentdf.yaml + chmod -R 700 ./keys +fi + +docker compose up -d --wait --wait-timeout 360 + +go run ./service provision keycloak + +go run ./service provision fixtures diff --git a/.github/workflows/build-golang-macos.yaml b/.github/workflows/build-golang-macos.yaml deleted file mode 100644 index 421d685..0000000 --- a/.github/workflows/build-golang-macos.yaml +++ /dev/null @@ -1,50 +0,0 @@ ---- -name: macOS build - -on: [push] - -defaults: - run: - shell: bash - -jobs: - call-lint: - uses: ./.github/workflows/lint-on-macos.yaml - - build: - runs-on: macos-13 - permissions: - contents: write - - strategy: - fail-fast: true - matrix: - go-version: [1.24.x] - python3_version: [ "3.10", "3.11", "3.12" ] - - steps: - - uses: actions/checkout@v4 - - - name: Setup Go - uses: actions/setup-go@v4 - with: - go-version: ${{ matrix.go-version }} - cache-dependency-path: go.sum - - name: Install dependencies - run: go get . - - name: Test with Go - run: go test -timeout 40s -run ^TestHello$ gotdf_python -count=1 # go test - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python3_version }} - - # FIXME: Add more caching - - name: Configure gopy / dependencies, and build wheel - run: | - ./build-scripts/ci-build.sh - - # - uses: ./.github/workflows/platform-integration-test.yaml - # with: - # wheel: dist/otdf_python-0.2.20-py3-none-any.whl diff --git a/.github/workflows/build-golang-ubuntu.yaml b/.github/workflows/build-golang-ubuntu.yaml deleted file mode 100644 index d65826f..0000000 --- a/.github/workflows/build-golang-ubuntu.yaml +++ /dev/null @@ -1,65 +0,0 @@ ---- -name: Ubuntu build - -on: [push] - -jobs: - call-lint: - uses: ./.github/workflows/lint-on-ubuntu.yaml - - build: - runs-on: ubuntu-22.04 - permissions: - contents: write - - strategy: - fail-fast: true - matrix: - go-version: [1.24.x] - python3_version: [ "3.10", "3.11", "3.12" ] - - steps: - - uses: actions/checkout@v4 - - - name: Setup Go - uses: actions/setup-go@v4 - with: - go-version: ${{ matrix.go-version }} - cache-dependency-path: go.sum - - name: Install dependencies - run: go get . - - name: Test with Go - run: go test -timeout 40s -run ^TestHello$ gotdf_python -count=1 # go test - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python3_version }} - - # FIXME: Add more caching - - name: Configure gopy / dependencies, and build wheel - run: | - ./build-scripts/ci-build.sh - - - uses: actions/cache/restore@v4 - with: - path: dist/otdf_python-0.2.20-py3-none-any.whl - key: ${{ runner.os }}${{ matrix.python3_version }}-data-${{ github.sha }} - - - uses: actions/cache/save@v4 - with: - path: dist/otdf_python-0.2.20-py3-none-any.whl - key: ${{ runner.os }}${{ matrix.python3_version }}-data-${{ github.sha }} - restore-keys: | - ${{ runner.os }}${{ matrix.python3_version }}-data- - - integration-test: - strategy: - fail-fast: true - matrix: - python3_version: [ "3.10", "3.11", "3.12" ] - needs: build - uses: ./.github/workflows/platform-integration-test.yaml - with: - wheel: dist/otdf_python-0.2.20-py3-none-any.whl - python_version: ${{ matrix.python3_version }} diff --git a/.github/workflows/build-python.yaml b/.github/workflows/build-python.yaml index 9f28d46..28a4a8e 100644 --- a/.github/workflows/build-python.yaml +++ b/.github/workflows/build-python.yaml @@ -1,44 +1,42 @@ ---- -name: Build Python package(s) - +# Build otdf-python wheel using uv and output the wheel path for downstream workflows +name: "Build Python Wheel" on: - push: - branches: - - disabled + push: + branches: + - chore/rewrite + pull_request: jobs: - build: - - runs-on: ubuntu-22.04 - strategy: - matrix: - go-version: [1.24.x] + build: + runs-on: ubuntu-22.04 + outputs: + wheel: ${{ steps.find_wheel.outputs.wheel_path }} + steps: + - name: Checkout this repo + uses: actions/checkout@v4 - steps: - - uses: actions/checkout@v4 - # - name: Setup Go - # uses: actions/setup-go@v4 - # with: - # go-version: ${{ matrix.go-version }} - # cache-dependency-path: go.sum - # - name: Install dependencies - # run: go get . - # - name: Test with Go - # run: go test -timeout 40s -run ^TestHello$ gotdf_python -count=1 # go test + - name: Set up uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.12' - - name: Install dependencies - run: | - pip install poetry - - name: Invoke pylint with all dependencies - run: | - # Since we don't have our wheel build / install configured yet we use '--no-root' - poetry install --no-root + - name: Build otdf-python wheel using uv + run: | + uv sync --frozen + uv build + shell: bash - # poetry install + - name: Find built wheel + id: find_wheel + run: | + wheel_path=$(ls dist/*.whl | head -n1) + echo "wheel_path=$wheel_path" >> $GITHUB_OUTPUT + shell: bash - # Bring this back later - # poetry run pytest tests/ + # - name: Upload wheel as artifact + # uses: actions/upload-artifact@v4 + # with: + # name: python-wheel + # path: dist/*.whl + # overwrite: true diff --git a/.github/workflows/lint-on-macos.yaml b/.github/workflows/lint-on-macos.yaml deleted file mode 100644 index b7ccd8e..0000000 --- a/.github/workflows/lint-on-macos.yaml +++ /dev/null @@ -1,34 +0,0 @@ ---- -name: macOS -- Lint - -on: - workflow_call: - -jobs: - - lint-macos: - runs-on: macos-latest - - strategy: - fail-fast: true - matrix: - go-version: [1.24.x] - - steps: - - uses: actions/checkout@v4 - - name: Setup Go - uses: actions/setup-go@v4 - with: - go-version: ${{ matrix.go-version }} - cache-dependency-path: go.sum - - name: Install dependencies - run: go get . - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.12' - - name: Install pre-commit dependencies - run: | - go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest - - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/lint-on-ubuntu.yaml b/.github/workflows/lint-on-ubuntu.yaml deleted file mode 100644 index 90b74b8..0000000 --- a/.github/workflows/lint-on-ubuntu.yaml +++ /dev/null @@ -1,34 +0,0 @@ ---- -name: Ubuntu -- Lint - -on: - workflow_call: - -jobs: - - lint-ubuntu: - runs-on: ubuntu-22.04 - - strategy: - fail-fast: true - matrix: - go-version: [1.24.x] - - steps: - - uses: actions/checkout@v4 - - name: Setup Go - uses: actions/setup-go@v4 - with: - go-version: ${{ matrix.go-version }} - cache-dependency-path: go.sum - - name: Install dependencies - run: go get . - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.12' - - name: Install pre-commit dependencies - run: | - go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest - - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/platform-integration-test.yaml b/.github/workflows/platform-integration-test.yaml index 809a50f..f4d8420 100644 --- a/.github/workflows/platform-integration-test.yaml +++ b/.github/workflows/platform-integration-test.yaml @@ -3,17 +3,15 @@ # # Except, that this is a "Composite Action", and specifies 'shell: bash' for # each 'run:' step. -name: "Platform Integration testing" +name: "NEW: Platform Integration testing" on: workflow_call: inputs: wheel: - description: The Python wheel to test required: true type: string python_version: - description: The wheel's Python version (like "3.12" or "3.13") required: true type: string @@ -27,18 +25,6 @@ jobs: - name: Checkout this repo uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - - uses: actions/cache/restore@v4 - with: - path: dist/otdf_python-0.2.20-py3-none-any.whl - key: ${{ runner.os }}${{ inputs.python_version }}-data-${{ github.sha }} - - - name: Prove that the input file is available - shell: bash - run: | - ls -la - ls -la "${{ inputs.wheel }}" - du -sh "${{ inputs.wheel }}" - # - uses: bufbuild/buf-setup-action@382440cdb8ec7bc25a68d7b4711163d95f7cc3aa # with: # github_token: ${{ secrets.GITHUB_TOKEN }} @@ -69,7 +55,39 @@ jobs: shell: bash run: | .github/scripts/init-temp-keys.sh - cp opentdf-dev.yaml opentdf.yaml + # Edit Keycloak sample file for our use case + yq -i '.realms[0].clients[0].client.directAccessGrantsEnabled = true | .realms[0].clients[0].client.serviceAccountsEnabled = true' service/cmd/keycloak_data.yaml + yq -i '.realms[0].clients[1].client.directAccessGrantsEnabled = true | .realms[0].clients[1].client.serviceAccountsEnabled = true' service/cmd/keycloak_data.yaml + yq -i '.realms[0].clients[4].client.directAccessGrantsEnabled = true | .realms[0].clients[4].client.serviceAccountsEnabled = true' service/cmd/keycloak_data.yaml + + cp opentdf-example.yaml opentdf.yaml + # Edit 'opentdf.yaml' for our use case + yq -i 'del(.db) | .services.entityresolution.url = "http://localhost:8888/auth" | .server.auth.issuer = "http://localhost:8888/auth/realms/opentdf"' opentdf.yaml + # The above expression can also be written as 3 separate commands: + # yq -i 'del(.db)' opentdf.yaml + # yq -i '.services.entityresolution.url = "http://localhost:8888/auth"' opentdf.yaml + # yq -i '.server.auth.issuer = "http://localhost:8888/auth/realms/opentdf"' opentdf.yaml + yq -i ' + .server.cryptoProvider = { + "type": "standard", + "standard": { + "keys": [ + { + "kid": "r1", + "alg": "rsa:2048", + "private": "kas-private.pem", + "cert": "kas-cert.pem" + }, + { + "kid": "e1", + "alg": "ec:secp256r1", + "private": "kas-ec-private.pem", + "cert": "kas-ec-cert.pem" + } + ] + } + } + ' opentdf.yaml sudo chmod -R 777 ./keys working-directory: platform # - name: Trust the locally issued cert @@ -113,31 +131,65 @@ jobs: grpcurl -plaintext localhost:8080 list && \ grpcurl -plaintext localhost:8080 kas.AccessService/PublicKey - - name: Set up Python - uses: actions/setup-python@v4 + - name: Install otdfctl + run: go install github.com/opentdf/otdfctl@latest + shell: bash + + - name: Create creds.json for otdfctl + run: echo -n '{"clientId":"opentdf-sdk","clientSecret":"secret"}' > creds.json + shell: bash + + - name: Create a plaintext file + run: echo "integration test secret" > secret.txt + shell: bash + + - name: Encrypt file with otdfctl (no attributes) + run: | + export PATH=$PATH:$(go env GOPATH)/bin + otdfctl encrypt -o secret.txt.tdf --host http://localhost:8080 --tls-no-verify --with-client-creds-file creds.json secret.txt + shell: bash + + - name: Set up uv + uses: astral-sh/setup-uv@v6 with: - python-version: ${{ inputs.python_version }} + enable-cache: true + cache-dependency-glob: "uv.lock" - - name: Validate the Python SDK + - name: Run all tests, minus integration tests env: - OPENTDF_CLIENT_ID: "opentdf-sdk" + OPENTDF_CLIENT_ID: "opentdf" OPENTDF_CLIENT_SECRET: "secret" - OPENTDF_HOSTNAME: "http://localhost:8080" + OPENTDF_HOSTNAME: "localhost:8080" OIDC_TOKEN_ENDPOINT: "http://localhost:8888/auth/realms/opentdf/protocol/openid-connect/token" OPENTDF_KAS_URL: "http://localhost:8080/kas" INSECURE_SKIP_VERIFY: "TRUE" + TEST_OPENTDF_ATTRIBUTE_1: "https://example.net/attr/attr1/value/value1" + TEST_OPENTDF_ATTRIBUTE_2: "https://example.com/attr/attr1/value/value1" run: | - mkdir validation - wheel="$(basename ${{ inputs.wheel }} )" - cp -v "${{ inputs.wheel }}" validation/ - cp -v validate_otdf_python.py validation/ - cd validation - python -m venv .venv - source .venv/bin/activate - pip install ./"$wheel" - python validate_otdf_python.py + uv sync + # Skip the tests marked "integration" + uv run pytest -m "not integration" --tb=short -vv tests + shell: bash + - name: Run integration tests + env: + OPENTDF_CLIENT_ID: "opentdf" + OPENTDF_CLIENT_SECRET: "secret" + OPENTDF_PLATFORM_HOST: "localhost:8080" + OPENTDF_PLATFORM_URL: "http://localhost:8080" + OIDC_OP_TOKEN_ENDPOINT: "http://localhost:8888/auth/realms/opentdf/protocol/openid-connect/token" + OPENTDF_KAS_URL: "http://localhost:8080/kas" + INSECURE_SKIP_VERIFY: "TRUE" + TEST_OPENTDF_ATTRIBUTE_1: "https://example.net/attr/attr1/value/value1" + TEST_OPENTDF_ATTRIBUTE_2: "https://example.com/attr/attr1/value/value1" + run: | + # Run check_entitlements.sh + ./.github/check_entitlements.sh + uv sync + # Skip the tests marked "integration" + uv run pytest -m "integration" --tb=short -vv tests + shell: bash # platform-xtest: # permissions: diff --git a/.github/workflows/publish-test.yaml b/.github/workflows/publish-test.yaml deleted file mode 100644 index 7762ab8..0000000 --- a/.github/workflows/publish-test.yaml +++ /dev/null @@ -1,256 +0,0 @@ -name: TestPyPIBuild - -# Based on: -# - https://github.com/tuananh/py-event-ruler/blob/0129d15e17d0023863a4d0e0e25e5256988b5c5b/.github/workflows/publish.yml -# - https://github.com/adhadse/excelFormExtractor/blob/1f82a97808b3cf3cdb25dcefdc1c6a1c74c5ad45/.github/workflows/build.yaml - - -on: - push: - branches: - - develop - workflow_dispatch: - -jobs: - build_macos: - # if: false - name: MacOS ${{ matrix.os_version }} Python 3${{ matrix.python3_version }} ${{ matrix.arch_cibw_go[0] }} - strategy: - fail-fast: false - matrix: - os_version: [ 13 ] - python3_version: [ 10, 11, 12 ] - arch_cibw_go: - - [ "x86_64", "amd64" ] - - [ "arm64", "arm64" ] - runs-on: macos-${{ matrix.os_version }} - steps: - - uses: actions/checkout@v4 - - - name: set up Go - uses: actions/setup-go@v3 - with: - go-version: "1.24.x" - - - name: Setup project files - run: | - rm -rf poetry.lock pyproject.toml - cp setup_ci.py setup.py - - - name: install gopy - run: go install github.com/go-python/gopy@v0.4.10 - - - name: install goimports - run: go install golang.org/x/tools/cmd/goimports@latest - - - name: install python 3.${{ matrix.python3_version }} - run: | - brew install --force --overwrite python@3.${{ matrix.python3_version }} - ln -s /usr/local/opt/python@3.${{ matrix.python3_version }}/bin/python3.${{ matrix.python3_version }} /usr/local/bin/python_for_build - /usr/local/bin/python_for_build --version - - - name: install cibuildwheel and pybindgen - run: /usr/local/bin/python_for_build -m pip install --break-system-packages cibuildwheel==2.21.3 pybindgen - - - name: build wheels - run: /usr/local/bin/python_for_build -m cibuildwheel --output-dir wheelhouse - env: - CGO_ENABLED: 1 # build fails for arm if unset - CIBW_ARCHS: ${{ matrix.arch_cibw_go[0] }} - CIBW_REPAIR_WHEEL_COMMAND_MACOS: "" - PYTHON_BINARY_PATH: /usr/local/bin/python_for_build - CIBW_BUILD: "cp3${{ matrix.python3_version }}-*" - CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" - CIBW_ENVIRONMENT: > - PATH=$PATH:/usr/local/go/bin - GOARCH=${{ matrix.arch_cibw_go[1] }} - - - # FIXME: Test before uploading - # - name: Test Python wheel - # run: | - # # Test wheel installation - # /usr/local/bin/python_for_build -m pip install wheelhouse/*.whl - - # # Test wheel functionality - # /usr/local/bin/python_for_build validate_otdf_python.py - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-macos${{ matrix.os_version }}-py3${{ matrix.python3_version }}-${{ matrix.arch_cibw_go[0] }} - path: ./wheelhouse/*.whl - - build_linux_x86_64: - # if: false - name: Linux Python x86_64 - runs-on: ubuntu-22.04 - - steps: - - uses: actions/checkout@v4 - - - name: Setup project files - run: | - rm -rf poetry.lock pyproject.toml - cp setup_ci.py setup.py - - - name: Build wheels - uses: pypa/cibuildwheel@v2.21.3 - env: - CIBW_BUILD: "cp3*_x86_64 cp3*_aarch64" - CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" - CIBW_ARCHS: "native" - CIBW_ENVIRONMENT: > - PATH=$PATH:/usr/local/go/bin - CIBW_BEFORE_ALL_LINUX: | - curl -o go.tar.gz https://dl.google.com/go/go1.24.5.linux-amd64.tar.gz - tar -C /usr/local -xzf go.tar.gz - go install github.com/go-python/gopy@v0.4.10 - go install golang.org/x/tools/cmd/goimports@latest - - - # FIXME: Test before uploading - # - name: Test Python wheel - # run: | - # # Test wheel installation - # python3 -m pip install wheelhouse/*.whl - - # # Test wheel functionality - # python3 validate_otdf_python.py - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-linux-amd64 - path: ./wheelhouse/*.whl - - build_linux_arm: - name: Linux Python ARM - runs-on: ubuntu-22.04 - timeout-minutes: 60 - - steps: - - uses: actions/checkout@v4 - - - name: Setup project files - run: | - rm -rf poetry.lock pyproject.toml - cp setup_ci.py setup.py - - # QEMU is used by cibuildwheel to cross-compile wheels - # https://cibuildwheel.pypa.io/en/stable/faq/#emulation - - name: Set up QEMU - if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v3 - with: - platforms: all - - - name: Build wheels - uses: pypa/cibuildwheel@v2.21.3 - env: - CIBW_BUILD: "cp3*_aarch64" - CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" - CIBW_ARCHS: "aarch64" - CIBW_ENVIRONMENT: > - PATH=$PATH:/usr/local/go/bin - CIBW_BEFORE_ALL_LINUX: | - curl -o go.tar.gz https://dl.google.com/go/go1.24.5.linux-arm64.tar.gz - tar -C /usr/local -xzf go.tar.gz - go install github.com/go-python/gopy@v0.4.10 - go install golang.org/x/tools/cmd/goimports@latest - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-linux-arm - path: ./wheelhouse/*.whl - -# build_windows: -# if: false # not working -# name: Windows 310,311 x86_64 -# runs-on: windows-2019 - -# steps: -# - uses: actions/checkout@v4 - -# - name: Setup project files -# run: | -# rm -rf poetry.lock pyproject.toml -# cp setup_ci.py setup.py - -# - name: set up Go -# uses: actions/setup-go@v3 -# with: -# go-version: "1.24.x" - -# - name: install gopy -# run: go install github.com/go-python/gopy@v0.4.10 - -# - name: install goimports -# run: go install golang.org/x/tools/cmd/goimports@latest - -# - name: Build wheels -# uses: pypa/cibuildwheel@v2.21.3 -# env: -# # CGO_ENABLED: 1 -# CIBW_BUILD: "cp3*" -# CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" -# CIBW_ARCHS: "native" -# CIBW_ENVIRONMENT: > -# GOARCH=amd64 - -# - name: Upload artifacts -# uses: actions/upload-artifact@v4 -# with: -# name: wheels-windows-amd64 -# path: ./wheelhouse/*.whl - - - release: - permissions: - contents: write - # This permission is mandatory for PyPI's trusted publishing - id-token: write - needs: [build_macos, build_linux_x86_64, build_linux_arm] - runs-on: ubuntu-22.04 - # If branch is 'develop' - if: github.ref == 'refs/heads/develop' - - steps: - - uses: actions/checkout@v4 - - uses: actions/download-artifact@v4 - - - name: Collect all wheels - run: | - # ls -R - mkdir dist - for f in $(find . -type f -name '*.whl'); do mv ${f} dist; done; - ls -R dist - - - name: Store version - run: | - pip install poetry - - PROJECT_VESION=$(poetry version -s) - echo "PROJECT_VESION=$PROJECT_VESION" >> $GITHUB_ENV - - # Publish with "trusted publisher" mechanism: - # https://docs.pypi.org/trusted-publishers/ - # - # Requires GHA token permission (above in YAML) and PyPI magement: - # https://test.pypi.org/manage/project/otdf-python/settings/publishing/ - - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - repository-url: https://test.pypi.org/legacy/ - packages-dir: dist/ - - - uses: ncipollo/release-action@v1 - with: - artifacts: | - README.md, - dist/*.whl - body: otdf_python version ${{ env.PROJECT_VESION }} - makeLatest: "false" - tag: "${{ env.PROJECT_VESION }}-dev-${{ github.sha }}" - # tag: v${{ env.PROJECT_VESION }} diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml deleted file mode 100644 index 716bd6f..0000000 --- a/.github/workflows/publish.yaml +++ /dev/null @@ -1,255 +0,0 @@ -name: PyPIBuild - -# Based on: -# - https://github.com/tuananh/py-event-ruler/blob/0129d15e17d0023863a4d0e0e25e5256988b5c5b/.github/workflows/publish.yml -# - https://github.com/adhadse/excelFormExtractor/blob/1f82a97808b3cf3cdb25dcefdc1c6a1c74c5ad45/.github/workflows/build.yaml - - -on: - push: - branches: - - main - workflow_dispatch: - -jobs: - build_macos: - # if: false - name: MacOS ${{ matrix.os_version }} Python 3${{ matrix.python3_version }} ${{ matrix.arch_cibw_go[0] }} - strategy: - fail-fast: false - matrix: - os_version: [ 13 ] - python3_version: [ 10, 11, 12 ] - arch_cibw_go: - - [ "x86_64", "amd64" ] - - [ "arm64", "arm64" ] - runs-on: macos-${{ matrix.os_version }} - steps: - - uses: actions/checkout@v4 - - - name: set up Go - uses: actions/setup-go@v3 - with: - go-version: "1.24.x" - - - name: Setup project files - run: | - rm -rf poetry.lock pyproject.toml - cp setup_ci.py setup.py - - - name: install gopy - run: go install github.com/go-python/gopy@v0.4.10 - - - name: install goimports - run: go install golang.org/x/tools/cmd/goimports@latest - - - name: install python 3.${{ matrix.python3_version }} - run: | - brew install --force --overwrite python@3.${{ matrix.python3_version }} - ln -s /usr/local/opt/python@3.${{ matrix.python3_version }}/bin/python3.${{ matrix.python3_version }} /usr/local/bin/python_for_build - /usr/local/bin/python_for_build --version - - - name: install cibuildwheel and pybindgen - run: /usr/local/bin/python_for_build -m pip install --break-system-packages cibuildwheel==2.21.3 pybindgen - - - name: build wheels - run: /usr/local/bin/python_for_build -m cibuildwheel --output-dir wheelhouse - env: - CGO_ENABLED: 1 # build fails for arm if unset - CIBW_ARCHS: ${{ matrix.arch_cibw_go[0] }} - CIBW_REPAIR_WHEEL_COMMAND_MACOS: "" - PYTHON_BINARY_PATH: /usr/local/bin/python_for_build - CIBW_BUILD: "cp3${{ matrix.python3_version }}-*" - CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" - CIBW_ENVIRONMENT: > - PATH=$PATH:/usr/local/go/bin - GOARCH=${{ matrix.arch_cibw_go[1] }} - - - # FIXME: Test before uploading - # - name: Test Python wheel - # run: | - # # Test wheel installation - # /usr/local/bin/python_for_build -m pip install wheelhouse/*.whl - - # # Test wheel functionality - # /usr/local/bin/python_for_build validate_otdf_python.py - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-macos${{ matrix.os_version }}-py3${{ matrix.python3_version }}-${{ matrix.arch_cibw_go[0] }} - path: ./wheelhouse/*.whl - - build_linux_x86_64: - # if: false - name: Linux Python x86_64 - runs-on: ubuntu-22.04 - - steps: - - uses: actions/checkout@v4 - - - name: Setup project files - run: | - rm -rf poetry.lock pyproject.toml - cp setup_ci.py setup.py - - - name: Build wheels - uses: pypa/cibuildwheel@v2.21.3 - env: - CIBW_BUILD: "cp3*_x86_64 cp3*_aarch64" - CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" - CIBW_ARCHS: "native" - CIBW_ENVIRONMENT: > - PATH=$PATH:/usr/local/go/bin - CIBW_BEFORE_ALL_LINUX: | - curl -o go.tar.gz https://dl.google.com/go/go1.24.5.linux-amd64.tar.gz - tar -C /usr/local -xzf go.tar.gz - go install github.com/go-python/gopy@v0.4.10 - go install golang.org/x/tools/cmd/goimports@latest - - - # FIXME: Test before uploading - # - name: Test Python wheel - # run: | - # # Test wheel installation - # python3 -m pip install wheelhouse/*.whl - - # # Test wheel functionality - # python3 validate_otdf_python.py - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-linux-amd64 - path: ./wheelhouse/*.whl - - build_linux_arm: - name: Linux Python ARM - runs-on: ubuntu-22.04 - timeout-minutes: 60 - - steps: - - uses: actions/checkout@v4 - - - name: Setup project files - run: | - rm -rf poetry.lock pyproject.toml - cp setup_ci.py setup.py - - # QEMU is used by cibuildwheel to cross-compile wheels - # https://cibuildwheel.pypa.io/en/stable/faq/#emulation - - name: Set up QEMU - if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v3 - with: - platforms: all - - - name: Build wheels - uses: pypa/cibuildwheel@v2.21.3 - env: - CIBW_BUILD: "cp3*_aarch64" - CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" - CIBW_ARCHS: "aarch64" - CIBW_ENVIRONMENT: > - PATH=$PATH:/usr/local/go/bin - CIBW_BEFORE_ALL_LINUX: | - curl -o go.tar.gz https://dl.google.com/go/go1.24.5.linux-arm64.tar.gz - tar -C /usr/local -xzf go.tar.gz - go install github.com/go-python/gopy@v0.4.10 - go install golang.org/x/tools/cmd/goimports@latest - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: wheels-linux-arm - path: ./wheelhouse/*.whl - -# build_windows: -# if: false # not working -# name: Windows 310,311 x86_64 -# runs-on: windows-2019 - -# steps: -# - uses: actions/checkout@v4 - -# - name: Setup project files -# run: | -# rm -rf poetry.lock pyproject.toml -# cp setup_ci.py setup.py - -# - name: set up Go -# uses: actions/setup-go@v3 -# with: -# go-version: "1.24.x" - -# - name: install gopy -# run: go install github.com/go-python/gopy@v0.4.10 - -# - name: install goimports -# run: go install golang.org/x/tools/cmd/goimports@latest - -# - name: Build wheels -# uses: pypa/cibuildwheel@v2.21.3 -# env: -# # CGO_ENABLED: 1 -# CIBW_BUILD: "cp3*" -# CIBW_SKIP: "cp36-* cp37-* cp38-* cp39-* cp313-* *-musllinux_x86_64" -# CIBW_ARCHS: "native" -# CIBW_ENVIRONMENT: > -# GOARCH=amd64 - -# - name: Upload artifacts -# uses: actions/upload-artifact@v4 -# with: -# name: wheels-windows-amd64 -# path: ./wheelhouse/*.whl - - - release: - permissions: - contents: write - # This permission is mandatory for PyPI's trusted publishing - id-token: write - needs: [build_macos, build_linux_x86_64, build_linux_arm] - runs-on: ubuntu-22.04 - # If branch is 'main' - if: github.ref == 'refs/heads/main' - - steps: - - uses: actions/checkout@v4 - - uses: actions/download-artifact@v4 - - - name: Collect all wheels - run: | - # ls -R - mkdir dist - for f in $(find . -type f -name '*.whl'); do mv ${f} dist; done; - ls -R dist - - - name: Store version - run: | - pip install poetry - - PROJECT_VESION=$(poetry version -s) - echo "PROJECT_VESION=$PROJECT_VESION" >> $GITHUB_ENV - - # Publish with "trusted publisher" mechanism: - # https://docs.pypi.org/trusted-publishers/ - # - # Requires GHA token permission (above in YAML) and PyPI magement: - # https://pypi.org/manage/project/otdf-python/settings/publishing/ - - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - # repository-url: https://pypi.org/legacy/ - packages-dir: dist/ - - - uses: ncipollo/release-action@v1 - with: - artifacts: | - README.md, - dist/*.whl - body: otdf_python version ${{ env.PROJECT_VESION }} - # tag: "dev-${{ github.job }}-${{ env.PROJECT_VESION }}" - tag: v${{ env.PROJECT_VESION }} diff --git a/.github/workflows/release-please.yaml b/.github/workflows/release-please.yaml new file mode 100644 index 0000000..136d67e --- /dev/null +++ b/.github/workflows/release-please.yaml @@ -0,0 +1,139 @@ +name: Release Please + +on: + push: + branches: + - main + - develop + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +jobs: + # Run full test suite before any release operations + test-suite: + uses: ./.github/workflows/test-suite.yaml + + release-please: + runs-on: ubuntu-latest + needs: test-suite + if: needs.test-suite.outputs.tests_passed == 'true' + outputs: + releases_created: ${{ steps.release-develop.outputs.releases_created || steps.release-main.outputs.releases_created }} + paths_released: ${{ steps.release-develop.outputs.paths_released || steps.release-main.outputs.paths_released }} + steps: + - uses: actions/checkout@v4 + + # Release-please for develop branch (creates alpha prereleases) + - uses: googleapis/release-please-action@v4 + if: github.ref == 'refs/heads/develop' + id: release-develop + with: + config-file: .release-please-config-develop.json + manifest-file: .release-please-manifest-develop.json + target-branch: develop + token: ${{ secrets.GITHUB_TOKEN }} + + # Release-please for main branch (creates stable releases) + - uses: googleapis/release-please-action@v4 + if: github.ref == 'refs/heads/main' + id: release-main + with: + config-file: .release-please-config.json + manifest-file: .release-please-manifest.json + target-branch: main + token: ${{ secrets.GITHUB_TOKEN }} + + # Trigger appropriate publish workflows based on release type + trigger-publish: + permissions: + contents: write + # This permission is mandatory for PyPI's trusted publishing + id-token: write + needs: release-please + if: ${{ needs.release-please.outputs.releases_created }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + version: "latest" + + - name: Build package + shell: bash + run: | + uv build + + - name: Test import + shell: bash + run: | + uv run python -c 'import otdf_python; print("Package imported successfully")' + + # While we improve the release process, prevent publishing to TestPyPI for versions <= 0.3.2 + - name: Store version and determine if should publish to TestPyPI + id: check_version + shell: bash + run: | + PROJECT_VERSION=$(uv version --short) + echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV + + if [[ "$PROJECT_VERSION" =~ [0-9]+\.[0-9]+\.[0-9]+a[0-9]+ ]]; then + echo "is_alpha=true" >> $GITHUB_OUTPUT + echo "Alpha version detected: $PROJECT_VERSION" + else + echo "is_alpha=false" >> $GITHUB_OUTPUT + echo "Stable version detected: $PROJECT_VERSION" + fi + + # Remove any alpha/beta/rc suffixes for comparison + CLEAN_VERSION=$(echo "$PROJECT_VERSION" | sed 's/[a-zA-Z].*//') + echo "clean_version=$CLEAN_VERSION" >> $GITHUB_OUTPUT + + # Convert versions to comparable format (e.g., "0.3.2" -> "000300020000") + version_to_number() { + echo "$1" | awk -F. '{ printf("%04d%04d%04d\n", $1,$2,$3); }' + } + + CURRENT_NUM=$(version_to_number "$CLEAN_VERSION") + THRESHOLD_NUM=$(version_to_number "0.3.2") + + if [ "$CURRENT_NUM" -gt "$THRESHOLD_NUM" ]; then + echo "should_publish=true" >> $GITHUB_OUTPUT + echo "Version $PROJECT_VERSION (clean: $CLEAN_VERSION) is > 0.3.2, will publish to TestPyPI" + else + echo "should_publish=false" >> $GITHUB_OUTPUT + echo "Version $PROJECT_VERSION (clean: $CLEAN_VERSION) is <= 0.3.2, skipping TestPyPI publish" + fi + + # For develop branch: trigger TestPyPI build (alpha prereleases go to TestPyPI from develop) + + # Publish with "trusted publisher" mechanism: + # https://docs.pypi.org/trusted-publishers/ + # + # Requires GHA token permission (above in YAML) and PyPI management: + # https://test.pypi.org/manage/project/otdf-python/settings/publishing/ + - name: Publish package distributions to TestPyPI + if: github.ref == 'refs/heads/develop' && steps.check_version.outputs.should_publish == 'true' + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + verbose: true + packages-dir: dist/ + + # For main branch: trigger PyPI build (stable releases go to PyPI from main) + # Publish with "trusted publisher" mechanism: + # https://docs.pypi.org/trusted-publishers/ + # + # Requires GHA token permission (above in YAML) and PyPI management: + # https://pypi.org/manage/project/otdf-python/settings/publishing/ + - name: Publish package distributions to PyPI + if: github.ref == 'refs/heads/main' + uses: pypa/gh-action-pypi-publish@release/v1 + with: + # repository-url: https://pypi.org/legacy/ + packages-dir: dist/ + verbose: true diff --git a/.github/workflows/test-suite.yaml b/.github/workflows/test-suite.yaml new file mode 100644 index 0000000..d9af290 --- /dev/null +++ b/.github/workflows/test-suite.yaml @@ -0,0 +1,121 @@ +name: Test Suite + +on: + push: + branches: + - main + - develop + pull_request: + workflow_call: + outputs: + tests_passed: + description: "Whether all tests passed" + value: ${{ jobs.report.outputs.success }} + workflow_dispatch: + +jobs: + # Step 1: Fast lint and format checks (fail fast on code style) + lint-check: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Run linting (fail fast) + run: | + uv sync --frozen + uv run ruff check + uv run ruff format --check + + # Step 2: Build (only after linting passes) + build: + runs-on: ubuntu-22.04 + needs: lint-check + outputs: + wheel: ${{ steps.find_wheel.outputs.wheel_path }} + steps: + - name: Checkout this repo + uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Build otdf-python wheel using uv + run: | + uv sync --frozen + uv build + shell: bash + + - name: Find built wheel + id: find_wheel + run: | + wheel_path=$(ls dist/*.whl | head -n1) + echo "wheel_path=$wheel_path" >> $GITHUB_OUTPUT + shell: bash + + - name: Upload wheel as artifact + uses: actions/upload-artifact@v4 + with: + name: python-wheel + path: dist/*.whl + + # Step 3: Unit tests (only after build succeeds) + unit-tests: + runs-on: ubuntu-22.04 + needs: build + steps: + - uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Run unit tests + run: | + uv sync --frozen + uv run pytest -m "not integration" --tb=short -v tests/ + + # Step 4: Integration tests (only after unit tests pass) + integration-tests: + strategy: + fail-fast: true + matrix: + python3_version: ["3.10", "3.11", "3.12", "3.13"] + needs: [build, unit-tests] + uses: ./.github/workflows/platform-integration-test.yaml + with: + wheel: ${{ needs.build.outputs.wheel }} + python_version: ${{ matrix.python3_version }} + + report: + runs-on: ubuntu-22.04 + needs: [lint-check, build, unit-tests, integration-tests] + if: always() + outputs: + success: ${{ steps.check.outputs.success }} + steps: + - name: Check all jobs succeeded + id: check + run: | + if [[ "${{ needs.lint-check.result }}" == "success" && "${{ needs.build.result }}" == "success" && "${{ needs.unit-tests.result }}" == "success" && "${{ needs.integration-tests.result }}" == "success" ]]; then + echo "success=true" >> $GITHUB_OUTPUT + echo "✅ All tests passed!" + else + echo "success=false" >> $GITHUB_OUTPUT + echo "❌ Some tests failed:" + echo " Lint Check: ${{ needs.lint-check.result }}" + echo " Build: ${{ needs.build.result }}" + echo " Unit Tests: ${{ needs.unit-tests.result }}" + echo " Integration Tests: ${{ needs.integration-tests.result }}" + exit 1 + fi diff --git a/.gitignore b/.gitignore index b8e2ea4..993764d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,9 @@ -#### Start project specific git exclusions -otdf_python/ - -#### End project specific git exclusions - # Created by https://www.toptal.com/developers/gitignore/api/python # Edit at https://www.toptal.com/developers/gitignore?templates=python +platform/ +tests/integration/test_data/v4.2.2/*tdf +tests/integration/test_data/v4.3.1/*tdf ### Python ### # Byte-compiled / optimized / DLL files __pycache__/ @@ -129,7 +127,7 @@ celerybeat.pid *.sage.py # Environments -.env +.env* .venv env/ venv/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e321b89..620ed7d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,55 +4,45 @@ default_install_hook_types: - commit-msg - post-rewrite exclude: | - (?x)^( - otdf_python/.* - )$ + (?x)^( + otdf_python/.* + | otdf-python-proto/proto-files/.* + | otdf-python-proto/generated/.* + | otdf-python-proto/src/otdf_python_proto/.* + | otdf-python-proto/buf.yaml + | otdf-python-proto/buf.gen.yaml + )$ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks# repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v6.0.0 - hooks: - - id: check-yaml - - id: end-of-file-fixer - - id: trailing-whitespace + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/codespell-project/codespell + rev: v2.4.1 + hooks: + - id: codespell + args: + [ + "--ignore-words-list", + "b-long, otdf_python", + "--skip=uv.lock,otdf-python-proto/uv.lock", + ] - # - repo: https://github.com/tekwizely/pre-commit-golang - # rev: master - # hooks: - # - id: go-lint - - - repo: https://github.com/dnephin/pre-commit-golang - rev: v0.5.1 - hooks: - - id: go-fmt - - - id: go-vet - # - id: golangci-lint - # timeout is needed for CI - # args: [-E, gosec, -E, goconst, -E, govet, --timeout, 300s] - # - id: go-imports - # - id: go-cyclo - # args: [-over=15] - # - id: validate-toml - - id: no-go-testing - - repo: https://github.com/codespell-project/codespell - rev: v2.4.1 - hooks: - - id: codespell - args: ["--ignore-words-list", "b-long, otdf_python", "--skip=go.sum,otdf_python/"] - - - repo: https://github.com/astral-sh/ruff-pre-commit - # Ruff version. - rev: v0.12.12 - hooks: - # Run the linter. - - id: ruff - # Run the formatter. - - id: ruff-format - - repo: https://github.com/compilerla/conventional-pre-commit - rev: v4.2.0 - hooks: - - id: conventional-pre-commit - stages: [commit-msg,post-rewrite] - args: [--verbose,--scopes=feat,fix,docs,style,test,chore,ci] + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.12.12 + hooks: + # Run the linter. + - id: ruff-check + # Run the formatter. + - id: ruff-format + - repo: https://github.com/compilerla/conventional-pre-commit + rev: v4.2.0 + hooks: + - id: conventional-pre-commit + stages: [commit-msg] + args: [--verbose, --scopes="feat, fix, docs, style, test, chore, ci"] diff --git a/.python-version b/.python-version deleted file mode 100644 index e4fba21..0000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.12 diff --git a/.release-please-config-develop.json b/.release-please-config-develop.json new file mode 100644 index 0000000..bd65fb3 --- /dev/null +++ b/.release-please-config-develop.json @@ -0,0 +1,44 @@ +{ + "release-type": "python", + "include-v-in-tag": true, + "bootstrap-sha": "5ed358b3e2e0fd12dc336133f701b4ba5d8a298c", + "packages": { + ".": { + "release-type": "python", + "package-name": "otdf-python", + "prerelease": true, + "prerelease-type": "alpha", + "extra-files": [ + "pyproject.toml", + { + "path": "uv.lock", + "type": "toml", + "jsonpath": "$.package[?(@.name.value=='otdf-python')].version" + }, + { + "path": "otdf-python-proto/pyproject.toml", + "type": "toml", + "jsonpath": "$.project.version" + }, + { + "path": "otdf-python-proto/uv.lock", + "type": "toml", + "jsonpath": "$.package[?(@.name.value=='otdf-python-proto')].version" + } + ] + } + }, + "changelog-sections": [ + { "type": "feat", "section": "Features", "hidden": false }, + { "type": "fix", "section": "Bug Fixes", "hidden": false }, + { "type": "perf", "section": "Performance Improvements", "hidden": false }, + { "type": "revert", "section": "Reverts", "hidden": false }, + { "type": "docs", "section": "Documentation", "hidden": false }, + { "type": "style", "section": "Styles", "hidden": true }, + { "type": "chore", "section": "Miscellaneous Chores", "hidden": true }, + { "type": "refactor", "section": "Code Refactoring", "hidden": false }, + { "type": "test", "section": "Tests", "hidden": true }, + { "type": "build", "section": "Build System", "hidden": false }, + { "type": "ci", "section": "Continuous Integration", "hidden": true } + ] +} diff --git a/.release-please-config.json b/.release-please-config.json index 115cd6e..801e498 100644 --- a/.release-please-config.json +++ b/.release-please-config.json @@ -1,7 +1,42 @@ { + "release-type": "python", + "include-v-in-tag": true, + "bootstrap-sha": "5ed358b3e2e0fd12dc336133f701b4ba5d8a298c", "packages": { ".": { - "release-type": "python" + "release-type": "python", + "package-name": "otdf-python", + "extra-files": [ + "pyproject.toml", + { + "path": "uv.lock", + "type": "toml", + "jsonpath": "$.package[?(@.name.value=='otdf-python')].version" + }, + { + "path": "otdf-python-proto/pyproject.toml", + "type": "toml", + "jsonpath": "$.project.version" + }, + { + "path": "otdf-python-proto/uv.lock", + "type": "toml", + "jsonpath": "$.package[?(@.name.value=='otdf-python-proto')].version" + } + ] } - } + }, + "changelog-sections": [ + { "type": "feat", "section": "Features", "hidden": false }, + { "type": "fix", "section": "Bug Fixes", "hidden": false }, + { "type": "perf", "section": "Performance Improvements", "hidden": false }, + { "type": "revert", "section": "Reverts", "hidden": false }, + { "type": "docs", "section": "Documentation", "hidden": false }, + { "type": "style", "section": "Styles", "hidden": true }, + { "type": "chore", "section": "Miscellaneous Chores", "hidden": true }, + { "type": "refactor", "section": "Code Refactoring", "hidden": false }, + { "type": "test", "section": "Tests", "hidden": true }, + { "type": "build", "section": "Build System", "hidden": false }, + { "type": "ci", "section": "Continuous Integration", "hidden": true } + ] } diff --git a/.release-please-manifest-develop.json b/.release-please-manifest-develop.json new file mode 100644 index 0000000..beea3f7 --- /dev/null +++ b/.release-please-manifest-develop.json @@ -0,0 +1,3 @@ +{ + ".": "0.3.1" +} diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 04e204f..8d7a673 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.2.20" + ".": "0.3.0" } diff --git a/.vscode/settings.json b/.vscode/settings.json index c377654..c159337 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,18 +1,8 @@ { - "go.testTimeout": "40s", - - // Without the '-count=1' flag, tests will not truly run all - // the time. - // - // As a result, integration tests have no value. For example, - // the service being tested might be down, and yet tests will - // give the impression that all tests have passed. - // - // Based on: - // https://stackoverflow.com/a/73490461 - "go.testFlags": ["-count=1"], - - - "go.testEnvFile": "${workspaceFolder}/env/.local_opentdf_2.0_env" - + "workbench.colorTheme": "Default Dark Modern", + "python.testing.pytestArgs": [ + "tests", + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true } diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..25b5369 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,28 @@ +# Changelog + +## [0.3.1](https://github.com/b-long/opentdf-python-sdk/compare/otdf-python-v0.3.0...otdf-python-v0.3.1) (2025-09-11) + + +### Bug Fixes + +* add develop-specific release-please files and update workflow ([63ef99a](https://github.com/b-long/opentdf-python-sdk/commit/63ef99a1439348ab829bec9d993bd6db245d0995)) +* address pre-commit (lint) issues ([f61b020](https://github.com/b-long/opentdf-python-sdk/commit/f61b02033f4a132ee1e552be61917d18d5a0bf4c)) +* fix .release-please-config.json file ([#97](https://github.com/b-long/opentdf-python-sdk/issues/97)) ([085f056](https://github.com/b-long/opentdf-python-sdk/commit/085f0564a44e4828c4bddfd67f6ecedbe72a6395)) +* fix publishing ([de00583](https://github.com/b-long/opentdf-python-sdk/commit/de005832c2a961f732f564b6a0380b6007a59b6b)) +* fix publishing ([dbc9f80](https://github.com/b-long/opentdf-python-sdk/commit/dbc9f8002d04ae08dc0403dea61507bd2eb19cd2)) +* guarantee target-version decrypt support ([#84](https://github.com/b-long/opentdf-python-sdk/issues/84)) ([238715f](https://github.com/b-long/opentdf-python-sdk/commit/238715f8ef761bcb39c66c13cf017fb891ad6d40)) +* omit README from Github releases ([cca9e3e](https://github.com/b-long/opentdf-python-sdk/commit/cca9e3efb9ead3693d401261e3e74debd985653e)) +* release configuration ([#99](https://github.com/b-long/opentdf-python-sdk/issues/99)) ([11497fb](https://github.com/b-long/opentdf-python-sdk/commit/11497fbd28e3c92b52e75cf417162ae12843e097)) +* release-please configuration ([#95](https://github.com/b-long/opentdf-python-sdk/issues/95)) ([fe1ee2d](https://github.com/b-long/opentdf-python-sdk/commit/fe1ee2d9a8e6763db27582f1d0d4a6625ec3716e)) +* remove unnecessary 'ncipollo/release-action' ([6a1d57b](https://github.com/b-long/opentdf-python-sdk/commit/6a1d57b562b6b6f506db9c29fa11858bf28ef702)) +* update prerelease config for develop branch ([6cfaee6](https://github.com/b-long/opentdf-python-sdk/commit/6cfaee6fd918f50185859edbab06a2dbf7d7123f)) +* use correct 'extra-files' for uv.lock ([0e09171](https://github.com/b-long/opentdf-python-sdk/commit/0e091714cad2a5f462bf0a5444e92d8e71639626)) + + +### Miscellaneous Chores + +* release 0.3.0a10 ([3bb4283](https://github.com/b-long/opentdf-python-sdk/commit/3bb42837fa9547273023859db48c4e4f69325273)) +* release 0.3.0a11 ([ce8a520](https://github.com/b-long/opentdf-python-sdk/commit/ce8a52064969aa5e27876c4454d2e32b42a36f5b)) +* release 0.3.1 ([c1395b1](https://github.com/b-long/opentdf-python-sdk/commit/c1395b1ff6bf02ffb24f20e7d45ca0869ae033f5)) + +## Changelog diff --git a/README.md b/README.md index 1d0b438..f99c32a 100644 --- a/README.md +++ b/README.md @@ -1,182 +1,212 @@ -# opentdf-python-sdk +# OpenTDF Python SDK Unofficial OpenTDF SDK for Python -[![Tests](https://github.com/b-long/opentdf-python-sdk/workflows/PyPIBuild/badge.svg)](https://github.com/b-long/opentdf-python-sdk/actions?query=workflow%3APyPIBuild) -This project is powered by gopy, which generates (and compiles) a CPython extension module from a go package. The `gopy` -tool unlocks performance, flexibility, and excellent Developer Experience to Python end-users. Read more about -[`gopy` on Github](https://github.com/go-python/gopy). +## Features -## Adding features +- **TDF Encryption/Decryption**: Create and decrypt TDF files with policy-based access control +- **Flexible Configuration**: Support for various authentication methods and platform endpoints +- **Comprehensive Testing**: Full test suite with unit and integration tests -If you wish to expand the functionality of `otdf-python`: +## Legacy Version -1. Create a fork/branch -1. Add new capabilities (e.g. in `main.go`) -1. Add a test (e.g. in `otdf_python_test.go`) -1. Commit your changes, push, and open a Pull Request via -the Github project: https://github.com/b-long/opentdf-python-sdk +A legacy version (0.2.x) of this project is available for users who need the previous implementation. For more information, see [LEGACY_VERSION.md](docs/LEGACY_VERSION.md) or visit the [legacy branch on GitHub](https://github.com/b-long/opentdf-python-sdk/tree/0.2.x). -## Installation +## Prerequisites -Install from the [Python Package Index (PyPI)](https://pypi.org): +This project uses [uv](https://docs.astral.sh/uv/) for dependency management and task running. -```bash -# Install the latest from pypi.org -pip install otdf_python +### Installing uv -# Install a pinned version -pip install otdf-python==0.2.20 +Install `uv` using one of the following methods: -# Install a pinned version, from test.pypi.org -pip install -i https://test.pypi.org/simple/ otdf-python==0.2.20 +**macOS/Linux:** +```bash +curl -LsSf https://astral.sh/uv/install.sh | sh ``` -## Usage +**Windows:** +```powershell +powershell -c "irm https://astral.sh/uv/install.ps1 | iex" +``` -Simple usage examples are given below. In addition, we recommend you also: +**Using Homebrew (macOS):** +```bash +brew install uv +``` -1. See the contents of [`main.go` on Github](https://github.com/b-long/opentdf-python-sdk/blob/main/main.go). ✨ Note that all Upper-case functions are available in Python. -1. See the contents of [`validate_otdf_python.py` on Github](https://github.com/b-long/opentdf-python-sdk/blob/main/validate_otdf_python.py). +For more installation options, see the [uv installation guide](https://docs.astral.sh/uv/getting-started/installation/). -### Example: Configuration +## Development Setup -Creating a helper function may simplify the usage of `otdf-python`. +1. Clone the repository: +```bash +git clone +cd opentdf-python-sdk +``` -For example: +2. Install dependencies: +```bash +uv sync +``` -```python -def _get_configuration() -> OpentdfConfig: - """ - The config returned is used for both encryption and decryption. - """ - print("Preparing 'OpentdfConfig' object") - from otdf_python.gotdf_python import OpentdfConfig +## Running Tests - platformEndpoint = "platform.opentdf.local" - keycloakEndpoint = "keycloak.opentdf.local/auth +Run the full test suite: +```bash +uv run pytest tests/ +``` - # Create config - config: OpentdfConfig = OpentdfConfig( - ClientId="opentdf-sdk", - ClientSecret="secret", - PlatformEndpoint=platformEndpoint, - TokenEndpoint=f"http://{keycloakEndpoint}/realms/opentdf/protocol/openid-connect/token", - KasUrl=f"http://{platformEndpoint}/kas", - ) +Run specific test files: +```bash +uv run pytest tests/test_sdk.py +``` - # NOTE: Structs from golang can be printed, like below - # print(config) - print("Returning 'OpentdfConfig'") +Run tests with verbose output: +```bash +uv run pytest tests/ -v +``` - return config +Run integration tests only: +```bash +uv run pytest tests/ -m integration ``` +## Installation -### Example: Encrypt a string +Install from PyPI: +```bash +pip install otdf-python +``` -```python -from otdf_python.gotdf_python import EncryptString -from otdf_python.go import Slice_string -# Depends on the '_get_opentdf_config()' given -# in the README above -config: OpentdfConfig = _get_opentdf_config() +## Protobuf & Connect RPC Generation -# da = Slice_string( -# [ -# "https://example.com/attr/attr1/value/value1", -# "https://example.com/attr/attr1/value/value2", -# ] -# ) -da = Slice_string([]) +This project uses a dedicated submodule, `otdf-python-proto/`, for generating Python protobuf files and Connect RPC clients from OpenTDF platform proto definitions. -tdf_manifest_json = EncryptString( - inputText="Hello from Python", - config=config, - dataAttributes=da, - authScopes=Slice_string(["email"]), -) +### Regenerating Protobuf & Connect RPC Files + +From the submodule: +```bash +cd otdf-python-proto +uv run python scripts/generate_connect_proto.py ``` -### Example: Encrypt a file +See [`otdf-python-proto/README.md`](otdf-python-proto/README.md) and [`PROTOBUF_SETUP.md`](PROTOBUF_SETUP.md) for details. + +## Quick Start + +### Basic Configuration ```python -from otdf_python.gotdf_python import EncryptFile -from otdf_python.go import Slice_string +from otdf_python.sdk_builder import SDKBuilder -# Depends on the '_get_opentdf_config()' given -# in the README above -config: OpentdfConfig = _get_opentdf_config() +# Create and configure SDK using builder pattern +builder = SDKBuilder() +builder.set_platform_endpoint("https://platform.example.com") +builder.client_secret("your-client-id", "your-client-secret") -with tempfile.TemporaryDirectory() as tmpDir: - print("Created temporary directory", tmpDir) +# Build the SDK instance +sdk = builder.build() +``` + +### Advanced Configuration + +```python +from otdf_python.sdk_builder import SDKBuilder - config: OpentdfConfig = _get_configuration() +# Create SDK with additional configuration options +builder = SDKBuilder() +builder.set_platform_endpoint("https://platform.example.com") +builder.set_issuer_endpoint("https://auth.example.com") +builder.client_secret("your-client-id", "your-client-secret") - SOME_ENCRYPTED_FILE = Path(tmpDir) / "some-file.tdf" +# Examples, for local development - if SOME_ENCRYPTED_FILE.exists(): - SOME_ENCRYPTED_FILE.unlink() +# Use HTTP instead of HTTPS +builder.use_insecure_plaintext_connection(True) - if SOME_ENCRYPTED_FILE.exists(): - raise ValueError( - "The output path should not exist before calling 'EncryptFile()'." - ) +# Or +# Skip TLS verification +builder.use_insecure_skip_verify(True) - SOME_PLAINTEXT_FILE = Path(tmpDir) / "new-file.txt" - SOME_PLAINTEXT_FILE.write_text("Hello world") +# Build the SDK instance +sdk = builder.build() +``` + +### Encrypt Data - from otdf_python.go import Slice_string +```python +from io import BytesIO - # da = Slice_string( - # [ - # "https://example.com/attr/attr1/value/value1", - # "https://example.com/attr/attr1/value/value2", - # ] - # ) - da = Slice_string([]) - outputFilePath = EncryptFile( - inputFilePath=str(SOME_PLAINTEXT_FILE), - outputFilePath=str(SOME_ENCRYPTED_FILE), - config=config, - dataAttributes=da, - authScopes=Slice_string(["email"]), - ) +# Create TDF configuration with attributes +config = sdk.new_tdf_config(attributes=["https://example.com/attr/classification/value/public"]) - print(f"The output file was written to destination path: {outputFilePath}") +# Encrypt data to TDF format +input_data = b"Hello, World!" +output_stream = BytesIO() +manifest, size, _ = sdk.create_tdf(BytesIO(input_data), config, output_stream) +encrypted_data = output_stream.getvalue() +# Save encrypted data to file +with open("encrypted.tdf", "wb") as f: + f.write(encrypted_data) ``` -### Example: Decrypt a file +### Decrypt Data ```python -from otdf_python.gotdf_python import EncryptFile -from otdf_python.go import Slice_string +from otdf_python.tdf import TDFReaderConfig -# Depends on the '_get_opentdf_config()' given -# in the README above -config: OpentdfConfig = _get_opentdf_config() +# Read encrypted TDF file +with open("encrypted.tdf", "rb") as f: + encrypted_data = f.read() -def decrypt_file(input_file_path: Path, output_file_path: Path) -> Path: - if output_file_path.exists(): - output_file_path.unlink() +# Decrypt TDF +reader_config = TDFReaderConfig() +tdf_reader = sdk.load_tdf(encrypted_data, reader_config) +decrypted_data = tdf_reader.payload - if output_file_path.exists(): - raise ValueError( - "The output path should not exist before calling 'DecryptFile()'." - ) +# Save decrypted data +with open("decrypted.txt", "wb") as f: + f.write(decrypted_data) - outputFilePath = DecryptFile( - inputFilePath=str(input_file_path), - outputFilePath=str(output_file_path), - config=config, - ) +# Don't forget to close the SDK when done +sdk.close() +``` - output = Path(outputFilePath) - if not output.exists(): - raise ValueError("DecryptFile() did not create the output file") +## Project Structure - return output ``` +src/otdf_python/ +├── sdk.py # Main SDK interface +├── config.py # Configuration management +├── tdf.py # TDF format handling +├── nanotdf.py # NanoTDF format handling +├── crypto_utils.py # Cryptographic utilities +├── kas_client.py # Key Access Service client +└── ... # Additional modules +tests/ +└── ... # Various tests + +## Contributing + +1. Fork the repository +2. Create a feature branch: `git checkout -b feature-name` +3. Make your changes +4. Run tests: `uv run pytest tests/` +5. Commit your changes: `git commit -am 'Add feature'` +6. Push to the branch: `git push origin feature-name` +7. Submit a pull request + +### Release Process + +For maintainers and contributors working on releases: +- See [RELEASES.md](RELEASES.md) for comprehensive release documentation +- Feature branch alpha releases available for testing changes before merge +- Automated releases via Release Please on the main branch + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. diff --git a/build-scripts/ci-build.sh b/build-scripts/ci-build.sh deleted file mode 100755 index 0af4461..0000000 --- a/build-scripts/ci-build.sh +++ /dev/null @@ -1,75 +0,0 @@ -#!/bin/bash - -set -eou pipefail - -# Based on: https://stackoverflow.com/a/246128 -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -BUILD_ROOT="${SCRIPT_DIR}/.." -cd "${BUILD_ROOT}" || { echo "Unable to change to build root directory" ; exit 1; } - -printf """ - -✨✨✨ Configure gopy / dependencies, and build wheel ✨✨✨ - -""" - -echo "✨✨✨ Display Python version" -echo "python -VV" -python -VV - -echo "✨✨✨ Display Python executable path" -echo 'python -c "import sys; print(sys.executable)"' -python -c "import sys; print(sys.executable)" - -echo "✨✨✨ Display pip version" -echo 'pip -V' -pip -V - -echo "✨✨✨ Install poetry" -echo 'pip install poetry' -pip install poetry - -echo "✨✨✨ List home directory contents" -# Look for go/bin (skip, we know it exists) -echo '$HOME/' -ls -la "$HOME/" - -echo "✨✨✨ List Go directory contents" -echo '$HOME/go/' -ls -la "$HOME/go/" - -echo "✨✨✨ Display Go version" -go version - -echo "✨✨✨ Add Go bin directory to PATH" -# Add Go bin directory to PATH -echo "export PATH=$PATH:~/.local/go/bin" >> $GITHUB_ENV - -echo "✨✨✨ Install dependencies with poetry" -# Since we don't have our wheel build / install configured yet we use '--no-root' -poetry install --no-root - -echo "✨✨✨ Activate poetry environment" -source $(poetry env info --path)/bin/activate - -echo "✨✨✨ Add Go bin directory to PATH again" -# Add Go bin directory to PATH -echo "export PATH=$PATH:~/.local/go/bin" >> $GITHUB_ENV - -echo "✨✨✨ Install goimports" -go install golang.org/x/tools/cmd/goimports@latest - -echo "✨✨✨ Install gopy" -go install github.com/go-python/gopy@latest - -echo "✨✨✨ Upgrade setuptools and wheel" -poetry run pip install --upgrade setuptools wheel - -echo "✨✨✨ Build gopy" -gopy build --output=otdf_python -vm=python3 . - -echo "✨✨✨ Build wheel" -poetry run python3 setup.py bdist_wheel - -echo "✨✨✨ Install wheel" -pip install dist/otdf_python-0.2.20-py3-none-any.whl diff --git a/build-scripts/make_and_validate_script.sh b/build-scripts/make_and_validate_script.sh deleted file mode 100755 index 1d67b47..0000000 --- a/build-scripts/make_and_validate_script.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -set -x -set -eou pipefail - -# Ensure we aren't in a virtual environment -deactivate || { echo "Not currently in a virtual environment" ; } - -# Based on: https://stackoverflow.com/a/246128 -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -BUILD_ROOT="${SCRIPT_DIR}/.." -cd "${BUILD_ROOT}" || { echo "Unable to change to build root directory" ; exit 1; } - -SKIP_TESTS="${1:-NO}" - -# Cleanup -rm -rf .venv/ -rm -rf dist/ - -# Install python deps -poetry config virtualenvs.create true --local -poetry config virtualenvs.in-project true --local -poetry install --no-root - -# Activate virtual environment with 'pybindgen' etc. -# -# NOTE: Using 'poetry shell' does not work, and we -# can't assume that the virtual environment is ./.venv/ -if ! [ -d "$( poetry env info --path )" ]; then - echo "Unable to locate virtual environment directory" - exit 1 -fi - -# shellcheck disable=SC1091 -source "$( poetry env info --path )/bin/activate" - -python3 -m pip install pybindgen -go install golang.org/x/tools/cmd/goimports@latest -go install github.com/go-python/gopy@v0.4.10 - -# For every step below, 'which python' should return '.venv/bin/python' -PATH="$PATH:$HOME/go/bin" gopy build --output=otdf_python -vm=python3 . - -python3 -m pip install --upgrade setuptools wheel - -# Build the 'dist/' folder (wheel) -python3 setup.py bdist_wheel - -# Prove that the wheel can be installed -pip install dist/otdf_python-0.2.20-py3-none-any.whl - -if [[ "$SKIP_TESTS" == "-s" || "$SKIP_TESTS" == "--skip-tests" ]]; then - echo "Build is complete, skipping tests." -else - # Validate functionality - echo "Build is complete, running tests." - python3 validate_otdf_python.py -fi diff --git a/build-scripts/uv_make_and_validate_script.sh b/build-scripts/uv_make_and_validate_script.sh deleted file mode 100755 index 666f55c..0000000 --- a/build-scripts/uv_make_and_validate_script.sh +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/bash - -set -x -set -eou pipefail - -loud_print(){ - printf """ - - ======================================== - $1 - - - ======================================== - - """ -} - -# Based on: https://stackoverflow.com/a/246128 -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -BUILD_ROOT="${SCRIPT_DIR}/.." -cd "${BUILD_ROOT}" || { echo "Unable to change to build root directory" ; exit 1; } - -SKIP_TESTS="${1:-NO}" - -# Cleanup -rm -rf .venv-wheel/ -rm -rf .venv/ -rm -rf dist/ - -# PY_TYPE="--python-preference=only-system" -PY_TYPE="--python-preference=only-managed" - -loud_print "Creating virtual environment" -# Install python deps -uv venv .venv --python 3.12 "$PY_TYPE" -source "${BUILD_ROOT}/.venv/bin/activate" - -loud_print "Installing dependencies" -uv pip install wheel pybindgen - -if ! [ -d ".venv" ]; then - echo "Unable to locate virtual environment directory" - exit 1 -fi - -loud_print "Activating virtual environment" -source "${BUILD_ROOT}/.venv/bin/activate" - -loud_print "Installing goimports" -go install golang.org/x/tools/cmd/goimports@latest -loud_print "Installing gopy" -go install github.com/go-python/gopy@v0.4.10 - -# For every step below, 'which python' should return '.venv/bin/python' -loud_print "Executing gopy" -PATH="$PATH:$HOME/go/bin" gopy build --output=otdf_python -vm=python3 . - -loud_print "Installing setuptools" -uv pip install --upgrade setuptools - -# Build the 'dist/' folder (wheel) -loud_print "Running 'setup.py bdist_wheel'" -python setup.py bdist_wheel - -deactivate - -# Prove that the wheel can be installed -loud_print "Installing wheel" - -uv venv .venv-wheel --python 3.12 "$PY_TYPE" -source "${BUILD_ROOT}/.venv-wheel/bin/activate" -pip install pybindgen -pip install dist/otdf_python-0.2.20-py3-none-any.whl - -if [[ "$SKIP_TESTS" == "-s" || "$SKIP_TESTS" == "--skip-tests" ]]; then - echo "Build is complete, skipping tests." -else - # Validate functionality - echo "Build is complete, running tests." - python validate_otdf_python.py -fi diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..b75e5b8 --- /dev/null +++ b/conftest.py @@ -0,0 +1,68 @@ +""" +Pytest configuration and fixtures for the OpenTDF Python SDK tests. + +This module contains pytest hooks and fixtures that will be automatically +loaded by pytest when running tests. +""" + +from pathlib import Path + +import pytest + +from tests.server_logs import log_server_logs_on_failure + + +@pytest.fixture(scope="session") +def project_root(request) -> Path: + return request.config.rootpath # Project root + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item, call): + """ + Hook that runs after each test phase (setup, call, teardown). + + This hook automatically collects server logs when a test fails. + """ + # Execute the test and get the report + outcome = yield + rep = outcome.get_result() + + # Only collect logs on test failure during the 'call' phase + # (not during setup or teardown failures) + if rep.when == "call" and rep.failed: + # Get the test name from the item + test_name = item.nodeid + + # Check if this is an integration test that might need server logs + if hasattr(item, "pytestmark"): + markers = [mark.name for mark in item.pytestmark] + if "integration" in markers: + log_server_logs_on_failure(test_name) + else: + # For tests without explicit markers, check if it's likely an integration test + # by looking at the test name or if it involves network operations + if ( + "integration" in test_name.lower() + or "encrypt" in test_name.lower() + or "decrypt" in test_name.lower() + or "cli" in test_name.lower() + ): + log_server_logs_on_failure(test_name) + + +@pytest.fixture +def collect_server_logs(): + """ + Fixture that provides a function to manually collect server logs. + + Usage: + def test_something(collect_server_logs): + # ... test code ... + if some_condition: + logs = collect_server_logs() + print(logs) + """ + from tests.server_logs import collect_server_logs + + return collect_server_logs diff --git a/docs/CONNECT_RPC_MIGRATION.md b/docs/CONNECT_RPC_MIGRATION.md new file mode 100644 index 0000000..92ef6fc --- /dev/null +++ b/docs/CONNECT_RPC_MIGRATION.md @@ -0,0 +1,283 @@ +# Connect RPC Migration Guide + +This document explains how to migrate from traditional gRPC clients to Connect RPC clients in the OpenTDF Python SDK. + +## What is Connect RPC? + +Connect RPC is a modern, HTTP-friendly alternative to gRPC that provides: + +- **HTTP/1.1 compatibility** - Works with all HTTP infrastructure +- **Human-readable debugging** - JSON payloads can be inspected with standard tools +- **Browser compatibility** - Can be called directly from web browsers +- **Simplified deployment** - No special gRPC infrastructure required +- **Better observability** - Standard HTTP status codes and headers + +For more information, see the [Connect RPC Protocol Documentation](https://connectrpc.com/docs/protocol/). + +## Dependencies + +The project now includes both Connect RPC and legacy gRPC dependencies: + +```toml +dependencies = [ + "connect-python>=0.4.2", # Connect RPC client + "grpcio>=1.74.0", # Legacy gRPC (backward compatibility) + "grpcio-tools>=1.74.0", # Legacy gRPC tools + # ... other dependencies +] +``` + +## Code Generation + +### Connect RPC Generation (Recommended) + +Use the new Connect RPC generation script: + +```bash +cd proto-gen +uv run python scripts/generate_connect_proto.py +``` + +This generates: +- `*_connect.py` - Connect RPC clients (preferred) +- `*_pb2.py` - Standard protobuf classes +- `*_pb2.pyi` - Type stubs +- `legacy_grpc/*_pb2_grpc.py` - Legacy gRPC clients (backward compatibility) + +### Legacy gRPC Generation + +The old script still works for backward compatibility: + +```bash +cd proto-gen +uv run python scripts/generate_proto.py +``` + +## Client Usage Examples + +### Connect RPC Client (Recommended) + +```python +import urllib3 +from otdf_python_proto.policy_pb2 import GetPolicyRequest +from otdf_python_proto.policy_connect import PolicyServiceClient + +# Create HTTP client +http_client = urllib3.PoolManager() + +# Create Connect RPC client +policy_client = PolicyServiceClient( + base_url="https://platform.opentdf.io", + http_client=http_client +) + +# Make unary RPC call +request = GetPolicyRequest(id="policy-123") +response = policy_client.get_policy(request) +print(f"Policy: {response}") + +# With extra headers and timeout +response = policy_client.get_policy( + request, + extra_headers={"Authorization": "Bearer your-token"}, + timeout_seconds=30.0 +) +``` + +### Async Connect RPC Client + +```python +import aiohttp +from otdf_python_proto.policy_pb2 import ListPoliciesRequest +from otdf_python_proto.policy_connect import AsyncPolicyServiceClient + +async def main(): + async with aiohttp.ClientSession() as http_client: + policy_client = AsyncPolicyServiceClient( + base_url="https://platform.opentdf.io", + http_client=http_client + ) + + # Make async RPC call + request = ListPoliciesRequest() + response = await policy_client.list_policies(request) + print(f"Policies: {response}") + + # Server streaming example + async for policy in policy_client.stream_policies(request): + print(f"Streaming policy: {policy}") +``` + +### Legacy gRPC Client (Backward Compatibility) + +```python +import grpc +from otdf_python_proto.policy_pb2 import GetPolicyRequest +from otdf_python_proto.legacy_grpc.policy_pb2_grpc import PolicyServiceStub + +# Create gRPC channel +channel = grpc.insecure_channel("platform.opentdf.io:443") +policy_client = PolicyServiceStub(channel) + +# Make RPC call +request = GetPolicyRequest(id="policy-123") +response = policy_client.GetPolicy(request) +print(f"Policy: {response}") +``` + +## Error Handling + +### Connect RPC Error Handling + +```python +from connectrpc.errors import ConnectError + +try: + response = policy_client.get_policy(request) +except ConnectError as e: + print(f"Connect error: {e.code} - {e.message}") + # e.code can be: "not_found", "permission_denied", etc. + # Full list: https://connectrpc.com/docs/protocol/#error-codes +``` + +### gRPC Error Handling + +```python +import grpc + +try: + response = policy_client.GetPolicy(request) +except grpc.RpcError as e: + print(f"gRPC error: {e.code()} - {e.details()}") +``` + +## Protocol Differences + +| Feature | Connect RPC | gRPC | +|---------|-------------|------| +| Transport | HTTP/1.1, HTTP/2 | HTTP/2 only | +| Payload | JSON or Binary | Binary only | +| Status Codes | HTTP status codes | gRPC status codes | +| Headers | Standard HTTP headers | Custom gRPC headers | +| Browser Support | ✅ Yes | ❌ No (requires gRPC-Web) | +| Debugging | ✅ Human-readable | ❌ Binary format | +| Infrastructure | ✅ Standard HTTP | ❌ Requires gRPC support | + +## Migration Checklist + +- [ ] Update dependencies to include `connect-python` +- [ ] Regenerate proto files with Connect RPC support +- [ ] Update client code to use Connect RPC clients +- [ ] Update error handling for Connect error types +- [ ] Test with your authentication/authorization setup +- [ ] Update deployment configuration (if needed) +- [ ] Remove legacy gRPC dependencies (optional) + +## Advanced Usage + +### Custom HTTP Configuration + +```python +import urllib3 + +# Configure HTTP client with custom settings +http_client = urllib3.PoolManager( + timeout=urllib3.Timeout(connect=10.0, read=30.0), + retries=urllib3.Retry(total=3, backoff_factor=0.3), + headers={"User-Agent": "MyApp/1.0"} +) + +policy_client = PolicyServiceClient( + base_url="https://platform.opentdf.io", + http_client=http_client +) +``` + +### Low-level API Access + +```python +# Access response metadata +output = policy_client.call_get_policy(request) +response = output.message() +headers = output.response_headers() +trailers = output.response_trailers() + +if output.error(): + raise output.error() +``` + +### Server Streaming + +```python +# Server streaming RPC +request = StreamPoliciesRequest() +for policy in policy_client.stream_policies(request): + print(f"Received policy: {policy.id}") + +# With error handling +try: + for policy in policy_client.stream_policies(request): + process_policy(policy) +except ConnectError as e: + print(f"Stream error: {e.code} - {e.message}") +``` + +## Troubleshooting + +### Common Issues + +1. **"buf command not found"** + ```bash + # Install buf + brew install bufbuild/buf/buf + # Or + go install github.com/bufbuild/buf/cmd/buf@latest + ``` + +2. **"protoc-gen-connect_python not found"** + ```bash + # Install with compiler support + uv add connect-python[compiler] + ``` + +3. **Import errors after generation** + ```bash + # Ensure __init__.py files exist + find proto-gen/generated -type d -exec touch {}/__init__.py \; + ``` + +4. **HTTP/2 server issues** + - Connect RPC works with HTTP/1.1, so this is rarely an issue + - If using streaming, ensure your server supports Connect protocol + +### Debug HTTP Traffic + +```python +import logging + +# Enable HTTP debug logging +logging.basicConfig(level=logging.DEBUG) +urllib3.disable_warnings() + +# You can now see all HTTP requests/responses +``` + +## Performance Considerations + +- **HTTP/1.1**: Good for most use cases, supports connection pooling +- **JSON vs Binary**: Binary protobuf is more efficient, JSON is more debuggable +- **Connection Reuse**: Reuse `urllib3.PoolManager` instances across calls +- **Async**: Use async clients for high-concurrency applications + +## Next Steps + +1. **Start with unary RPCs**: Easiest to migrate and test +2. **Test authentication**: Ensure your auth tokens work with HTTP headers +3. **Migrate streaming RPCs**: More complex but follow similar patterns +4. **Remove gRPC dependencies**: Once fully migrated, clean up dependencies +5. **Update documentation**: Update your team's documentation and examples + +For more information, see: +- [Connect RPC Documentation](https://connectrpc.com/docs/) +- [Connect Python Repository](https://github.com/connectrpc/connect-python) +- [OpenTDF Platform](https://github.com/opentdf/platform) diff --git a/docs/DEVELOPING.md b/docs/DEVELOPING.md new file mode 100644 index 0000000..2affa49 --- /dev/null +++ b/docs/DEVELOPING.md @@ -0,0 +1,18 @@ +# Developing the OpenTDF Python SDK + +## Enabling Direct Access Grants + +In order to use token exchange with direct access grants, you will need to enable the `Direct Access Grants` option in your IdP (e.g. Keycloak) +client settings. + +## Setting Up Your Development Environment + +A convenience script is provided to help set up your development environment with an OpenTDF platform running in docker. + +You can run the following command in your terminal: + +```bash +.github/start_opentdf_docker.sh +``` + +Using this script will automatically enable direct access grants in Keycloak for you. diff --git a/docs/LEGACY_VERSION.md b/docs/LEGACY_VERSION.md new file mode 100644 index 0000000..3da78a2 --- /dev/null +++ b/docs/LEGACY_VERSION.md @@ -0,0 +1,20 @@ +# Legacy Version + +The legacy version of this project is available using the Github branch: `0.2.x`. + +You can access it directly via the following link: +https://github.com/b-long/opentdf-python-sdk/tree/0.2.x + +In extreme cases, if you need to use this legacy version, please refer to the information +available in that branch. However, it is highly recommended to use the latest version of the project for better performance, security, and support. + +## About the legacy version + +The legacy version was built using [gopy](https://github.com/go-python/gopy), to create an initial +release of the OpenTDF Python SDK. It provided basic functionalities for interacting with the OpenTDF platform, including data encryption and decryption. + +## PyPI + +The most recent release of the legacy version is available on PyPI and can +be found at the following link: +https://pypi.org/project/otdf-python/0.2.20/ diff --git a/docs/PROTOBUF_SETUP.md b/docs/PROTOBUF_SETUP.md new file mode 100644 index 0000000..053d061 --- /dev/null +++ b/docs/PROTOBUF_SETUP.md @@ -0,0 +1,135 @@ +# OpenTDF Python SDK - Protobuf Generation Sub-Module + +This document explains the protobuf generation sub-module that was created for the OpenTDF Python SDK project. + +## Overview + +A dedicated sub-module (`proto-gen/`) has been created to handle downloading and generating protobuf files from the OpenTDF platform. This provides a clean separation of concerns and makes it easy to update protobuf definitions. + +## Structure + +``` +opentdf-python-sdk.rewrite/ +├── proto-gen/ # Protobuf generation sub-module +│ ├── pyproject.toml # Sub-module dependencies +│ ├── README.md # Sub-module documentation +│ ├── proto-files/ # Raw .proto files +│ │ ├── kas.proto # Downloaded from OpenTDF platform +│ │ └── kas_simplified.proto # Simplified version (auto-generated) +│ ├── generated/ # Generated Python files +│ │ ├── __init__.py +│ │ ├── kas_pb2.py # Generated protobuf classes +│ │ └── kas_pb2_grpc.py # Generated gRPC service stubs +│ └── scripts/ +│ ├── generate_proto.py # Python script to generate protobuf files +│ └── build_proto.sh # Shell script wrapper +├── scripts/ +│ └── update-proto.sh # Convenience script to regenerate and sync +├── uv.toml # UV workspace configuration +└── pyproject.toml # Main project (includes proto dependency) +``` + +## What Was Accomplished + +### 1. Downloaded Proto File ✅ +- Downloaded the latest `kas.proto` file from: `https://raw.githubusercontent.com/opentdf/platform/refs/tags/service/v0.8.0/service/kas/kas.proto` +- Stored in `proto-gen/proto-files/kas.proto` + +### 2. Built Usable Library ✅ +- Created a robust protobuf generation system using `uv run python -m grpc_tools.protoc` +- Handles dependency issues gracefully with fallback generation +- Generates both protobuf classes (`kas_pb2.py`) and gRPC service stubs (`kas_pb2_grpc.py`) + +## Key Features + +### Smart Dependency Handling +- Automatically detects and uses `googleapis-common-protos` when available +- Falls back to a simplified proto definition when external dependencies are missing +- Handles import issues gracefully + +### Multiple Ways to Generate +1. **Python script**: `uv run python scripts/generate_proto.py` +2. **Shell script**: `./scripts/build_proto.sh` +3. **Convenience script**: `./scripts/update-proto.sh` (from main project) + +### Workspace Integration +- Uses UV workspace configuration to link the sub-module +- Main project depends on `otdf-python-proto` for the generated files +- Automatic syncing of generated files to the main project's proto directory + +## Usage + +### Regenerate Protobuf Files + +From the main project root: +```bash +./scripts/update-proto.sh +``` + +From the proto-gen sub-module: +```bash +cd proto-gen +uv run python scripts/generate_proto.py +``` + +### Update Proto Definition + +1. Download the latest proto file: +```bash +curl -o proto-gen/proto-files/kas.proto https://raw.githubusercontent.com/opentdf/platform/refs/tags/service/v0.8.0/service/kas/kas.proto +``` + +2. Regenerate the Python files: +```bash +./scripts/update-proto.sh +``` + +## Dependencies + +The otdf-python-proto sub-module includes these dependencies: +- `grpcio>=1.74.0` - gRPC runtime +- `grpcio-tools>=1.74.0` - Protocol buffer compiler +- `protobuf>=6.31.1` - Protocol buffer runtime +- `googleapis-common-protos>=1.66.0` - Google API common proto definitions + +## Final Status ✅ + +The protobuf sub-module has been successfully implemented and tested: + +### ✅ Completed Tasks +1. **Downloaded proto file** from OpenTDF platform (service/v0.8.0) +2. **Built usable library** with `uv run python -m grpc_tools.protoc` +3. **Generated working Python files** (`kas_pb2.py`, `kas_pb2_grpc.py`) +4. **Verified imports and functionality** - all message types are accessible +5. **Created automated build scripts** for easy regeneration +6. **Integrated with main project** via file syncing + +### ✅ Verified Working Features +- ✅ Import: `from otdf_python.proto import kas_pb2, kas_pb2_grpc` +- ✅ Message creation: `req = kas_pb2.RewrapRequest()` +- ✅ gRPC service stubs: `kas_pb2_grpc.AccessServiceStub` +- ✅ All core message types: `RewrapRequest`, `RewrapResponse`, `InfoRequest`, etc. + +### Test Results +```bash +Successfully imported protobuf files +Found 35 symbols in kas_pb2 +Found 19 symbols in kas_pb2_grpc +Available message types: +- RewrapRequest: True +- RewrapResponse: True +- AccessService: True +Created request with token: test +``` + +## Generated Files + +The generated Python files include: +- **`kas_pb2.py`** - Protocol buffer message classes +- **`kas_pb2_grpc.py`** - gRPC service client and server classes + +These files are automatically synced to `otdf-python-proto/generated/` and used by the main project in `src/otdf_python/`. + +## Fallback Strategy + +When the original proto file has missing dependencies (like Google API annotations), the system automatically creates a simplified version that includes all the core message types and services but removes problematic imports. This ensures the build always succeeds and provides usable protobuf classes. diff --git a/docs/RELEASES.md b/docs/RELEASES.md new file mode 100644 index 0000000..64143fd --- /dev/null +++ b/docs/RELEASES.md @@ -0,0 +1,205 @@ +# Release Process for OpenTDF Python SDK + +This document describes the automated release process for the OpenTDF Python SDK using Release Please and GitHub Actions. + +## Overview + +The OpenTDF Python SDK uses a **dual-branch release strategy** with automated publishing: + +- **`develop` branch**: Creates alpha prereleases (e.g., `v1.0.0-alpha.1`) → Published to TestPyPI +- **`main` branch**: Creates stable releases (e.g., `v1.0.0`) → Published to PyPI + +This ensures that alpha and stable releases have distinct version numbers and publishing destinations, preventing conflicts between development and production releases. + +## Branch Strategy + +### Develop Branch (Alpha Releases) +- **Purpose**: Development and testing +- **Release Type**: Alpha prereleases (`v1.0.0-alpha.1`, `v1.0.0-alpha.2`, etc.) +- **GitHub Status**: Marked as "pre-release" +- **Publishing Target**: TestPyPI (test.pypi.org) +- **Trigger**: Push to `develop` branch with conventional commits + +### Main Branch (Stable Releases) +- **Purpose**: Production releases +- **Release Type**: Stable releases (`v1.0.0`, `v1.0.1`, etc.) +- **GitHub Status**: Marked as stable release +- **Publishing Target**: PyPI (pypi.org) +- **Trigger**: Push to `main` branch with conventional commits + +## Automated Release Process + +### Prerequisites + +✅ **All tests must pass** before any release: +- Unit tests via GitHub Actions test suite +- Integration tests +- Code quality checks (linting, formatting) + +### For Alpha Releases (Develop Branch) + +1. **Commit with Conventional Commit Messages** to `develop` branch: + ```bash + git checkout develop + git commit -m "feat: add new encryption algorithm support" + git commit -m "fix: resolve TDF decryption issue with large files" + git push origin develop + ``` + +2. **Automated Process**: + - Release Please creates a PR with alpha version bump and changelog + - Once PR is merged, GitHub Actions automatically: + - Runs full test suite + - Builds the package + - Creates GitHub release marked as "pre-release" + - Publishes to TestPyPI (if version > 0.3.2) + +**Note**: The develop branch uses separate configuration files (`.release-please-config-develop.json` and `.release-please-manifest-develop.json`) to ensure proper alpha version tracking independent of the main branch. + +### For Stable Releases (Main Branch) + +1. **Merge from develop** (or commit directly): + ```bash + git checkout main + git merge develop + # OR make direct commits with conventional commit messages + git commit -m "feat: stable feature ready for production" + git push origin main + ``` + +2. **Automated Process**: + - Release Please creates a PR with stable version bump and changelog + - Once PR is merged, GitHub Actions automatically: + - Runs full test suite + - Builds the package + - Creates GitHub release marked as stable + - Publishes to PyPI + +## Version Numbering + +### How Version Tracking Works + +Release Please uses manifest files to track the "last released version" for each branch: + +- **`.release-please-manifest.json`**: Tracks the last stable release from main branch +- **`.release-please-manifest-develop.json`**: Tracks the last alpha release from develop branch + +When Release Please runs, it: +1. Reads the manifest to find the last released version +2. Analyzes conventional commits since that version +3. Calculates the next version based on commit types (feat, fix, etc.) +4. For develop branch: Applies alpha suffix due to prerelease configuration + +### Alpha Versions (from develop) +- Format: `vX.Y.Z-alpha.N` (e.g., `v0.3.1-alpha.1`, `v0.3.1-alpha.2`) +- Automatically incremented by Release Please using separate configuration files +- Marked as pre-release on GitHub +- Published to TestPyPI +- Tracked independently from main branch versions + +### Stable Versions (from main) +- Format: `vX.Y.Z` (e.g., `v0.3.1`, `v0.3.2`) +- Follow semantic versioning +- Marked as stable release on GitHub +- Published to PyPI +- Use main branch configuration files + +## Manual Release Triggers + +You can manually trigger releases via GitHub Actions: +- Go to **Actions** → **"Release Please"** → **"Run workflow"** +- Select the appropriate branch (`develop` for alpha, `main` for stable) + +## Conventional Commit Messages + +Release Please determines version bumps based on commit message types: + +- `feat:` → Minor version bump (new features) +- `fix:` → Patch version bump (bug fixes) +- `BREAKING CHANGE:` → Major version bump (breaking changes) +- `docs:`, `chore:`, `style:` → No version bump + +Examples: +```bash +git commit -m "feat: add support for new TDF format" # Minor bump +git commit -m "fix: resolve memory leak in encryption" # Patch bump +git commit -m "feat!: redesign SDK API (BREAKING CHANGE)" # Major bump +``` + +## Testing Process + +### Testing Alpha Releases +```bash +# Install from TestPyPI (alpha versions use the format X.Y.Z-alphaX) +pip install --index-url https://test.pypi.org/simple/ otdf-python==0.3.1a1 + +# Test functionality +python -c "import otdf_python; print('Alpha version works!')" +``` + +### Testing Stable Releases +```bash +# Install from PyPI +pip install otdf-python==0.3.1 + +# Test functionality +python -c "import otdf_python; print('Stable version works!')" +``` + +## Multi-Package Releases + +This repository manages two packages: +- `otdf-python` (main SDK) +- `otdf-python-proto` (protobuf submodule) + +Release Please automatically updates version references in both packages using the `extra-files` configuration. + +## Troubleshooting + +### No Release Created +- Verify commits use conventional commit format +- Check that tests pass in GitHub Actions +- Ensure commits were pushed to the correct branch + +### Failed Publishing +- Check GitHub Actions logs for detailed error messages +- Verify PyPI trusted publisher configuration +- Ensure version doesn't already exist on the target repository + +### Release Please Configuration Errors +- **Error: "Missing required manifest versions"**: Ensure both `.release-please-config-develop.json` and `.release-please-manifest-develop.json` are committed to the repository +- **Dynamic file creation errors**: The develop-specific configuration files must exist in the repository, not generated at runtime +- **Wrong branch configuration**: Verify the workflow uses the correct config and manifest files for each branch + +### Version Conflicts +- Alpha and stable releases use separate configuration and manifest files to prevent conflicts +- Develop branch uses `.release-please-config-develop.json` and `.release-please-manifest-develop.json` +- Main branch uses `.release-please-config.json` and `.release-please-manifest.json` +- If conflicts occur, check the appropriate Release Please configuration files for the target branch + +## Emergency Procedures + +### Hotfix for Stable Release +```bash +# Create hotfix directly on main +git checkout main +git commit -m "fix: critical security vulnerability" +git push origin main +# Release Please will create a patch release +``` + +## Configuration Files + +- `.release-please-config.json`: Main branch release configuration (stable releases) +- `.release-please-manifest.json`: Main branch version tracking +- `.release-please-config-develop.json`: Develop branch release configuration (alpha releases) +- `.release-please-manifest-develop.json`: Develop branch version tracking +- `.github/workflows/release-please.yaml`: GitHub Actions workflow + +## Support + +For release issues: +1. Check GitHub Actions logs in the "Release Please" workflow +2. Review the Release Please documentation +3. Create a GitHub issue with workflow logs +4. Contact repository maintainers diff --git a/go.mod b/go.mod deleted file mode 100644 index f324fa9..0000000 --- a/go.mod +++ /dev/null @@ -1,38 +0,0 @@ -module gotdf_python - -go 1.24.7 - -require github.com/opentdf/platform/sdk v0.7.0 - -require ( - buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.8-20250717185734-6c6e0d3c608e.1 // indirect - connectrpc.com/connect v1.18.1 // indirect - github.com/Masterminds/semver/v3 v3.4.0 // indirect - github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect - github.com/goccy/go-json v0.10.5 // indirect - github.com/google/uuid v1.6.0 // indirect - github.com/gowebpki/jcs v1.0.1 // indirect - github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect - github.com/lestrrat-go/blackmagic v1.0.4 // indirect - github.com/lestrrat-go/httpcc v1.0.1 // indirect - github.com/lestrrat-go/httprc v1.0.6 // indirect - github.com/lestrrat-go/iter v1.0.2 // indirect - github.com/lestrrat-go/jwx/v2 v2.1.6 // indirect - github.com/lestrrat-go/option v1.0.1 // indirect - github.com/opentdf/platform/lib/ocrypto v0.5.0 // indirect - github.com/opentdf/platform/protocol/go v0.8.0 // indirect - github.com/segmentio/asm v1.2.0 // indirect - github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect - github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect - github.com/xeipuuv/gojsonschema v1.2.0 // indirect - golang.org/x/crypto v0.41.0 // indirect - golang.org/x/net v0.43.0 // indirect - golang.org/x/oauth2 v0.30.0 // indirect - golang.org/x/sys v0.35.0 // indirect - golang.org/x/text v0.28.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250826171959-ef028d996bc1 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1 // indirect - google.golang.org/grpc v1.75.0 // indirect - google.golang.org/protobuf v1.36.8 // indirect -) diff --git a/go.sum b/go.sum deleted file mode 100644 index e28a562..0000000 --- a/go.sum +++ /dev/null @@ -1,259 +0,0 @@ -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250425153114-8976f5be98c1.1 h1:YhMSc48s25kr7kv31Z8vf7sPUIq5YJva9z1mn/hAt0M= -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250425153114-8976f5be98c1.1/go.mod h1:avRlCjnFzl98VPaeCtJ24RrV/wwHFzB8sWXhj26+n/U= -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250717185734-6c6e0d3c608e.1 h1:Lg6klmCi3v7VvpqeeLEER9/m5S8y9e9DjhqQnSCNy4k= -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250717185734-6c6e0d3c608e.1/go.mod h1:avRlCjnFzl98VPaeCtJ24RrV/wwHFzB8sWXhj26+n/U= -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.8-20250717185734-6c6e0d3c608e.1 h1:sjY1k5uszbIZfv11HO2keV4SLhNA47SabPO886v7Rvo= -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.8-20250717185734-6c6e0d3c608e.1/go.mod h1:8EQ5GzyGJQ5tEIwMSxCl8RKJYsjCpAwkdcENoioXT6g= -connectrpc.com/connect v1.18.1 h1:PAg7CjSAGvscaf6YZKUefjoih5Z/qYkyaTrBW8xvYPw= -connectrpc.com/connect v1.18.1/go.mod h1:0292hj1rnx8oFrStN7cB4jjVBeqs+Yx5yDIC2prWDO8= -dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= -dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= -github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= -github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4= -github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= -github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= -github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= -github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= -github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= -github.com/Nerzal/gocloak/v13 v13.9.0 h1:YWsJsdM5b0yhM2Ba3MLydiOlujkBry4TtdzfIzSVZhw= -github.com/Nerzal/gocloak/v13 v13.9.0/go.mod h1:YYuDcXZ7K2zKECyVP7pPqjKxx2AzYSpKDj8d6GuyM10= -github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= -github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= -github.com/containerd/containerd v1.7.27 h1:yFyEyojddO3MIGVER2xJLWoCIn+Up4GaHFquP7hsFII= -github.com/containerd/containerd v1.7.27/go.mod h1:xZmPnl75Vc+BLGt4MIfu6bp+fy03gdHAn9bz+FreFR0= -github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= -github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= -github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A= -github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw= -github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= -github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40= -github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= -github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= -github.com/docker/docker v27.1.1+incompatible h1:hO/M4MtV36kzKldqnA37IWhebRA+LnqqcqDja6kVaKY= -github.com/docker/docker v27.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= -github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= -github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= -github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= -github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= -github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= -github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-resty/resty/v2 v2.12.0 h1:rsVL8P90LFvkUYq/V5BTVe203WfRIU4gvcf+yfzJzGA= -github.com/go-resty/resty/v2 v2.12.0/go.mod h1:o0yGPrkS3lOe1+eFajk6kBW8ScXzwU3hD69/gt2yB/0= -github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= -github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= -github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= -github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= -github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= -github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/gowebpki/jcs v1.0.1 h1:Qjzg8EOkrOTuWP7DqQ1FbYtcpEbeTzUoTN9bptp8FOU= -github.com/gowebpki/jcs v1.0.1/go.mod h1:CID1cNZ+sHp1CCpAR8mPf6QRtagFBgPJE0FCUQ6+BrI= -github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.1 h1:KcFzXwzM/kGhIRHvc8jdixfIJjVzuUJdnv+5xsPutog= -github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.1/go.mod h1:qOchhhIlmRcqk/O9uCo/puJlyo07YINaIqdZfZG3Jkc= -github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 h1:sGm2vDRFUrQJO/Veii4h4zG2vvqG6uWNkBHSTqXOZk0= -github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2/go.mod h1:wd1YpapPLivG6nQgbf7ZkG1hhSOXDhhn4MLTknx2aAc= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs= -github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= -github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= -github.com/lestrrat-go/blackmagic v1.0.3 h1:94HXkVLxkZO9vJI/w2u1T0DAoprShFd13xtnSINtDWs= -github.com/lestrrat-go/blackmagic v1.0.3/go.mod h1:6AWFyKNNj0zEXQYfTMPfZrAXUWUfTIZ5ECEUEJaijtw= -github.com/lestrrat-go/blackmagic v1.0.4 h1:IwQibdnf8l2KoO+qC3uT4OaTWsW7tuRQXy9TRN9QanA= -github.com/lestrrat-go/blackmagic v1.0.4/go.mod h1:6AWFyKNNj0zEXQYfTMPfZrAXUWUfTIZ5ECEUEJaijtw= -github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE= -github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E= -github.com/lestrrat-go/httprc v1.0.6 h1:qgmgIRhpvBqexMJjA/PmwSvhNk679oqD1RbovdCGW8k= -github.com/lestrrat-go/httprc v1.0.6/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo= -github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI= -github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4= -github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVfecA= -github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU= -github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU= -github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= -github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 h1:PpXWgLPs+Fqr325bN2FD2ISlRRztXibcX6e8f5FR5Dc= -github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= -github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= -github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= -github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= -github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= -github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= -github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= -github.com/moby/sys/user v0.3.0 h1:9ni5DlcW5an3SvRSx4MouotOygvzaXbaSrc/wGDFWPo= -github.com/moby/sys/user v0.3.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= -github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g= -github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= -github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= -github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= -github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= -github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= -github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= -github.com/opentdf/platform/lib/fixtures v0.2.10 h1:R688b98ctsEiDRlQSvLxmAWT7bXvCTb+nJCuiU2WsWs= -github.com/opentdf/platform/lib/fixtures v0.2.10/go.mod h1:wGhclxDeDXf8bp5VAWztT1nY2gWVNGQLd8rWs5wtXV0= -github.com/opentdf/platform/lib/ocrypto v0.1.9 h1:GvgPB7CoK7JmWvsSvJ0hc+RC0wezgcuRpy3q2oYKjdA= -github.com/opentdf/platform/lib/ocrypto v0.1.9/go.mod h1:UTtqh8mvhAYA+sEnaMxpr/406e84L5Q1sAxtKGIXfu4= -github.com/opentdf/platform/lib/ocrypto v0.3.0 h1:/nHlIj6kqZ9XT9M45vAbzoMV8USeCj7GRuhFR6JH+RA= -github.com/opentdf/platform/lib/ocrypto v0.3.0/go.mod h1:VuVHTye/smLiRZ5Ls4sZ14R+PtN9Egwj8D1Hv5X9iP0= -github.com/opentdf/platform/lib/ocrypto v0.5.0 h1:zG6ZsQ/6e9wP9dKiVNJE4VZTkjF6EeerU28sMxKoSlQ= -github.com/opentdf/platform/lib/ocrypto v0.5.0/go.mod h1:sYhoBL1bQYgQVSSNpxU13RsrE5JAk8BABT1hfr9L3j8= -github.com/opentdf/platform/protocol/go v0.3.2 h1:WugeSl7RSRM7e7c5jJumZOIW2jr+sMqwDzpGUGyeC5k= -github.com/opentdf/platform/protocol/go v0.3.2/go.mod h1:nErYkgt32GW22CNqSyLO+JE49C3JndI1TsVdF+CUYd4= -github.com/opentdf/platform/protocol/go v0.6.2 h1:seLTEP4xBRF2BG1vbuWzQqNo58g3wtkzCV+Z4ExRXnM= -github.com/opentdf/platform/protocol/go v0.6.2/go.mod h1:FwoNd0HJaxGCZf74de/yFpVP4HEjkUMoF6Br79W0TBk= -github.com/opentdf/platform/protocol/go v0.8.0 h1:fIskNUGX8jXtPG8c/DuGo4FoCH6OOShGPZFu63DkMK8= -github.com/opentdf/platform/protocol/go v0.8.0/go.mod h1:GRycoDGDxaz91sOvGZFWVEKJLluZFg2wM3NJmhucDHo= -github.com/opentdf/platform/sdk v0.4.4 h1:jBJPXZBOodmanla9aS1aaPQgcg7zqOEbBTLF0c0BULM= -github.com/opentdf/platform/sdk v0.4.4/go.mod h1:xPjymAKCbFzo+z+PvFVa10NOT+9i5ljxmJaGJ9tkPrw= -github.com/opentdf/platform/sdk v0.6.1 h1:eDrLjcjl069Yy2kiZgtkl6j0Z/lcOdI17NxcCFmpRMA= -github.com/opentdf/platform/sdk v0.6.1/go.mod h1:hGQ+RLkiytMZkxhjtab2plIV71fMzmLiN0QFPqeBgTg= -github.com/opentdf/platform/sdk v0.7.0 h1:8hczDycXGY1ucdIXSrP17oW/Eyu3vsb4LEX4hc7tvVY= -github.com/opentdf/platform/sdk v0.7.0/go.mod h1:CTJR1NXeYe896M1/VN0h+1Ff54SdBtxv4z18BGTi8yk= -github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= -github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= -github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys= -github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs= -github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c= -github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= -github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= -github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= -github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= -github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/testcontainers/testcontainers-go v0.34.0 h1:5fbgF0vIN5u+nD3IWabQwRybuB4GY8G2HHgCkbMzMHo= -github.com/testcontainers/testcontainers-go v0.34.0/go.mod h1:6P/kMkQe8yqPHfPWNulFGdFHTD8HB2vLq/231xY2iPQ= -github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= -github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= -github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= -github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= -github.com/tklauser/numcpus v0.10.0 h1:18njr6LDBk1zuna922MgdjQuJFjrdppsZG60sHGfjso= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= -github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= -github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= -github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= -go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 h1:4K4tsIXefpVJtvA/8srF4V4y0akAoPHkIslgAkjixJA= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0/go.mod h1:jjdQuTGVsXV4vSs+CJ2qYDeDPf9yIJV23qlIzBm73Vg= -go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY= -go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI= -go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ= -go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE= -go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A= -go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU= -go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk= -go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w= -go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k= -go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= -golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= -golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= -golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= -golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= -golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= -golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= -golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= -golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= -golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= -golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= -golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= -golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= -golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= -golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= -golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= -golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= -golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= -golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= -golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= -golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= -golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= -golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= -golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= -golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= -google.golang.org/genproto/googleapis/api v0.0.0-20250428153025-10db94c68c34 h1:0PeQib/pH3nB/5pEmFeVQJotzGohV0dq4Vcp09H5yhE= -google.golang.org/genproto/googleapis/api v0.0.0-20250428153025-10db94c68c34/go.mod h1:0awUlEkap+Pb1UMeJwJQQAdJQrt3moU7J2moTy69irI= -google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 h1:vPV0tzlsK6EzEDHNNH5sa7Hs9bd7iXR7B1tSiPepkV0= -google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:pKLAc5OolXC3ViWGI62vvC0n10CpwAtRcTNCFwTKBEw= -google.golang.org/genproto/googleapis/api v0.0.0-20250728155136-f173205681a0 h1:0UOBWO4dC+e51ui0NFKSPbkHHiQ4TmrEfEZMLDyRmY8= -google.golang.org/genproto/googleapis/api v0.0.0-20250728155136-f173205681a0/go.mod h1:8ytArBbtOy2xfht+y2fqKd5DRDJRUQhqbyEnQ4bDChs= -google.golang.org/genproto/googleapis/api v0.0.0-20250826171959-ef028d996bc1 h1:APHvLLYBhtZvsbnpkfknDZ7NyH4z5+ub/I0u8L3Oz6g= -google.golang.org/genproto/googleapis/api v0.0.0-20250826171959-ef028d996bc1/go.mod h1:xUjFWUnWDpZ/C0Gu0qloASKFb6f8/QXiiXhSPFsD668= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250428153025-10db94c68c34 h1:h6p3mQqrmT1XkHVTfzLdNz1u7IhINeZkz67/xTbOuWs= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250428153025-10db94c68c34/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2 h1:IqsN8hx+lWLqlN+Sc3DoMy/watjofWiU8sRFgQ8fhKM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1 h1:pmJpJEvT846VzausCQ5d7KreSROcDqmO388w5YbnltA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1/go.mod h1:GmFNa4BdJZ2a8G+wCe9Bg3wwThLrJun751XstdJt5Og= -google.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM= -google.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= -google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= -google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= -google.golang.org/grpc v1.75.0 h1:+TW+dqTd2Biwe6KKfhE5JpiYIBWq865PhKGSXiivqt4= -google.golang.org/grpc v1.75.0/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ= -google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= -google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= -google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc= -google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/main.go b/main.go deleted file mode 100644 index 8e96a3b..0000000 --- a/main.go +++ /dev/null @@ -1,843 +0,0 @@ -package gotdf_python - -/* -All public (upper-case) functions here should be available to Python. -* E.g. imported & tested via 'validate_otdf_python.py' - -TODO: Consider testing against attributes that are returned by some listing. -* See: https://github.com/orgs/opentdf/discussions/947 - -TODO: Consider exposing an sdkClient that can be returned to the caller -* Note, previously this failed in a 'gopy' compiled context - -*/ -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "log" - "os" - "path" - "path/filepath" - "strings" - "sync" - - "github.com/opentdf/platform/sdk" -) - -type TokenAuth struct { - AccessToken string - NpeClientId string -} - -type OpentdfConfig struct { - ClientId string - ClientSecret string - PlatformEndpoint string - TokenEndpoint string - KasUrl string - InsecureSkipVerify bool -} - -func getEnv(key, defaultValue string) string { - if value, ok := os.LookupEnv(key); ok { - return value - } - return defaultValue -} - -func newSdkClient(config OpentdfConfig, authScopes []string) (*sdk.SDK, error) { - // NOTE: The 'platformEndpoint' is sometimes referenced as 'host' - if strings.Count(config.TokenEndpoint, "http://") == 1 { - return sdk.New(config.PlatformEndpoint, - sdk.WithClientCredentials(config.ClientId, config.ClientSecret, authScopes), - sdk.WithTokenEndpoint(config.TokenEndpoint), - sdk.WithInsecurePlaintextConn(), - ) - } else if strings.Count(config.TokenEndpoint, "https://") == 1 { - opts := []sdk.Option{ - sdk.WithClientCredentials(config.ClientId, config.ClientSecret, authScopes), - sdk.WithTokenEndpoint(config.TokenEndpoint), - } - - if config.InsecureSkipVerify { - opts = append(opts, sdk.WithInsecureSkipVerifyConn()) - } - - return sdk.New(config.PlatformEndpoint, opts...) - } else { - return nil, errors.New("invalid TokenEndpoint given") - } -} - -/* -NOTE: When the environment variable 'INSECURE_SKIP_VERIFY' is set to 'TRUE', -this option for the OpenTDF SDK will be set. -*/ -func peSdkClient(config OpentdfConfig, authScopes []string, token TokenAuth) (*sdk.SDK, error) { - // NOTE: The 'platformEndpoint' is sometimes referenced as 'host' - if strings.Count(config.TokenEndpoint, "http://") == 1 { - return sdk.New(config.PlatformEndpoint, - sdk.WithClientCredentials(config.ClientId, config.ClientSecret, authScopes), - sdk.WithTokenEndpoint(config.TokenEndpoint), - sdk.WithTokenExchange(token.AccessToken, []string{token.NpeClientId}), - sdk.WithInsecurePlaintextConn(), - ) - } else if strings.Count(config.TokenEndpoint, "https://") == 1 { - opts := []sdk.Option{ - sdk.WithClientCredentials(config.ClientId, config.ClientSecret, authScopes), - sdk.WithTokenEndpoint(config.TokenEndpoint), - sdk.WithTokenExchange(token.AccessToken, []string{token.NpeClientId}), - } - - if config.InsecureSkipVerify { - opts = append(opts, sdk.WithInsecureSkipVerifyConn()) - } - - return sdk.New(config.PlatformEndpoint, opts...) - } else { - return nil, errors.New("invalid TokenEndpoint given") - } -} - -func EncryptString(inputText string, config OpentdfConfig, dataAttributes []string, authScopes []string) (string, error) { - strReader := strings.NewReader(inputText) - sdkClient, err := newSdkClient(config, authScopes) - - if err != nil { - return "", err - } - - tdfFile, err := os.Create("sensitive.txt.tdf") - if err != nil { - return "", err - } - defer tdfFile.Close() - - if strings.Count(config.KasUrl, "http") != 1 { - return "", errors.New("invalid KAS Url, should contain single protocol") - } - - tdf, err := sdkClient.CreateTDF( - tdfFile, - strReader, - // sdk.WithDataAttributes("https://example.com/attributes/1", "https://example.com/attributes/2"), - // sdk.WithDataAttributes("https://example.com/attr/attr1/value/value1"), - sdk.WithDataAttributes(dataAttributes...), - sdk.WithKasInformation( - sdk.KASInfo{ - // examples assume insecure http - URL: config.KasUrl, - PublicKey: "", - }), - ) - - if err != nil { - return "", err - } - - manifestJSON, err := json.MarshalIndent(tdf.Manifest(), "", " ") - if err != nil { - return "", err - } - - // IF DEBUG: ... Print Manifest - // fmt.Println(string(manifestJSON)) - return string(manifestJSON), nil -} - -/* -Encrypts a string as a PE (Person Entity), returning a TDF manifest and the cipher text. -*/ -func EncryptStringPE(inputText string, config OpentdfConfig, token TokenAuth, dataAttributes []string, authScopes []string) (string, string, error) { - sdkClient, err := peSdkClient(config, authScopes, token) - - if err != nil { - return "", "", err - } - - // tdfFile, err := os.Create("sensitive.txt.tdf") - // if err != nil { - // return "", err - // } - // defer tdfFile.Close() - - if strings.Count(config.KasUrl, "http") != 1 { - return "", "", errors.New("invalid KAS Url, should contain single protocol") - } - - plaintext := strings.NewReader(inputText) - ciphertext := new(bytes.Buffer) - - tdf, err := sdkClient.CreateTDF( - // tdfFile, - ciphertext, - plaintext, - sdk.WithDataAttributes(dataAttributes...), - sdk.WithKasInformation( - sdk.KASInfo{ - // examples assume insecure http - URL: config.KasUrl, - PublicKey: "", - }), - ) - - if err != nil { - return "", "", err - } - - manifestJSON, err := json.MarshalIndent(tdf.Manifest(), "", " ") - if err != nil { - return "", "", err - } - - // Print Manifest (maybe useful in debugging) - // fmt.Println(string(manifestJSON)) - return string(manifestJSON), ciphertext.String(), nil -} - -func DecryptStringPE(inputText string, config OpentdfConfig, token TokenAuth, authScopes []string) (string, error) { - decrypted, err := decryptBytesPE([]byte(inputText), authScopes, config, token) - if err != nil { - return "", err - } - - return decrypted.String(), nil -} - -func readBytesFromFile(filePath string) ([]byte, error) { - if filePath == "" { - return nil, errors.New("invalid input file path given") - } - fileToEncrypt, err := os.Open(filePath) - if err != nil { - return nil, fmt.Errorf("failed to open file at path: %s", filePath) - } - defer fileToEncrypt.Close() - - bytes, err := io.ReadAll(fileToEncrypt) - if err != nil { - return nil, fmt.Errorf("failed to read bytes from file at path: %s", filePath) - } - return bytes, err -} - -/* -The encryptBytesNPE function below is based on the 'EncryptBytes()' function -provided by otdfctl. - -NOTE: the original 'EncryptBytes()' function has a parameter named -'scopes', we've changed that variable name to 'authScopes' for more -clarity. - -One noticeable difference is that rather than having state kept -in the CLI, we provide our own input parameter OpentdfConfig. - -See: - - https://github.com/opentdf/otdfctl/blob/46cfca1ba32c57f7264c320db27394c00412ca49/pkg/handlers/tdf.go#L10-L27 -*/ -func encryptBytesNPE(b []byte, authScopes []string, config OpentdfConfig, dataAttributes []string) (*bytes.Buffer, error) { - sdkClient, err := newSdkClient(config, authScopes) - - if err != nil { - return nil, err - } - - var encrypted []byte - enc := bytes.NewBuffer(encrypted) - - // TODO: validate values are FQNs or return an error [https://github.com/opentdf/platform/issues/515] - _, err = sdkClient.CreateTDF(enc, bytes.NewReader(b), - sdk.WithDataAttributes(dataAttributes...), - sdk.WithKasInformation(sdk.KASInfo{ - URL: config.KasUrl, - PublicKey: "", - }, - ), - ) - if err != nil { - return nil, err - } - return enc, nil -} - -func encryptBytesPE(b []byte, authScopes []string, config OpentdfConfig, token TokenAuth, dataAttributes []string) (*bytes.Buffer, error) { - sdkClient, err := peSdkClient(config, authScopes, token) - - if err != nil { - return nil, err - } - - var encrypted []byte - enc := bytes.NewBuffer(encrypted) - - // TODO: validate values are FQNs or return an error [https://github.com/opentdf/platform/issues/515] - _, err = sdkClient.CreateTDF(enc, bytes.NewReader(b), - sdk.WithDataAttributes(dataAttributes...), - sdk.WithKasInformation(sdk.KASInfo{ - URL: config.KasUrl, - PublicKey: "", - }, - ), - ) - if err != nil { - return nil, err - } - return enc, nil -} - -func EncryptFile(inputFilePath string, outputFilePath string, config OpentdfConfig, dataAttributes []string, authScopes []string) (string, error) { - if outputFilePath == "" { - return "", errors.New("invalid output file path given") - } - - bytes, err := readBytesFromFile(inputFilePath) - - if err != nil { - return "", err - } - - // If necessary, bytes can be printed for debugging - // fmt.Print(bytes) - - // Do the encryption - encrypted, err := encryptBytesNPE(bytes, authScopes, config, dataAttributes) - if err != nil { - return "", fmt.Errorf("failed to encrypt: %w", err) - } - - // Find the destination as the output flag filename or stdout - var dest *os.File - - // make sure output ends in .tdf extension - if !strings.HasSuffix(outputFilePath, ".tdf") { - outputFilePath += ".tdf" - } - tdfFile, err := os.Create(outputFilePath) - if err != nil { - return "", fmt.Errorf("failed to write encrypted file %s", outputFilePath) - } - defer tdfFile.Close() - dest = tdfFile - - _, e := io.Copy(dest, encrypted) - if e != nil { - return "", errors.New("failed to write encrypted data to destination") - } - - return outputFilePath, nil -} - -/* - EncryptFilesInDirNPE encrypts all files in the specified directory - -Work is performed as an NPE (Non-Person Entity). Encrypted files are placed -in the same directory as the input files, with a .tdf extension added to the file name. -*/ -func EncryptFilesInDirNPE(dirPath string, config OpentdfConfig, dataAttributes []string, authScopes []string) ([]string, error) { - files, err := os.ReadDir(dirPath) - if err != nil { - return nil, err - } - errChan := make(chan error, len(files)) - - var outputPaths []string - var mu sync.Mutex - var wg sync.WaitGroup - - for _, file := range files { - if !file.IsDir() { - wg.Add(1) - go func(file os.DirEntry) { - defer wg.Done() - sdkClient, err := newSdkClient(config, authScopes) - if err != nil { - errChan <- fmt.Errorf("failed to create SDK client: %v", err) - return - } - inputFilePath := path.Join(dirPath, file.Name()) - outputFilePath := inputFilePath + ".tdf" - got, err := encryptFileWithClient(inputFilePath, outputFilePath, sdkClient, config, dataAttributes) - if err != nil { - errChan <- fmt.Errorf("failed to encrypt file %s: %v", inputFilePath, err) - return - } - mu.Lock() - outputPaths = append(outputPaths, got) - mu.Unlock() - }(file) - } - } - - wg.Wait() - close(errChan) - - var errors []error - for err := range errChan { - errors = append(errors, err) - } - - logOutputPaths(outputPaths, errors) - - if len(errors) > 0 { - return outputPaths, fmt.Errorf("encountered errors during encryption: %v", errors) - } - return outputPaths, nil -} - -/* - EncryptFilesWithExtensionsNPE encrypts all files in 'dirPath' with given file 'extensions'. - -Work is performed as an NPE (Non-Person Entity). Encrypted files are placed -in the same directory as the input files, with a .tdf extension added to the file name. -*/ -func EncryptFilesWithExtensionsNPE(dirPath string, extensions []string, config OpentdfConfig, dataAttributes []string, authScopes []string) ([]string, error) { - sdkClient, err := newSdkClient(config, authScopes) - if err != nil { - return nil, err - } - - files, err := findFiles(dirPath, extensions) - if err != nil { - return nil, err - } - - var outputPaths = make([]string, 0, len(files)) - var errors = make([]error, 0, len(files)) - for _, file := range files { - inputFilePath := file - outputFilePath := inputFilePath + ".tdf" - got, err := encryptFileWithClient(inputFilePath, outputFilePath, sdkClient, config, dataAttributes) - if err != nil { - errors = append(errors, fmt.Errorf("failed to encrypt file %s: %v", inputFilePath, err)) - continue - } - outputPaths = append(outputPaths, got) - } - - logOutputPaths(outputPaths, errors) - - if len(errors) > 0 { - return outputPaths, fmt.Errorf("encountered errors during encryption: %v", errors) - } - return outputPaths, nil -} - -/* -Encrypts a file as a PE (Person Entity), returning a TDF manifest and the cipher text. -*/ -func EncryptFilePE(inputFilePath string, outputFilePath string, config OpentdfConfig, token TokenAuth, dataAttributes []string, authScopes []string) (string, error) { - if outputFilePath == "" { - return "", errors.New("invalid output file path given") - } - - bytes, err := readBytesFromFile(inputFilePath) - - if err != nil { - return "", err - } - - // If necessary, bytes can be printed for debugging - // fmt.Print(bytes) - - // Do the encryption - encrypted, err := encryptBytesPE(bytes, authScopes, config, token, dataAttributes) - if err != nil { - return "", fmt.Errorf("failed to encrypt: %w", err) - } - - // Find the destination as the output flag filename or stdout - var dest *os.File - - // make sure output ends in .tdf extension - if !strings.HasSuffix(outputFilePath, ".tdf") { - return "", fmt.Errorf("output file path '%s' should have .tdf extension", outputFilePath) - } - tdfFile, err := os.Create(outputFilePath) - if err != nil { - return "", fmt.Errorf("failed to write encrypted file %s", outputFilePath) - } - defer tdfFile.Close() - dest = tdfFile - - _, e := io.Copy(dest, encrypted) - if e != nil { - return "", errors.New("failed to write encrypted data to destination") - } - - return outputFilePath, nil -} - -/* -A non-Public decrypt function, based on: -- https://github.com/opentdf/otdfctl/blob/46cfca1ba32c57f7264c320db27394c00412ca49/pkg/handlers/tdf.go#L29-L41 -*/ -func decryptBytes(toDecrypt []byte, authScopes []string, config OpentdfConfig) (*bytes.Buffer, error) { - sdkClient, err := newSdkClient(config, authScopes) - - if err != nil { - return nil, err - } - - tdfreader, err := sdkClient.LoadTDF(bytes.NewReader(toDecrypt)) - if err != nil { - return nil, err - } - - buf := new(bytes.Buffer) - _, err = io.Copy(buf, tdfreader) - if err != nil && err != io.EOF { - return nil, err - } - return buf, nil -} - -func decryptBytesPE(toDecrypt []byte, authScopes []string, config OpentdfConfig, token TokenAuth) (*bytes.Buffer, error) { - - sdkClient, err := peSdkClient(config, authScopes, token) - - if err != nil { - return nil, err - } - - reader, err := sdkClient.LoadTDF(bytes.NewReader(toDecrypt)) - if err != nil { - return nil, err - } - - buf := new(bytes.Buffer) - _, err = io.Copy(buf, reader) - if err != nil && err != io.EOF { - return nil, err - } - return buf, nil -} - -func DecryptFile(inputFilePath string, outputFilePath string, config OpentdfConfig) (string, error) { - bytes, err := readBytesFromFile(inputFilePath) - if err != nil { - return "", err - } - - decrypted, err := decryptBytes(bytes, nil, config) - if err != nil { - return "", err - } - - tdfFile, err := os.Create(outputFilePath) - if err != nil { - return "", fmt.Errorf("failed to write decrypted file %s", outputFilePath) - } - defer tdfFile.Close() - - _, e := io.Copy(tdfFile, decrypted) - if e != nil { - return "", errors.New("failed to write decrypted data to destination") - } - - return outputFilePath, nil -} - -/* -DecryptFilesInDirNPE decrypts all files in the specified directory -Work is performed as an NPE (Non-Person Entity). Decrypted files are placed -in the same directory as the input files, with the .tdf extension removed from the file name. -*/ -func DecryptFilesInDirNPE(dirPath string, config OpentdfConfig, authScopes []string) ([]string, error) { - files, err := os.ReadDir(dirPath) - if err != nil { - return nil, err - } - - var wg sync.WaitGroup - outputPathsChan := make(chan string, len(files)) - errChan := make(chan error, len(files)) - - for _, file := range files { - if !file.IsDir() && strings.HasSuffix(file.Name(), ".tdf") { - wg.Add(1) - go func(file os.DirEntry) { - defer wg.Done() - sdkClient, err := newSdkClient(config, authScopes) - if err != nil { - errChan <- fmt.Errorf("failed to create SDK client: %v", err) - return - } - - fileInfo, err := file.Info() - if err != nil { - errChan <- fmt.Errorf("failed to get file info for %s: %v", file.Name(), err) - return - } - inputFilePath := path.Join(dirPath, fileInfo.Name()) - outputFilePath := strings.TrimSuffix(inputFilePath, ".tdf") - - bytes, err := readBytesFromFile(inputFilePath) - if err != nil { - errChan <- fmt.Errorf("failed to read file %s: %v", inputFilePath, err) - return - } - - decrypted, err := decryptBytesWithClient(bytes, sdkClient) - if err != nil { - errChan <- fmt.Errorf("failed to decrypt file %s: %v", inputFilePath, err) - return - } - - tdfFile, err := os.Create(outputFilePath) - if err != nil { - errChan <- fmt.Errorf("failed to write decrypted file %s: %v", outputFilePath, err) - return - } - defer tdfFile.Close() - - _, e := io.Copy(tdfFile, decrypted) - if e != nil { - errChan <- fmt.Errorf("failed to write decrypted data to destination %s: %v", outputFilePath, err) - return - } - - outputPathsChan <- outputFilePath - }(file) - } - } - - wg.Wait() - close(outputPathsChan) - close(errChan) - - var outputPaths []string - for path := range outputPathsChan { - outputPaths = append(outputPaths, path) - } - - var errors []error - for err := range errChan { - errors = append(errors, err) - } - - logOutputPaths(outputPaths, errors) - - if len(errors) > 0 { - return nil, fmt.Errorf("encountered errors during decryption: %v", errors) - } - - return outputPaths, nil -} - -/* -DecryptFilesWithExtensionsNPE decrypts all files matching the file 'extensions' in 'dirPath'. -Work is performed as an NPE (Non-Person Entity). Decrypted files are placed -in the same directory as the input files, with the .tdf extension removed from the file name. -*/ -func DecryptFilesWithExtensionsNPE(dirPath string, extensions []string, config OpentdfConfig, authScopes []string) ([]string, error) { - files, err := os.ReadDir(dirPath) - if err != nil { - return nil, err - } - - outputPathsChan := make(chan string, len(files)) - errChan := make(chan error, len(files)) - - var wg sync.WaitGroup - - for _, file := range files { - if !file.IsDir() { - for _, ext := range extensions { - if strings.HasSuffix(file.Name(), ext) { - wg.Add(1) - go func(file os.DirEntry, ext string) { - defer wg.Done() - sdkClient, err := newSdkClient(config, authScopes) - if err != nil { - errChan <- fmt.Errorf("failed to create SDK client: %v", err) - return - } - - inputFilePath := filepath.Join(dirPath, file.Name()) - outputFilePath := strings.TrimSuffix(inputFilePath, ext) - - bytes, err := readBytesFromFile(inputFilePath) - if err != nil { - errChan <- fmt.Errorf("failed to read file %s: %v", inputFilePath, err) - return - } - - decrypted, err := decryptBytesWithClient(bytes, sdkClient) - if err != nil { - errChan <- fmt.Errorf("failed to decrypt file %s: %v", inputFilePath, err) - return - } - - tdfFile, err := os.Create(outputFilePath) - if err != nil { - errChan <- fmt.Errorf("failed to write decrypted file %s: %v", outputFilePath, err) - return - } - defer tdfFile.Close() - - _, e := io.Copy(tdfFile, decrypted) - if e != nil { - errChan <- fmt.Errorf("failed to write decrypted data to destination %s: %v", outputFilePath, err) - return - } - - outputPathsChan <- outputFilePath - }(file, ext) - } - } - } - } - - wg.Wait() - close(outputPathsChan) - close(errChan) - - var outputPaths []string - for path := range outputPathsChan { - outputPaths = append(outputPaths, path) - } - - var errors []error - for err := range errChan { - errors = append(errors, err) - } - - logOutputPaths(outputPaths, errors) - - if len(outputPaths) == 0 { - if len(errors) == 0 { - return nil, fmt.Errorf("no files with extensions %v found in directory %s", extensions, dirPath) - } - return nil, fmt.Errorf("encountered errors during decryption of files in directory %s: %v", dirPath, errors) - } - return outputPaths, nil -} - -func decryptBytesWithClient(toDecrypt []byte, sdkClient *sdk.SDK) (*bytes.Buffer, error) { - tdfreader, err := sdkClient.LoadTDF(bytes.NewReader(toDecrypt)) - if err != nil { - return nil, err - } - - buf := new(bytes.Buffer) - _, err = io.Copy(buf, tdfreader) - if err != nil && err != io.EOF { - return nil, err - } - return buf, nil -} - -func DecryptFilePE(inputFilePath string, outputFilePath string, config OpentdfConfig, token TokenAuth, authScopes []string) (string, error) { - bytes, err := readBytesFromFile(inputFilePath) - if err != nil { - return "", err - } - decrypted, err := decryptBytesPE(bytes, authScopes, config, token) - if err != nil { - return "", err - } - - tdfFile, err := os.Create(outputFilePath) - if err != nil { - return "", fmt.Errorf("failed to write decrypted file %s", outputFilePath) - } - defer tdfFile.Close() - - _, e := io.Copy(tdfFile, decrypted) - if e != nil { - return "", errors.New("failed to write decrypted data to destination") - } - - return outputFilePath, nil -} - -func encryptFileWithClient(inputFilePath string, outputFilePath string, sdkClient *sdk.SDK, config OpentdfConfig, dataAttributes []string) (string, error) { - bytes, err := readBytesFromFile(inputFilePath) - if err != nil { - return "", err - } - - encrypted, err := encryptBytesWithClient(bytes, sdkClient, config, dataAttributes) - if err != nil { - return "", fmt.Errorf("failed to encrypt: %w", err) - } - - var dest *os.File - if !strings.HasSuffix(outputFilePath, ".tdf") { - outputFilePath += ".tdf" - } - tdfFile, err := os.Create(outputFilePath) - if err != nil { - return "", fmt.Errorf("failed to write encrypted file %s", outputFilePath) - } - defer tdfFile.Close() - dest = tdfFile - - _, e := io.Copy(dest, encrypted) - if e != nil { - return "", errors.New("failed to write encrypted data to destination") - } - - return outputFilePath, nil -} - -func encryptBytesWithClient(b []byte, sdkClient *sdk.SDK, config OpentdfConfig, dataAttributes []string) (*bytes.Buffer, error) { - var encrypted []byte - enc := bytes.NewBuffer(encrypted) - - _, err := sdkClient.CreateTDF(enc, bytes.NewReader(b), - sdk.WithDataAttributes(dataAttributes...), - sdk.WithKasInformation(sdk.KASInfo{ - URL: config.KasUrl, - PublicKey: "", - }), - ) - if err != nil { - return nil, err - } - return enc, nil -} - -// Function to find all files recursively in a directory matching the given extensions -func findFiles(dir string, extensions []string) ([]string, error) { - var files []string - - // Use filepath.Walk to walk through the directory recursively - err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { - if err != nil { - // If there's an error reading the file, skip it - return err - } - - // Check if the file extension matches 'extensions' parameter - if !info.IsDir() && strings.Contains(strings.Join(extensions, ","), filepath.Ext(path)) { - files = append(files, path) // Add the file to the list - } - - return nil - }) - - if err != nil { - return nil, err - } - - return files, nil -} - -// logOutputPaths logs the output paths and any errors that occurred during processing -func logOutputPaths(outputPaths []string, errors []error) { - if len(errors) > 0 { - log.Println("Errors occurred during processing:") - for _, err := range errors { - log.Printf("\t%s\n", err) - } - } - log.Println("Output Paths:") - for _, path := range outputPaths { - log.Printf("\t%s\n", path) - } -} diff --git a/otdf-python-proto/README.md b/otdf-python-proto/README.md new file mode 100644 index 0000000..7327a9a --- /dev/null +++ b/otdf-python-proto/README.md @@ -0,0 +1,186 @@ +# OpenTDF Python Proto Generator + +This sub-module is responsible for generating Python protobuf files and Connect RPC clients from the OpenTDF platform proto definitions. + +## What's New: Connect RPC Support + +This project now supports **Connect RPC**, a modern HTTP-friendly alternative to gRPC that provides: + +- 🌐 **HTTP/1.1 compatibility** - Works with all HTTP infrastructure +- 🔍 **Human-readable debugging** - JSON payloads can be inspected with standard tools +- 🌍 **Browser compatibility** - Can be called directly from web browsers +- 🚀 **Simplified deployment** - No special gRPC infrastructure required +- 📊 **Better observability** - Standard HTTP status codes and headers + +See [CONNECT_RPC_MIGRATION.md](../CONNECT_RPC_MIGRATION.md) for migration guide and examples. + +## Structure + +- `proto-files/`: Contains the raw .proto files downloaded from the OpenTDF platform +- `generated/`: Contains the generated Python protobuf and Connect RPC client files +- `scripts/`: Contains build scripts for generating protobuf and Connect RPC files +- `buf.yaml`: Buf configuration for proto validation and management +- `buf.gen.yaml`: Buf generation configuration for Connect RPC and protobuf + +## Usage + +### Connect RPC Generation (Recommended) + +To generate Connect RPC clients and protobuf files: + +```bash +cd otdf-python-proto +uv run python scripts/generate_connect_proto.py +``` + +Or use the convenience script: + +```bash +./scripts/build_connect_proto.sh +``` + +This generates: +- `generated/*_connect.py` - Connect RPC clients (preferred) +- `generated/*_pb2.py` - Standard protobuf classes +- `generated/*_pb2.pyi` - Type stubs for better IDE support +- `generated/legacy_grpc/*_pb2_grpc.py` - Legacy gRPC clients (backward compatibility) + +### Legacy gRPC Generation + +To generate traditional gRPC clients (backward compatibility): + +```bash +cd otdf-python-proto +uv run python scripts/generate_proto.py +``` + +Or use the legacy script: + +```bash +./scripts/build_proto.sh +``` + +### Download Fresh Proto Files + +To download the latest proto files from OpenTDF platform: + +```bash +cd otdf-python-proto +uv run python scripts/generate_connect_proto.py --download +``` + +## Dependencies + +The generated files depend on: + +### Connect RPC (Recommended) +- `connect-python[compiler]>=0.4.2` - Connect RPC client and code generator +- `protobuf>=6.31.1` - Protocol Buffers +- `googleapis-common-protos>=1.66.0` - Google API annotations +- `urllib3` or `aiohttp` - HTTP client (for Connect RPC) + +### Legacy gRPC (Backward Compatibility) +- `grpcio>=1.74.0` - gRPC runtime +- `grpcio-tools>=1.74.0` - gRPC code generation tools +- `protobuf>=6.31.1` - Protocol Buffers +- `googleapis-common-protos>=1.66.0` - Google API annotations + +## Examples + +### Connect RPC Client Usage + +```python +import urllib3 +from otdf_python_proto.policy_pb2 import GetPolicyRequest +from otdf_python_proto.policy_connect import PolicyServiceClient + +# Create HTTP client +http_client = urllib3.PoolManager() + +# Create Connect RPC client +client = PolicyServiceClient( + base_url="https://platform.opentdf.io", + http_client=http_client +) + +# Make RPC call +request = GetPolicyRequest(id="policy-123") +response = client.get_policy( + request, + extra_headers={"Authorization": "Bearer your-token"}, + timeout_seconds=30.0 +) +``` + +### Async Connect RPC Client + +```python +import aiohttp +from otdf_python_proto.policy_connect import AsyncPolicyServiceClient + +async with aiohttp.ClientSession() as session: + client = AsyncPolicyServiceClient( + base_url="https://platform.opentdf.io", + http_client=session + ) + + response = await client.get_policy(request) +``` + +### Legacy gRPC Client + +```python +import grpc +from otdf_python_proto.legacy_grpc.policy_pb2_grpc import PolicyServiceStub + +channel = grpc.insecure_channel("platform.opentdf.io:443") +client = PolicyServiceStub(channel) +response = client.GetPolicy(request) +``` + +## Tool Requirements + +- **buf** - Protocol buffer management and generation + ```bash + # macOS + brew install bufbuild/buf/buf + + # Or with Go + go install github.com/bufbuild/buf/cmd/buf@latest + ``` + +- **uv** - Python package management + ```bash + curl -LsSf https://astral.sh/uv/install.sh | sh + ``` + +## Migration from gRPC + +If you're migrating from traditional gRPC clients to Connect RPC: + +1. Read the [Connect RPC Migration Guide](../CONNECT_RPC_MIGRATION.md) +2. Run the Connect RPC generation: `./scripts/build_connect_proto.sh` (or from the submodule: `cd otdf-python-proto && uv run python scripts/generate_connect_proto.py`) +3. Update your client code to use `*_connect.py` modules +4. Test with your authentication and deployment setup +5. Optionally remove legacy gRPC dependencies + +## Troubleshooting + +### "buf command not found" +Install buf: `brew install bufbuild/buf/buf` + +### "protoc-gen-connect_python not found" +Install with compiler support: `uv add connect-python[compiler]` + +### Import errors after generation +Ensure `__init__.py` files exist in otdf_python_proto directories + +### Protocol version mismatches +Regenerate with latest proto files: `uv run python scripts/generate_connect_proto.py --download` + +## Learn More + +- [Connect RPC Documentation](https://connectrpc.com/docs/) +- [Connect Python Repository](https://github.com/connectrpc/connect-python) +- [OpenTDF Platform](https://github.com/opentdf/platform) +- [Buf Documentation](https://buf.build/docs/) diff --git a/otdf-python-proto/buf.gen.yaml b/otdf-python-proto/buf.gen.yaml new file mode 100644 index 0000000..75c7b67 --- /dev/null +++ b/otdf-python-proto/buf.gen.yaml @@ -0,0 +1,28 @@ +version: v2 +managed: + enabled: true + disable: + - file_option: go_package + module: buf.build/bufbuild/protovalidate + - file_option: go_package + module: buf.build/googleapis/googleapis + - file_option: go_package + module: buf.build/grpc-ecosystem/grpc-gateway +plugins: + # Standard Python protobuf generation + - remote: buf.build/protocolbuffers/python:v31.1 + out: src/otdf_python_proto + + # Python type stubs generation + - remote: buf.build/protocolbuffers/pyi:v31.1 + out: src/otdf_python_proto + + # Connect Python client generation (preferred) + - local: ../.venv/bin/protoc-gen-connect_python + out: src/otdf_python_proto + opt: + - paths=source_relative + + # Legacy gRPC Python generation (for backward compatibility) + - remote: buf.build/grpc/python:v1.74.0 + out: src/otdf_python_proto/legacy_grpc diff --git a/otdf-python-proto/buf.lock b/otdf-python-proto/buf.lock new file mode 100644 index 0000000..22c5f3b --- /dev/null +++ b/otdf-python-proto/buf.lock @@ -0,0 +1,12 @@ +# Generated by buf. DO NOT EDIT. +version: v2 +deps: + - name: buf.build/bufbuild/protovalidate + commit: 6c6e0d3c608e4549802254a2eee81bc8 + digest: b5:a7ca081f38656fc0f5aaa685cc111d3342876723851b47ca6b80cbb810cbb2380f8c444115c495ada58fa1f85eff44e68dc54a445761c195acdb5e8d9af675b6 + - name: buf.build/googleapis/googleapis + commit: 61b203b9a9164be9a834f58c37be6f62 + digest: b5:7811a98b35bd2e4ae5c3ac73c8b3d9ae429f3a790da15de188dc98fc2b77d6bb10e45711f14903af9553fa9821dff256054f2e4b7795789265bc476bec2f088c + - name: buf.build/grpc-ecosystem/grpc-gateway + commit: 4c5ba75caaf84e928b7137ae5c18c26a + digest: b5:c113e62fb3b29289af785866cae062b55ec8ae19ab3f08f3004098928fbca657730a06810b2012951294326b95669547194fa84476b9e9b688d4f8bf77a0691d diff --git a/otdf-python-proto/buf.yaml b/otdf-python-proto/buf.yaml new file mode 100644 index 0000000..5cf58f7 --- /dev/null +++ b/otdf-python-proto/buf.yaml @@ -0,0 +1,24 @@ +version: v2 +modules: + - path: proto-files +deps: + - buf.build/bufbuild/protovalidate + - buf.build/googleapis/googleapis + - buf.build/grpc-ecosystem/grpc-gateway +lint: + use: + - STANDARD + except: + - FIELD_NOT_REQUIRED + - PACKAGE_NO_IMPORT_CYCLE + - PACKAGE_VERSION_SUFFIX +breaking: + use: + - FILE + - PACKAGE + - WIRE + - WIRE_JSON + except: + - EXTENSION_NO_DELETE + - FIELD_SAME_DEFAULT + - PACKAGE_EXTENSION_NO_DELETE \ No newline at end of file diff --git a/otdf-python-proto/proto-files/authorization/authorization.proto b/otdf-python-proto/proto-files/authorization/authorization.proto new file mode 100644 index 0000000..058f1f1 --- /dev/null +++ b/otdf-python-proto/proto-files/authorization/authorization.proto @@ -0,0 +1,305 @@ +syntax = "proto3"; + +package authorization; + +import "google/api/annotations.proto"; +import "google/protobuf/any.proto"; + +import "policy/objects.proto"; + + +message Token { + string id = 1; // ephemeral id for tracking between request and response + string jwt = 2; // the token +} + +// PE (Person Entity) or NPE (Non-Person Entity) +message Entity { + string id = 1; // ephemeral id for tracking between request and response + // Standard entity types supported by the platform + oneof entity_type { + // one of the entity options must be set + string email_address = 2; + string user_name = 3; + string remote_claims_url = 4; + string uuid = 5; + google.protobuf.Any claims = 6; + EntityCustom custom = 7; + string client_id = 8; + } + enum Category { + CATEGORY_UNSPECIFIED = 0; + CATEGORY_SUBJECT = 1; + CATEGORY_ENVIRONMENT = 2; + } + Category category = 9; +} + +// Entity type for custom entities beyond the standard types +message EntityCustom { + google.protobuf.Any extension = 1; +} + +// A set of related PE and NPE +message EntityChain { + string id = 1; // ephemeral id for tracking between request and response + repeated Entity entities = 2; +} + +/* + Example Request Get Decisions to answer the question - Do Bob (represented by entity chain ec1) + and Alice (represented by entity chain ec2) have TRANSMIT authorization for + 2 resources; resource1 (attr-set-1) defined by attributes foo:bar resource2 (attr-set-2) defined by attribute foo:bar, color:red ? + + { + "actions": [ + { + "standard": "STANDARD_ACTION_TRANSMIT" + } + ], + "entityChains": [ + { + "id": "ec1", + "entities": [ + { + "emailAddress": "bob@example.org" + } + ] + }, + { + "id": "ec2", + "entities": [ + { + "userName": "alice@example.org" + } + ] + } + ], + "resourceAttributes": [ + { + "resourceAttributeId": "attr-set-1", + "attributeFqns": [ + "https://www.example.org/attr/foo/value/value1" + ] + }, + { + "resourceAttributeId": "attr-set-2", + "attributeFqns": [ + "https://example.net/attr/attr1/value/value1", + "https://example.net/attr/attr1/value/value2" + ] + } + ] +} + +*/ +message DecisionRequest { + repeated policy.Action actions = 1; + repeated EntityChain entity_chains = 2; + repeated ResourceAttribute resource_attributes = 3; +} + +/* + + Example response for a Decision Request - Do Bob (represented by entity chain ec1) + and Alice (represented by entity chain ec2) have TRANSMIT authorization for + 2 resources; resource1 (attr-set-1) defined by attributes foo:bar resource2 (attr-set-2) defined by attribute foo:bar, color:red ? + + Results: + - bob has permitted authorization to transmit for a resource defined by attr-set-1 attributes and has a watermark obligation + - bob has denied authorization to transmit a for a resource defined by attr-set-2 attributes + - alice has permitted authorization to transmit for a resource defined by attr-set-1 attributes + - alice has denied authorization to transmit a for a resource defined by attr-set-2 attributes + + { + "entityChainId": "ec1", + "resourceAttributesId": "attr-set-1", + "decision": "DECISION_PERMIT", + "obligations": [ + "http://www.example.org/obligation/watermark" + ] + }, + { + "entityChainId": "ec1", + "resourceAttributesId": "attr-set-2", + "decision": "DECISION_PERMIT" + }, + { + "entityChainId": "ec2", + "resourceAttributesId": "attr-set-1", + "decision": "DECISION_PERMIT" + }, + { + "entityChainId": "ec2", + "resourceAttributesId": "attr-set-2", + "decision": "DECISION_DENY" + } + + +*/ +message DecisionResponse { + enum Decision { + DECISION_UNSPECIFIED = 0; + DECISION_DENY = 1; + DECISION_PERMIT = 2; + } + string entity_chain_id = 1; // ephemeral entity chain id from the request + string resource_attributes_id = 2; // ephemeral resource attributes id from the request + policy.Action action = 3; // Action of the decision response + Decision decision = 4; // The decision response + repeated string obligations = 5; //optional list of obligations represented in URI format +} + +message GetDecisionsRequest { + repeated DecisionRequest decision_requests = 1; +} + +message GetDecisionsResponse { + repeated DecisionResponse decision_responses = 1; +} + +/* + Request to get entitlements for one or more entities for an optional attribute scope + + Example: Get entitlements for bob and alice (both represented using an email address + + { + "entities": [ + { + "id": "e1", + "emailAddress": "bob@example.org" + }, + { + "id": "e2", + "emailAddress": "alice@example.org" + } + ], + "scope": { + "attributeFqns": [ + "https://example.net/attr/attr1/value/value1", + "https://example.net/attr/attr1/value/value2" + ] + } + } + +*/ +message GetEntitlementsRequest { + // list of requested entities + repeated Entity entities = 1; + // optional attribute fqn as a scope + optional ResourceAttribute scope = 2; + // optional parameter to return a full list of entitlements - returns lower hierarchy attributes + optional bool with_comprehensive_hierarchy = 3; +} + +message EntityEntitlements { + string entity_id = 1; + repeated string attribute_value_fqns = 2; +} + +//A logical bucket of attributes belonging to a "Resource" +message ResourceAttribute { + string resource_attributes_id = 1; + repeated string attribute_value_fqns = 2; +} + +/* + + Example Response for a request of : Get entitlements for bob and alice (both represented using an email address + + { + "entitlements": [ + { + "entityId": "e1", + "attributeValueReferences": [ + { + "attributeFqn": "http://www.example.org/attr/foo/value/bar" + } + ] + }, + { + "entityId": "e2", + "attributeValueReferences": [ + { + "attributeFqn": "http://www.example.org/attr/color/value/red" + } + ] + } + ] + } + + +*/ +message GetEntitlementsResponse { + repeated EntityEntitlements entitlements = 1; +} + + +/* + Example Request Get Decisions by Token to answer the question - Do Bob and client1 (represented by token tok1) + and Alice and client2 (represented by token tok2) have TRANSMIT authorization for + 2 resources; resource1 (attr-set-1) defined by attributes foo:bar resource2 (attr-set-2) defined by attribute foo:bar, color:red ? + + { + "actions": [ + { + "standard": "STANDARD_ACTION_TRANSMIT" + } + ], + "tokens": [ + { + "id": "tok1", + "jwt": .... + }, + { + "id": "tok2", + "jwt": ..... + } + ], + "resourceAttributes": [ + { + "attributeFqns": [ + "https://www.example.org/attr/foo/value/value1" + ] + }, + { + "attributeFqns": [ + "https://example.net/attr/attr1/value/value1", + "https://example.net/attr/attr1/value/value2" + ] + } + ] +} + +*/ +message TokenDecisionRequest { + repeated policy.Action actions = 1; + repeated Token tokens = 2; + repeated ResourceAttribute resource_attributes = 3; +} + +message GetDecisionsByTokenRequest { + repeated TokenDecisionRequest decision_requests = 1; +} + +message GetDecisionsByTokenResponse { + repeated DecisionResponse decision_responses = 1; +} + +service AuthorizationService { + rpc GetDecisions(GetDecisionsRequest) returns (GetDecisionsResponse) { + option (google.api.http) = { + post: "/v1/authorization" + body: "*" + }; + } + rpc GetDecisionsByToken(GetDecisionsByTokenRequest) returns (GetDecisionsByTokenResponse) { + option (google.api.http) = {post: "/v1/token/authorization"}; + } + rpc GetEntitlements(GetEntitlementsRequest) returns (GetEntitlementsResponse) { + option (google.api.http) = { + post: "/v1/entitlements" + body: "*" + }; + } +} diff --git a/otdf-python-proto/proto-files/authorization/v2/authorization.proto b/otdf-python-proto/proto-files/authorization/v2/authorization.proto new file mode 100644 index 0000000..7345334 --- /dev/null +++ b/otdf-python-proto/proto-files/authorization/v2/authorization.proto @@ -0,0 +1,171 @@ +syntax = "proto3"; + +package authorization.v2; + +import "buf/validate/validate.proto"; +import "entity/entity.proto"; +import "google/protobuf/wrappers.proto"; +import "policy/objects.proto"; + +enum Decision { + DECISION_UNSPECIFIED = 0; + DECISION_DENY = 1; + DECISION_PERMIT = 2; + // DECISION_OBLIGATED = 3; +} + +// The EntityIdentifier specifies the actor in an entitlement or decision request - the PE, NPE, or PE+NPE being authorized. +// The abstraction houses the distinct entity types, PE and/or NPE combinations, or a registered resource value +// being treated as an entity in entitlement/authorization decisioning. +message EntityIdentifier { + oneof identifier { + option (buf.validate.oneof).required = true; + + // chain of one or more entities + entity.EntityChain entity_chain = 1 [(buf.validate.field).cel = { + id: "entity_chain_required" + message: "entities must be provided" + expression: "has(this.entities) && this.entities.size() > 0" + }]; + + // fully qualified name of the registered resource value stored in platform policy, where in + // this case the resource acts as and represents a single entity for authorization/entitlement decisioning + string registered_resource_value_fqn = 2 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + + // access token (JWT), which is used to create an entity chain (comprising one or more entities) + entity.Token token = 3 [(buf.validate.field).cel = { + id: "token_required" + message: "token must be provided" + expression: "has(this.jwt) && this.jwt.size() > 0" + }]; + } +} + +// Entitlements for a given entity, mapping each attribute value FQN to any entitled actions[] +message EntityEntitlements { + message ActionsList { + repeated policy.Action actions = 1; + } + + // ephemeral id for tracking between request and response + string ephemeral_id = 1; + map actions_per_attribute_value_fqn = 2; +} + +// Either a set of attribute values (such as those on a TDF) or a registered resource value +message Resource { + // ephemeral id for tracking between request and response + string ephemeral_id = 1; + + message AttributeValues { + repeated string fqns = 1; + } + + oneof resource { + AttributeValues attribute_values = 2 [(buf.validate.field).cel = { + id: "attribute_values_required" + message: "if provided, resource.attribute_values must not be empty" + expression: "this.fqns.size() > 0 && this.fqns.all(item, item.isUri())" + }]; + string registered_resource_value_fqn = 3 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } +} + +message ResourceDecision { + // ephemeral id for tracking between request and response + string ephemeral_resource_id = 1; + // decision result + Decision decision = 2; +} + +// Can the identified entity/entities access? +// 1. one entity reference (actor) +// 2. one action +// 3. one resource +message GetDecisionRequest { + // an entity must be identified for authorization decisioning + EntityIdentifier entity_identifier = 1 [(buf.validate.field).required = true]; + // name on action is required + policy.Action action = 2 [(buf.validate.field).required = true]; + Resource resource = 3 [(buf.validate.field).required = true]; + + option (buf.validate.message).cel = { + id: "get_decision_request.action_name_required" + message: "action.name must be provided" + expression: "has(this.action.name)" + }; +} +message GetDecisionResponse { + // decision on the resource + ResourceDecision decision = 1; + // optional list of obligations represented in URI format + // repeated string obligations = 2; +} + +// Can the identified entity/entities access? +// 1. one entity reference (actor) +// 2. one action +// 3. multiple resources +// Note: this is a more performant bulk request for multiple resource decisions +message GetDecisionMultiResourceRequest { + // an entity must be identified for authorization decisioning + EntityIdentifier entity_identifier = 1 [(buf.validate.field).required = true]; + // name on action is required + policy.Action action = 2 [(buf.validate.field).required = true]; + repeated Resource resources = 3 [ + (buf.validate.field).required = true, + (buf.validate.field).repeated = {min_items: 1} + ]; + + option (buf.validate.message).cel = { + id: "get_decision_multi_request.action_name_required" + message: "action.name must be provided" + expression: "has(this.action.name)" + }; +} +message GetDecisionMultiResourceResponse { + // convenience flag indicating global resource decisions result (permit/deny) + google.protobuf.BoolValue all_permitted = 1; + // individual resource decisions + repeated ResourceDecision resource_decisions = 2; +} + +// Is access allowed? +// 1. Multiplexing of a Decision request +// This is a more performant bulk request for complex decisioning (i.e. multiple entity chains or actions on +// multiple resources) +message GetDecisionBulkRequest { + repeated GetDecisionMultiResourceRequest decision_requests = 1; +} +message GetDecisionBulkResponse { + repeated GetDecisionMultiResourceResponse decision_responses = 1; +} + +// What is permitted to the identified entity/entities (actor), broken down as permitted actions on attribute value FQNs? +// +// Note: the v1 API parameter 'scope' has been dropped, and it is recommended to use +// GetDecision if the resource is known +message GetEntitlementsRequest { + // an entity must be identified for entitlement decisioning + EntityIdentifier entity_identifier = 1 [(buf.validate.field).required = true]; + // optional parameter to return all entitled values for attribute definitions with hierarchy rules, propagating + // down the hierarchical values instead of returning solely the value that is directly entitled + optional bool with_comprehensive_hierarchy = 2; +} +message GetEntitlementsResponse { + repeated EntityEntitlements entitlements = 1; +} + +service AuthorizationService { + rpc GetDecision(GetDecisionRequest) returns (GetDecisionResponse) {} + rpc GetDecisionMultiResource(GetDecisionMultiResourceRequest) returns (GetDecisionMultiResourceResponse) {} + rpc GetDecisionBulk(GetDecisionBulkRequest) returns (GetDecisionBulkResponse) {} + + rpc GetEntitlements(GetEntitlementsRequest) returns (GetEntitlementsResponse) {} +} diff --git a/otdf-python-proto/proto-files/common/common.proto b/otdf-python-proto/proto-files/common/common.proto new file mode 100644 index 0000000..645235d --- /dev/null +++ b/otdf-python-proto/proto-files/common/common.proto @@ -0,0 +1,41 @@ +syntax = "proto3"; + +package common; + +import "google/protobuf/timestamp.proto"; + +// Struct to uniquely identify a resource with optional additional metadata +message Metadata { + // created_at set by server (entity who created will recorded in an audit event) + google.protobuf.Timestamp created_at = 1; + // updated_at set by server (entity who updated will recorded in an audit event) + google.protobuf.Timestamp updated_at = 2; + // optional short description + map labels = 3; +} + +message MetadataMutable { + // optional labels + map labels = 3; +} + +enum MetadataUpdateEnum { + // unspecified update type + METADATA_UPDATE_ENUM_UNSPECIFIED = 0; + // only update the fields that are provided + METADATA_UPDATE_ENUM_EXTEND = 1; + // replace the entire metadata with the provided metadata + METADATA_UPDATE_ENUM_REPLACE = 2; +} + +// buflint ENUM_VALUE_PREFIX: to make sure that C++ scoping rules aren't violated when users add new enum values to an enum in a given package +enum ActiveStateEnum { + ACTIVE_STATE_ENUM_UNSPECIFIED = 0; + ACTIVE_STATE_ENUM_ACTIVE = 1; + ACTIVE_STATE_ENUM_INACTIVE = 2; + ACTIVE_STATE_ENUM_ANY = 3; +} + +// message ListOptions { +// TODO: limit/offset [https://github.com/opentdf/platform/issues/55] +// } diff --git a/otdf-python-proto/proto-files/entity/entity.proto b/otdf-python-proto/proto-files/entity/entity.proto new file mode 100644 index 0000000..b807bef --- /dev/null +++ b/otdf-python-proto/proto-files/entity/entity.proto @@ -0,0 +1,41 @@ +syntax = "proto3"; + +package entity; + +import "buf/validate/validate.proto"; +import "google/protobuf/any.proto"; + +message Token { + // ephemeral id for tracking between request and response + string ephemeral_id = 1; + // the token + string jwt = 2 [(buf.validate.field).required = true]; +} + +// PE (Person Entity) or NPE (Non-Person Entity) +message Entity { + // ephemeral id for tracking between request and response + string ephemeral_id = 1; + + // Standard entity types supported by the platform, one of which must be set + oneof entity_type { + string email_address = 2; + string user_name = 3; + // used by ERS claims mode + google.protobuf.Any claims = 4; + string client_id = 5; + } + enum Category { + CATEGORY_UNSPECIFIED = 0; + CATEGORY_SUBJECT = 1; + CATEGORY_ENVIRONMENT = 2; + } + Category category = 11; +} + +// A set of related PE and NPE +message EntityChain { + // ephemeral id for tracking between request and response + string ephemeral_id = 1; + repeated Entity entities = 2; +} diff --git a/otdf-python-proto/proto-files/entityresolution/entity_resolution.proto b/otdf-python-proto/proto-files/entityresolution/entity_resolution.proto new file mode 100644 index 0000000..4b93ab2 --- /dev/null +++ b/otdf-python-proto/proto-files/entityresolution/entity_resolution.proto @@ -0,0 +1,124 @@ +syntax = "proto3"; + +package entityresolution; + +import "authorization/authorization.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; + +/* + Example: Get idp attributes for bob and alice (both represented using an email address + { + "entities": [ + { + "id": "e1", + "emailAddress": "bob@example.org" + }, + { + "id": "e2", + "emailAddress": "alice@example.org" + } + ] + } + +*/ +message ResolveEntitiesRequest { + repeated authorization.Entity entities = 1; +} + + +message EntityRepresentation { + repeated google.protobuf.Struct additional_props = 1; + string original_id = 2; // ephemeral entity id from the request +} + + +/* + Example: Get idp attributes for bob and alice + { + "entity_representations": [ + { + "idp_entity_id": "e1", + "additional_props": {"someAttr1":"someValue1"} + }, + { + "idp_entity_id": "e2", + "additional_props": {"someAttr2":"someValue2"} + } + ] + } + +*/ +message ResolveEntitiesResponse { + repeated EntityRepresentation entity_representations = 1; +} + +message EntityNotFoundError { + int32 code = 1; + string message = 2; + repeated google.protobuf.Any details = 3; + string entity = 4; +} + + +/* + Example: Get Entity chains for tokens aaaaaa and bbbbbb + { + "tokens": [ + "aaaaaaa", + "bbbbbbbb" + ] + } + +*/ +message CreateEntityChainFromJwtRequest { + repeated authorization.Token tokens = 1; +} + +/* + Example: Return the entity chains from the provided tokens + { + "entity_chains": [ + { + "id": "tok1", + "entities": [ + { + "clientId": "client1" + } + ] + }, + { + "id": "tok2", + "entities": [ + { + "userName": "alice", + "clientId": "client2" + } + ] + } + ] + } + +*/ +message CreateEntityChainFromJwtResponse { + repeated authorization.EntityChain entity_chains = 1; +} + + +service EntityResolutionService { + // Deprecated: use v2 ResolveEntities instead + rpc ResolveEntities(ResolveEntitiesRequest) returns (ResolveEntitiesResponse) { + option (google.api.http) = { + post: "/entityresolution/resolve" + body: "*"; + }; + } + // Deprecated: use v2 CreateEntityChainsFromTokens instead + rpc CreateEntityChainFromJwt(CreateEntityChainFromJwtRequest) returns (CreateEntityChainFromJwtResponse) { + option (google.api.http) = { + post: "/entityresolution/entitychain" + body: "*"; + }; + } +} diff --git a/otdf-python-proto/proto-files/entityresolution/v2/entity_resolution.proto b/otdf-python-proto/proto-files/entityresolution/v2/entity_resolution.proto new file mode 100644 index 0000000..7dffbf3 --- /dev/null +++ b/otdf-python-proto/proto-files/entityresolution/v2/entity_resolution.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; + +package entityresolution.v2; + +import "buf/validate/validate.proto"; +import "entity/entity.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; + +message EntityRepresentation { + // ephemeral entity id from the request + string original_id = 1; + repeated google.protobuf.Struct additional_props = 2; +} + +// Resolve a set of entities to their representations. +message ResolveEntitiesRequest { + repeated entity.Entity entities = 1 [ + (buf.validate.field).required = true, + (buf.validate.field).repeated = {min_items: 1} + ]; +} +message ResolveEntitiesResponse { + repeated EntityRepresentation entity_representations = 1; +} + +message EntityNotFoundError { + int32 code = 1; + string message = 2; + repeated google.protobuf.Any details = 3; + string entity = 4; +} + +// Create an entity chain for each token (JWT) in the request. +message CreateEntityChainsFromTokensRequest { + repeated entity.Token tokens = 1; +} + +message CreateEntityChainsFromTokensResponse { + repeated entity.EntityChain entity_chains = 1; +} + +service EntityResolutionService { + rpc ResolveEntities(ResolveEntitiesRequest) returns (ResolveEntitiesResponse) {} + rpc CreateEntityChainsFromTokens(CreateEntityChainsFromTokensRequest) returns (CreateEntityChainsFromTokensResponse) {} +} diff --git a/otdf-python-proto/proto-files/kas/kas.proto b/otdf-python-proto/proto-files/kas/kas.proto new file mode 100644 index 0000000..a19bc84 --- /dev/null +++ b/otdf-python-proto/proto-files/kas/kas.proto @@ -0,0 +1,160 @@ +syntax = "proto3"; + +package kas; + +import "google/api/annotations.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/wrappers.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + info: { + title: "OpenTDF Key Access Service"; + version: "1.5.0"; + license: { + name: "BSD 3-Clause Clear"; + url: "https://github.com/opentdf/backend/blob/master/LICENSE"; + }; + }; +}; + +message InfoRequest { + // Intentionally empty. May include features later. +} + +// Service application level metadata +message InfoResponse { + string version = 1; +} + +message LegacyPublicKeyRequest { + string algorithm = 1; +} + +message PolicyBinding { + string algorithm = 1 [json_name = "alg"]; + string hash = 2; +} + +message KeyAccess { + string encrypted_metadata = 1; + PolicyBinding policy_binding = 2; + string protocol = 3; + string key_type = 4 [json_name = "type"]; + string kas_url = 5 [json_name = "url"]; + string kid = 6; + string split_id = 7 [json_name = "sid"]; + bytes wrapped_key = 8; + // header is only used for NanoTDFs + bytes header = 9; + + // For wrapping with an ECDH derived key, when type=ec-wrapped. + // Should be a PEM-encoded PKCS#8 (asn.1) value. + string ephemeral_public_key = 10; +} + +message UnsignedRewrapRequest { + message WithPolicy { + string id = 1; + string body = 2; + } + message WithKeyAccessObject { + string key_access_object_id = 1; + KeyAccess key_access_object = 2; + } + + message WithPolicyRequest { + repeated WithKeyAccessObject key_access_objects = 1; + WithPolicy policy = 2; + string algorithm = 3; + } + + string client_public_key = 1; + repeated WithPolicyRequest requests = 2; + + // Used for legacy non-bulk requests + KeyAccess key_access = 3 [deprecated = true]; + // Used for legacy non-bulk requests + string policy = 4 [deprecated = true]; + // Used for legacy non-bulk requests + string algorithm = 5 [deprecated = true]; +} +message PublicKeyRequest { + string algorithm = 1 [(grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = {description: "algorithm type rsa: or ec:"}]; + string fmt = 2 [(grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = {description: "response format"}]; + string v = 3 [(grpc.gateway.protoc_gen_openapiv2.options.openapiv2_field) = {description: "request version"}]; +} + +message PublicKeyResponse { + string public_key = 1; + string kid = 2; +} + +message RewrapRequest { + reserved 2; + reserved "bearer"; + string signed_request_token = 1; +} + + +message KeyAccessRewrapResult { + map metadata = 1; + string key_access_object_id = 2; + string status = 3; + oneof result { + bytes kas_wrapped_key = 4; + string error = 5; + } +} + +message PolicyRewrapResult { + string policy_id = 1; + repeated KeyAccessRewrapResult results = 2; +} + +message RewrapResponse { + map metadata = 1 [deprecated = true]; + bytes entity_wrapped_key = 2 [deprecated = true]; + string session_public_key = 3; + string schema_version = 4 [deprecated = true]; + // New Rewrap API changes + repeated PolicyRewrapResult responses = 5; +} + +// Get app info from the root path +service AccessService { + rpc PublicKey(PublicKeyRequest) returns (PublicKeyResponse) { + option (google.api.http) = {get: "/kas/v2/kas_public_key"}; + + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + responses: {key: "200"} + }; + option idempotency_level = NO_SIDE_EFFECTS; + } + + // Endpoint intended for gRPC Gateway's REST endpoint to provide v1 compatibility with older TDF clients + // + // This endpoint is not recommended for use in new applications, prefer the v2 endpoint ('PublicKey') instead. + // + // buf:lint:ignore RPC_RESPONSE_STANDARD_NAME + rpc LegacyPublicKey(LegacyPublicKeyRequest) returns (google.protobuf.StringValue) { + option (google.api.http) = {get: "/kas/kas_public_key"}; + + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + responses: {key: "200"} + }; + option idempotency_level = NO_SIDE_EFFECTS; + option deprecated = true; + } + + rpc Rewrap(RewrapRequest) returns (RewrapResponse) { + option (google.api.http) = { + post: "/kas/v2/rewrap" + body: "*"; + }; + + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + responses: {key: "200"} + }; + } +} diff --git a/otdf-python-proto/proto-files/logger/audit/test.proto b/otdf-python-proto/proto-files/logger/audit/test.proto new file mode 100644 index 0000000..de938f4 --- /dev/null +++ b/otdf-python-proto/proto-files/logger/audit/test.proto @@ -0,0 +1,40 @@ +syntax = "proto3"; + +package service.logger.audit; + +import "common/common.proto"; +import "google/protobuf/wrappers.proto"; + +// This is a test proto message to test the functionality of the audit logger +// and is excluded from normal proto generation. See the service/buf.yaml file +// for details on how to make changes to this proto file (which should be rare). +message TestPolicyObject { + string id = 1; + + google.protobuf.BoolValue active = 2; + + TestPolicyObjectVersionEnum version = 3; + + // add array field + repeated string tags = 5; + + // add oneOf simulated field that has either a scalar or struct value + oneof policy_user { + string username = 6; + User user = 7; + } + + common.Metadata metadata = 4; +} + +message User { + string id = 1; + string name = 2; +} + +// buflint ENUM_VALUE_PREFIX: to make sure that C++ scoping rules aren't violated when users add new enum values to an enum in a given package +enum TestPolicyObjectVersionEnum { + TEST_POLICY_OBJECT_VERSION_ENUM_UNSPECIFIED = 0; + TEST_POLICY_OBJECT_VERSION_ENUM_OLD = 1; + TEST_POLICY_OBJECT_VERSION_ENUM_NEW = 2; +} diff --git a/otdf-python-proto/proto-files/policy/actions/actions.proto b/otdf-python-proto/proto-files/policy/actions/actions.proto new file mode 100644 index 0000000..8dc5e42 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/actions/actions.proto @@ -0,0 +1,108 @@ +syntax = "proto3"; + +package policy.actions; + +import "buf/validate/validate.proto"; +import "common/common.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +/* + Actions CRUD Operations +*/ + +message GetActionRequest { + // Required + oneof identifier { + option (buf.validate.oneof).required = true; + string id = 1 [(buf.validate.field).string.uuid = true]; + string name = 2 [ + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "action_name_format" + message: "Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case." + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + } +} +message GetActionResponse { + policy.Action action = 1; + + // Subject Mappings driving entitlement to the action + repeated policy.SubjectMapping subject_mappings = 2; +} + +message ListActionsRequest { + // Optional + policy.PageRequest pagination = 10; +} +message ListActionsResponse { + repeated policy.Action actions_standard = 1; + repeated policy.Action actions_custom = 2; + + policy.PageResponse pagination = 10; +} + +// Create a new Custom action name with optional metadata. +// Creation of Standard actions is not supported. +message CreateActionRequest { + // Required + string name = 1 [ + (buf.validate.field).required = true, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "action_name_format" + message: "Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case." + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // Optional + common.MetadataMutable metadata = 100; +} +message CreateActionResponse { + policy.Action action = 1; +} + +// Metadata may be updated for either Custom or Standard actions. +// Names may only be updated for Custom actions. +message UpdateActionRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Optional + // Custom actions only: replaces the existing action name + string name = 2 [ + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "action_name_format" + message: "Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case." + expression: "size(this) == 0 || this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateActionResponse { + policy.Action action = 1; +} + +// Custom only: deletion of Standard actions is not supported. +message DeleteActionRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeleteActionResponse { + policy.Action action = 1; +} + +service ActionService { + rpc GetAction(GetActionRequest) returns (GetActionResponse) {} + rpc ListActions(ListActionsRequest) returns (ListActionsResponse) {} + rpc CreateAction(CreateActionRequest) returns (CreateActionResponse) {} + rpc UpdateAction(UpdateActionRequest) returns (UpdateActionResponse) {} + rpc DeleteAction(DeleteActionRequest) returns (DeleteActionResponse) {} +} diff --git a/otdf-python-proto/proto-files/policy/attributes/attributes.proto b/otdf-python-proto/proto-files/policy/attributes/attributes.proto new file mode 100644 index 0000000..695feb2 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/attributes/attributes.proto @@ -0,0 +1,464 @@ +syntax = "proto3"; + +package policy.attributes; + +import "buf/validate/validate.proto"; +import "common/common.proto"; +import "google/api/annotations.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +/* + Key Access Server Grants +*/ +// Deprecated +message AttributeKeyAccessServer { + option deprecated = true; + // Required + string attribute_id = 1 [(buf.validate.field).string.uuid = true]; + // Required + string key_access_server_id = 2 [(buf.validate.field).string.uuid = true]; +} + +/* + Key Maps +*/ + +message ValueKeyAccessServer { + option deprecated = true; + // Required + string value_id = 1 [(buf.validate.field).string.uuid = true]; + // Required + string key_access_server_id = 2 [(buf.validate.field).string.uuid = true]; +} + +message AttributeKey { + // Required + string attribute_id = 1 [ + (buf.validate.field).string.uuid = true, + (buf.validate.field).required = true + ]; + // Required + string key_id = 2 [ + (buf.validate.field).string.uuid = true, + (buf.validate.field).required = true + ]; +} + +message ValueKey { + // Required + string value_id = 1 [ + (buf.validate.field).string.uuid = true, + (buf.validate.field).required = true + ]; + // Required (The id listed in the AsymmetricKeys object) + string key_id = 2 [ + (buf.validate.field).string.uuid = true, + (buf.validate.field).required = true + ]; +} + +/* + Attribute Service Definitions +*/ + +message ListAttributesRequest { + // Optional + // ACTIVE by default when not specified + common.ActiveStateEnum state = 1; + // Optional + // Namespace ID or name + string namespace = 2; + + // Optional + policy.PageRequest pagination = 10; +} +message ListAttributesResponse { + repeated policy.Attribute attributes = 1; + + policy.PageResponse pagination = 10; +} + +message GetAttributeRequest { + // Temporary message level validation until we remove the deprecated id field + option (buf.validate.message).cel = { + id: "exclusive_fields" + expression: "!(has(this.id) && (has(this.attribute_id) || has(this.fqn)))" + message: "Either use deprecated 'id' field or one of 'attribute_id' or 'fqn', but not both" + }; + + option (buf.validate.message).cel = { + id: "required_fields" + expression: "has(this.id) || has(this.attribute_id) || has(this.fqn)" + message: "Either id or one of attribute_id or fqn must be set" + }; + + // Deprecated + string id = 1 [ + deprecated = true, + (buf.validate.field).ignore = IGNORE_IF_ZERO_VALUE, + (buf.validate.field).string.uuid = true + ]; + + oneof identifier { + //option (buf.validate.oneof).required = true; // TODO: enable this when we remove the deprecated field + string attribute_id = 2 [(buf.validate.field).string.uuid = true]; + string fqn = 3 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } +} +message GetAttributeResponse { + policy.Attribute attribute = 1; +} + +message CreateAttributeRequest { + // Required + string namespace_id = 1 [(buf.validate.field).string.uuid = true]; + // Required + string name = 2 [ + (buf.validate.field).required = true, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "attribute_name_format" + message: "Attribute name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute name will be normalized to lower case." + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + // Required + AttributeRuleTypeEnum rule = 3 [ + (buf.validate.field).enum.defined_only = true, + (buf.validate.field).required = true + ]; + // Optional + // Attribute values (when provided) must be alphanumeric strings, allowing hyphens and underscores but not as the first or last character. + // The stored attribute value will be normalized to lower case. + repeated string values = 4 [(buf.validate.field).repeated = { + min_items: 0 + unique: true + items: { + string: { + max_len: 253 + pattern: "^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$" + } + } + }]; + + // Optional + common.MetadataMutable metadata = 100; +} +message CreateAttributeResponse { + policy.Attribute attribute = 1; +} + +message UpdateAttributeRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Optional + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateAttributeResponse { + policy.Attribute attribute = 1; +} + +message DeactivateAttributeRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeactivateAttributeResponse { + policy.Attribute attribute = 1; +} + +/// +/// Value RPC messages +/// +message GetAttributeValueRequest { + // Temporary message level validation until we remove the deprecated id field + option (buf.validate.message).cel = { + id: "exclusive_fields" + expression: "!(has(this.id) && (has(this.value_id) || has(this.fqn)))" + message: "Either use deprecated 'id' field or one of 'value_id' or 'fqn', but not both" + }; + + option (buf.validate.message).cel = { + id: "required_fields" + expression: "has(this.id) || has(this.value_id) || has(this.fqn)" + message: "Either id or one of value_id or fqn must be set" + }; + + // Deprecated + string id = 1 [ + deprecated = true, + (buf.validate.field).ignore = IGNORE_IF_ZERO_VALUE, + (buf.validate.field).string.uuid = true + ]; + + oneof identifier { + //option (buf.validate.oneof).required = true; // TODO: enable this when we remove the deprecated field + string value_id = 2 [(buf.validate.field).string.uuid = true]; + string fqn = 3 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } +} +message GetAttributeValueResponse { + policy.Value value = 1; +} + +message ListAttributeValuesRequest { + // Required + string attribute_id = 1 [(buf.validate.field).string.uuid = true]; + // Optional + // ACTIVE by default when not specified + common.ActiveStateEnum state = 2; + + // Optional + policy.PageRequest pagination = 10; +} +message ListAttributeValuesResponse { + repeated policy.Value values = 1; + + policy.PageResponse pagination = 10; +} + +message CreateAttributeValueRequest { + // Required + string attribute_id = 1 [(buf.validate.field).string.uuid = true]; + // Required + string value = 2 [ + (buf.validate.field).required = true, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "attribute_value_format" + message: "Attribute value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute value will be normalized to lower case." + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // deprecated members on values + reserved "members"; + reserved 3; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; +} +message CreateAttributeValueResponse { + policy.Value value = 1; +} + +message UpdateAttributeValueRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Deprecated + reserved "members"; + reserved 4; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateAttributeValueResponse { + policy.Value value = 1; +} + +message DeactivateAttributeValueRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeactivateAttributeValueResponse { + policy.Value value = 1; +} + +message GetAttributeValuesByFqnsRequest { + // Required + // Fully Qualified Names of attribute values (i.e. https:///attr//value/), normalized to lower case. + repeated string fqns = 1 [(buf.validate.field).repeated = { + min_items: 1 + max_items: 250 + }]; + + // Optional + // This attribute value selector is not used currently, but left here for future use. + policy.AttributeValueSelector with_value = 2; +} +message GetAttributeValuesByFqnsResponse { + message AttributeAndValue { + policy.Attribute attribute = 1; + policy.Value value = 2; + } + // map of FQNs to complete attributes and the one selected value + map fqn_attribute_values = 1; +} + +/* + Assign Key Access Server (KAS Grant) to Attribute and Value +*/ + +message AssignKeyAccessServerToAttributeRequest { + option deprecated = true; + + // Required + AttributeKeyAccessServer attribute_key_access_server = 1; +} + +message AssignKeyAccessServerToAttributeResponse { + option deprecated = true; + AttributeKeyAccessServer attribute_key_access_server = 1; +} + +message RemoveKeyAccessServerFromAttributeRequest { + option deprecated = true; + // Required + AttributeKeyAccessServer attribute_key_access_server = 1; +} + +message RemoveKeyAccessServerFromAttributeResponse { + option deprecated = true; + AttributeKeyAccessServer attribute_key_access_server = 1; +} + +message AssignKeyAccessServerToValueRequest { + option deprecated = true; + // Required + ValueKeyAccessServer value_key_access_server = 1; +} + +message AssignKeyAccessServerToValueResponse { + option deprecated = true; + ValueKeyAccessServer value_key_access_server = 1; +} + +message RemoveKeyAccessServerFromValueRequest { + option deprecated = true; + // Required + ValueKeyAccessServer value_key_access_server = 1; +} + +message RemoveKeyAccessServerFromValueResponse { + option deprecated = true; + ValueKeyAccessServer value_key_access_server = 1; +} + +/* + Assign Key to Attribute and Value +*/ + +message AssignPublicKeyToAttributeRequest { + // Required + AttributeKey attribute_key = 1 [(buf.validate.field).required = true]; +} + +message AssignPublicKeyToAttributeResponse { + // Required + AttributeKey attribute_key = 1; +} + +message RemovePublicKeyFromAttributeRequest { + // Required + AttributeKey attribute_key = 1 [(buf.validate.field).required = true]; +} + +message RemovePublicKeyFromAttributeResponse { + // Required + AttributeKey attribute_key = 1; +} + +message AssignPublicKeyToValueRequest { + // Required + ValueKey value_key = 1 [(buf.validate.field).required = true]; +} + +message AssignPublicKeyToValueResponse { + // Required + ValueKey value_key = 1; +} + +message RemovePublicKeyFromValueRequest { + // Required + ValueKey value_key = 1 [(buf.validate.field).required = true]; +} + +message RemovePublicKeyFromValueResponse { + // Required + ValueKey value_key = 1; +} + +/// +/// Attribute Service +/// +service AttributesService { + /*--------------------------------------* + * Attribute RPCs + *---------------------------------------*/ + rpc ListAttributes(ListAttributesRequest) returns (ListAttributesResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + rpc ListAttributeValues(ListAttributeValuesRequest) returns (ListAttributeValuesResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc GetAttribute(GetAttributeRequest) returns (GetAttributeResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + rpc GetAttributeValuesByFqns(GetAttributeValuesByFqnsRequest) returns (GetAttributeValuesByFqnsResponse) { + option (google.api.http) = {get: "/attributes/*/fqn"}; + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateAttribute(CreateAttributeRequest) returns (CreateAttributeResponse) {} + + rpc UpdateAttribute(UpdateAttributeRequest) returns (UpdateAttributeResponse) {} + + rpc DeactivateAttribute(DeactivateAttributeRequest) returns (DeactivateAttributeResponse) {} + + /*--------------------------------------* + * Value RPCs + *---------------------------------------*/ + rpc GetAttributeValue(GetAttributeValueRequest) returns (GetAttributeValueResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateAttributeValue(CreateAttributeValueRequest) returns (CreateAttributeValueResponse) {} + + rpc UpdateAttributeValue(UpdateAttributeValueRequest) returns (UpdateAttributeValueResponse) {} + + rpc DeactivateAttributeValue(DeactivateAttributeValueRequest) returns (DeactivateAttributeValueResponse) {} + + /*--------------------------------------* + * Attribute <> Key Access Server RPCs + *---------------------------------------*/ + rpc AssignKeyAccessServerToAttribute(AssignKeyAccessServerToAttributeRequest) returns (AssignKeyAccessServerToAttributeResponse) { + option deprecated = true; + } + + rpc RemoveKeyAccessServerFromAttribute(RemoveKeyAccessServerFromAttributeRequest) returns (RemoveKeyAccessServerFromAttributeResponse) { + option deprecated = true; + } + + rpc AssignKeyAccessServerToValue(AssignKeyAccessServerToValueRequest) returns (AssignKeyAccessServerToValueResponse) { + option deprecated = true; + } + + rpc RemoveKeyAccessServerFromValue(RemoveKeyAccessServerFromValueRequest) returns (RemoveKeyAccessServerFromValueResponse) { + option deprecated = true; + } + + /*--------------------------------------* + * Attribute <> Key RPCs + *---------------------------------------*/ + + rpc AssignPublicKeyToAttribute(AssignPublicKeyToAttributeRequest) returns (AssignPublicKeyToAttributeResponse) {} + + rpc RemovePublicKeyFromAttribute(RemovePublicKeyFromAttributeRequest) returns (RemovePublicKeyFromAttributeResponse) {} + + rpc AssignPublicKeyToValue(AssignPublicKeyToValueRequest) returns (AssignPublicKeyToValueResponse) {} + + rpc RemovePublicKeyFromValue(RemovePublicKeyFromValueRequest) returns (RemovePublicKeyFromValueResponse) {} +} diff --git a/otdf-python-proto/proto-files/policy/kasregistry/key_access_server_registry.proto b/otdf-python-proto/proto-files/policy/kasregistry/key_access_server_registry.proto new file mode 100644 index 0000000..badee52 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/kasregistry/key_access_server_registry.proto @@ -0,0 +1,663 @@ +syntax = "proto3"; + +package policy.kasregistry; + +import "buf/validate/validate.proto"; +import "common/common.proto"; +import "google/api/annotations.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +message GetKeyAccessServerRequest { + // Temporary message level validation until we remove the deprecated id field + option (buf.validate.message).cel = { + id: "exclusive_fields" + expression: "!(has(this.id) && (has(this.kas_id) || has(this.uri) || has(this.name)))" + message: "Either use deprecated 'id' field or one of 'kas_id' or 'uri', but not both" + }; + + option (buf.validate.message).cel = { + id: "required_fields" + expression: "has(this.id) || has(this.kas_id) || has(this.uri) || has(this.name)" + message: "Either id or one of kas_id or uri must be set" + }; + + // Deprecated + string id = 1 [ + deprecated = true, + (buf.validate.field).ignore = IGNORE_IF_ZERO_VALUE, + (buf.validate.field).string.uuid = true + ]; + + oneof identifier { + // option (buf.validate.oneof).required = true; // TODO: enable this when we remove the deprecated field + string kas_id = 2 [(buf.validate.field).string.uuid = true]; + string name = 3 [(buf.validate.field).string.min_len = 1]; + string uri = 4 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } +} +message GetKeyAccessServerResponse { + KeyAccessServer key_access_server = 1; +} + +message ListKeyAccessServersRequest { + // Optional + policy.PageRequest pagination = 10; +} +message ListKeyAccessServersResponse { + repeated KeyAccessServer key_access_servers = 1; + + policy.PageResponse pagination = 10; +} + +// TODO: optional validation below should be through a custom validator, which +// is too bleeding edge at present without full plugin support + +message CreateKeyAccessServerRequest { + // Required + string uri = 1 [(buf.validate.field).cel = { + id: "uri_format" + message: + "URI must be a valid URL (e.g., 'https://demo.com/') followed by " + "additional segments. Each segment must start and end with an " + "alphanumeric character, can contain hyphens, alphanumeric " + "characters, and slashes." + expression: "this.isUri()" + }]; + // Deprecated + PublicKey public_key = 2; + + // Optional + SourceType source_type = 3 [ + (buf.validate.field).required = false, + (buf.validate.field).enum = {defined_only: true} + ]; + + // Optional + string name = 20 [ + (buf.validate.field).required = false, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "kas_name_format" + message: + "Registered KAS name must be an alphanumeric string, allowing " + "hyphens, and underscores but not as the first or last " + "character. The stored KAS name will be normalized to lower " + "case." + expression: + "size(this) > 0 ? " + "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + " : true" + } + ]; + + // Common metadata + common.MetadataMutable metadata = 100; +} +message CreateKeyAccessServerResponse { + KeyAccessServer key_access_server = 1; +} + +message UpdateKeyAccessServerRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + // Optional + string uri = 2 [(buf.validate.field).cel = { + id: "optional_uri_format" + message: + "Optional URI must be a valid URL (e.g., 'https://demo.com/') " + "followed by additional segments. Each segment must start and " + "end with an alphanumeric character, can contain hyphens, " + "alphanumeric characters, and slashes." + expression: "size(this) == 0 || this.isUri()" + }]; + // Deprecated + // Optional + PublicKey public_key = 3; + + // Optional + // Using UNSPECIFIED will result in a successful update, + // but will not actually update the underlying source. + // You should not update KAS's from INTERNAL/EXTERNAL + // to unspecified. + SourceType source_type = 4 [ + (buf.validate.field).required = false, + (buf.validate.field).enum = {defined_only: true} + ]; + + // Optional + string name = 20 [ + (buf.validate.field).required = false, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "kas_name_format" + message: + "Registered KAS name must be an alphanumeric string, allowing " + "hyphens, and underscores but not as the first or last " + "character. The stored KAS name will be normalized to lower " + "case." + expression: + "size(this) == 0 || " + "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateKeyAccessServerResponse { + KeyAccessServer key_access_server = 1; +} + +message DeleteKeyAccessServerRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeleteKeyAccessServerResponse { + KeyAccessServer key_access_server = 1; +} + +// Can be namespace, attribute definition, or value +message GrantedPolicyObject { + string id = 1; + string fqn = 2; +} + +// Deprecated +message KeyAccessServerGrants { + KeyAccessServer key_access_server = 1; + repeated GrantedPolicyObject namespace_grants = 2; + repeated GrantedPolicyObject attribute_grants = 3; + repeated GrantedPolicyObject value_grants = 4; +} + +/* + KEY MANAGEMENT +*/ + +message CreatePublicKeyRequest { + // Required + string kas_id = 1 [(buf.validate.field).string.uuid = true]; + + // Required + KasPublicKey key = 2 [(buf.validate.field).required = true]; + + // Common metadata + common.MetadataMutable metadata = 100; +} + +message CreatePublicKeyResponse { + Key key = 1; +} + +message GetPublicKeyRequest { + oneof identifier { + string id = 1 [(buf.validate.field).string.uuid = true]; + } +} + +message GetPublicKeyResponse { + Key key = 1; +} + +message ListPublicKeysRequest { + oneof kas_filter { + // Optional + string kas_id = 1 [(buf.validate.field).string.uuid = true]; + // Optional + string kas_name = 2 [(buf.validate.field).string.min_len = 1]; + // Optional + string kas_uri = 3 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } + + // Optional + policy.PageRequest pagination = 10; +} + +message ListPublicKeysResponse { + repeated Key keys = 1; + + policy.PageResponse pagination = 10; +} + +message ListPublicKeyMappingRequest { + oneof kas_filter { + // Optional + string kas_id = 1 [(buf.validate.field).string.uuid = true]; + // Optional + string kas_name = 2 [(buf.validate.field).string.min_len = 1]; + // Optional + string kas_uri = 3 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } + + // Optional Public Key ID + string public_key_id = 4 [ + (buf.validate.field).string.uuid = true, + (buf.validate.field).ignore = IGNORE_IF_ZERO_VALUE + ]; + + // Optional + policy.PageRequest pagination = 10; +} + +message ListPublicKeyMappingResponse { + message PublicKeyMapping { + string kas_id = 2; + string kas_name = 3; + string kas_uri = 4; + repeated PublicKey public_keys = 5; + } + message PublicKey { + policy.Key key = 1; + repeated Association values = 6; + repeated Association definitions = 7; + repeated Association namespaces = 8; + } + message Association { + string id = 1; + string fqn = 2; + } + + repeated PublicKeyMapping public_key_mappings = 1; + + policy.PageResponse pagination = 10; +} + +message UpdatePublicKeyRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} + +message UpdatePublicKeyResponse { + Key key = 1; +} + +message DeactivatePublicKeyRequest { + string id = 1 [(buf.validate.field).string.uuid = true]; +} + +message DeactivatePublicKeyResponse { + Key key = 1; +} + +message ActivatePublicKeyRequest { + string id = 1 [(buf.validate.field).string.uuid = true]; +} + +message ActivatePublicKeyResponse { + Key key = 1; +} + +// Deprecated in favor of ListPublicKeyMapping +// TODO: optional validation below should be through a custom validator, which +// is too bleeding edge at present without full plugin support + +// LIST of KAS Grants returns flat response of grants to all policy objects. It +// does not employ selectors for grants to specific policy objects or build the +// attribute tree relation. If grants to a known namespace, attribute, or value +// are needed, use the respective GET request to the specific policy object. +message ListKeyAccessServerGrantsRequest { + option deprecated = true; + // Optional + // Filter LIST by ID of a registered Key Access Server. + // If neither is provided, grants from all registered KASs to policy attribute + // objects are returned. + string kas_id = 1 [(buf.validate.field).cel = { + id: "optional_uuid_format" + message: "Optional field must be a valid UUID" + expression: + "size(this) == 0 || " + "this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[" + "0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + // Optional + // Filter LIST by URI of a registered Key Access Server. + // If none is provided, grants from all registered KASs to policy attribute + // objects are returned. + string kas_uri = 2 [(buf.validate.field).cel = { + id: "optional_uri_format" + message: + "Optional URI must be a valid URL (e.g., 'https://demo.com/') " + "followed by additional segments. Each segment must start and " + "end with an alphanumeric character, can contain hyphens, " + "alphanumeric characters, and slashes." + expression: "size(this) == 0 || this.isUri()" + }]; + // Optional + // Filter LIST by name of a registered Key Access Server. + // If none are provided, grants from all registered KASs to policy attribute + // objects are returned. + string kas_name = 3 [ + (buf.validate.field).required = false, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "kas_name_format" + message: + "Registered KAS name must be an alphanumeric string, allowing " + "hyphens, and underscores but not as the first or last " + "character. The stored KAS name will be normalized to lower " + "case." + expression: + "size(this) == 0 || " + "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // Optional + policy.PageRequest pagination = 10; +} + +// Deprecated +message ListKeyAccessServerGrantsResponse { + option deprecated = true; + + repeated KeyAccessServerGrants grants = 1 [deprecated = true]; + + policy.PageResponse pagination = 10; +} + +/* + KAS Key Management Requests and Response Messages +*/ +// Create a new asymmetric key for the specified Key Access Server (KAS) +message CreateKeyRequest { + option (buf.validate.message).cel = { + id: "private_key_ctx_optionally_required" + message: + "The wrapped_key is required if key_mode is KEY_MODE_CONFIG_ROOT_KEY or KEY_MODE_PROVIDER_ROOT_KEY. " + "The wrapped_key must be empty if key_mode is KEY_MODE_REMOTE or KEY_MODE_PUBLIC_KEY_ONLY." + expression: "((this.key_mode == 1 || this.key_mode == 2) && this.private_key_ctx.wrapped_key != '') || ((this.key_mode == 3 || this.key_mode == 4) && this.private_key_ctx.wrapped_key == '')" + }; + option (buf.validate.message).cel = { + id: "provider_config_id_optionally_required" + message: "Provider config id is required if key_mode is KEY_MODE_PROVIDER_ROOT_KEY or KEY_MODE_REMOTE. It must be empty for KEY_MODE_CONFIG_ROOT_KEY and KEY_MODE_PUBLIC_KEY_ONLY." + expression: "((this.key_mode == 1 || this.key_mode == 4) && this.provider_config_id == '') || ((this.key_mode == 2 || this.key_mode == 3) && this.provider_config_id != '')" + }; + option (buf.validate.message).cel = { + id: "private_key_ctx_for_public_key_only" + message: "private_key_ctx must not be set if key_mode is KEY_MODE_PUBLIC_KEY_ONLY." + expression: "!(this.key_mode == 4 && has(this.private_key_ctx))" + }; + + // Required + string kas_id = 1 [(buf.validate.field).string.uuid = true]; // The unique identifier of the Key Access Server + // Required + string key_id = 2 [(buf.validate.field).string.min_len = 1]; // A user-defined identifier for the key + // Required + Algorithm key_algorithm = 3 [(buf.validate.field).cel = { + id: "key_algorithm_defined" + message: "The key_algorithm must be one of the defined values." + expression: "this in [1, 2, 3, 4]" // Allow ALGORITHM_EC_P256, ALGORITHM_RSA_2048, ALGORITHM_X25519, ALGORITHM_AES_256_GCM + }]; // The algorithm to be used for the key + // Required + KeyMode key_mode = 4 [(buf.validate.field).cel = { + id: "key_mode_defined" + message: "The key_mode must be one of the defined values (1-4)." + expression: "this >= 1 && this <= 4" // Allow CONFIG_ROOT_KEY, PROVIDER_ROOT_KEY, REMOTE, PUBLIC_KEY_ONLY + }]; // The mode of the key (e.g., local or external) + // Required + PublicKeyCtx public_key_ctx = 5 [(buf.validate.field).required = true]; // Context or additional data specific to the public key, based on the key provider implementation + // Conditionally Required + PrivateKeyCtx private_key_ctx = 6; // Context or additional data specific to the private key, based on the key provider implementation + // Optional + string provider_config_id = 7; // Configuration ID for the key provider, if applicable + // Common metadata + common.MetadataMutable metadata = 100; // Mutable metadata for the key +} + +// Response to a CreateKeyRequest, containing the created asymmetric key +message CreateKeyResponse { + KasKey kas_key = 1; // The created asymmetric key for a KAS. +} + +// Retrieve an existing asymmetric key from the Key Management System +message GetKeyRequest { + oneof identifier { + option (buf.validate.oneof).required = true; + string id = 2 [(buf.validate.field).string.uuid = true]; // The unique identifier of the key to retrieve + KasKeyIdentifier key = 3; + } +} + +// Response to a GetKeyRequest, containing the requested asymmetric key +message GetKeyResponse { + KasKey kas_key = 1; // The requested asymmetric key for a KAS. +} + +// List all asymmetric keys managed by a specific Key Access Server or with a given algorithm +message ListKeysRequest { + Algorithm key_algorithm = 1 [(buf.validate.field).cel = { + id: "key_algorithm_defined" + message: "The key_algorithm must be one of the defined values." + expression: "this in [0, 1, 2, 3, 4]" // Allow ALGORITHM_EC_P256, ALGORITHM_RSA_2048, ALGORITHM_X25519, ALGORITHM_AES_256_GCM + }]; // Filter keys by algorithm + + oneof kas_filter { + string kas_id = 2 [(buf.validate.field).string.uuid = true]; // Filter keys by the KAS ID + string kas_name = 3 [(buf.validate.field).string.min_len = 1]; // Filter keys by the KAS name + string kas_uri = 4 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; // Filter keys by the KAS URI + } + + // Optional + policy.PageRequest pagination = 10; // Pagination request for the list of keys +} + +// Response to a ListKeysRequest, containing the list of asymmetric keys and pagination information +message ListKeysResponse { + repeated KasKey kas_keys = 1; // The list of kas keys + + policy.PageResponse pagination = 10; // Pagination response for the list of keys +} + +// Update an existing asymmetric key in the Key Management System +message UpdateKeyRequest { + option (buf.validate.message).cel = { + id: "metadata_update_behavior" + message: "Metadata update behavior must be either APPEND or REPLACE, when updating metadata." + expression: "((!has(this.metadata)) || (has(this.metadata) && this.metadata_update_behavior != 0))" + }; + + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; // The unique identifier of the key to update + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; // Mutable metadata for the key + common.MetadataUpdateEnum metadata_update_behavior = 101; // The behavior for updating the metadata +} + +// Response to an UpdateKeyRequest, containing the updated asymmetric key +message UpdateKeyResponse { + KasKey kas_key = 1; // The updated kas key +} + +// Nested message for specifying the active key using KAS ID and Key ID +message KasKeyIdentifier { + // Required UUID of the Key Access Server + oneof identifier { + option (buf.validate.oneof).required = true; + string kas_id = 2 [(buf.validate.field).string.uuid = true]; + string name = 3 [(buf.validate.field).string.min_len = 1]; + string uri = 4 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } + // Required Key ID of the key in question + string kid = 5 [(buf.validate.field).string.min_len = 1]; +} + +message RotateKeyRequest { + option (buf.validate.message).cel = { + id: "private_key_ctx_optionally_required" + message: + "For the new key, the wrapped_key is required if key_mode is KEY_MODE_CONFIG_ROOT_KEY or KEY_MODE_PROVIDER_ROOT_KEY. " + "The wrapped_key must be empty if key_mode is KEY_MODE_REMOTE or KEY_MODE_PUBLIC_KEY_ONLY." + expression: "((this.new_key.key_mode == 1 || this.new_key.key_mode == 2) && this.new_key.private_key_ctx.wrapped_key != '') || ((this.new_key.key_mode == 3 || this.new_key.key_mode == 4) && this.new_key.private_key_ctx.wrapped_key == '')" + }; + option (buf.validate.message).cel = { + id: "provider_config_id_optionally_required" + message: "For the new key, provider config id is required if key_mode is KEY_MODE_PROVIDER_ROOT_KEY or KEY_MODE_REMOTE. It must be empty for KEY_MODE_CONFIG_ROOT_KEY and KEY_MODE_PUBLIC_KEY_ONLY." + expression: "((this.new_key.key_mode == 1 || this.new_key.key_mode == 4) && this.new_key.provider_config_id == '') || ((this.new_key.key_mode == 2 || this.new_key.key_mode == 3) && this.new_key.provider_config_id != '')" + }; + option (buf.validate.message).cel = { + id: "private_key_ctx_for_public_key_only" + message: "private_key_ctx must not be set if key_mode is KEY_MODE_PUBLIC_KEY_ONLY." + expression: "!(this.new_key.key_mode == 4 && has(this.new_key.private_key_ctx))" + }; + + // Required (Current Active Key ID) + oneof active_key { + option (buf.validate.oneof).required = true; + // Current Active Key UUID + string id = 1 [(buf.validate.field).string.uuid = true]; + // Alternative way to specify the active key using KAS ID and Key ID + KasKeyIdentifier key = 2; + } + + // Information about the new key to be rotated in + NewKey new_key = 3; + + // Nested message for specifying the new key details + message NewKey { + // Required + string key_id = 1 [(buf.validate.field).string.min_len = 1]; + // Required + Algorithm algorithm = 2 [(buf.validate.field).cel = { + id: "key_algorithm_defined" + message: "The key_algorithm must be one of the defined values." + expression: "this in [1, 2, 3, 4]" // Allow ALGORITHM_EC_P256, ALGORITHM_RSA_2048, ALGORITHM_X25519, ALGORITHM_AES_256_GCM + }]; + // Required + KeyMode key_mode = 3 [ + (buf.validate.field).enum.defined_only = true, + (buf.validate.field).cel = { + id: "new_key_mode_defined" + message: "The new key_mode must be one of the defined values (1-4)." + expression: "this in [1, 2, 3, 4]" // Allow all defined modes + } + ]; + // Required + PublicKeyCtx public_key_ctx = 4 [(buf.validate.field).required = true]; + // Required + PrivateKeyCtx private_key_ctx = 5; + // Conditionally Required. + string provider_config_id = 6; // Validation handled by message-level CEL + + // Common metadata fields + common.MetadataMutable metadata = 100; + } +} + +/** + * Simplified information about the resources that were rotated as part of the key rotation process. + */ +message ChangeMappings { + string id = 1; + string fqn = 2; +} + +/* + * All resources that were rotated as part of the key rotation process + */ +message RotatedResources { + KasKey rotated_out_key = 1; // The old key that was rotated out + repeated ChangeMappings attribute_definition_mappings = 2; + repeated ChangeMappings attribute_value_mappings = 3; + repeated ChangeMappings namespace_mappings = 4; +} + +// Response message for the RotateKey request +message RotateKeyResponse { + // The newly rotated Kas Key + KasKey kas_key = 1; + // All resources that were rotated as part of the key rotation process + RotatedResources rotated_resources = 2; +} + +// Sets the specified key as the base key for the Key Access Server +// Note: The key must be active. +message SetBaseKeyRequest { + // Required + oneof active_key { + option (buf.validate.oneof).required = true; + // Current Key UUID tp be set as default + string id = 1 [(buf.validate.field).string.uuid = true]; + // Alternative way to specify the key using KAS ID and Key ID + KasKeyIdentifier key = 2; + } +} + +message GetBaseKeyRequest {} +message GetBaseKeyResponse { + SimpleKasKey base_key = 1; // The current base key +} + +message SetBaseKeyResponse { + SimpleKasKey new_base_key = 1; // The key that was set as base + SimpleKasKey previous_base_key = 2; // The previous base key, if any +} + +service KeyAccessServerRegistryService { + rpc ListKeyAccessServers(ListKeyAccessServersRequest) returns (ListKeyAccessServersResponse) { + option (google.api.http) = {get: "/key-access-servers"}; + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc GetKeyAccessServer(GetKeyAccessServerRequest) returns (GetKeyAccessServerResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateKeyAccessServer(CreateKeyAccessServerRequest) returns (CreateKeyAccessServerResponse) {} + + rpc UpdateKeyAccessServer(UpdateKeyAccessServerRequest) returns (UpdateKeyAccessServerResponse) {} + + rpc DeleteKeyAccessServer(DeleteKeyAccessServerRequest) returns (DeleteKeyAccessServerResponse) {} + + // Deprecated + rpc ListKeyAccessServerGrants(ListKeyAccessServerGrantsRequest) returns (ListKeyAccessServerGrantsResponse) { + option deprecated = true; + option idempotency_level = NO_SIDE_EFFECTS; + } + + // KAS Key Management + // Request to create a new key in the Key Access Service. + rpc CreateKey(CreateKeyRequest) returns (CreateKeyResponse) {} + + // Request to retrieve a key from the Key Access Service. + rpc GetKey(GetKeyRequest) returns (GetKeyResponse) {} + + // Request to list keys in the Key Access Service. + rpc ListKeys(ListKeysRequest) returns (ListKeysResponse) {} + + // Request to update a key in the Key Access Service. + rpc UpdateKey(UpdateKeyRequest) returns (UpdateKeyResponse) {} + + // Request to rotate a key in the Key Access Service. + rpc RotateKey(RotateKeyRequest) returns (RotateKeyResponse) {} + + // Request to set the default a default kas key. + rpc SetBaseKey(SetBaseKeyRequest) returns (SetBaseKeyResponse) {} + + // Get Default kas keys + rpc GetBaseKey(GetBaseKeyRequest) returns (GetBaseKeyResponse) {} +} diff --git a/otdf-python-proto/proto-files/policy/keymanagement/key_management.proto b/otdf-python-proto/proto-files/policy/keymanagement/key_management.proto new file mode 100644 index 0000000..56c2f54 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/keymanagement/key_management.proto @@ -0,0 +1,84 @@ +syntax = "proto3"; + +package policy.keymanagement; + +import "buf/validate/validate.proto"; +import "common/common.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +/* + Provider Configuration Requests and Response Messages +*/ +message CreateProviderConfigRequest { + // Required + // The name of the key provider. (e.g. "AWS KMS", "Google Cloud KMS", "Azure Key Vault") + string name = 1 [(buf.validate.field).required = true]; + // Required + // JSON configuration for the key provider. This is unique to individual key providers. + bytes config_json = 2 [(buf.validate.field).required = true]; + + // Common metadata + common.MetadataMutable metadata = 100; +} +message CreateProviderConfigResponse { + KeyProviderConfig provider_config = 1; +} + +message GetProviderConfigRequest { + // Required + oneof identifier { + option (buf.validate.oneof).required = true; + string id = 2 [(buf.validate.field).string.uuid = true]; + string name = 3 [(buf.validate.field).string.min_len = 1]; + } +} +message GetProviderConfigResponse { + KeyProviderConfig provider_config = 1; +} + +message ListProviderConfigsRequest { + // Optional + policy.PageRequest pagination = 10; +} +message ListProviderConfigsResponse { + repeated KeyProviderConfig provider_configs = 1; + + policy.PageResponse pagination = 10; +} + +message UpdateProviderConfigRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + // Optional + string name = 2 [(buf.validate.field).required = false]; + // Optional + bytes config_json = 3 [(buf.validate.field).required = false]; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateProviderConfigResponse { + KeyProviderConfig provider_config = 1; +} + +// In order to delete a provider configuration you must first delete all keys associated with the provider. +message DeleteProviderConfigRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeleteProviderConfigResponse { + KeyProviderConfig provider_config = 1; +} + +service KeyManagementService { + // Key Management + // Provider Management + rpc CreateProviderConfig(CreateProviderConfigRequest) returns (CreateProviderConfigResponse) {} + rpc GetProviderConfig(GetProviderConfigRequest) returns (GetProviderConfigResponse) {} + rpc ListProviderConfigs(ListProviderConfigsRequest) returns (ListProviderConfigsResponse) {} + rpc UpdateProviderConfig(UpdateProviderConfigRequest) returns (UpdateProviderConfigResponse) {} + rpc DeleteProviderConfig(DeleteProviderConfigRequest) returns (DeleteProviderConfigResponse) {} +} diff --git a/otdf-python-proto/proto-files/policy/namespaces/namespaces.proto b/otdf-python-proto/proto-files/policy/namespaces/namespaces.proto new file mode 100644 index 0000000..56f0908 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/namespaces/namespaces.proto @@ -0,0 +1,200 @@ +syntax = "proto3"; + +package policy.namespaces; + +import "buf/validate/validate.proto"; +import "common/common.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +/* + Key Access Server Grants +*/ + +// Deprecated +message NamespaceKeyAccessServer { + option deprecated = true; + // Required + string namespace_id = 1 [(buf.validate.field).string.uuid = true]; + // Required + string key_access_server_id = 2 [(buf.validate.field).string.uuid = true]; +} + +/* + Key Maps +*/ + +message NamespaceKey { + // Required + string namespace_id = 1 [ + (buf.validate.field).string.uuid = true, + (buf.validate.field).required = true + ]; + // Required (The id from the Asymmetric Key object) + string key_id = 2 [ + (buf.validate.field).string.uuid = true, + (buf.validate.field).required = true + ]; +} + +/* + + Namespace Service Definitions + +*/ + +message GetNamespaceRequest { + // Temporary message level validation until we remove the deprecated id field + option (buf.validate.message).cel = { + id: "exclusive_fields" + expression: "!(has(this.id) && (has(this.namespace_id) || has(this.fqn)))" + message: "Either use deprecated 'id' field or one of 'namespace_id' or 'fqn', but not both" + }; + + option (buf.validate.message).cel = { + id: "required_fields" + expression: "has(this.id) || has(this.namespace_id) || has(this.fqn)" + message: "Either id or one of namespace_id or fqn must be set" + }; + + // Deprecated + string id = 1 [ + deprecated = true, + (buf.validate.field).ignore = IGNORE_IF_ZERO_VALUE, + (buf.validate.field).string.uuid = true + ]; + + oneof identifier { + //option (buf.validate.oneof).required = true; // TODO: enable this when we remove the deprecated field + string namespace_id = 2 [(buf.validate.field).string.uuid = true]; + string fqn = 3 [(buf.validate.field).string = { + min_len: 1 + uri: true + }]; + } +} + +message GetNamespaceResponse { + policy.Namespace namespace = 1; +} + +message ListNamespacesRequest { + // Optional + // ACTIVE by default when not specified + common.ActiveStateEnum state = 1; + + // Optional + policy.PageRequest pagination = 10; +} +message ListNamespacesResponse { + repeated policy.Namespace namespaces = 1; + + policy.PageResponse pagination = 10; +} + +message CreateNamespaceRequest { + // Required + string name = 1 [ + (buf.validate.field).required = true, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "namespace_format" + message: "Namespace must be a valid hostname. It should include at least one dot, with each segment (label) starting and ending with an alphanumeric character. Each label must be 1 to 63 characters long, allowing hyphens but not as the first or last character. The top-level domain (the last segment after the final dot) must consist of at least two alphabetic characters. The stored namespace will be normalized to lower case." + expression: "this.matches('^([a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?\\\\.)+[a-zA-Z]{2,}$')" + } + ]; + + // Optional + common.MetadataMutable metadata = 100; +} +message CreateNamespaceResponse { + policy.Namespace namespace = 1; +} + +message UpdateNamespaceRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Optional + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateNamespaceResponse { + policy.Namespace namespace = 1; +} + +message DeactivateNamespaceRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeactivateNamespaceResponse {} + +/* + Assign Key Access Server to Namespace +*/ + +message AssignKeyAccessServerToNamespaceRequest { + NamespaceKeyAccessServer namespace_key_access_server = 1; +} + +message AssignKeyAccessServerToNamespaceResponse { + NamespaceKeyAccessServer namespace_key_access_server = 1; +} + +message RemoveKeyAccessServerFromNamespaceRequest { + NamespaceKeyAccessServer namespace_key_access_server = 1; +} + +message RemoveKeyAccessServerFromNamespaceResponse { + NamespaceKeyAccessServer namespace_key_access_server = 1; +} + +/* + Assign Key to Namespace +*/ +message AssignPublicKeyToNamespaceRequest { + NamespaceKey namespace_key = 1 [(buf.validate.field).required = true]; +} + +message AssignPublicKeyToNamespaceResponse { + NamespaceKey namespace_key = 1; +} + +message RemovePublicKeyFromNamespaceRequest { + NamespaceKey namespace_key = 1 [(buf.validate.field).required = true]; +} + +message RemovePublicKeyFromNamespaceResponse { + NamespaceKey namespace_key = 1; +} + +service NamespaceService { + rpc GetNamespace(GetNamespaceRequest) returns (GetNamespaceResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc ListNamespaces(ListNamespacesRequest) returns (ListNamespacesResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateNamespace(CreateNamespaceRequest) returns (CreateNamespaceResponse) {} + rpc UpdateNamespace(UpdateNamespaceRequest) returns (UpdateNamespaceResponse) {} + rpc DeactivateNamespace(DeactivateNamespaceRequest) returns (DeactivateNamespaceResponse) {} + + /*--------------------------------------* + * Namespace <> Key Access Server RPCs + *---------------------------------------*/ + rpc AssignKeyAccessServerToNamespace(AssignKeyAccessServerToNamespaceRequest) returns (AssignKeyAccessServerToNamespaceResponse) { + option deprecated = true; + } + + rpc RemoveKeyAccessServerFromNamespace(RemoveKeyAccessServerFromNamespaceRequest) returns (RemoveKeyAccessServerFromNamespaceResponse) { + option deprecated = true; + } + + /*--------------------------------------* + * Namespace <> Key RPCs + *---------------------------------------*/ + rpc AssignPublicKeyToNamespace(AssignPublicKeyToNamespaceRequest) returns (AssignPublicKeyToNamespaceResponse) {} + rpc RemovePublicKeyFromNamespace(RemovePublicKeyFromNamespaceRequest) returns (RemovePublicKeyFromNamespaceResponse) {} +} diff --git a/otdf-python-proto/proto-files/policy/objects.proto b/otdf-python-proto/proto-files/policy/objects.proto new file mode 100644 index 0000000..cb7ba7a --- /dev/null +++ b/otdf-python-proto/proto-files/policy/objects.proto @@ -0,0 +1,556 @@ +syntax = "proto3"; + +package policy; + +import "buf/validate/validate.proto"; +import "common/common.proto"; +import "google/protobuf/wrappers.proto"; + +message SimpleKasPublicKey { + Algorithm algorithm = 1; + string kid = 2; + string pem = 3; +} + +message SimpleKasKey { + string kas_uri = 1; // The URL of the Key Access Server + SimpleKasPublicKey public_key = 2; // The public key of the Key that belongs to the KAS + string kas_id = 3; // The ID of the Key Access Server +}; + +message KeyProviderConfig { + string id = 1; + string name = 2; + bytes config_json = 3; + + // Common metadata + common.Metadata metadata = 100; +} + +message Namespace { + // generated uuid in database + string id = 1; + // used to partition Attribute Definitions, support by namespace AuthN and + // enable federation + string name = 2; + + string fqn = 3; + + // active by default until explicitly deactivated + google.protobuf.BoolValue active = 4; + + common.Metadata metadata = 5; + + // Deprecated KAS grants for the namespace. Use kas_keys instead. + repeated KeyAccessServer grants = 6; + + // Keys for the namespace + repeated SimpleKasKey kas_keys = 7; +} + +message Attribute { + string id = 1; + + // namespace of the attribute + Namespace namespace = 2; + + // attribute name + string name = 3; + + // attribute rule enum + AttributeRuleTypeEnum rule = 4 [ + (buf.validate.field).enum.defined_only = true, + (buf.validate.field).required = true + ]; + + repeated Value values = 5; + + // Deprecated KAS grants for the attribute. Use kas_keys instead. + repeated KeyAccessServer grants = 6; + + string fqn = 7; + + // active by default until explicitly deactivated + google.protobuf.BoolValue active = 8; + + //Keys associated with the attribute + repeated SimpleKasKey kas_keys = 9; + + // Common metadata + common.Metadata metadata = 100; +} + +enum AttributeRuleTypeEnum { + ATTRIBUTE_RULE_TYPE_ENUM_UNSPECIFIED = 0; + ATTRIBUTE_RULE_TYPE_ENUM_ALL_OF = 1; + ATTRIBUTE_RULE_TYPE_ENUM_ANY_OF = 2; + ATTRIBUTE_RULE_TYPE_ENUM_HIERARCHY = 3; +} + +message Value { + // generated uuid in database + string id = 1; + + Attribute attribute = 2; + + string value = 3; + + // Deprecated + reserved "members"; + reserved 4; + + // Deprecated KAS grants for the value. Use kas_keys instead. + repeated KeyAccessServer grants = 5; + + string fqn = 6; + + // active by default until explicitly deactivated + google.protobuf.BoolValue active = 7; + + // subject mapping + repeated SubjectMapping subject_mappings = 8; + + repeated SimpleKasKey kas_keys = 9; + + repeated ResourceMapping resource_mappings = 10; + + // Common metadata + common.Metadata metadata = 100; +} + +// An action an entity can take +message Action { + // Generated uuid in database + string id = 3; + + enum StandardAction { + STANDARD_ACTION_UNSPECIFIED = 0; + + // Deprecated + // Migrate to 'read' action name + STANDARD_ACTION_DECRYPT = 1; + + // Deprecated + // Migrate to 'create' action name + STANDARD_ACTION_TRANSMIT = 2; + } + // Deprecated + oneof value { + // Deprecated + StandardAction standard = 1; + // Deprecated + string custom = 2; + } + + string name = 4; + + common.Metadata metadata = 100; +} + +/* + Subject Mapping (aka Access Control Subject Encoding aka ACSE): Structures + supporting the mapping of Subjects and Attributes (e.g. Entitlement) +*/ + +enum SubjectMappingOperatorEnum { + SUBJECT_MAPPING_OPERATOR_ENUM_UNSPECIFIED = 0; + // operator that returns true if a value in a list matches the string + SUBJECT_MAPPING_OPERATOR_ENUM_IN = 1; + // operator that returns true if a value is not in a list that is matched by + // string + SUBJECT_MAPPING_OPERATOR_ENUM_NOT_IN = 2; + // operator that returns true if a value in a list contains the substring + SUBJECT_MAPPING_OPERATOR_ENUM_IN_CONTAINS = 3; +} + +enum ConditionBooleanTypeEnum { + CONDITION_BOOLEAN_TYPE_ENUM_UNSPECIFIED = 0; + CONDITION_BOOLEAN_TYPE_ENUM_AND = 1; + CONDITION_BOOLEAN_TYPE_ENUM_OR = 2; +} + +/* + Subject Mapping: A Policy assigning Subject Set(s) to a permitted attribute + value + action(s) combination +*/ +message SubjectMapping { + string id = 1; + + // the Attribute Value mapped to; aka: "The Entity Entitlement Attribute" + Value attribute_value = 2; + + // the reusable SubjectConditionSet mapped to the given Attribute Value + SubjectConditionSet subject_condition_set = 3; + + // The actions permitted by subjects in this mapping + repeated Action actions = 4; + + common.Metadata metadata = 100; +} + +/** + A Condition defines a rule of +*/ +message Condition { + // a selector for a field value on a flattened Entity Representation (such as + // from idP/LDAP) + string subject_external_selector_value = 1 [(buf.validate.field).required = true]; + + // the evaluation operator of relation + SubjectMappingOperatorEnum operator = 2 [ + (buf.validate.field).enum.defined_only = true, + (buf.validate.field).required = true + ]; + + // list of comparison values for the result of applying the + // subject_external_selector_value on a flattened Entity Representation + // (Subject), evaluated by the operator + repeated string subject_external_values = 3 [(buf.validate.field).repeated.min_items = 1]; +} + +// A collection of Conditions evaluated by the boolean_operator provided +message ConditionGroup { + repeated Condition conditions = 1 [(buf.validate.field).repeated.min_items = 1]; + + // the boolean evaluation type across the conditions + ConditionBooleanTypeEnum boolean_operator = 2 [ + (buf.validate.field).enum.defined_only = true, + (buf.validate.field).required = true + ]; +} + +// A collection of Condition Groups +message SubjectSet { + // multiple Condition Groups are evaluated with AND logic + repeated ConditionGroup condition_groups = 1 [(buf.validate.field).repeated.min_items = 1]; +} + +/* + A container for multiple Subject Sets, each containing Condition Groups, each + containing Conditions. Multiple Subject Sets in a SubjectConditionSet are + evaluated with AND logic. As each Subject Mapping has only one Attribute + Value, the SubjectConditionSet is reusable across multiple Subject Mappings / + Attribute Values and is an independent unit. +*/ +message SubjectConditionSet { + string id = 1; + + repeated SubjectSet subject_sets = 3 [(buf.validate.field).repeated.min_items = 1]; + + common.Metadata metadata = 100; +} + +/* + + A property of a Subject/Entity as its selector expression -> value result + pair. This would mirror external user attributes retrieved from an + authoritative source such as an IDP (Identity Provider) or User Store. + Examples include such ADFS/LDAP, OKTA, etc. For now, a valid property must + contain both a selector expression & a resulting value. + + The external_selector_value is a specifier to select a value from a flattened + external representation of an Entity (such as from idP/LDAP), and the + external_value is the value selected by the external_selector_value on that + Entity Representation (Subject Context). These mirror the Condition. +*/ +message SubjectProperty { + string external_selector_value = 1 [ + (buf.validate.field).required = true, + (buf.validate.field).string = {min_len: 1} + ]; + string external_value = 2; +} + +/* + Resource Mapping Groups are namespaced collections of Resource Mappings + associated under a common group name. +*/ +message ResourceMappingGroup { + string id = 1; + + // the namespace containing the group of resource mappings + string namespace_id = 2 [(buf.validate.field).required = true]; + + // the common name for the group of resource mappings, which must be unique + // per namespace + string name = 3 [(buf.validate.field).required = true]; + + // Common metadata + common.Metadata metadata = 100; +} + +/* + Resource Mappings (aka Access Control Resource Encodings aka ACRE) are + structures supporting the mapping of Resources and Attribute Values +*/ +message ResourceMapping { + string id = 1; + + common.Metadata metadata = 2; + + policy.Value attribute_value = 3 [(buf.validate.field).required = true]; + + repeated string terms = 4; + + ResourceMappingGroup group = 5; +} + +// Describes whether this kas is managed by the organization or if they imported +// the kas information from an external party. These two modes are necessary in order +// to encrypt a tdf dek with an external parties kas public key. +enum SourceType { + SOURCE_TYPE_UNSPECIFIED = 0; + // The kas is managed by the organization. + SOURCE_TYPE_INTERNAL = 1; + // The kas is managed by an external party. + SOURCE_TYPE_EXTERNAL = 2; +} + +/* + Key Access Server Registry +*/ +message KeyAccessServer { + string id = 1; + // Address of a KAS instance + string uri = 2 [(buf.validate.field).cel = { + id: "uri_format" + message: + "URI must be a valid URL (e.g., 'https://demo.com/') followed by " + "additional segments. Each segment must start and end with an " + "alphanumeric character, can contain hyphens, alphanumeric " + "characters, and slashes." + expression: + "this.matches('^https?://" + "[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-" + "Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?)*(:[0-9]+)?(/" + ".*)?$')" + }]; + + // Deprecated + PublicKey public_key = 3; + + // The source of the KAS: (INTERNAL, EXTERNAL) + SourceType source_type = 4; + // Kas keys associated with this KAS + repeated SimpleKasKey kas_keys = 5; + + // Optional + // Unique name of the KAS instance + string name = 20; + + // Common metadata + common.Metadata metadata = 100; +} + +message Key { + // the database record ID, not the key ID (`kid`) + string id = 1; + + google.protobuf.BoolValue is_active = 2; + + google.protobuf.BoolValue was_mapped = 3; + + KasPublicKey public_key = 4; + + KeyAccessServer kas = 5; + + // Common metadata + common.Metadata metadata = 100; +} + +enum KasPublicKeyAlgEnum { + KAS_PUBLIC_KEY_ALG_ENUM_UNSPECIFIED = 0; + KAS_PUBLIC_KEY_ALG_ENUM_RSA_2048 = 1; + KAS_PUBLIC_KEY_ALG_ENUM_RSA_4096 = 2; + + KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP256R1 = 5; + KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP384R1 = 6; + KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP521R1 = 7; +} + +// Deprecated +// A KAS public key and some associated metadata for further identifcation +message KasPublicKey { + // x509 ASN.1 content in PEM envelope, usually + string pem = 1 [(buf.validate.field).string = { + min_len: 1 + max_len: 8192 + }]; + + // A unique string identifier for this key + string kid = 2 [(buf.validate.field).string = { + min_len: 1 + max_len: 32 + }]; + + // A known algorithm type with any additional parameters encoded. + // To start, these may be `rsa:2048` for encrypting ZTDF files and + // `ec:secp256r1` for nanoTDF, but more formats may be added as needed. + KasPublicKeyAlgEnum alg = 3 [(buf.validate.field).enum = { + defined_only: true + not_in: [0] + }]; +} + +// Deprecated +// A list of known KAS public keys +message KasPublicKeySet { + repeated KasPublicKey keys = 1; +} + +// Deprecated +message PublicKey { + // Deprecated + reserved "local"; + reserved 2; + + oneof public_key { + // kas public key url - optional since can also be retrieved via public key + string remote = 1 [(buf.validate.field).cel = { + id: "uri_format" + message: + "URI must be a valid URL (e.g., 'https://demo.com/') followed " + "by additional segments. Each segment must start and end with " + "an alphanumeric character, can contain hyphens, alphanumeric " + "characters, and slashes." + expression: + "this.matches('^https://" + "[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-" + "Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?)*(/.*)?$')" + }]; + + // public key with additional information. Current preferred version + KasPublicKeySet cached = 3; + } +} + +message RegisteredResource { + string id = 1; + + string name = 2; + + repeated RegisteredResourceValue values = 3; + + // Common metadata + common.Metadata metadata = 100; +} + +message RegisteredResourceValue { + message ActionAttributeValue { + string id = 1; + + Action action = 2; + + Value attribute_value = 3; + + // Common metadata + common.Metadata metadata = 100; + } + + string id = 1; + + string value = 2; + + RegisteredResource resource = 3; + + repeated ActionAttributeValue action_attribute_values = 4; + + // Common metadata + common.Metadata metadata = 100; +} + +// Supported key algorithms. +enum Algorithm { + ALGORITHM_UNSPECIFIED = 0; + ALGORITHM_RSA_2048 = 1; + ALGORITHM_RSA_4096 = 2; + ALGORITHM_EC_P256 = 3; + ALGORITHM_EC_P384 = 4; + ALGORITHM_EC_P521 = 5; +} + +// The status of the key +enum KeyStatus { + KEY_STATUS_UNSPECIFIED = 0; + KEY_STATUS_ACTIVE = 1; + KEY_STATUS_ROTATED = 2; +} + +// Describes the management and operational mode of a cryptographic key. +enum KeyMode { + // KEY_MODE_UNSPECIFIED: Default, unspecified key mode. Indicates an uninitialized or error state. + KEY_MODE_UNSPECIFIED = 0; + + // KEY_MODE_CONFIG_ROOT_KEY: Local key management where the private key is wrapped by a Key Encryption Key (KEK) + // sourced from local configuration. Unwrapping and all cryptographic operations are performed locally. + KEY_MODE_CONFIG_ROOT_KEY = 1; + + // KEY_MODE_PROVIDER_ROOT_KEY: Local key management where the private key is wrapped by a Key Encryption Key (KEK) + // managed by an external provider (e.g., a Hardware Security Module or Cloud KMS). + // Key unwrapping is delegated to the external provider; subsequent cryptographic operations + // are performed locally using the unwrapped key. + KEY_MODE_PROVIDER_ROOT_KEY = 2; + + // KEY_MODE_REMOTE: Remote key management where the private key is stored in, and all cryptographic + // operations are performed by, a remote Key Management Service (KMS) or HSM. + // The private key material never leaves the secure boundary of the remote system. + KEY_MODE_REMOTE = 3; + + // KEY_MODE_PUBLIC_KEY_ONLY: Public key only mode. Used when only a public key is available or required, + // typically for wrapping operations (e.g., encrypting a Data Encryption Key (DEK) for an external KAS). + // The corresponding private key is not managed or accessible by this system. + KEY_MODE_PUBLIC_KEY_ONLY = 4; +} + +message KasKey { + string kas_id = 1; + AsymmetricKey key = 2; + string kas_uri = 3; +} + +message PublicKeyCtx { + // Required + string pem = 1 [(buf.validate.field).string = {min_len: 1}]; // Base64 encoded public key in PEM format +} + +message PrivateKeyCtx { + // Required + string key_id = 1 [(buf.validate.field).string = {min_len: 1}]; // Key ID for the symmetric key wrapping this key. + // Optional + string wrapped_key = 2; // Base64 encoded wrapped key. Conditionally required if key_mode is LOCAL. Should not be present if key_mode is REMOTE. +} + +message AsymmetricKey { + // Required + string id = 1; + // Required + string key_id = 2; + // Required + Algorithm key_algorithm = 3; + // Required + KeyStatus key_status = 4; + // Required + KeyMode key_mode = 5; // Specifies how the key is managed (local or remote) + // Required + PublicKeyCtx public_key_ctx = 6; // Specific structure based on key provider implementation + // Optional + PrivateKeyCtx private_key_ctx = 7; // Specific structure based on key provider implementation + // Optional + KeyProviderConfig provider_config = 8; // Configuration for the key provider + + // Common metadata fields + common.Metadata metadata = 100; +} + +message SymmetricKey { + string id = 1; + string key_id = 2; + KeyStatus key_status = 3; + KeyMode key_mode = 4; // Specifies how the key is managed (local or remote) + bytes key_ctx = 5; // Specific structure based on key provider implementation + KeyProviderConfig provider_config = 6; // Configuration for the key provider + + // Common metadata fields + common.Metadata metadata = 100; +} diff --git a/otdf-python-proto/proto-files/policy/registeredresources/registered_resources.proto b/otdf-python-proto/proto-files/policy/registeredresources/registered_resources.proto new file mode 100644 index 0000000..c694019 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/registeredresources/registered_resources.proto @@ -0,0 +1,330 @@ +syntax = "proto3"; + +package policy.registeredresources; + +import "buf/validate/validate.proto"; + +import "common/common.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +/// +/// Registered Resource +/// + +message CreateRegisteredResourceRequest { + // Required + string name = 1 [ + (buf.validate.field).required = true, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "rr_name_format", + message: "Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.", + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // Optional + // Registered Resource Values (when provided) must be alphanumeric strings, allowing hyphens and underscores but not as the first or last character. + // The stored value will be normalized to lower case. + repeated string values = 2 [ + (buf.validate.field).repeated = { + min_items: 0, + unique: true, + items: { + string: + { + max_len: 253, + pattern: "^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$" + } + }, + } + ]; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; +} +message CreateRegisteredResourceResponse { + policy.RegisteredResource resource = 1; +} + +message GetRegisteredResourceRequest { + oneof identifier { + option (buf.validate.oneof).required = true; + + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + string name = 2 [ + (buf.validate.field).required = false, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "rr_name_format", + message: "Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.", + expression: "size(this) > 0 ? this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$') : true" + } + ]; + } +} +message GetRegisteredResourceResponse { + policy.RegisteredResource resource = 1; +} + +message ListRegisteredResourcesRequest { + // Optional + policy.PageRequest pagination = 10; +} +message ListRegisteredResourcesResponse { + repeated policy.RegisteredResource resources = 1; + + policy.PageResponse pagination = 10; +} + +message UpdateRegisteredResourceRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + // Optional + string name = 2 [ + (buf.validate.field).required = false, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "rr_name_format", + message: "Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.", + expression: "size(this) > 0 ? this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$') : true" + } + ]; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateRegisteredResourceResponse { + policy.RegisteredResource resource = 1; +} + +message DeleteRegisteredResourceRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; +} +message DeleteRegisteredResourceResponse { + policy.RegisteredResource resource = 1; +} + +/// +/// Registered Resource Values +/// + +message ActionAttributeValue { + // Required + oneof action_identifier { + option (buf.validate.oneof).required = true; + + string action_id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + string action_name = 2 [ + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "action_name_format" + message: "Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case." + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + } + + // Required + oneof attribute_value_identifier { + option (buf.validate.oneof).required = true; + + string attribute_value_id = 3 [ + (buf.validate.field).string.uuid = true + ]; + + string attribute_value_fqn = 4 [ + (buf.validate.field).string = { + min_len : 1 + uri : true + } + ]; + } +} + +message CreateRegisteredResourceValueRequest { + // Required + string resource_id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + // Required + string value = 2 [ + (buf.validate.field).required = true, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "rr_value_format", + message: "Registered Resource Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored value will be normalized to lower case.", + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // Optional + // The associated Action <> AttributeValue combinations to be utilized in authorization/entitlement decisioning + // (i.e. action read -> attribute value https://example.com/attr/department/value/marketing) + repeated ActionAttributeValue action_attribute_values = 3; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; +} +message CreateRegisteredResourceValueResponse { + policy.RegisteredResourceValue value = 1; +} + +message GetRegisteredResourceValueRequest { + oneof identifier { + option (buf.validate.oneof).required = true; + + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + string fqn = 2 [ + (buf.validate.field).string = { + min_len : 1 + uri : true + } + ]; + } +} +message GetRegisteredResourceValueResponse { + policy.RegisteredResourceValue value = 1; +} + +message GetRegisteredResourceValuesByFQNsRequest { + // Required + repeated string fqns = 1 [ + (buf.validate.field).repeated = { + min_items: 1, + unique: true, + items: { + string: + { + min_len: 1, + uri: true + } + }, + } + ]; +} +message GetRegisteredResourceValuesByFQNsResponse { + map fqn_value_map = 1; +} + +message ListRegisteredResourceValuesRequest { + // Optional + string resource_id = 1 [(buf.validate.field).cel = { + id: "optional_uuid_format", + message: "Optional field must be a valid UUID", + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + + // Optional + policy.PageRequest pagination = 10; +} +message ListRegisteredResourceValuesResponse { + repeated policy.RegisteredResourceValue values = 1; + + policy.PageResponse pagination = 10; +} + +message UpdateRegisteredResourceValueRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + // Optional + string value = 2 [ + (buf.validate.field).required = false, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "rr_value_format", + message: "Registered Resource Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored value will be normalized to lower case.", + expression: "size(this) > 0 ? this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$') : true" + } + ]; + + // Optional + // Action Attribute Values provided here will replace all existing records in the database. To delete all action attribute values, set this field to an empty list. + repeated ActionAttributeValue action_attribute_values = 3; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateRegisteredResourceValueResponse { + policy.RegisteredResourceValue value = 1; +} + +message DeleteRegisteredResourceValueRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; +} +message DeleteRegisteredResourceValueResponse { + policy.RegisteredResourceValue value = 1; +} + + +/// +/// Registered Resources Service +/// + +service RegisteredResourcesService { + // Registered Resources + + rpc CreateRegisteredResource(CreateRegisteredResourceRequest) returns (CreateRegisteredResourceResponse) { + } + + rpc GetRegisteredResource(GetRegisteredResourceRequest) returns (GetRegisteredResourceResponse) { + } + + rpc ListRegisteredResources(ListRegisteredResourcesRequest) returns (ListRegisteredResourcesResponse) { + } + + rpc UpdateRegisteredResource(UpdateRegisteredResourceRequest) returns (UpdateRegisteredResourceResponse) { + } + + rpc DeleteRegisteredResource(DeleteRegisteredResourceRequest) returns (DeleteRegisteredResourceResponse) { + } + + // Registered Resource Values + + rpc CreateRegisteredResourceValue(CreateRegisteredResourceValueRequest) returns (CreateRegisteredResourceValueResponse) { + } + + rpc GetRegisteredResourceValue(GetRegisteredResourceValueRequest) returns (GetRegisteredResourceValueResponse) { + } + + rpc GetRegisteredResourceValuesByFQNs(GetRegisteredResourceValuesByFQNsRequest) returns (GetRegisteredResourceValuesByFQNsResponse) { + } + + rpc ListRegisteredResourceValues(ListRegisteredResourceValuesRequest) returns (ListRegisteredResourceValuesResponse) { + } + + rpc UpdateRegisteredResourceValue(UpdateRegisteredResourceValueRequest) returns (UpdateRegisteredResourceValueResponse) { + } + + rpc DeleteRegisteredResourceValue(DeleteRegisteredResourceValueRequest) returns (DeleteRegisteredResourceValueResponse) { + } +} diff --git a/otdf-python-proto/proto-files/policy/resourcemapping/resource_mapping.proto b/otdf-python-proto/proto-files/policy/resourcemapping/resource_mapping.proto new file mode 100644 index 0000000..5fa321f --- /dev/null +++ b/otdf-python-proto/proto-files/policy/resourcemapping/resource_mapping.proto @@ -0,0 +1,276 @@ +syntax = "proto3"; + +package policy.resourcemapping; + +import "buf/validate/validate.proto"; + +import "common/common.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +/* + Resource Mapping Groups +*/ + +// TODO: optional validation below should be through a custom validator, which is too bleeding edge at present without full plugin support + +message ListResourceMappingGroupsRequest { + // Optional + string namespace_id = 1 [(buf.validate.field).cel = { + id: "optional_uuid_format", + message: "Optional field must be a valid UUID", + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + + // Optional + policy.PageRequest pagination = 10; +} + +message ListResourceMappingGroupsResponse { + repeated ResourceMappingGroup resource_mapping_groups = 1; + + policy.PageResponse pagination = 10; +} + +message GetResourceMappingGroupRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; +} + +message GetResourceMappingGroupResponse { + ResourceMappingGroup resource_mapping_group = 1; +} + +message CreateResourceMappingGroupRequest { + // Required + string namespace_id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + // Required + string name = 2 [(buf.validate.field).required = true]; + + // Common metadata + common.MetadataMutable metadata = 100; +} + +message CreateResourceMappingGroupResponse { + ResourceMappingGroup resource_mapping_group = 1; +} + +message UpdateResourceMappingGroupRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + // Optional + string namespace_id = 2 [(buf.validate.field).cel = { + id: "optional_uuid_format", + message: "Optional field must be a valid UUID", + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + + // Optional + string name = 3 [ + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "optional_name_format", + message: "Optional field must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored group name will be normalized to lower case.", + expression: "size(this) == 0 || this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; + + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} + +message UpdateResourceMappingGroupResponse { + ResourceMappingGroup resource_mapping_group = 1; +} + +message DeleteResourceMappingGroupRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; +} + +message DeleteResourceMappingGroupResponse { + ResourceMappingGroup resource_mapping_group = 1; +} + +/* + Resource Mappings +*/ + +message ListResourceMappingsRequest { + // Optional + string group_id = 1 [(buf.validate.field).cel = { + id: "optional_uuid_format", + message: "Optional field must be a valid UUID", + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + + // Optional + policy.PageRequest pagination = 10; +} + +message ListResourceMappingsResponse { + repeated policy.ResourceMapping resource_mappings = 1; + + policy.PageResponse pagination = 10; +} + +message ListResourceMappingsByGroupFqnsRequest { + // Required + // Structure of the RM Group FQN is 'https:///resm/' + repeated string fqns = 1 [(buf.validate.field).repeated = { + min_items: 1, + items: { + cel: [ + { + id: "resourcemappinggroup_fqn", + message: "Resource Mapping Group FQN must be in the format 'https:///resm/'", + expression: "this.matches('^https://([a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?\\\\.)+[a-zA-Z]{2,}/resm/[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ] + }, + }]; +} + +message ResourceMappingsByGroup { + policy.ResourceMappingGroup group = 1; + repeated policy.ResourceMapping mappings = 2; +} + +message ListResourceMappingsByGroupFqnsResponse { + map fqn_resource_mapping_groups = 1; +} + +message GetResourceMappingRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; +} + +message GetResourceMappingResponse { + policy.ResourceMapping resource_mapping = 1; +} + + +message CreateResourceMappingRequest { + // Required + string attribute_value_id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + // Required + repeated string terms = 2 [(buf.validate.field).repeated = { + min_items: 1, + max_items: 1000, + }]; + + // Optional + string group_id = 3 [(buf.validate.field).cel = { + id: "optional_uuid_format", + message: "Optional field must be a valid UUID", + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + + // Optional + common.MetadataMutable metadata = 100; +} +message CreateResourceMappingResponse { + ResourceMapping resource_mapping = 1; +} + +message UpdateResourceMappingRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; + + // Optional + string attribute_value_id = 4 [(buf.validate.field).cel = { + id: "optional_uuid_format", + message: "Optional field must be a valid UUID", + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + + // Optional + repeated string terms = 5 [(buf.validate.field).repeated = { + max_items: 1000, + }]; + + // Optional + string group_id = 6 [(buf.validate.field).cel = { + id: "optional_uuid_format", + message: "Optional field must be a valid UUID", + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + + // Optional + // Common Metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateResourceMappingResponse { + ResourceMapping resource_mapping = 1; +} + +message DeleteResourceMappingRequest { + // Required + string id = 1 [ + (buf.validate.field).string.uuid = true + ]; +} +message DeleteResourceMappingResponse { + ResourceMapping resource_mapping = 1; +} + +service ResourceMappingService { + /* + Resource Mapping Groups + */ + + rpc ListResourceMappingGroups(ListResourceMappingGroupsRequest) returns (ListResourceMappingGroupsResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc GetResourceMappingGroup(GetResourceMappingGroupRequest) returns (GetResourceMappingGroupResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateResourceMappingGroup(CreateResourceMappingGroupRequest) returns (CreateResourceMappingGroupResponse) {} + + rpc UpdateResourceMappingGroup(UpdateResourceMappingGroupRequest) returns (UpdateResourceMappingGroupResponse) {} + + rpc DeleteResourceMappingGroup(DeleteResourceMappingGroupRequest) returns (DeleteResourceMappingGroupResponse) {} + + /* + Resource Mappings + */ + + rpc ListResourceMappings(ListResourceMappingsRequest) returns (ListResourceMappingsResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc ListResourceMappingsByGroupFqns(ListResourceMappingsByGroupFqnsRequest) returns (ListResourceMappingsByGroupFqnsResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc GetResourceMapping(GetResourceMappingRequest) returns (GetResourceMappingResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateResourceMapping(CreateResourceMappingRequest) returns (CreateResourceMappingResponse) {} + + rpc UpdateResourceMapping(UpdateResourceMappingRequest) returns (UpdateResourceMappingResponse) {} + + rpc DeleteResourceMapping(DeleteResourceMappingRequest) returns (DeleteResourceMappingResponse) {} +} diff --git a/otdf-python-proto/proto-files/policy/selectors.proto b/otdf-python-proto/proto-files/policy/selectors.proto new file mode 100644 index 0000000..5e717c6 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/selectors.proto @@ -0,0 +1,70 @@ +syntax = "proto3"; + +package policy; + +message AttributeNamespaceSelector { + message AttributeSelector { + // Deprecated + bool with_key_access_grants = 1; + message ValueSelector { + // Deprecated + bool with_key_access_grants = 1; + bool with_subject_maps = 2; + bool with_resource_maps = 3; + } + ValueSelector with_values = 10; + } + AttributeSelector with_attributes = 10; +} + +message AttributeDefinitionSelector { + // Deprecated + bool with_key_access_grants = 1; + + message NamespaceSelector {} + NamespaceSelector with_namespace = 10; + + message ValueSelector { + // Deprecated + bool with_key_access_grants = 1; + bool with_subject_maps = 2; + bool with_resource_maps = 3; + } + ValueSelector with_values = 11; +} + +message AttributeValueSelector { + // Deprecated + bool with_key_access_grants = 1; + bool with_subject_maps = 2; + bool with_resource_maps = 3; + + message AttributeSelector { + // Deprecated + bool with_key_access_grants = 1; + + message NamespaceSelector {} + NamespaceSelector with_namespace = 10; + } + AttributeSelector with_attribute = 10; +} + +message PageRequest { + // Optional + // Set to configured default limit if not provided + // Maximum limit set in platform config and enforced by services + int32 limit = 1; + // Optional + // Defaulted if not provided + int32 offset = 2; +} + +message PageResponse { + // Requested pagination offset + int32 current_offset = 1; + // Calculated with request limit + offset or defaults + // Empty when none remain after current page + int32 next_offset = 2; + // Total count of entire list + int32 total = 3; +} diff --git a/otdf-python-proto/proto-files/policy/subjectmapping/subject_mapping.proto b/otdf-python-proto/proto-files/policy/subjectmapping/subject_mapping.proto new file mode 100644 index 0000000..f725cc2 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/subjectmapping/subject_mapping.proto @@ -0,0 +1,215 @@ +syntax = "proto3"; + +package policy.subjectmapping; + +import "buf/validate/validate.proto"; +import "common/common.proto"; +import "policy/objects.proto"; +import "policy/selectors.proto"; + +// MatchSubjectMappingsRequest liberally returns a list of SubjectMappings based on the provided SubjectProperties. +// The SubjectMappings are returned if an external selector field matches. +message MatchSubjectMappingsRequest { + repeated policy.SubjectProperty subject_properties = 1 [(buf.validate.field).repeated.min_items = 1]; +} + +message MatchSubjectMappingsResponse { + repeated policy.SubjectMapping subject_mappings = 1; +} + +/* + Subject Mappings CRUD Operations +*/ + +message GetSubjectMappingRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message GetSubjectMappingResponse { + policy.SubjectMapping subject_mapping = 1; +} + +message ListSubjectMappingsRequest { + // Optional + policy.PageRequest pagination = 10; +} +message ListSubjectMappingsResponse { + repeated policy.SubjectMapping subject_mappings = 1; + + policy.PageResponse pagination = 10; +} + +message CreateSubjectMappingRequest { + // Required + // Attribute Value to be mapped to + string attribute_value_id = 1 [(buf.validate.field).string.uuid = true]; + // Required + // The actions permitted by subjects in this mapping + repeated policy.Action actions = 2 [ + (buf.validate.field).repeated.min_items = 1, + (buf.validate.field).cel = { + id: "action_name_or_id_not_empty" + message: "Action name or ID must not be empty if provided" + expression: "this.all(item, item.name != '' || item.id != '')" + } + ]; + + // Either of the following: + // Reuse existing SubjectConditionSet (NOTE: prioritized over new_subject_condition_set) + string existing_subject_condition_set_id = 3 [(buf.validate.field).cel = { + id: "optional_uuid_format" + message: "Optional field must be a valid UUID" + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + // Create new SubjectConditionSet (NOTE: ignored if existing_subject_condition_set_id is provided) + SubjectConditionSetCreate new_subject_condition_set = 4; + + // Optional + common.MetadataMutable metadata = 100; +} +message CreateSubjectMappingResponse { + policy.SubjectMapping subject_mapping = 1; +} + +message UpdateSubjectMappingRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Optional + // Replaces the existing SubjectConditionSet id with a new one + string subject_condition_set_id = 2 [(buf.validate.field).cel = { + id: "optional_uuid_format" + message: "Optional field must be a valid UUID" + expression: "size(this) == 0 || this.matches('[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}')" + }]; + // Optional + // Replaces entire list of actions permitted by subjects + repeated policy.Action actions = 3 [(buf.validate.field).cel = { + id: "action_name_or_id_not_empty" + message: "Action name or ID must not be empty if provided" + expression: "this.size() == 0 || this.all(item, item.name != '' || item.id != '')" + }]; + + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateSubjectMappingResponse { + // Only ID of the updated Subject Mapping provided + policy.SubjectMapping subject_mapping = 1; +} + +message DeleteSubjectMappingRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeleteSubjectMappingResponse { + // Only ID of the updated Subject Mapping provided + policy.SubjectMapping subject_mapping = 1; +} + +/** + SubjectConditionSet CRUD operations +*/ + +message GetSubjectConditionSetRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message GetSubjectConditionSetResponse { + policy.SubjectConditionSet subject_condition_set = 1; + // contextualized Subject Mappings associated with this SubjectConditionSet + repeated policy.SubjectMapping associated_subject_mappings = 2; +} + +message ListSubjectConditionSetsRequest { + // Optional + policy.PageRequest pagination = 10; +} +message ListSubjectConditionSetsResponse { + repeated policy.SubjectConditionSet subject_condition_sets = 1; + + policy.PageResponse pagination = 10; +} + +message SubjectConditionSetCreate { + // Required + repeated policy.SubjectSet subject_sets = 1 [(buf.validate.field).repeated.min_items = 1]; + + // Optional + // Common metadata + common.MetadataMutable metadata = 100; +} +message CreateSubjectConditionSetRequest { + SubjectConditionSetCreate subject_condition_set = 1 [(buf.validate.field).required = true]; +} +message CreateSubjectConditionSetResponse { + SubjectConditionSet subject_condition_set = 1; +} + +message UpdateSubjectConditionSetRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Optional + // If provided, replaces entire existing structure of Subject Sets, Condition Groups, & Conditions + repeated policy.SubjectSet subject_sets = 2; + + // Common metadata + common.MetadataMutable metadata = 100; + common.MetadataUpdateEnum metadata_update_behavior = 101; +} +message UpdateSubjectConditionSetResponse { + // Only ID of updated Subject Condition Set provided + policy.SubjectConditionSet subject_condition_set = 1; +} + +message DeleteSubjectConditionSetRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message DeleteSubjectConditionSetResponse { + // Only ID of deleted Subject Condition Set provided + policy.SubjectConditionSet subject_condition_set = 1; +} + +// Prune any Subject Condition Sets not utilized within a Subject Mapping +message DeleteAllUnmappedSubjectConditionSetsRequest {} +message DeleteAllUnmappedSubjectConditionSetsResponse { + // Only IDs of any deleted Subject Condition Set provided + repeated policy.SubjectConditionSet subject_condition_sets = 1; +} + +service SubjectMappingService { + // Find matching Subject Mappings for a given Subject + rpc MatchSubjectMappings(MatchSubjectMappingsRequest) returns (MatchSubjectMappingsResponse) {} + + rpc ListSubjectMappings(ListSubjectMappingsRequest) returns (ListSubjectMappingsResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + rpc GetSubjectMapping(GetSubjectMappingRequest) returns (GetSubjectMappingResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateSubjectMapping(CreateSubjectMappingRequest) returns (CreateSubjectMappingResponse) {} + + rpc UpdateSubjectMapping(UpdateSubjectMappingRequest) returns (UpdateSubjectMappingResponse) {} + + rpc DeleteSubjectMapping(DeleteSubjectMappingRequest) returns (DeleteSubjectMappingResponse) {} + + rpc ListSubjectConditionSets(ListSubjectConditionSetsRequest) returns (ListSubjectConditionSetsResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc GetSubjectConditionSet(GetSubjectConditionSetRequest) returns (GetSubjectConditionSetResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + rpc CreateSubjectConditionSet(CreateSubjectConditionSetRequest) returns (CreateSubjectConditionSetResponse) {} + + rpc UpdateSubjectConditionSet(UpdateSubjectConditionSetRequest) returns (UpdateSubjectConditionSetResponse) {} + + rpc DeleteSubjectConditionSet(DeleteSubjectConditionSetRequest) returns (DeleteSubjectConditionSetResponse) {} + + rpc DeleteAllUnmappedSubjectConditionSets(DeleteAllUnmappedSubjectConditionSetsRequest) returns (DeleteAllUnmappedSubjectConditionSetsResponse) {} +} diff --git a/otdf-python-proto/proto-files/policy/unsafe/unsafe.proto b/otdf-python-proto/proto-files/policy/unsafe/unsafe.proto new file mode 100644 index 0000000..c11bfb9 --- /dev/null +++ b/otdf-python-proto/proto-files/policy/unsafe/unsafe.proto @@ -0,0 +1,209 @@ +syntax = "proto3"; + +package policy.unsafe; + +import "buf/validate/validate.proto"; + +import "policy/objects.proto"; + +// Namespaces Unsafe RPCs + +// WARNING!! +// Updating the Namespace of an Attribute will retroactively alter access to existing TDFs of the old and new Namespace name. +// Existing Attribute Definitions and their Values under the Namespace will now be associated with the new Namespace name. +message UnsafeUpdateNamespaceRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Required + string name = 2 [ + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "namespace_name_format" + message: "Namespace must be a valid hostname. It should include at least one dot, with each segment (label) starting and ending with an alphanumeric character. Each label must be 1 to 63 characters long, allowing hyphens but not as the first or last character. The top-level domain (the last segment after the final dot) must consist of at least two alphabetic characters. The stored namespace will be normalized to lower case." + expression: "this.matches('^([a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?\\\\.)+[a-zA-Z]{2,}$')" + } + ]; +} +message UnsafeUpdateNamespaceResponse { + policy.Namespace namespace = 1; +} + +// WARNING!! +// Reactivating a Namespace can potentially open up an access path to existing TDFs containing any Attributes under the Namespace. +// Active state of any Definitions and their Values under this Namespace will NOT be changed. +message UnsafeReactivateNamespaceRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message UnsafeReactivateNamespaceResponse { + policy.Namespace namespace = 1; +} + +// WARNING!! +// Deleting a Namespace will free up the Attribute Namespace, Definitions, and Values for reuse, which can introduce an access path to existing TDFs containing the deleted policy. +// Definitions and their Values under this Namespace will be cascadingly deleted. +message UnsafeDeleteNamespaceRequest { + // Required + // UUID of the Namespace + string id = 1 [(buf.validate.field).string.uuid = true]; + // Required + // Fully Qualified Name (FQN) of Namespace (i.e. https://), normalized to lower case. + string fqn = 2 [(buf.validate.field).required = true]; +} +message UnsafeDeleteNamespaceResponse { + policy.Namespace namespace = 1; +} + +// Attribute Definitions Unsafe RPCs + +// WARNING!! +// Updating an Attribute can have dangerous consequences. Use with caution. +message UnsafeUpdateAttributeRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Optional + // WARNING!! + // Updating the name of an Attribute will retroactively alter access to existing TDFs of the old and new Attribute name. + string name = 2 [ + (buf.validate.field).required = false, + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "attribute_name_format" + message: "Attribute name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute name will be normalized to lower case." + expression: "size(this) > 0 ? this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$') : true" + } + ]; + // Optional + // WARNING!! + // Updating the rule of an Attribute will retroactively alter access to existing TDFs of the Attribute name. + AttributeRuleTypeEnum rule = 3 [(buf.validate.field).enum.defined_only = true]; + // Optional + // WARNING!! + // Unsafe reordering requires the full list of values in the new order they should be stored. Updating the order of values in a HIERARCHY-rule Attribute Definition + // will retroactively alter access to existing TDFs containing those values. Replacing values on an attribute in place is not supported; values can be unsafely deleted + // deleted, created, and unsafely re-ordered as necessary. + repeated string values_order = 4; +} +message UnsafeUpdateAttributeResponse { + policy.Attribute attribute = 1; +} + +// WARNING!! +// Reactivating an Attribute can potentially open up an access path to existing TDFs containing the Attribute name. +// Active state of any Values under this Attribute Definition will NOT be changed. +message UnsafeReactivateAttributeRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message UnsafeReactivateAttributeResponse { + policy.Attribute attribute = 1; +} + +// WARNING!! +// Deleting an Attribute will free up the Attribute name for reuse, which can introduce an access path to existing TDFs containing the deleted Attribute name. +// Values under this Attribute will be cascadingly deleted. +// Any KAS Grants associated with this Attribute will be cascadingly deleted. +message UnsafeDeleteAttributeRequest { + // Required + // UUID of the Attribute + string id = 1 [(buf.validate.field).string.uuid = true]; + // Required + // Fully Qualified Name (FQN) of Attribute Definition (i.e. https:///attr/), normalized to lower case. + string fqn = 2 [(buf.validate.field).required = true]; +} +message UnsafeDeleteAttributeResponse { + policy.Attribute attribute = 1; +} + +/// +/// Value Unsafe RPC messages +/// + +// WARNING!! +// Updating an Attribute Value will retroactively alter access to existing TDFs containing the old and new Attribute Value. +message UnsafeUpdateAttributeValueRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; + + // Required + string value = 2 [ + (buf.validate.field).string.max_len = 253, + (buf.validate.field).cel = { + id: "value_format" + message: "Attribute Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute value will be normalized to lower case." + expression: "this.matches('^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$')" + } + ]; +} +message UnsafeUpdateAttributeValueResponse { + policy.Value value = 1; +} + +// WARNING!! +// Reactivating an Attribute Value can potentially open up an access path to existing TDFs containing the Attribute Value. +message UnsafeReactivateAttributeValueRequest { + // Required + string id = 1 [(buf.validate.field).string.uuid = true]; +} +message UnsafeReactivateAttributeValueResponse { + policy.Value value = 1; +} + +// WARNING!! +// Deleting an Attribute Value will free up the Attribute Value for reuse, which can introduce an access path to existing TDFs containing the deleted Attribute Value. +// Any KAS Grants associated with this Attribute Value will be cascadingly deleted. +message UnsafeDeleteAttributeValueRequest { + // Required + // UUID of the Attribute Value + string id = 1 [(buf.validate.field).string.uuid = true]; + // Required + // Fully Qualified Name (FQN) of Attribute Value (i.e. https:///attr//value/), normalized to lower case. + string fqn = 2 [(buf.validate.field).required = true]; +} +message UnsafeDeleteAttributeValueResponse { + policy.Value value = 1; +} + +// WARNING!! +message UnsafeDeleteKasKeyRequest { + // Required + // UUID of the Key + string id = 1 [(buf.validate.field).string.uuid = true]; +} + +message UnsafeDeleteKasKeyResponse { + policy.Key key = 1; +} + +/// +/// Unsafe Service +/// +service UnsafeService { + /*--------------------------------------* + * Namespace RPCs + *---------------------------------------*/ + rpc UnsafeUpdateNamespace(UnsafeUpdateNamespaceRequest) returns (UnsafeUpdateNamespaceResponse) {} + rpc UnsafeReactivateNamespace(UnsafeReactivateNamespaceRequest) returns (UnsafeReactivateNamespaceResponse) {} + rpc UnsafeDeleteNamespace(UnsafeDeleteNamespaceRequest) returns (UnsafeDeleteNamespaceResponse) {} + + /*--------------------------------------* + * Attribute RPCs + *---------------------------------------*/ + rpc UnsafeUpdateAttribute(UnsafeUpdateAttributeRequest) returns (UnsafeUpdateAttributeResponse) {} + rpc UnsafeReactivateAttribute(UnsafeReactivateAttributeRequest) returns (UnsafeReactivateAttributeResponse) {} + rpc UnsafeDeleteAttribute(UnsafeDeleteAttributeRequest) returns (UnsafeDeleteAttributeResponse) {} + + /*--------------------------------------* + * Value RPCs + *---------------------------------------*/ + rpc UnsafeUpdateAttributeValue(UnsafeUpdateAttributeValueRequest) returns (UnsafeUpdateAttributeValueResponse) {} + rpc UnsafeReactivateAttributeValue(UnsafeReactivateAttributeValueRequest) returns (UnsafeReactivateAttributeValueResponse) {} + rpc UnsafeDeleteAttributeValue(UnsafeDeleteAttributeValueRequest) returns (UnsafeDeleteAttributeValueResponse) {} + + /*--------------------------------------* + * Kas Key RPCs + *---------------------------------------*/ + rpc UnsafeDeleteKasKey(UnsafeDeleteKasKeyRequest) returns (UnsafeDeleteKasKeyResponse) {} +} diff --git a/otdf-python-proto/proto-files/wellknownconfiguration/wellknown_configuration.proto b/otdf-python-proto/proto-files/wellknownconfiguration/wellknown_configuration.proto new file mode 100644 index 0000000..b6d2c22 --- /dev/null +++ b/otdf-python-proto/proto-files/wellknownconfiguration/wellknown_configuration.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package wellknownconfiguration; + +import "google/api/annotations.proto"; +import "google/protobuf/struct.proto"; + +message WellKnownConfig { + map configuration = 1; +} + +message GetWellKnownConfigurationRequest {} + +message GetWellKnownConfigurationResponse { + google.protobuf.Struct configuration = 1; +} + +service WellKnownService { + rpc GetWellKnownConfiguration(GetWellKnownConfigurationRequest) returns (GetWellKnownConfigurationResponse) { + option (google.api.http) = {get: "/.well-known/opentdf-configuration"}; + option idempotency_level = NO_SIDE_EFFECTS; + } +} diff --git a/otdf-python-proto/pyproject.toml b/otdf-python-proto/pyproject.toml new file mode 100644 index 0000000..f120b6e --- /dev/null +++ b/otdf-python-proto/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "otdf-python-proto" +version = "0.3.1" +description = "Generated protobuf files for OpenTDF Python SDK" +readme = "README.md" +authors = [ + { name = "b-long", email = "b-long@users.noreply.github.com" } +] +requires-python = ">=3.10" +dependencies = [ + "connect-python[compiler]>=0.4.2", + "protobuf>=6.31.1", + "googleapis-common-protos>=1.66.0", + # Legacy gRPC support (may be removed in future versions) + "grpcio>=1.74.0", + "grpcio-tools>=1.74.0", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[dependency-groups] +dev = [ + "mypy-protobuf>=3.6.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/otdf_python_proto"] + +[tool.hatch.metadata] +allow-direct-references = true diff --git a/otdf-python-proto/scripts/build_connect_proto.sh b/otdf-python-proto/scripts/build_connect_proto.sh new file mode 100755 index 0000000..d5b4591 --- /dev/null +++ b/otdf-python-proto/scripts/build_connect_proto.sh @@ -0,0 +1,95 @@ +#!/bin/bash + +# Build Connect RPC protobuf files for OpenTDF Python SDK +# This script sets up the environment and generates Connect RPC clients + +set -e + +echo "OpenTDF Connect RPC Proto Builder" +echo "=================================" + +# Get the directory of this script +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROTO_GEN_DIR="$(dirname "$SCRIPT_DIR")" + +echo "Working in: $PROTO_GEN_DIR" + +# Check if we're in the right directory +if [[ ! -f "$PROTO_GEN_DIR/buf.yaml" ]]; then + echo "Error: buf.yaml not found. Are you in the proto-gen directory?" + exit 1 +fi + +# Check for required tools +echo "Checking dependencies..." + +if ! command -v buf &> /dev/null; then + echo "Error: buf is not installed." + echo "Install it with:" + echo " brew install bufbuild/buf/buf" + echo " # or" + echo " go install github.com/bufbuild/buf/cmd/buf@latest" + exit 1 +fi + +echo "✓ buf is available" + +# Check if uv is available +if ! command -v uv &> /dev/null; then + echo "Error: uv is not installed." + echo "Install it with:" + echo " curl -LsSf https://astral.sh/uv/install.sh | sh" + exit 1 +fi + +echo "✓ uv is available" + +# Install dependencies if needed +echo "Installing/updating dependencies..." +cd "$PROTO_GEN_DIR" +uv sync --dev + +# Check if connect-python is available +if ! uv run python -c "import connectrpc" 2>/dev/null; then + echo "Installing connect-python[compiler]..." + uv add "connect-python[compiler]>=0.4.2" +fi + +echo "✓ connect-python is available" + +# Clean up previous generated files +echo "Cleaning up previous generated files..." +if [[ -d "generated" ]]; then + rm -rf generated/* +fi + +# Create generated directory +mkdir -p generated + +# Run the generation +echo "Generating Connect RPC protobuf files..." +uv run python scripts/generate_connect_proto.py "$@" + +if [[ $? -eq 0 ]]; then + echo "" + echo "✓ Connect RPC generation complete!" + echo "" + echo "Generated files:" + echo " - generated/*_pb2.py (Protobuf message classes)" + echo " - generated/*_pb2.pyi (Type stubs)" + echo " - generated/*_connect.py (Connect RPC clients)" + echo "" + echo "Legacy gRPC files (if generated):" + echo " - generated/legacy_grpc/*_pb2_grpc.py (gRPC stubs)" + echo "" + echo "Usage examples:" + echo " cd .." + echo " python examples/connect_rpc_client_example.py" + echo "" + echo "For more information, see:" + echo " - CONNECT_RPC_MIGRATION.md" + echo " - https://connectrpc.com/docs/" +else + echo "✗ Connect RPC generation failed!" + exit 1 +fi diff --git a/otdf-python-proto/scripts/generate_connect_proto.py b/otdf-python-proto/scripts/generate_connect_proto.py new file mode 100644 index 0000000..ff0d0d8 --- /dev/null +++ b/otdf-python-proto/scripts/generate_connect_proto.py @@ -0,0 +1,299 @@ +#!/usr/bin/env python3 +""" +Enhanced script to generate Python Connect RPC clients from .proto definitions. + +This script: +1. Downloads the latest proto files from OpenTDF platform +2. Generates standard Python protobuf files +3. Generates Connect RPC Python clients (preferred) +4. Optionally generates legacy gRPC clients for backward compatibility +""" + +import subprocess +import sys +from pathlib import Path + + +def check_dependencies() -> bool: + """Check if required dependencies are available.""" + dependencies = [ + ("buf", "buf --version"), + ("connect-python", "uv run python -c 'import connectrpc'"), + ] + + missing = [] + for name, check_cmd in dependencies: + try: + subprocess.run(check_cmd, shell=True, capture_output=True, check=True) + print(f"✓ {name} is available") + except (subprocess.CalledProcessError, FileNotFoundError): # noqa: PERF203 + missing.append(name) + print(f"✗ {name} is missing") + + if missing: + print("\nMissing dependencies. Install them with:") + for dep in missing: + if dep == "buf": + print(" # Install buf: https://buf.build/docs/installation") + print(" # macOS: brew install bufbuild/buf/buf") + print(" # Or: go install github.com/bufbuild/buf/cmd/buf@latest") + elif dep == "connect-python": + print(" uv add connect-python[compiler]") + return False + + return True + + +def copy_opentdf_proto_files(proto_gen_dir: Path) -> bool: + """Clone OpenTDF platform repository and copy all proto files.""" + GIT_TAG = "service/v0.7.2" + REPO_URL = "https://github.com/opentdf/platform.git" + + temp_repo_dir = proto_gen_dir / "temp_platform_repo" + proto_files_dir = proto_gen_dir / "proto-files" + proto_files_dir.mkdir(exist_ok=True) + + copied_files = 0 + + try: + # Remove existing temp directory if it exists + if temp_repo_dir.exists(): + subprocess.run(["rm", "-rf", str(temp_repo_dir)], check=True) + + print(f"Cloning OpenTDF platform repository (tag: {GIT_TAG})...") + + # Shallow clone the specific tag + subprocess.run( + [ + "git", + "clone", + "--depth", + "1", + "--branch", + GIT_TAG, + REPO_URL, + str(temp_repo_dir), + ], + capture_output=True, + text=True, + check=True, + ) + + # Find all .proto files in the service directory and copy them immediately + service_dir = temp_repo_dir / "service" + if service_dir.exists(): + for proto_file in service_dir.glob("**/*.proto"): + try: + # Get the relative path from the service directory + relative_path = proto_file.relative_to(service_dir) + + # Create the destination path + dest_path = proto_files_dir / relative_path + + # Create any necessary parent directories + dest_path.parent.mkdir(parents=True, exist_ok=True) + + print(f" Copying {relative_path}...") + + # Copy the file content + with open(proto_file) as src: + content = src.read() + + with open(dest_path, "w") as dst: + dst.write(content) + + copied_files += 1 + + except Exception as e: # noqa: PERF203 + print(f" Warning: Failed to copy {relative_path}: {e}") + + print(f"Found and copied {copied_files} proto files from repository") + return copied_files > 0 + + except subprocess.CalledProcessError as e: + print(f"Error cloning repository: {e}") + print(f"stdout: {e.stdout}") + print(f"stderr: {e.stderr}") + return False + except Exception as e: + print(f"Error copying proto files: {e}") + return False + finally: + # Clean up temp directory + if temp_repo_dir.exists(): + subprocess.run(["rm", "-rf", str(temp_repo_dir)], check=False) + + return False + + +def download_proto_files(proto_gen_dir: Path) -> bool: + """Download proto files from OpenTDF platform.""" + print("Copying proto files from OpenTDF platform...") + + try: + return copy_opentdf_proto_files(proto_gen_dir) + except Exception as e: + print(f"Error getting proto files: {e}") + return False + + +def run_buf_generate(proto_gen_dir: Path) -> bool: + """Run buf generate to create protobuf and Connect RPC files.""" + print("Generating protobuf and Connect RPC files...") + + try: + # First, get the path to protoc-gen-connect_python + result = subprocess.run( + ["uv", "run", "which", "protoc-gen-connect_python"], + cwd=proto_gen_dir, + capture_output=True, + text=True, + check=True, + ) + connect_plugin_path = result.stdout.strip() + print(f"Using Connect plugin at: {connect_plugin_path}") + + # Update buf.gen.yaml with the correct path + buf_gen_path = proto_gen_dir / "buf.gen.yaml" + with open(buf_gen_path) as f: + content = f.read() + + # Replace the local plugin path + updated_content = content.replace( + "- local: protoc-gen-connect_python", f"- local: {connect_plugin_path}" + ) + + with open(buf_gen_path, "w") as f: + f.write(updated_content) + + # Run buf generate + subprocess.run( + ["buf", "generate"], + cwd=proto_gen_dir, + capture_output=True, + text=True, + check=True, + ) + + print("✓ Successfully generated protobuf and Connect RPC files") + return True + + except subprocess.CalledProcessError as e: + print("✗ buf generate failed:") + print(f"stdout: {e.stdout}") + print(f"stderr: {e.stderr}") + return False + except FileNotFoundError: + print("✗ buf command not found. Please install buf.") + return False + + +def create_init_files(generated_dir: Path) -> None: + """Create __init__.py files in generated directories.""" + # Create __init__.py in main generated directory + (generated_dir / "__init__.py").touch() + + # Create __init__.py files in any subdirectories + for subdir in generated_dir.iterdir(): + if subdir.is_dir(): + (subdir / "__init__.py").touch() + + +def _fix_ignore_if_default_value(proto_files_dir): + """ + TODO: Fix buf validation: Updated the proto files to use the correct enum value: + + Changed IGNORE_IF_DEFAULT_VALUE → IGNORE_IF_ZERO_VALUE in: + attributes.proto + key_access_server_registry.proto + namespaces.proto + + See release notes: + * https://github.com/bufbuild/protovalidate/releases/tag/v0.14.2 + + > IGNORE_IF_DEFAULT_VALUE is removed. + > In most cases, you can replace it with IGNORE_IF_ZERO_VALUE. See #396 for details. + > https://github.com/bufbuild/protovalidate/pull/396 + + """ + # raise NotImplementedError + + # Iterate all .proto files in the directory + for proto_file in proto_files_dir.glob("**/*.proto"): + try: + with open(proto_file, "r") as file: # noqa: UP015 + content = file.read() + + # Replace the old enum value with the new one + updated_content = content.replace( + "IGNORE_IF_DEFAULT_VALUE", "IGNORE_IF_ZERO_VALUE" + ) + + # Write the updated content back to the file + with open(proto_file, "w") as file: + file.write(updated_content) + + print(f"Updated {proto_file.name} to use IGNORE_IF_ZERO_VALUE") + + except Exception as e: # noqa: PERF203 + print(f"Error updating {proto_file.name}: {e}") + + +def main(): + """Main function to coordinate the generation process.""" + print("OpenTDF Connect RPC Client Generator") + print("===================================") + + # Get the proto-gen directory (parent of scripts) + proto_gen_dir = Path(__file__).parent.parent + proto_files_dir = proto_gen_dir / "proto-files" + generated_dir = proto_gen_dir / "generated" + + # Check dependencies + if not check_dependencies(): + return 1 + + # Ensure directories exist + proto_files_dir.mkdir(exist_ok=True) + generated_dir.mkdir(exist_ok=True) + + # Download proto files (optional - can use existing files) + if ( + "--download" in sys.argv or not any(proto_files_dir.glob("**/*.proto")) + ) and not download_proto_files(proto_gen_dir): + return 1 + + # Check if we have any proto files + proto_files = list(proto_files_dir.glob("**/*.proto")) + if not proto_files: + print("No .proto files found. Use --download to fetch from OpenTDF platform.") + return 1 + + print(f"Found {len(proto_files)} proto files:") + for proto_file in proto_files: + print(f" - {proto_file.name}") + + # Fix IGNORE_IF_DEFAULT_VALUE in proto files + _fix_ignore_if_default_value(proto_files_dir) + + # Generate protobuf and Connect RPC files using buf + if not run_buf_generate(proto_gen_dir): + return 1 + + # Create __init__.py files + create_init_files(generated_dir) + + print("\n✓ Connect RPC client generation complete!") + print(f" Generated files are in: {generated_dir}") + print(f" Connect RPC clients: {generated_dir}/*_connect.py") + print(f" Protobuf files: {generated_dir}/*_pb2.py") + print(f" Type stubs: {generated_dir}/*_pb2.pyi") + + if (generated_dir / "legacy_grpc").exists(): + print(f" Legacy gRPC files: {generated_dir}/legacy_grpc/*_pb2_grpc.py") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/__init__.py b/otdf-python-proto/scripts/setup_connect_rpc.py old mode 100644 new mode 100755 similarity index 100% rename from __init__.py rename to otdf-python-proto/scripts/setup_connect_rpc.py diff --git a/otdf-python-proto/src/otdf_python_proto/__init__.py b/otdf-python-proto/src/otdf_python_proto/__init__.py new file mode 100644 index 0000000..7e24e49 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/__init__.py @@ -0,0 +1,37 @@ +"""OpenTDF Python Protocol Buffers Package. + +This package contains generated Python code from OpenTDF protocol buffer definitions. +It includes modules for authorization, common types, entities, policy management, +and other OpenTDF services. +""" +from importlib import metadata + +try: + __version__ = metadata.version("otdf-python-proto") +except metadata.PackageNotFoundError: + # package is not installed, e.g., in development + __version__ = "0.0.0" + +# Import submodules to make them available +from . import authorization +from . import common +from . import entity +from . import entityresolution +from . import kas +from . import legacy_grpc +from . import logger +from . import policy +from . import wellknownconfiguration + +# Export main module categories +__all__ = [ + "authorization", + "common", + "entity", + "entityresolution", + "kas", + "legacy_grpc", + "logger", + "policy", + "wellknownconfiguration", +] diff --git a/otdf-python-proto/src/otdf_python_proto/authorization/__init__.py b/otdf-python-proto/src/otdf_python_proto/authorization/__init__.py new file mode 100644 index 0000000..025b306 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/authorization/__init__.py @@ -0,0 +1 @@ +"""authorization protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2.py b/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2.py new file mode 100644 index 0000000..2e9f43a --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: authorization/authorization.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'authorization/authorization.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!authorization/authorization.proto\x12\rauthorization\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto\x1a\x14policy/objects.proto\")\n\x05Token\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x10\n\x03jwt\x18\x02 \x01(\tR\x03jwt\"\xc9\x03\n\x06\x45ntity\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12%\n\remail_address\x18\x02 \x01(\tH\x00R\x0c\x65mailAddress\x12\x1d\n\tuser_name\x18\x03 \x01(\tH\x00R\x08userName\x12,\n\x11remote_claims_url\x18\x04 \x01(\tH\x00R\x0fremoteClaimsUrl\x12\x14\n\x04uuid\x18\x05 \x01(\tH\x00R\x04uuid\x12.\n\x06\x63laims\x18\x06 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\x06\x63laims\x12\x35\n\x06\x63ustom\x18\x07 \x01(\x0b\x32\x1b.authorization.EntityCustomH\x00R\x06\x63ustom\x12\x1d\n\tclient_id\x18\x08 \x01(\tH\x00R\x08\x63lientId\x12:\n\x08\x63\x61tegory\x18\t \x01(\x0e\x32\x1e.authorization.Entity.CategoryR\x08\x63\x61tegory\"T\n\x08\x43\x61tegory\x12\x18\n\x14\x43\x41TEGORY_UNSPECIFIED\x10\x00\x12\x14\n\x10\x43\x41TEGORY_SUBJECT\x10\x01\x12\x18\n\x14\x43\x41TEGORY_ENVIRONMENT\x10\x02\x42\r\n\x0b\x65ntity_type\"B\n\x0c\x45ntityCustom\x12\x32\n\textension\x18\x01 \x01(\x0b\x32\x14.google.protobuf.AnyR\textension\"P\n\x0b\x45ntityChain\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x31\n\x08\x65ntities\x18\x02 \x03(\x0b\x32\x15.authorization.EntityR\x08\x65ntities\"\xcf\x01\n\x0f\x44\x65\x63isionRequest\x12(\n\x07\x61\x63tions\x18\x01 \x03(\x0b\x32\x0e.policy.ActionR\x07\x61\x63tions\x12?\n\rentity_chains\x18\x02 \x03(\x0b\x32\x1a.authorization.EntityChainR\x0c\x65ntityChains\x12Q\n\x13resource_attributes\x18\x03 \x03(\x0b\x32 .authorization.ResourceAttributeR\x12resourceAttributes\"\xce\x02\n\x10\x44\x65\x63isionResponse\x12&\n\x0f\x65ntity_chain_id\x18\x01 \x01(\tR\rentityChainId\x12\x34\n\x16resource_attributes_id\x18\x02 \x01(\tR\x14resourceAttributesId\x12&\n\x06\x61\x63tion\x18\x03 \x01(\x0b\x32\x0e.policy.ActionR\x06\x61\x63tion\x12\x44\n\x08\x64\x65\x63ision\x18\x04 \x01(\x0e\x32(.authorization.DecisionResponse.DecisionR\x08\x64\x65\x63ision\x12 \n\x0bobligations\x18\x05 \x03(\tR\x0bobligations\"L\n\x08\x44\x65\x63ision\x12\x18\n\x14\x44\x45\x43ISION_UNSPECIFIED\x10\x00\x12\x11\n\rDECISION_DENY\x10\x01\x12\x13\n\x0f\x44\x45\x43ISION_PERMIT\x10\x02\"b\n\x13GetDecisionsRequest\x12K\n\x11\x64\x65\x63ision_requests\x18\x01 \x03(\x0b\x32\x1e.authorization.DecisionRequestR\x10\x64\x65\x63isionRequests\"f\n\x14GetDecisionsResponse\x12N\n\x12\x64\x65\x63ision_responses\x18\x01 \x03(\x0b\x32\x1f.authorization.DecisionResponseR\x11\x64\x65\x63isionResponses\"\xfa\x01\n\x16GetEntitlementsRequest\x12\x31\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x15.authorization.EntityR\x08\x65ntities\x12;\n\x05scope\x18\x02 \x01(\x0b\x32 .authorization.ResourceAttributeH\x00R\x05scope\x88\x01\x01\x12\x45\n\x1cwith_comprehensive_hierarchy\x18\x03 \x01(\x08H\x01R\x1awithComprehensiveHierarchy\x88\x01\x01\x42\x08\n\x06_scopeB\x1f\n\x1d_with_comprehensive_hierarchy\"c\n\x12\x45ntityEntitlements\x12\x1b\n\tentity_id\x18\x01 \x01(\tR\x08\x65ntityId\x12\x30\n\x14\x61ttribute_value_fqns\x18\x02 \x03(\tR\x12\x61ttributeValueFqns\"{\n\x11ResourceAttribute\x12\x34\n\x16resource_attributes_id\x18\x01 \x01(\tR\x14resourceAttributesId\x12\x30\n\x14\x61ttribute_value_fqns\x18\x02 \x03(\tR\x12\x61ttributeValueFqns\"`\n\x17GetEntitlementsResponse\x12\x45\n\x0c\x65ntitlements\x18\x01 \x03(\x0b\x32!.authorization.EntityEntitlementsR\x0c\x65ntitlements\"\xc1\x01\n\x14TokenDecisionRequest\x12(\n\x07\x61\x63tions\x18\x01 \x03(\x0b\x32\x0e.policy.ActionR\x07\x61\x63tions\x12,\n\x06tokens\x18\x02 \x03(\x0b\x32\x14.authorization.TokenR\x06tokens\x12Q\n\x13resource_attributes\x18\x03 \x03(\x0b\x32 .authorization.ResourceAttributeR\x12resourceAttributes\"n\n\x1aGetDecisionsByTokenRequest\x12P\n\x11\x64\x65\x63ision_requests\x18\x01 \x03(\x0b\x32#.authorization.TokenDecisionRequestR\x10\x64\x65\x63isionRequests\"m\n\x1bGetDecisionsByTokenResponse\x12N\n\x12\x64\x65\x63ision_responses\x18\x01 \x03(\x0b\x32\x1f.authorization.DecisionResponseR\x11\x64\x65\x63isionResponses2\x9c\x03\n\x14\x41uthorizationService\x12u\n\x0cGetDecisions\x12\".authorization.GetDecisionsRequest\x1a#.authorization.GetDecisionsResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v1/authorization:\x01*\x12\x8d\x01\n\x13GetDecisionsByToken\x12).authorization.GetDecisionsByTokenRequest\x1a*.authorization.GetDecisionsByTokenResponse\"\x1f\x82\xd3\xe4\x93\x02\x19\"\x17/v1/token/authorization\x12}\n\x0fGetEntitlements\x12%.authorization.GetEntitlementsRequest\x1a&.authorization.GetEntitlementsResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/entitlements:\x01*B{\n\x11\x63om.authorizationB\x12\x41uthorizationProtoP\x01\xa2\x02\x03\x41XX\xaa\x02\rAuthorization\xca\x02\rAuthorization\xe2\x02\x19\x41uthorization\\GPBMetadata\xea\x02\rAuthorizationb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'authorization.authorization_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021com.authorizationB\022AuthorizationProtoP\001\242\002\003AXX\252\002\rAuthorization\312\002\rAuthorization\342\002\031Authorization\\GPBMetadata\352\002\rAuthorization' + _globals['_AUTHORIZATIONSERVICE'].methods_by_name['GetDecisions']._loaded_options = None + _globals['_AUTHORIZATIONSERVICE'].methods_by_name['GetDecisions']._serialized_options = b'\202\323\344\223\002\026\"\021/v1/authorization:\001*' + _globals['_AUTHORIZATIONSERVICE'].methods_by_name['GetDecisionsByToken']._loaded_options = None + _globals['_AUTHORIZATIONSERVICE'].methods_by_name['GetDecisionsByToken']._serialized_options = b'\202\323\344\223\002\031\"\027/v1/token/authorization' + _globals['_AUTHORIZATIONSERVICE'].methods_by_name['GetEntitlements']._loaded_options = None + _globals['_AUTHORIZATIONSERVICE'].methods_by_name['GetEntitlements']._serialized_options = b'\202\323\344\223\002\025\"\020/v1/entitlements:\001*' + _globals['_TOKEN']._serialized_start=131 + _globals['_TOKEN']._serialized_end=172 + _globals['_ENTITY']._serialized_start=175 + _globals['_ENTITY']._serialized_end=632 + _globals['_ENTITY_CATEGORY']._serialized_start=533 + _globals['_ENTITY_CATEGORY']._serialized_end=617 + _globals['_ENTITYCUSTOM']._serialized_start=634 + _globals['_ENTITYCUSTOM']._serialized_end=700 + _globals['_ENTITYCHAIN']._serialized_start=702 + _globals['_ENTITYCHAIN']._serialized_end=782 + _globals['_DECISIONREQUEST']._serialized_start=785 + _globals['_DECISIONREQUEST']._serialized_end=992 + _globals['_DECISIONRESPONSE']._serialized_start=995 + _globals['_DECISIONRESPONSE']._serialized_end=1329 + _globals['_DECISIONRESPONSE_DECISION']._serialized_start=1253 + _globals['_DECISIONRESPONSE_DECISION']._serialized_end=1329 + _globals['_GETDECISIONSREQUEST']._serialized_start=1331 + _globals['_GETDECISIONSREQUEST']._serialized_end=1429 + _globals['_GETDECISIONSRESPONSE']._serialized_start=1431 + _globals['_GETDECISIONSRESPONSE']._serialized_end=1533 + _globals['_GETENTITLEMENTSREQUEST']._serialized_start=1536 + _globals['_GETENTITLEMENTSREQUEST']._serialized_end=1786 + _globals['_ENTITYENTITLEMENTS']._serialized_start=1788 + _globals['_ENTITYENTITLEMENTS']._serialized_end=1887 + _globals['_RESOURCEATTRIBUTE']._serialized_start=1889 + _globals['_RESOURCEATTRIBUTE']._serialized_end=2012 + _globals['_GETENTITLEMENTSRESPONSE']._serialized_start=2014 + _globals['_GETENTITLEMENTSRESPONSE']._serialized_end=2110 + _globals['_TOKENDECISIONREQUEST']._serialized_start=2113 + _globals['_TOKENDECISIONREQUEST']._serialized_end=2306 + _globals['_GETDECISIONSBYTOKENREQUEST']._serialized_start=2308 + _globals['_GETDECISIONSBYTOKENREQUEST']._serialized_end=2418 + _globals['_GETDECISIONSBYTOKENRESPONSE']._serialized_start=2420 + _globals['_GETDECISIONSBYTOKENRESPONSE']._serialized_end=2529 + _globals['_AUTHORIZATIONSERVICE']._serialized_start=2532 + _globals['_AUTHORIZATIONSERVICE']._serialized_end=2944 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2.pyi new file mode 100644 index 0000000..a84d24b --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2.pyi @@ -0,0 +1,161 @@ +from google.api import annotations_pb2 as _annotations_pb2 +from google.protobuf import any_pb2 as _any_pb2 +from policy import objects_pb2 as _objects_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class Token(_message.Message): + __slots__ = ("id", "jwt") + ID_FIELD_NUMBER: _ClassVar[int] + JWT_FIELD_NUMBER: _ClassVar[int] + id: str + jwt: str + def __init__(self, id: _Optional[str] = ..., jwt: _Optional[str] = ...) -> None: ... + +class Entity(_message.Message): + __slots__ = ("id", "email_address", "user_name", "remote_claims_url", "uuid", "claims", "custom", "client_id", "category") + class Category(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + CATEGORY_UNSPECIFIED: _ClassVar[Entity.Category] + CATEGORY_SUBJECT: _ClassVar[Entity.Category] + CATEGORY_ENVIRONMENT: _ClassVar[Entity.Category] + CATEGORY_UNSPECIFIED: Entity.Category + CATEGORY_SUBJECT: Entity.Category + CATEGORY_ENVIRONMENT: Entity.Category + ID_FIELD_NUMBER: _ClassVar[int] + EMAIL_ADDRESS_FIELD_NUMBER: _ClassVar[int] + USER_NAME_FIELD_NUMBER: _ClassVar[int] + REMOTE_CLAIMS_URL_FIELD_NUMBER: _ClassVar[int] + UUID_FIELD_NUMBER: _ClassVar[int] + CLAIMS_FIELD_NUMBER: _ClassVar[int] + CUSTOM_FIELD_NUMBER: _ClassVar[int] + CLIENT_ID_FIELD_NUMBER: _ClassVar[int] + CATEGORY_FIELD_NUMBER: _ClassVar[int] + id: str + email_address: str + user_name: str + remote_claims_url: str + uuid: str + claims: _any_pb2.Any + custom: EntityCustom + client_id: str + category: Entity.Category + def __init__(self, id: _Optional[str] = ..., email_address: _Optional[str] = ..., user_name: _Optional[str] = ..., remote_claims_url: _Optional[str] = ..., uuid: _Optional[str] = ..., claims: _Optional[_Union[_any_pb2.Any, _Mapping]] = ..., custom: _Optional[_Union[EntityCustom, _Mapping]] = ..., client_id: _Optional[str] = ..., category: _Optional[_Union[Entity.Category, str]] = ...) -> None: ... + +class EntityCustom(_message.Message): + __slots__ = ("extension",) + EXTENSION_FIELD_NUMBER: _ClassVar[int] + extension: _any_pb2.Any + def __init__(self, extension: _Optional[_Union[_any_pb2.Any, _Mapping]] = ...) -> None: ... + +class EntityChain(_message.Message): + __slots__ = ("id", "entities") + ID_FIELD_NUMBER: _ClassVar[int] + ENTITIES_FIELD_NUMBER: _ClassVar[int] + id: str + entities: _containers.RepeatedCompositeFieldContainer[Entity] + def __init__(self, id: _Optional[str] = ..., entities: _Optional[_Iterable[_Union[Entity, _Mapping]]] = ...) -> None: ... + +class DecisionRequest(_message.Message): + __slots__ = ("actions", "entity_chains", "resource_attributes") + ACTIONS_FIELD_NUMBER: _ClassVar[int] + ENTITY_CHAINS_FIELD_NUMBER: _ClassVar[int] + RESOURCE_ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + actions: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Action] + entity_chains: _containers.RepeatedCompositeFieldContainer[EntityChain] + resource_attributes: _containers.RepeatedCompositeFieldContainer[ResourceAttribute] + def __init__(self, actions: _Optional[_Iterable[_Union[_objects_pb2.Action, _Mapping]]] = ..., entity_chains: _Optional[_Iterable[_Union[EntityChain, _Mapping]]] = ..., resource_attributes: _Optional[_Iterable[_Union[ResourceAttribute, _Mapping]]] = ...) -> None: ... + +class DecisionResponse(_message.Message): + __slots__ = ("entity_chain_id", "resource_attributes_id", "action", "decision", "obligations") + class Decision(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + DECISION_UNSPECIFIED: _ClassVar[DecisionResponse.Decision] + DECISION_DENY: _ClassVar[DecisionResponse.Decision] + DECISION_PERMIT: _ClassVar[DecisionResponse.Decision] + DECISION_UNSPECIFIED: DecisionResponse.Decision + DECISION_DENY: DecisionResponse.Decision + DECISION_PERMIT: DecisionResponse.Decision + ENTITY_CHAIN_ID_FIELD_NUMBER: _ClassVar[int] + RESOURCE_ATTRIBUTES_ID_FIELD_NUMBER: _ClassVar[int] + ACTION_FIELD_NUMBER: _ClassVar[int] + DECISION_FIELD_NUMBER: _ClassVar[int] + OBLIGATIONS_FIELD_NUMBER: _ClassVar[int] + entity_chain_id: str + resource_attributes_id: str + action: _objects_pb2.Action + decision: DecisionResponse.Decision + obligations: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, entity_chain_id: _Optional[str] = ..., resource_attributes_id: _Optional[str] = ..., action: _Optional[_Union[_objects_pb2.Action, _Mapping]] = ..., decision: _Optional[_Union[DecisionResponse.Decision, str]] = ..., obligations: _Optional[_Iterable[str]] = ...) -> None: ... + +class GetDecisionsRequest(_message.Message): + __slots__ = ("decision_requests",) + DECISION_REQUESTS_FIELD_NUMBER: _ClassVar[int] + decision_requests: _containers.RepeatedCompositeFieldContainer[DecisionRequest] + def __init__(self, decision_requests: _Optional[_Iterable[_Union[DecisionRequest, _Mapping]]] = ...) -> None: ... + +class GetDecisionsResponse(_message.Message): + __slots__ = ("decision_responses",) + DECISION_RESPONSES_FIELD_NUMBER: _ClassVar[int] + decision_responses: _containers.RepeatedCompositeFieldContainer[DecisionResponse] + def __init__(self, decision_responses: _Optional[_Iterable[_Union[DecisionResponse, _Mapping]]] = ...) -> None: ... + +class GetEntitlementsRequest(_message.Message): + __slots__ = ("entities", "scope", "with_comprehensive_hierarchy") + ENTITIES_FIELD_NUMBER: _ClassVar[int] + SCOPE_FIELD_NUMBER: _ClassVar[int] + WITH_COMPREHENSIVE_HIERARCHY_FIELD_NUMBER: _ClassVar[int] + entities: _containers.RepeatedCompositeFieldContainer[Entity] + scope: ResourceAttribute + with_comprehensive_hierarchy: bool + def __init__(self, entities: _Optional[_Iterable[_Union[Entity, _Mapping]]] = ..., scope: _Optional[_Union[ResourceAttribute, _Mapping]] = ..., with_comprehensive_hierarchy: bool = ...) -> None: ... + +class EntityEntitlements(_message.Message): + __slots__ = ("entity_id", "attribute_value_fqns") + ENTITY_ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_FQNS_FIELD_NUMBER: _ClassVar[int] + entity_id: str + attribute_value_fqns: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, entity_id: _Optional[str] = ..., attribute_value_fqns: _Optional[_Iterable[str]] = ...) -> None: ... + +class ResourceAttribute(_message.Message): + __slots__ = ("resource_attributes_id", "attribute_value_fqns") + RESOURCE_ATTRIBUTES_ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_FQNS_FIELD_NUMBER: _ClassVar[int] + resource_attributes_id: str + attribute_value_fqns: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, resource_attributes_id: _Optional[str] = ..., attribute_value_fqns: _Optional[_Iterable[str]] = ...) -> None: ... + +class GetEntitlementsResponse(_message.Message): + __slots__ = ("entitlements",) + ENTITLEMENTS_FIELD_NUMBER: _ClassVar[int] + entitlements: _containers.RepeatedCompositeFieldContainer[EntityEntitlements] + def __init__(self, entitlements: _Optional[_Iterable[_Union[EntityEntitlements, _Mapping]]] = ...) -> None: ... + +class TokenDecisionRequest(_message.Message): + __slots__ = ("actions", "tokens", "resource_attributes") + ACTIONS_FIELD_NUMBER: _ClassVar[int] + TOKENS_FIELD_NUMBER: _ClassVar[int] + RESOURCE_ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + actions: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Action] + tokens: _containers.RepeatedCompositeFieldContainer[Token] + resource_attributes: _containers.RepeatedCompositeFieldContainer[ResourceAttribute] + def __init__(self, actions: _Optional[_Iterable[_Union[_objects_pb2.Action, _Mapping]]] = ..., tokens: _Optional[_Iterable[_Union[Token, _Mapping]]] = ..., resource_attributes: _Optional[_Iterable[_Union[ResourceAttribute, _Mapping]]] = ...) -> None: ... + +class GetDecisionsByTokenRequest(_message.Message): + __slots__ = ("decision_requests",) + DECISION_REQUESTS_FIELD_NUMBER: _ClassVar[int] + decision_requests: _containers.RepeatedCompositeFieldContainer[TokenDecisionRequest] + def __init__(self, decision_requests: _Optional[_Iterable[_Union[TokenDecisionRequest, _Mapping]]] = ...) -> None: ... + +class GetDecisionsByTokenResponse(_message.Message): + __slots__ = ("decision_responses",) + DECISION_RESPONSES_FIELD_NUMBER: _ClassVar[int] + decision_responses: _containers.RepeatedCompositeFieldContainer[DecisionResponse] + def __init__(self, decision_responses: _Optional[_Iterable[_Union[DecisionResponse, _Mapping]]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2_connect.py new file mode 100644 index 0000000..a338f78 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/authorization/authorization_pb2_connect.py @@ -0,0 +1,191 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import authorization.authorization_pb2 + +class AuthorizationServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_get_decisions( + self, req: authorization.authorization_pb2.GetDecisionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.authorization_pb2.GetDecisionsResponse]: + """Low-level method to call GetDecisions, granting access to errors and metadata""" + url = self.base_url + "/authorization.AuthorizationService/GetDecisions" + return self._connect_client.call_unary(url, req, authorization.authorization_pb2.GetDecisionsResponse,extra_headers, timeout_seconds) + + + def get_decisions( + self, req: authorization.authorization_pb2.GetDecisionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.authorization_pb2.GetDecisionsResponse: + response = self.call_get_decisions(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_decisions_by_token( + self, req: authorization.authorization_pb2.GetDecisionsByTokenRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.authorization_pb2.GetDecisionsByTokenResponse]: + """Low-level method to call GetDecisionsByToken, granting access to errors and metadata""" + url = self.base_url + "/authorization.AuthorizationService/GetDecisionsByToken" + return self._connect_client.call_unary(url, req, authorization.authorization_pb2.GetDecisionsByTokenResponse,extra_headers, timeout_seconds) + + + def get_decisions_by_token( + self, req: authorization.authorization_pb2.GetDecisionsByTokenRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.authorization_pb2.GetDecisionsByTokenResponse: + response = self.call_get_decisions_by_token(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_entitlements( + self, req: authorization.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.authorization_pb2.GetEntitlementsResponse]: + """Low-level method to call GetEntitlements, granting access to errors and metadata""" + url = self.base_url + "/authorization.AuthorizationService/GetEntitlements" + return self._connect_client.call_unary(url, req, authorization.authorization_pb2.GetEntitlementsResponse,extra_headers, timeout_seconds) + + + def get_entitlements( + self, req: authorization.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.authorization_pb2.GetEntitlementsResponse: + response = self.call_get_entitlements(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncAuthorizationServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_get_decisions( + self, req: authorization.authorization_pb2.GetDecisionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.authorization_pb2.GetDecisionsResponse]: + """Low-level method to call GetDecisions, granting access to errors and metadata""" + url = self.base_url + "/authorization.AuthorizationService/GetDecisions" + return await self._connect_client.call_unary(url, req, authorization.authorization_pb2.GetDecisionsResponse,extra_headers, timeout_seconds) + + async def get_decisions( + self, req: authorization.authorization_pb2.GetDecisionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.authorization_pb2.GetDecisionsResponse: + response = await self.call_get_decisions(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_decisions_by_token( + self, req: authorization.authorization_pb2.GetDecisionsByTokenRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.authorization_pb2.GetDecisionsByTokenResponse]: + """Low-level method to call GetDecisionsByToken, granting access to errors and metadata""" + url = self.base_url + "/authorization.AuthorizationService/GetDecisionsByToken" + return await self._connect_client.call_unary(url, req, authorization.authorization_pb2.GetDecisionsByTokenResponse,extra_headers, timeout_seconds) + + async def get_decisions_by_token( + self, req: authorization.authorization_pb2.GetDecisionsByTokenRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.authorization_pb2.GetDecisionsByTokenResponse: + response = await self.call_get_decisions_by_token(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_entitlements( + self, req: authorization.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.authorization_pb2.GetEntitlementsResponse]: + """Low-level method to call GetEntitlements, granting access to errors and metadata""" + url = self.base_url + "/authorization.AuthorizationService/GetEntitlements" + return await self._connect_client.call_unary(url, req, authorization.authorization_pb2.GetEntitlementsResponse,extra_headers, timeout_seconds) + + async def get_entitlements( + self, req: authorization.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.authorization_pb2.GetEntitlementsResponse: + response = await self.call_get_entitlements(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class AuthorizationServiceProtocol(typing.Protocol): + def get_decisions(self, req: ClientRequest[authorization.authorization_pb2.GetDecisionsRequest]) -> ServerResponse[authorization.authorization_pb2.GetDecisionsResponse]: + ... + def get_decisions_by_token(self, req: ClientRequest[authorization.authorization_pb2.GetDecisionsByTokenRequest]) -> ServerResponse[authorization.authorization_pb2.GetDecisionsByTokenResponse]: + ... + def get_entitlements(self, req: ClientRequest[authorization.authorization_pb2.GetEntitlementsRequest]) -> ServerResponse[authorization.authorization_pb2.GetEntitlementsResponse]: + ... + +AUTHORIZATION_SERVICE_PATH_PREFIX = "/authorization.AuthorizationService" + +def wsgi_authorization_service(implementation: AuthorizationServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/authorization.AuthorizationService/GetDecisions", implementation.get_decisions, authorization.authorization_pb2.GetDecisionsRequest) + app.register_unary_rpc("/authorization.AuthorizationService/GetDecisionsByToken", implementation.get_decisions_by_token, authorization.authorization_pb2.GetDecisionsByTokenRequest) + app.register_unary_rpc("/authorization.AuthorizationService/GetEntitlements", implementation.get_entitlements, authorization.authorization_pb2.GetEntitlementsRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2.py b/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2.py new file mode 100644 index 0000000..184bb3e --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: authorization/v2/authorization.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'authorization/v2/authorization.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from entity import entity_pb2 as entity_dot_entity__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$authorization/v2/authorization.proto\x12\x10\x61uthorization.v2\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x65ntity/entity.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x14policy/objects.proto\"\x9a\x03\n\x10\x45ntityIdentifier\x12\xa2\x01\n\x0c\x65ntity_chain\x18\x01 \x01(\x0b\x32\x13.entity.EntityChainBh\xbaHe\xba\x01\x62\n\x15\x65ntity_chain_required\x12\x19\x65ntities must be provided\x1a.has(this.entities) && this.entities.size() > 0H\x00R\x0b\x65ntityChain\x12O\n\x1dregistered_resource_value_fqn\x18\x02 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x1aregisteredResourceValueFqn\x12{\n\x05token\x18\x03 \x01(\x0b\x32\r.entity.TokenBT\xbaHQ\xba\x01N\n\x0etoken_required\x12\x16token must be provided\x1a$has(this.jwt) && this.jwt.size() > 0H\x00R\x05tokenB\x13\n\nidentifier\x12\x05\xbaH\x02\x08\x01\"\x81\x03\n\x12\x45ntityEntitlements\x12!\n\x0c\x65phemeral_id\x18\x01 \x01(\tR\x0b\x65phemeralId\x12\x8b\x01\n\x1f\x61\x63tions_per_attribute_value_fqn\x18\x02 \x03(\x0b\x32\x45.authorization.v2.EntityEntitlements.ActionsPerAttributeValueFqnEntryR\x1b\x61\x63tionsPerAttributeValueFqn\x1a\x37\n\x0b\x41\x63tionsList\x12(\n\x07\x61\x63tions\x18\x01 \x03(\x0b\x32\x0e.policy.ActionR\x07\x61\x63tions\x1a\x80\x01\n ActionsPerAttributeValueFqnEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x46\n\x05value\x18\x02 \x01(\x0b\x32\x30.authorization.v2.EntityEntitlements.ActionsListR\x05value:\x02\x38\x01\"\xa6\x03\n\x08Resource\x12!\n\x0c\x65phemeral_id\x18\x01 \x01(\tR\x0b\x65phemeralId\x12\xf2\x01\n\x10\x61ttribute_values\x18\x02 \x01(\x0b\x32*.authorization.v2.Resource.AttributeValuesB\x98\x01\xbaH\x94\x01\xba\x01\x90\x01\n\x19\x61ttribute_values_required\x12\x38if provided, resource.attribute_values must not be empty\x1a\x39this.fqns.size() > 0 && this.fqns.all(item, item.isUri())H\x00R\x0f\x61ttributeValues\x12O\n\x1dregistered_resource_value_fqn\x18\x03 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x1aregisteredResourceValueFqn\x1a%\n\x0f\x41ttributeValues\x12\x12\n\x04\x66qns\x18\x01 \x03(\tR\x04\x66qnsB\n\n\x08resource\"~\n\x10ResourceDecision\x12\x32\n\x15\x65phemeral_resource_id\x18\x01 \x01(\tR\x13\x65phemeralResourceId\x12\x36\n\x08\x64\x65\x63ision\x18\x02 \x01(\x0e\x32\x1a.authorization.v2.DecisionR\x08\x64\x65\x63ision\"\xc4\x02\n\x12GetDecisionRequest\x12W\n\x11\x65ntity_identifier\x18\x01 \x01(\x0b\x32\".authorization.v2.EntityIdentifierB\x06\xbaH\x03\xc8\x01\x01R\x10\x65ntityIdentifier\x12.\n\x06\x61\x63tion\x18\x02 \x01(\x0b\x32\x0e.policy.ActionB\x06\xbaH\x03\xc8\x01\x01R\x06\x61\x63tion\x12>\n\x08resource\x18\x03 \x01(\x0b\x32\x1a.authorization.v2.ResourceB\x06\xbaH\x03\xc8\x01\x01R\x08resource:e\xbaHb\x1a`\n)get_decision_request.action_name_required\x12\x1c\x61\x63tion.name must be provided\x1a\x15has(this.action.name)\"U\n\x13GetDecisionResponse\x12>\n\x08\x64\x65\x63ision\x18\x01 \x01(\x0b\x32\".authorization.v2.ResourceDecisionR\x08\x64\x65\x63ision\"\xde\x02\n\x1fGetDecisionMultiResourceRequest\x12W\n\x11\x65ntity_identifier\x18\x01 \x01(\x0b\x32\".authorization.v2.EntityIdentifierB\x06\xbaH\x03\xc8\x01\x01R\x10\x65ntityIdentifier\x12.\n\x06\x61\x63tion\x18\x02 \x01(\x0b\x32\x0e.policy.ActionB\x06\xbaH\x03\xc8\x01\x01R\x06\x61\x63tion\x12\x45\n\tresources\x18\x03 \x03(\x0b\x32\x1a.authorization.v2.ResourceB\x0b\xbaH\x08\x92\x01\x02\x08\x01\xc8\x01\x01R\tresources:k\xbaHh\x1a\x66\n/get_decision_multi_request.action_name_required\x12\x1c\x61\x63tion.name must be provided\x1a\x15has(this.action.name)\"\xb6\x01\n GetDecisionMultiResourceResponse\x12?\n\rall_permitted\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\x0c\x61llPermitted\x12Q\n\x12resource_decisions\x18\x02 \x03(\x0b\x32\".authorization.v2.ResourceDecisionR\x11resourceDecisions\"x\n\x16GetDecisionBulkRequest\x12^\n\x11\x64\x65\x63ision_requests\x18\x01 \x03(\x0b\x32\x31.authorization.v2.GetDecisionMultiResourceRequestR\x10\x64\x65\x63isionRequests\"|\n\x17GetDecisionBulkResponse\x12\x61\n\x12\x64\x65\x63ision_responses\x18\x01 \x03(\x0b\x32\x32.authorization.v2.GetDecisionMultiResourceResponseR\x11\x64\x65\x63isionResponses\"\xd9\x01\n\x16GetEntitlementsRequest\x12W\n\x11\x65ntity_identifier\x18\x01 \x01(\x0b\x32\".authorization.v2.EntityIdentifierB\x06\xbaH\x03\xc8\x01\x01R\x10\x65ntityIdentifier\x12\x45\n\x1cwith_comprehensive_hierarchy\x18\x02 \x01(\x08H\x00R\x1awithComprehensiveHierarchy\x88\x01\x01\x42\x1f\n\x1d_with_comprehensive_hierarchy\"c\n\x17GetEntitlementsResponse\x12H\n\x0c\x65ntitlements\x18\x01 \x03(\x0b\x32$.authorization.v2.EntityEntitlementsR\x0c\x65ntitlements*L\n\x08\x44\x65\x63ision\x12\x18\n\x14\x44\x45\x43ISION_UNSPECIFIED\x10\x00\x12\x11\n\rDECISION_DENY\x10\x01\x12\x13\n\x0f\x44\x45\x43ISION_PERMIT\x10\x02\x32\xce\x03\n\x14\x41uthorizationService\x12\\\n\x0bGetDecision\x12$.authorization.v2.GetDecisionRequest\x1a%.authorization.v2.GetDecisionResponse\"\x00\x12\x83\x01\n\x18GetDecisionMultiResource\x12\x31.authorization.v2.GetDecisionMultiResourceRequest\x1a\x32.authorization.v2.GetDecisionMultiResourceResponse\"\x00\x12h\n\x0fGetDecisionBulk\x12(.authorization.v2.GetDecisionBulkRequest\x1a).authorization.v2.GetDecisionBulkResponse\"\x00\x12h\n\x0fGetEntitlements\x12(.authorization.v2.GetEntitlementsRequest\x1a).authorization.v2.GetEntitlementsResponse\"\x00\x42\x8b\x01\n\x14\x63om.authorization.v2B\x12\x41uthorizationProtoP\x01\xa2\x02\x03\x41XX\xaa\x02\x10\x41uthorization.V2\xca\x02\x10\x41uthorization\\V2\xe2\x02\x1c\x41uthorization\\V2\\GPBMetadata\xea\x02\x11\x41uthorization::V2b\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'authorization.v2.authorization_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\024com.authorization.v2B\022AuthorizationProtoP\001\242\002\003AXX\252\002\020Authorization.V2\312\002\020Authorization\\V2\342\002\034Authorization\\V2\\GPBMetadata\352\002\021Authorization::V2' + _globals['_ENTITYIDENTIFIER'].oneofs_by_name['identifier']._loaded_options = None + _globals['_ENTITYIDENTIFIER'].oneofs_by_name['identifier']._serialized_options = b'\272H\002\010\001' + _globals['_ENTITYIDENTIFIER'].fields_by_name['entity_chain']._loaded_options = None + _globals['_ENTITYIDENTIFIER'].fields_by_name['entity_chain']._serialized_options = b'\272He\272\001b\n\025entity_chain_required\022\031entities must be provided\032.has(this.entities) && this.entities.size() > 0' + _globals['_ENTITYIDENTIFIER'].fields_by_name['registered_resource_value_fqn']._loaded_options = None + _globals['_ENTITYIDENTIFIER'].fields_by_name['registered_resource_value_fqn']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_ENTITYIDENTIFIER'].fields_by_name['token']._loaded_options = None + _globals['_ENTITYIDENTIFIER'].fields_by_name['token']._serialized_options = b'\272HQ\272\001N\n\016token_required\022\026token must be provided\032$has(this.jwt) && this.jwt.size() > 0' + _globals['_ENTITYENTITLEMENTS_ACTIONSPERATTRIBUTEVALUEFQNENTRY']._loaded_options = None + _globals['_ENTITYENTITLEMENTS_ACTIONSPERATTRIBUTEVALUEFQNENTRY']._serialized_options = b'8\001' + _globals['_RESOURCE'].fields_by_name['attribute_values']._loaded_options = None + _globals['_RESOURCE'].fields_by_name['attribute_values']._serialized_options = b'\272H\224\001\272\001\220\001\n\031attribute_values_required\0228if provided, resource.attribute_values must not be empty\0329this.fqns.size() > 0 && this.fqns.all(item, item.isUri())' + _globals['_RESOURCE'].fields_by_name['registered_resource_value_fqn']._loaded_options = None + _globals['_RESOURCE'].fields_by_name['registered_resource_value_fqn']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_GETDECISIONREQUEST'].fields_by_name['entity_identifier']._loaded_options = None + _globals['_GETDECISIONREQUEST'].fields_by_name['entity_identifier']._serialized_options = b'\272H\003\310\001\001' + _globals['_GETDECISIONREQUEST'].fields_by_name['action']._loaded_options = None + _globals['_GETDECISIONREQUEST'].fields_by_name['action']._serialized_options = b'\272H\003\310\001\001' + _globals['_GETDECISIONREQUEST'].fields_by_name['resource']._loaded_options = None + _globals['_GETDECISIONREQUEST'].fields_by_name['resource']._serialized_options = b'\272H\003\310\001\001' + _globals['_GETDECISIONREQUEST']._loaded_options = None + _globals['_GETDECISIONREQUEST']._serialized_options = b'\272Hb\032`\n)get_decision_request.action_name_required\022\034action.name must be provided\032\025has(this.action.name)' + _globals['_GETDECISIONMULTIRESOURCEREQUEST'].fields_by_name['entity_identifier']._loaded_options = None + _globals['_GETDECISIONMULTIRESOURCEREQUEST'].fields_by_name['entity_identifier']._serialized_options = b'\272H\003\310\001\001' + _globals['_GETDECISIONMULTIRESOURCEREQUEST'].fields_by_name['action']._loaded_options = None + _globals['_GETDECISIONMULTIRESOURCEREQUEST'].fields_by_name['action']._serialized_options = b'\272H\003\310\001\001' + _globals['_GETDECISIONMULTIRESOURCEREQUEST'].fields_by_name['resources']._loaded_options = None + _globals['_GETDECISIONMULTIRESOURCEREQUEST'].fields_by_name['resources']._serialized_options = b'\272H\010\222\001\002\010\001\310\001\001' + _globals['_GETDECISIONMULTIRESOURCEREQUEST']._loaded_options = None + _globals['_GETDECISIONMULTIRESOURCEREQUEST']._serialized_options = b'\272Hh\032f\n/get_decision_multi_request.action_name_required\022\034action.name must be provided\032\025has(this.action.name)' + _globals['_GETENTITLEMENTSREQUEST'].fields_by_name['entity_identifier']._loaded_options = None + _globals['_GETENTITLEMENTSREQUEST'].fields_by_name['entity_identifier']._serialized_options = b'\272H\003\310\001\001' + _globals['_DECISION']._serialized_start=3037 + _globals['_DECISION']._serialized_end=3113 + _globals['_ENTITYIDENTIFIER']._serialized_start=163 + _globals['_ENTITYIDENTIFIER']._serialized_end=573 + _globals['_ENTITYENTITLEMENTS']._serialized_start=576 + _globals['_ENTITYENTITLEMENTS']._serialized_end=961 + _globals['_ENTITYENTITLEMENTS_ACTIONSLIST']._serialized_start=775 + _globals['_ENTITYENTITLEMENTS_ACTIONSLIST']._serialized_end=830 + _globals['_ENTITYENTITLEMENTS_ACTIONSPERATTRIBUTEVALUEFQNENTRY']._serialized_start=833 + _globals['_ENTITYENTITLEMENTS_ACTIONSPERATTRIBUTEVALUEFQNENTRY']._serialized_end=961 + _globals['_RESOURCE']._serialized_start=964 + _globals['_RESOURCE']._serialized_end=1386 + _globals['_RESOURCE_ATTRIBUTEVALUES']._serialized_start=1337 + _globals['_RESOURCE_ATTRIBUTEVALUES']._serialized_end=1374 + _globals['_RESOURCEDECISION']._serialized_start=1388 + _globals['_RESOURCEDECISION']._serialized_end=1514 + _globals['_GETDECISIONREQUEST']._serialized_start=1517 + _globals['_GETDECISIONREQUEST']._serialized_end=1841 + _globals['_GETDECISIONRESPONSE']._serialized_start=1843 + _globals['_GETDECISIONRESPONSE']._serialized_end=1928 + _globals['_GETDECISIONMULTIRESOURCEREQUEST']._serialized_start=1931 + _globals['_GETDECISIONMULTIRESOURCEREQUEST']._serialized_end=2281 + _globals['_GETDECISIONMULTIRESOURCERESPONSE']._serialized_start=2284 + _globals['_GETDECISIONMULTIRESOURCERESPONSE']._serialized_end=2466 + _globals['_GETDECISIONBULKREQUEST']._serialized_start=2468 + _globals['_GETDECISIONBULKREQUEST']._serialized_end=2588 + _globals['_GETDECISIONBULKRESPONSE']._serialized_start=2590 + _globals['_GETDECISIONBULKRESPONSE']._serialized_end=2714 + _globals['_GETENTITLEMENTSREQUEST']._serialized_start=2717 + _globals['_GETENTITLEMENTSREQUEST']._serialized_end=2934 + _globals['_GETENTITLEMENTSRESPONSE']._serialized_start=2936 + _globals['_GETENTITLEMENTSRESPONSE']._serialized_end=3035 + _globals['_AUTHORIZATIONSERVICE']._serialized_start=3116 + _globals['_AUTHORIZATIONSERVICE']._serialized_end=3578 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2.pyi new file mode 100644 index 0000000..925685d --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2.pyi @@ -0,0 +1,134 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from entity import entity_pb2 as _entity_pb2 +from google.protobuf import wrappers_pb2 as _wrappers_pb2 +from policy import objects_pb2 as _objects_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class Decision(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + DECISION_UNSPECIFIED: _ClassVar[Decision] + DECISION_DENY: _ClassVar[Decision] + DECISION_PERMIT: _ClassVar[Decision] +DECISION_UNSPECIFIED: Decision +DECISION_DENY: Decision +DECISION_PERMIT: Decision + +class EntityIdentifier(_message.Message): + __slots__ = ("entity_chain", "registered_resource_value_fqn", "token") + ENTITY_CHAIN_FIELD_NUMBER: _ClassVar[int] + REGISTERED_RESOURCE_VALUE_FQN_FIELD_NUMBER: _ClassVar[int] + TOKEN_FIELD_NUMBER: _ClassVar[int] + entity_chain: _entity_pb2.EntityChain + registered_resource_value_fqn: str + token: _entity_pb2.Token + def __init__(self, entity_chain: _Optional[_Union[_entity_pb2.EntityChain, _Mapping]] = ..., registered_resource_value_fqn: _Optional[str] = ..., token: _Optional[_Union[_entity_pb2.Token, _Mapping]] = ...) -> None: ... + +class EntityEntitlements(_message.Message): + __slots__ = ("ephemeral_id", "actions_per_attribute_value_fqn") + class ActionsList(_message.Message): + __slots__ = ("actions",) + ACTIONS_FIELD_NUMBER: _ClassVar[int] + actions: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Action] + def __init__(self, actions: _Optional[_Iterable[_Union[_objects_pb2.Action, _Mapping]]] = ...) -> None: ... + class ActionsPerAttributeValueFqnEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: EntityEntitlements.ActionsList + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[EntityEntitlements.ActionsList, _Mapping]] = ...) -> None: ... + EPHEMERAL_ID_FIELD_NUMBER: _ClassVar[int] + ACTIONS_PER_ATTRIBUTE_VALUE_FQN_FIELD_NUMBER: _ClassVar[int] + ephemeral_id: str + actions_per_attribute_value_fqn: _containers.MessageMap[str, EntityEntitlements.ActionsList] + def __init__(self, ephemeral_id: _Optional[str] = ..., actions_per_attribute_value_fqn: _Optional[_Mapping[str, EntityEntitlements.ActionsList]] = ...) -> None: ... + +class Resource(_message.Message): + __slots__ = ("ephemeral_id", "attribute_values", "registered_resource_value_fqn") + class AttributeValues(_message.Message): + __slots__ = ("fqns",) + FQNS_FIELD_NUMBER: _ClassVar[int] + fqns: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, fqns: _Optional[_Iterable[str]] = ...) -> None: ... + EPHEMERAL_ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUES_FIELD_NUMBER: _ClassVar[int] + REGISTERED_RESOURCE_VALUE_FQN_FIELD_NUMBER: _ClassVar[int] + ephemeral_id: str + attribute_values: Resource.AttributeValues + registered_resource_value_fqn: str + def __init__(self, ephemeral_id: _Optional[str] = ..., attribute_values: _Optional[_Union[Resource.AttributeValues, _Mapping]] = ..., registered_resource_value_fqn: _Optional[str] = ...) -> None: ... + +class ResourceDecision(_message.Message): + __slots__ = ("ephemeral_resource_id", "decision") + EPHEMERAL_RESOURCE_ID_FIELD_NUMBER: _ClassVar[int] + DECISION_FIELD_NUMBER: _ClassVar[int] + ephemeral_resource_id: str + decision: Decision + def __init__(self, ephemeral_resource_id: _Optional[str] = ..., decision: _Optional[_Union[Decision, str]] = ...) -> None: ... + +class GetDecisionRequest(_message.Message): + __slots__ = ("entity_identifier", "action", "resource") + ENTITY_IDENTIFIER_FIELD_NUMBER: _ClassVar[int] + ACTION_FIELD_NUMBER: _ClassVar[int] + RESOURCE_FIELD_NUMBER: _ClassVar[int] + entity_identifier: EntityIdentifier + action: _objects_pb2.Action + resource: Resource + def __init__(self, entity_identifier: _Optional[_Union[EntityIdentifier, _Mapping]] = ..., action: _Optional[_Union[_objects_pb2.Action, _Mapping]] = ..., resource: _Optional[_Union[Resource, _Mapping]] = ...) -> None: ... + +class GetDecisionResponse(_message.Message): + __slots__ = ("decision",) + DECISION_FIELD_NUMBER: _ClassVar[int] + decision: ResourceDecision + def __init__(self, decision: _Optional[_Union[ResourceDecision, _Mapping]] = ...) -> None: ... + +class GetDecisionMultiResourceRequest(_message.Message): + __slots__ = ("entity_identifier", "action", "resources") + ENTITY_IDENTIFIER_FIELD_NUMBER: _ClassVar[int] + ACTION_FIELD_NUMBER: _ClassVar[int] + RESOURCES_FIELD_NUMBER: _ClassVar[int] + entity_identifier: EntityIdentifier + action: _objects_pb2.Action + resources: _containers.RepeatedCompositeFieldContainer[Resource] + def __init__(self, entity_identifier: _Optional[_Union[EntityIdentifier, _Mapping]] = ..., action: _Optional[_Union[_objects_pb2.Action, _Mapping]] = ..., resources: _Optional[_Iterable[_Union[Resource, _Mapping]]] = ...) -> None: ... + +class GetDecisionMultiResourceResponse(_message.Message): + __slots__ = ("all_permitted", "resource_decisions") + ALL_PERMITTED_FIELD_NUMBER: _ClassVar[int] + RESOURCE_DECISIONS_FIELD_NUMBER: _ClassVar[int] + all_permitted: _wrappers_pb2.BoolValue + resource_decisions: _containers.RepeatedCompositeFieldContainer[ResourceDecision] + def __init__(self, all_permitted: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., resource_decisions: _Optional[_Iterable[_Union[ResourceDecision, _Mapping]]] = ...) -> None: ... + +class GetDecisionBulkRequest(_message.Message): + __slots__ = ("decision_requests",) + DECISION_REQUESTS_FIELD_NUMBER: _ClassVar[int] + decision_requests: _containers.RepeatedCompositeFieldContainer[GetDecisionMultiResourceRequest] + def __init__(self, decision_requests: _Optional[_Iterable[_Union[GetDecisionMultiResourceRequest, _Mapping]]] = ...) -> None: ... + +class GetDecisionBulkResponse(_message.Message): + __slots__ = ("decision_responses",) + DECISION_RESPONSES_FIELD_NUMBER: _ClassVar[int] + decision_responses: _containers.RepeatedCompositeFieldContainer[GetDecisionMultiResourceResponse] + def __init__(self, decision_responses: _Optional[_Iterable[_Union[GetDecisionMultiResourceResponse, _Mapping]]] = ...) -> None: ... + +class GetEntitlementsRequest(_message.Message): + __slots__ = ("entity_identifier", "with_comprehensive_hierarchy") + ENTITY_IDENTIFIER_FIELD_NUMBER: _ClassVar[int] + WITH_COMPREHENSIVE_HIERARCHY_FIELD_NUMBER: _ClassVar[int] + entity_identifier: EntityIdentifier + with_comprehensive_hierarchy: bool + def __init__(self, entity_identifier: _Optional[_Union[EntityIdentifier, _Mapping]] = ..., with_comprehensive_hierarchy: bool = ...) -> None: ... + +class GetEntitlementsResponse(_message.Message): + __slots__ = ("entitlements",) + ENTITLEMENTS_FIELD_NUMBER: _ClassVar[int] + entitlements: _containers.RepeatedCompositeFieldContainer[EntityEntitlements] + def __init__(self, entitlements: _Optional[_Iterable[_Union[EntityEntitlements, _Mapping]]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2_connect.py new file mode 100644 index 0000000..64fabc6 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/authorization/v2/authorization_pb2_connect.py @@ -0,0 +1,233 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import authorization.v2.authorization_pb2 + +class AuthorizationServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_get_decision( + self, req: authorization.v2.authorization_pb2.GetDecisionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetDecisionResponse]: + """Low-level method to call GetDecision, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetDecision" + return self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetDecisionResponse,extra_headers, timeout_seconds) + + + def get_decision( + self, req: authorization.v2.authorization_pb2.GetDecisionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetDecisionResponse: + response = self.call_get_decision(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_decision_multi_resource( + self, req: authorization.v2.authorization_pb2.GetDecisionMultiResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetDecisionMultiResourceResponse]: + """Low-level method to call GetDecisionMultiResource, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetDecisionMultiResource" + return self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetDecisionMultiResourceResponse,extra_headers, timeout_seconds) + + + def get_decision_multi_resource( + self, req: authorization.v2.authorization_pb2.GetDecisionMultiResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetDecisionMultiResourceResponse: + response = self.call_get_decision_multi_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_decision_bulk( + self, req: authorization.v2.authorization_pb2.GetDecisionBulkRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetDecisionBulkResponse]: + """Low-level method to call GetDecisionBulk, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetDecisionBulk" + return self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetDecisionBulkResponse,extra_headers, timeout_seconds) + + + def get_decision_bulk( + self, req: authorization.v2.authorization_pb2.GetDecisionBulkRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetDecisionBulkResponse: + response = self.call_get_decision_bulk(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_entitlements( + self, req: authorization.v2.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetEntitlementsResponse]: + """Low-level method to call GetEntitlements, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetEntitlements" + return self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetEntitlementsResponse,extra_headers, timeout_seconds) + + + def get_entitlements( + self, req: authorization.v2.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetEntitlementsResponse: + response = self.call_get_entitlements(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncAuthorizationServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_get_decision( + self, req: authorization.v2.authorization_pb2.GetDecisionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetDecisionResponse]: + """Low-level method to call GetDecision, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetDecision" + return await self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetDecisionResponse,extra_headers, timeout_seconds) + + async def get_decision( + self, req: authorization.v2.authorization_pb2.GetDecisionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetDecisionResponse: + response = await self.call_get_decision(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_decision_multi_resource( + self, req: authorization.v2.authorization_pb2.GetDecisionMultiResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetDecisionMultiResourceResponse]: + """Low-level method to call GetDecisionMultiResource, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetDecisionMultiResource" + return await self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetDecisionMultiResourceResponse,extra_headers, timeout_seconds) + + async def get_decision_multi_resource( + self, req: authorization.v2.authorization_pb2.GetDecisionMultiResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetDecisionMultiResourceResponse: + response = await self.call_get_decision_multi_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_decision_bulk( + self, req: authorization.v2.authorization_pb2.GetDecisionBulkRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetDecisionBulkResponse]: + """Low-level method to call GetDecisionBulk, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetDecisionBulk" + return await self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetDecisionBulkResponse,extra_headers, timeout_seconds) + + async def get_decision_bulk( + self, req: authorization.v2.authorization_pb2.GetDecisionBulkRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetDecisionBulkResponse: + response = await self.call_get_decision_bulk(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_entitlements( + self, req: authorization.v2.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[authorization.v2.authorization_pb2.GetEntitlementsResponse]: + """Low-level method to call GetEntitlements, granting access to errors and metadata""" + url = self.base_url + "/authorization.v2.AuthorizationService/GetEntitlements" + return await self._connect_client.call_unary(url, req, authorization.v2.authorization_pb2.GetEntitlementsResponse,extra_headers, timeout_seconds) + + async def get_entitlements( + self, req: authorization.v2.authorization_pb2.GetEntitlementsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> authorization.v2.authorization_pb2.GetEntitlementsResponse: + response = await self.call_get_entitlements(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class AuthorizationServiceProtocol(typing.Protocol): + def get_decision(self, req: ClientRequest[authorization.v2.authorization_pb2.GetDecisionRequest]) -> ServerResponse[authorization.v2.authorization_pb2.GetDecisionResponse]: + ... + def get_decision_multi_resource(self, req: ClientRequest[authorization.v2.authorization_pb2.GetDecisionMultiResourceRequest]) -> ServerResponse[authorization.v2.authorization_pb2.GetDecisionMultiResourceResponse]: + ... + def get_decision_bulk(self, req: ClientRequest[authorization.v2.authorization_pb2.GetDecisionBulkRequest]) -> ServerResponse[authorization.v2.authorization_pb2.GetDecisionBulkResponse]: + ... + def get_entitlements(self, req: ClientRequest[authorization.v2.authorization_pb2.GetEntitlementsRequest]) -> ServerResponse[authorization.v2.authorization_pb2.GetEntitlementsResponse]: + ... + +AUTHORIZATION_SERVICE_PATH_PREFIX = "/authorization.v2.AuthorizationService" + +def wsgi_authorization_service(implementation: AuthorizationServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/authorization.v2.AuthorizationService/GetDecision", implementation.get_decision, authorization.v2.authorization_pb2.GetDecisionRequest) + app.register_unary_rpc("/authorization.v2.AuthorizationService/GetDecisionMultiResource", implementation.get_decision_multi_resource, authorization.v2.authorization_pb2.GetDecisionMultiResourceRequest) + app.register_unary_rpc("/authorization.v2.AuthorizationService/GetDecisionBulk", implementation.get_decision_bulk, authorization.v2.authorization_pb2.GetDecisionBulkRequest) + app.register_unary_rpc("/authorization.v2.AuthorizationService/GetEntitlements", implementation.get_entitlements, authorization.v2.authorization_pb2.GetEntitlementsRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/common/__init__.py b/otdf-python-proto/src/otdf_python_proto/common/__init__.py new file mode 100644 index 0000000..6acca50 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/common/__init__.py @@ -0,0 +1 @@ +"""common protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/common/common_pb2.py b/otdf-python-proto/src/otdf_python_proto/common/common_pb2.py new file mode 100644 index 0000000..51caa90 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/common/common_pb2.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: common/common.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'common/common.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x63ommon/common.proto\x12\x06\x63ommon\x1a\x1fgoogle/protobuf/timestamp.proto\"\xf1\x01\n\x08Metadata\x12\x39\n\ncreated_at\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x34\n\x06labels\x18\x03 \x03(\x0b\x32\x1c.common.Metadata.LabelsEntryR\x06labels\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x89\x01\n\x0fMetadataMutable\x12;\n\x06labels\x18\x03 \x03(\x0b\x32#.common.MetadataMutable.LabelsEntryR\x06labels\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01*}\n\x12MetadataUpdateEnum\x12$\n METADATA_UPDATE_ENUM_UNSPECIFIED\x10\x00\x12\x1f\n\x1bMETADATA_UPDATE_ENUM_EXTEND\x10\x01\x12 \n\x1cMETADATA_UPDATE_ENUM_REPLACE\x10\x02*\x8d\x01\n\x0f\x41\x63tiveStateEnum\x12!\n\x1d\x41\x43TIVE_STATE_ENUM_UNSPECIFIED\x10\x00\x12\x1c\n\x18\x41\x43TIVE_STATE_ENUM_ACTIVE\x10\x01\x12\x1e\n\x1a\x41\x43TIVE_STATE_ENUM_INACTIVE\x10\x02\x12\x19\n\x15\x41\x43TIVE_STATE_ENUM_ANY\x10\x03\x42Q\n\ncom.commonB\x0b\x43ommonProtoP\x01\xa2\x02\x03\x43XX\xaa\x02\x06\x43ommon\xca\x02\x06\x43ommon\xe2\x02\x12\x43ommon\\GPBMetadata\xea\x02\x06\x43ommonb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'common.common_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\ncom.commonB\013CommonProtoP\001\242\002\003CXX\252\002\006Common\312\002\006Common\342\002\022Common\\GPBMetadata\352\002\006Common' + _globals['_METADATA_LABELSENTRY']._loaded_options = None + _globals['_METADATA_LABELSENTRY']._serialized_options = b'8\001' + _globals['_METADATAMUTABLE_LABELSENTRY']._loaded_options = None + _globals['_METADATAMUTABLE_LABELSENTRY']._serialized_options = b'8\001' + _globals['_METADATAUPDATEENUM']._serialized_start=448 + _globals['_METADATAUPDATEENUM']._serialized_end=573 + _globals['_ACTIVESTATEENUM']._serialized_start=576 + _globals['_ACTIVESTATEENUM']._serialized_end=717 + _globals['_METADATA']._serialized_start=65 + _globals['_METADATA']._serialized_end=306 + _globals['_METADATA_LABELSENTRY']._serialized_start=249 + _globals['_METADATA_LABELSENTRY']._serialized_end=306 + _globals['_METADATAMUTABLE']._serialized_start=309 + _globals['_METADATAMUTABLE']._serialized_end=446 + _globals['_METADATAMUTABLE_LABELSENTRY']._serialized_start=249 + _globals['_METADATAMUTABLE_LABELSENTRY']._serialized_end=306 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/common/common_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/common/common_pb2.pyi new file mode 100644 index 0000000..acd0f4e --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/common/common_pb2.pyi @@ -0,0 +1,61 @@ +import datetime + +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class MetadataUpdateEnum(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + METADATA_UPDATE_ENUM_UNSPECIFIED: _ClassVar[MetadataUpdateEnum] + METADATA_UPDATE_ENUM_EXTEND: _ClassVar[MetadataUpdateEnum] + METADATA_UPDATE_ENUM_REPLACE: _ClassVar[MetadataUpdateEnum] + +class ActiveStateEnum(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ACTIVE_STATE_ENUM_UNSPECIFIED: _ClassVar[ActiveStateEnum] + ACTIVE_STATE_ENUM_ACTIVE: _ClassVar[ActiveStateEnum] + ACTIVE_STATE_ENUM_INACTIVE: _ClassVar[ActiveStateEnum] + ACTIVE_STATE_ENUM_ANY: _ClassVar[ActiveStateEnum] +METADATA_UPDATE_ENUM_UNSPECIFIED: MetadataUpdateEnum +METADATA_UPDATE_ENUM_EXTEND: MetadataUpdateEnum +METADATA_UPDATE_ENUM_REPLACE: MetadataUpdateEnum +ACTIVE_STATE_ENUM_UNSPECIFIED: ActiveStateEnum +ACTIVE_STATE_ENUM_ACTIVE: ActiveStateEnum +ACTIVE_STATE_ENUM_INACTIVE: ActiveStateEnum +ACTIVE_STATE_ENUM_ANY: ActiveStateEnum + +class Metadata(_message.Message): + __slots__ = ("created_at", "updated_at", "labels") + class LabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + CREATED_AT_FIELD_NUMBER: _ClassVar[int] + UPDATED_AT_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] + created_at: _timestamp_pb2.Timestamp + updated_at: _timestamp_pb2.Timestamp + labels: _containers.ScalarMap[str, str] + def __init__(self, created_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., updated_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., labels: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class MetadataMutable(_message.Message): + __slots__ = ("labels",) + class LabelsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + LABELS_FIELD_NUMBER: _ClassVar[int] + labels: _containers.ScalarMap[str, str] + def __init__(self, labels: _Optional[_Mapping[str, str]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/entity/__init__.py b/otdf-python-proto/src/otdf_python_proto/entity/__init__.py new file mode 100644 index 0000000..328d445 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entity/__init__.py @@ -0,0 +1 @@ +"""entity protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/entity/entity_pb2.py b/otdf-python-proto/src/otdf_python_proto/entity/entity_pb2.py new file mode 100644 index 0000000..693fdf4 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entity/entity_pb2.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: entity/entity.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'entity/entity.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x65ntity/entity.proto\x12\x06\x65ntity\x1a\x1b\x62uf/validate/validate.proto\x1a\x19google/protobuf/any.proto\"D\n\x05Token\x12!\n\x0c\x65phemeral_id\x18\x01 \x01(\tR\x0b\x65phemeralId\x12\x18\n\x03jwt\x18\x02 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x03jwt\"\xda\x02\n\x06\x45ntity\x12!\n\x0c\x65phemeral_id\x18\x01 \x01(\tR\x0b\x65phemeralId\x12%\n\remail_address\x18\x02 \x01(\tH\x00R\x0c\x65mailAddress\x12\x1d\n\tuser_name\x18\x03 \x01(\tH\x00R\x08userName\x12.\n\x06\x63laims\x18\x04 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\x06\x63laims\x12\x1d\n\tclient_id\x18\x05 \x01(\tH\x00R\x08\x63lientId\x12\x33\n\x08\x63\x61tegory\x18\x0b \x01(\x0e\x32\x17.entity.Entity.CategoryR\x08\x63\x61tegory\"T\n\x08\x43\x61tegory\x12\x18\n\x14\x43\x41TEGORY_UNSPECIFIED\x10\x00\x12\x14\n\x10\x43\x41TEGORY_SUBJECT\x10\x01\x12\x18\n\x14\x43\x41TEGORY_ENVIRONMENT\x10\x02\x42\r\n\x0b\x65ntity_type\"\\\n\x0b\x45ntityChain\x12!\n\x0c\x65phemeral_id\x18\x01 \x01(\tR\x0b\x65phemeralId\x12*\n\x08\x65ntities\x18\x02 \x03(\x0b\x32\x0e.entity.EntityR\x08\x65ntitiesBQ\n\ncom.entityB\x0b\x45ntityProtoP\x01\xa2\x02\x03\x45XX\xaa\x02\x06\x45ntity\xca\x02\x06\x45ntity\xe2\x02\x12\x45ntity\\GPBMetadata\xea\x02\x06\x45ntityb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'entity.entity_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\ncom.entityB\013EntityProtoP\001\242\002\003EXX\252\002\006Entity\312\002\006Entity\342\002\022Entity\\GPBMetadata\352\002\006Entity' + _globals['_TOKEN'].fields_by_name['jwt']._loaded_options = None + _globals['_TOKEN'].fields_by_name['jwt']._serialized_options = b'\272H\003\310\001\001' + _globals['_TOKEN']._serialized_start=87 + _globals['_TOKEN']._serialized_end=155 + _globals['_ENTITY']._serialized_start=158 + _globals['_ENTITY']._serialized_end=504 + _globals['_ENTITY_CATEGORY']._serialized_start=405 + _globals['_ENTITY_CATEGORY']._serialized_end=489 + _globals['_ENTITYCHAIN']._serialized_start=506 + _globals['_ENTITYCHAIN']._serialized_end=598 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/entity/entity_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/entity/entity_pb2.pyi new file mode 100644 index 0000000..3adde2a --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entity/entity_pb2.pyi @@ -0,0 +1,50 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from google.protobuf import any_pb2 as _any_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class Token(_message.Message): + __slots__ = ("ephemeral_id", "jwt") + EPHEMERAL_ID_FIELD_NUMBER: _ClassVar[int] + JWT_FIELD_NUMBER: _ClassVar[int] + ephemeral_id: str + jwt: str + def __init__(self, ephemeral_id: _Optional[str] = ..., jwt: _Optional[str] = ...) -> None: ... + +class Entity(_message.Message): + __slots__ = ("ephemeral_id", "email_address", "user_name", "claims", "client_id", "category") + class Category(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + CATEGORY_UNSPECIFIED: _ClassVar[Entity.Category] + CATEGORY_SUBJECT: _ClassVar[Entity.Category] + CATEGORY_ENVIRONMENT: _ClassVar[Entity.Category] + CATEGORY_UNSPECIFIED: Entity.Category + CATEGORY_SUBJECT: Entity.Category + CATEGORY_ENVIRONMENT: Entity.Category + EPHEMERAL_ID_FIELD_NUMBER: _ClassVar[int] + EMAIL_ADDRESS_FIELD_NUMBER: _ClassVar[int] + USER_NAME_FIELD_NUMBER: _ClassVar[int] + CLAIMS_FIELD_NUMBER: _ClassVar[int] + CLIENT_ID_FIELD_NUMBER: _ClassVar[int] + CATEGORY_FIELD_NUMBER: _ClassVar[int] + ephemeral_id: str + email_address: str + user_name: str + claims: _any_pb2.Any + client_id: str + category: Entity.Category + def __init__(self, ephemeral_id: _Optional[str] = ..., email_address: _Optional[str] = ..., user_name: _Optional[str] = ..., claims: _Optional[_Union[_any_pb2.Any, _Mapping]] = ..., client_id: _Optional[str] = ..., category: _Optional[_Union[Entity.Category, str]] = ...) -> None: ... + +class EntityChain(_message.Message): + __slots__ = ("ephemeral_id", "entities") + EPHEMERAL_ID_FIELD_NUMBER: _ClassVar[int] + ENTITIES_FIELD_NUMBER: _ClassVar[int] + ephemeral_id: str + entities: _containers.RepeatedCompositeFieldContainer[Entity] + def __init__(self, ephemeral_id: _Optional[str] = ..., entities: _Optional[_Iterable[_Union[Entity, _Mapping]]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/entityresolution/__init__.py b/otdf-python-proto/src/otdf_python_proto/entityresolution/__init__.py new file mode 100644 index 0000000..146c5b7 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entityresolution/__init__.py @@ -0,0 +1 @@ +"""entityresolution protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2.py b/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2.py new file mode 100644 index 0000000..11e5907 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: entityresolution/entity_resolution.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'entityresolution/entity_resolution.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from authorization import authorization_pb2 as authorization_dot_authorization__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(entityresolution/entity_resolution.proto\x12\x10\x65ntityresolution\x1a!authorization/authorization.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/api/annotations.proto\"K\n\x16ResolveEntitiesRequest\x12\x31\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x15.authorization.EntityR\x08\x65ntities\"{\n\x14\x45ntityRepresentation\x12\x42\n\x10\x61\x64\x64itional_props\x18\x01 \x03(\x0b\x32\x17.google.protobuf.StructR\x0f\x61\x64\x64itionalProps\x12\x1f\n\x0boriginal_id\x18\x02 \x01(\tR\noriginalId\"x\n\x17ResolveEntitiesResponse\x12]\n\x16\x65ntity_representations\x18\x01 \x03(\x0b\x32&.entityresolution.EntityRepresentationR\x15\x65ntityRepresentations\"\x8b\x01\n\x13\x45ntityNotFoundError\x12\x12\n\x04\x63ode\x18\x01 \x01(\x05R\x04\x63ode\x12\x18\n\x07message\x18\x02 \x01(\tR\x07message\x12.\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyR\x07\x64\x65tails\x12\x16\n\x06\x65ntity\x18\x04 \x01(\tR\x06\x65ntity\"O\n\x1f\x43reateEntityChainFromJwtRequest\x12,\n\x06tokens\x18\x01 \x03(\x0b\x32\x14.authorization.TokenR\x06tokens\"c\n CreateEntityChainFromJwtResponse\x12?\n\rentity_chains\x18\x01 \x03(\x0b\x32\x1a.authorization.EntityChainR\x0c\x65ntityChains2\xd6\x02\n\x17\x45ntityResolutionService\x12\x8c\x01\n\x0fResolveEntities\x12(.entityresolution.ResolveEntitiesRequest\x1a).entityresolution.ResolveEntitiesResponse\"$\x82\xd3\xe4\x93\x02\x1e\"\x19/entityresolution/resolve:\x01*\x12\xab\x01\n\x18\x43reateEntityChainFromJwt\x12\x31.entityresolution.CreateEntityChainFromJwtRequest\x1a\x32.entityresolution.CreateEntityChainFromJwtResponse\"(\x82\xd3\xe4\x93\x02\"\"\x1d/entityresolution/entitychain:\x01*B\x8d\x01\n\x14\x63om.entityresolutionB\x15\x45ntityResolutionProtoP\x01\xa2\x02\x03\x45XX\xaa\x02\x10\x45ntityresolution\xca\x02\x10\x45ntityresolution\xe2\x02\x1c\x45ntityresolution\\GPBMetadata\xea\x02\x10\x45ntityresolutionb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'entityresolution.entity_resolution_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\024com.entityresolutionB\025EntityResolutionProtoP\001\242\002\003EXX\252\002\020Entityresolution\312\002\020Entityresolution\342\002\034Entityresolution\\GPBMetadata\352\002\020Entityresolution' + _globals['_ENTITYRESOLUTIONSERVICE'].methods_by_name['ResolveEntities']._loaded_options = None + _globals['_ENTITYRESOLUTIONSERVICE'].methods_by_name['ResolveEntities']._serialized_options = b'\202\323\344\223\002\036\"\031/entityresolution/resolve:\001*' + _globals['_ENTITYRESOLUTIONSERVICE'].methods_by_name['CreateEntityChainFromJwt']._loaded_options = None + _globals['_ENTITYRESOLUTIONSERVICE'].methods_by_name['CreateEntityChainFromJwt']._serialized_options = b'\202\323\344\223\002\"\"\035/entityresolution/entitychain:\001*' + _globals['_RESOLVEENTITIESREQUEST']._serialized_start=184 + _globals['_RESOLVEENTITIESREQUEST']._serialized_end=259 + _globals['_ENTITYREPRESENTATION']._serialized_start=261 + _globals['_ENTITYREPRESENTATION']._serialized_end=384 + _globals['_RESOLVEENTITIESRESPONSE']._serialized_start=386 + _globals['_RESOLVEENTITIESRESPONSE']._serialized_end=506 + _globals['_ENTITYNOTFOUNDERROR']._serialized_start=509 + _globals['_ENTITYNOTFOUNDERROR']._serialized_end=648 + _globals['_CREATEENTITYCHAINFROMJWTREQUEST']._serialized_start=650 + _globals['_CREATEENTITYCHAINFROMJWTREQUEST']._serialized_end=729 + _globals['_CREATEENTITYCHAINFROMJWTRESPONSE']._serialized_start=731 + _globals['_CREATEENTITYCHAINFROMJWTRESPONSE']._serialized_end=830 + _globals['_ENTITYRESOLUTIONSERVICE']._serialized_start=833 + _globals['_ENTITYRESOLUTIONSERVICE']._serialized_end=1175 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2.pyi new file mode 100644 index 0000000..8b48ce6 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2.pyi @@ -0,0 +1,55 @@ +from authorization import authorization_pb2 as _authorization_pb2 +from google.protobuf import struct_pb2 as _struct_pb2 +from google.protobuf import any_pb2 as _any_pb2 +from google.api import annotations_pb2 as _annotations_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class ResolveEntitiesRequest(_message.Message): + __slots__ = ("entities",) + ENTITIES_FIELD_NUMBER: _ClassVar[int] + entities: _containers.RepeatedCompositeFieldContainer[_authorization_pb2.Entity] + def __init__(self, entities: _Optional[_Iterable[_Union[_authorization_pb2.Entity, _Mapping]]] = ...) -> None: ... + +class EntityRepresentation(_message.Message): + __slots__ = ("additional_props", "original_id") + ADDITIONAL_PROPS_FIELD_NUMBER: _ClassVar[int] + ORIGINAL_ID_FIELD_NUMBER: _ClassVar[int] + additional_props: _containers.RepeatedCompositeFieldContainer[_struct_pb2.Struct] + original_id: str + def __init__(self, additional_props: _Optional[_Iterable[_Union[_struct_pb2.Struct, _Mapping]]] = ..., original_id: _Optional[str] = ...) -> None: ... + +class ResolveEntitiesResponse(_message.Message): + __slots__ = ("entity_representations",) + ENTITY_REPRESENTATIONS_FIELD_NUMBER: _ClassVar[int] + entity_representations: _containers.RepeatedCompositeFieldContainer[EntityRepresentation] + def __init__(self, entity_representations: _Optional[_Iterable[_Union[EntityRepresentation, _Mapping]]] = ...) -> None: ... + +class EntityNotFoundError(_message.Message): + __slots__ = ("code", "message", "details", "entity") + CODE_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + DETAILS_FIELD_NUMBER: _ClassVar[int] + ENTITY_FIELD_NUMBER: _ClassVar[int] + code: int + message: str + details: _containers.RepeatedCompositeFieldContainer[_any_pb2.Any] + entity: str + def __init__(self, code: _Optional[int] = ..., message: _Optional[str] = ..., details: _Optional[_Iterable[_Union[_any_pb2.Any, _Mapping]]] = ..., entity: _Optional[str] = ...) -> None: ... + +class CreateEntityChainFromJwtRequest(_message.Message): + __slots__ = ("tokens",) + TOKENS_FIELD_NUMBER: _ClassVar[int] + tokens: _containers.RepeatedCompositeFieldContainer[_authorization_pb2.Token] + def __init__(self, tokens: _Optional[_Iterable[_Union[_authorization_pb2.Token, _Mapping]]] = ...) -> None: ... + +class CreateEntityChainFromJwtResponse(_message.Message): + __slots__ = ("entity_chains",) + ENTITY_CHAINS_FIELD_NUMBER: _ClassVar[int] + entity_chains: _containers.RepeatedCompositeFieldContainer[_authorization_pb2.EntityChain] + def __init__(self, entity_chains: _Optional[_Iterable[_Union[_authorization_pb2.EntityChain, _Mapping]]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2_connect.py new file mode 100644 index 0000000..a28cb79 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entityresolution/entity_resolution_pb2_connect.py @@ -0,0 +1,149 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import entityresolution.entity_resolution_pb2 + +class EntityResolutionServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_resolve_entities( + self, req: entityresolution.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.entity_resolution_pb2.ResolveEntitiesResponse]: + """Low-level method to call ResolveEntities, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.EntityResolutionService/ResolveEntities" + return self._connect_client.call_unary(url, req, entityresolution.entity_resolution_pb2.ResolveEntitiesResponse,extra_headers, timeout_seconds) + + + def resolve_entities( + self, req: entityresolution.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.entity_resolution_pb2.ResolveEntitiesResponse: + response = self.call_resolve_entities(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_entity_chain_from_jwt( + self, req: entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtResponse]: + """Low-level method to call CreateEntityChainFromJwt, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.EntityResolutionService/CreateEntityChainFromJwt" + return self._connect_client.call_unary(url, req, entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtResponse,extra_headers, timeout_seconds) + + + def create_entity_chain_from_jwt( + self, req: entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtResponse: + response = self.call_create_entity_chain_from_jwt(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncEntityResolutionServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_resolve_entities( + self, req: entityresolution.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.entity_resolution_pb2.ResolveEntitiesResponse]: + """Low-level method to call ResolveEntities, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.EntityResolutionService/ResolveEntities" + return await self._connect_client.call_unary(url, req, entityresolution.entity_resolution_pb2.ResolveEntitiesResponse,extra_headers, timeout_seconds) + + async def resolve_entities( + self, req: entityresolution.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.entity_resolution_pb2.ResolveEntitiesResponse: + response = await self.call_resolve_entities(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_entity_chain_from_jwt( + self, req: entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtResponse]: + """Low-level method to call CreateEntityChainFromJwt, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.EntityResolutionService/CreateEntityChainFromJwt" + return await self._connect_client.call_unary(url, req, entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtResponse,extra_headers, timeout_seconds) + + async def create_entity_chain_from_jwt( + self, req: entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtResponse: + response = await self.call_create_entity_chain_from_jwt(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class EntityResolutionServiceProtocol(typing.Protocol): + def resolve_entities(self, req: ClientRequest[entityresolution.entity_resolution_pb2.ResolveEntitiesRequest]) -> ServerResponse[entityresolution.entity_resolution_pb2.ResolveEntitiesResponse]: + ... + def create_entity_chain_from_jwt(self, req: ClientRequest[entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtRequest]) -> ServerResponse[entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtResponse]: + ... + +ENTITY_RESOLUTION_SERVICE_PATH_PREFIX = "/entityresolution.EntityResolutionService" + +def wsgi_entity_resolution_service(implementation: EntityResolutionServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/entityresolution.EntityResolutionService/ResolveEntities", implementation.resolve_entities, entityresolution.entity_resolution_pb2.ResolveEntitiesRequest) + app.register_unary_rpc("/entityresolution.EntityResolutionService/CreateEntityChainFromJwt", implementation.create_entity_chain_from_jwt, entityresolution.entity_resolution_pb2.CreateEntityChainFromJwtRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2.py b/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2.py new file mode 100644 index 0000000..84fed25 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: entityresolution/v2/entity_resolution.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'entityresolution/v2/entity_resolution.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from entity import entity_pb2 as entity_dot_entity__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+entityresolution/v2/entity_resolution.proto\x12\x13\x65ntityresolution.v2\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x65ntity/entity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\"{\n\x14\x45ntityRepresentation\x12\x1f\n\x0boriginal_id\x18\x01 \x01(\tR\noriginalId\x12\x42\n\x10\x61\x64\x64itional_props\x18\x02 \x03(\x0b\x32\x17.google.protobuf.StructR\x0f\x61\x64\x64itionalProps\"Q\n\x16ResolveEntitiesRequest\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x0e.entity.EntityB\x0b\xbaH\x08\x92\x01\x02\x08\x01\xc8\x01\x01R\x08\x65ntities\"{\n\x17ResolveEntitiesResponse\x12`\n\x16\x65ntity_representations\x18\x01 \x03(\x0b\x32).entityresolution.v2.EntityRepresentationR\x15\x65ntityRepresentations\"\x8b\x01\n\x13\x45ntityNotFoundError\x12\x12\n\x04\x63ode\x18\x01 \x01(\x05R\x04\x63ode\x12\x18\n\x07message\x18\x02 \x01(\tR\x07message\x12.\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyR\x07\x64\x65tails\x12\x16\n\x06\x65ntity\x18\x04 \x01(\tR\x06\x65ntity\"L\n#CreateEntityChainsFromTokensRequest\x12%\n\x06tokens\x18\x01 \x03(\x0b\x32\r.entity.TokenR\x06tokens\"`\n$CreateEntityChainsFromTokensResponse\x12\x38\n\rentity_chains\x18\x01 \x03(\x0b\x32\x13.entity.EntityChainR\x0c\x65ntityChains2\xa1\x02\n\x17\x45ntityResolutionService\x12n\n\x0fResolveEntities\x12+.entityresolution.v2.ResolveEntitiesRequest\x1a,.entityresolution.v2.ResolveEntitiesResponse\"\x00\x12\x95\x01\n\x1c\x43reateEntityChainsFromTokens\x12\x38.entityresolution.v2.CreateEntityChainsFromTokensRequest\x1a\x39.entityresolution.v2.CreateEntityChainsFromTokensResponse\"\x00\x42\x9d\x01\n\x17\x63om.entityresolution.v2B\x15\x45ntityResolutionProtoP\x01\xa2\x02\x03\x45XX\xaa\x02\x13\x45ntityresolution.V2\xca\x02\x13\x45ntityresolution\\V2\xe2\x02\x1f\x45ntityresolution\\V2\\GPBMetadata\xea\x02\x14\x45ntityresolution::V2b\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'entityresolution.v2.entity_resolution_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\027com.entityresolution.v2B\025EntityResolutionProtoP\001\242\002\003EXX\252\002\023Entityresolution.V2\312\002\023Entityresolution\\V2\342\002\037Entityresolution\\V2\\GPBMetadata\352\002\024Entityresolution::V2' + _globals['_RESOLVEENTITIESREQUEST'].fields_by_name['entities']._loaded_options = None + _globals['_RESOLVEENTITIESREQUEST'].fields_by_name['entities']._serialized_options = b'\272H\010\222\001\002\010\001\310\001\001' + _globals['_ENTITYREPRESENTATION']._serialized_start=175 + _globals['_ENTITYREPRESENTATION']._serialized_end=298 + _globals['_RESOLVEENTITIESREQUEST']._serialized_start=300 + _globals['_RESOLVEENTITIESREQUEST']._serialized_end=381 + _globals['_RESOLVEENTITIESRESPONSE']._serialized_start=383 + _globals['_RESOLVEENTITIESRESPONSE']._serialized_end=506 + _globals['_ENTITYNOTFOUNDERROR']._serialized_start=509 + _globals['_ENTITYNOTFOUNDERROR']._serialized_end=648 + _globals['_CREATEENTITYCHAINSFROMTOKENSREQUEST']._serialized_start=650 + _globals['_CREATEENTITYCHAINSFROMTOKENSREQUEST']._serialized_end=726 + _globals['_CREATEENTITYCHAINSFROMTOKENSRESPONSE']._serialized_start=728 + _globals['_CREATEENTITYCHAINSFROMTOKENSRESPONSE']._serialized_end=824 + _globals['_ENTITYRESOLUTIONSERVICE']._serialized_start=827 + _globals['_ENTITYRESOLUTIONSERVICE']._serialized_end=1116 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2.pyi new file mode 100644 index 0000000..f6374cf --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2.pyi @@ -0,0 +1,55 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from entity import entity_pb2 as _entity_pb2 +from google.protobuf import any_pb2 as _any_pb2 +from google.protobuf import struct_pb2 as _struct_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class EntityRepresentation(_message.Message): + __slots__ = ("original_id", "additional_props") + ORIGINAL_ID_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_PROPS_FIELD_NUMBER: _ClassVar[int] + original_id: str + additional_props: _containers.RepeatedCompositeFieldContainer[_struct_pb2.Struct] + def __init__(self, original_id: _Optional[str] = ..., additional_props: _Optional[_Iterable[_Union[_struct_pb2.Struct, _Mapping]]] = ...) -> None: ... + +class ResolveEntitiesRequest(_message.Message): + __slots__ = ("entities",) + ENTITIES_FIELD_NUMBER: _ClassVar[int] + entities: _containers.RepeatedCompositeFieldContainer[_entity_pb2.Entity] + def __init__(self, entities: _Optional[_Iterable[_Union[_entity_pb2.Entity, _Mapping]]] = ...) -> None: ... + +class ResolveEntitiesResponse(_message.Message): + __slots__ = ("entity_representations",) + ENTITY_REPRESENTATIONS_FIELD_NUMBER: _ClassVar[int] + entity_representations: _containers.RepeatedCompositeFieldContainer[EntityRepresentation] + def __init__(self, entity_representations: _Optional[_Iterable[_Union[EntityRepresentation, _Mapping]]] = ...) -> None: ... + +class EntityNotFoundError(_message.Message): + __slots__ = ("code", "message", "details", "entity") + CODE_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + DETAILS_FIELD_NUMBER: _ClassVar[int] + ENTITY_FIELD_NUMBER: _ClassVar[int] + code: int + message: str + details: _containers.RepeatedCompositeFieldContainer[_any_pb2.Any] + entity: str + def __init__(self, code: _Optional[int] = ..., message: _Optional[str] = ..., details: _Optional[_Iterable[_Union[_any_pb2.Any, _Mapping]]] = ..., entity: _Optional[str] = ...) -> None: ... + +class CreateEntityChainsFromTokensRequest(_message.Message): + __slots__ = ("tokens",) + TOKENS_FIELD_NUMBER: _ClassVar[int] + tokens: _containers.RepeatedCompositeFieldContainer[_entity_pb2.Token] + def __init__(self, tokens: _Optional[_Iterable[_Union[_entity_pb2.Token, _Mapping]]] = ...) -> None: ... + +class CreateEntityChainsFromTokensResponse(_message.Message): + __slots__ = ("entity_chains",) + ENTITY_CHAINS_FIELD_NUMBER: _ClassVar[int] + entity_chains: _containers.RepeatedCompositeFieldContainer[_entity_pb2.EntityChain] + def __init__(self, entity_chains: _Optional[_Iterable[_Union[_entity_pb2.EntityChain, _Mapping]]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2_connect.py new file mode 100644 index 0000000..2fa1944 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/entityresolution/v2/entity_resolution_pb2_connect.py @@ -0,0 +1,149 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import entityresolution.v2.entity_resolution_pb2 + +class EntityResolutionServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_resolve_entities( + self, req: entityresolution.v2.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.v2.entity_resolution_pb2.ResolveEntitiesResponse]: + """Low-level method to call ResolveEntities, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.v2.EntityResolutionService/ResolveEntities" + return self._connect_client.call_unary(url, req, entityresolution.v2.entity_resolution_pb2.ResolveEntitiesResponse,extra_headers, timeout_seconds) + + + def resolve_entities( + self, req: entityresolution.v2.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.v2.entity_resolution_pb2.ResolveEntitiesResponse: + response = self.call_resolve_entities(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_entity_chains_from_tokens( + self, req: entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensResponse]: + """Low-level method to call CreateEntityChainsFromTokens, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.v2.EntityResolutionService/CreateEntityChainsFromTokens" + return self._connect_client.call_unary(url, req, entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensResponse,extra_headers, timeout_seconds) + + + def create_entity_chains_from_tokens( + self, req: entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensResponse: + response = self.call_create_entity_chains_from_tokens(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncEntityResolutionServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_resolve_entities( + self, req: entityresolution.v2.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.v2.entity_resolution_pb2.ResolveEntitiesResponse]: + """Low-level method to call ResolveEntities, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.v2.EntityResolutionService/ResolveEntities" + return await self._connect_client.call_unary(url, req, entityresolution.v2.entity_resolution_pb2.ResolveEntitiesResponse,extra_headers, timeout_seconds) + + async def resolve_entities( + self, req: entityresolution.v2.entity_resolution_pb2.ResolveEntitiesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.v2.entity_resolution_pb2.ResolveEntitiesResponse: + response = await self.call_resolve_entities(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_entity_chains_from_tokens( + self, req: entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensResponse]: + """Low-level method to call CreateEntityChainsFromTokens, granting access to errors and metadata""" + url = self.base_url + "/entityresolution.v2.EntityResolutionService/CreateEntityChainsFromTokens" + return await self._connect_client.call_unary(url, req, entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensResponse,extra_headers, timeout_seconds) + + async def create_entity_chains_from_tokens( + self, req: entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensResponse: + response = await self.call_create_entity_chains_from_tokens(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class EntityResolutionServiceProtocol(typing.Protocol): + def resolve_entities(self, req: ClientRequest[entityresolution.v2.entity_resolution_pb2.ResolveEntitiesRequest]) -> ServerResponse[entityresolution.v2.entity_resolution_pb2.ResolveEntitiesResponse]: + ... + def create_entity_chains_from_tokens(self, req: ClientRequest[entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensRequest]) -> ServerResponse[entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensResponse]: + ... + +ENTITY_RESOLUTION_SERVICE_PATH_PREFIX = "/entityresolution.v2.EntityResolutionService" + +def wsgi_entity_resolution_service(implementation: EntityResolutionServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/entityresolution.v2.EntityResolutionService/ResolveEntities", implementation.resolve_entities, entityresolution.v2.entity_resolution_pb2.ResolveEntitiesRequest) + app.register_unary_rpc("/entityresolution.v2.EntityResolutionService/CreateEntityChainsFromTokens", implementation.create_entity_chains_from_tokens, entityresolution.v2.entity_resolution_pb2.CreateEntityChainsFromTokensRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/kas/__init__.py b/otdf-python-proto/src/otdf_python_proto/kas/__init__.py new file mode 100644 index 0000000..b40d7eb --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/kas/__init__.py @@ -0,0 +1,9 @@ +"""KAS (Key Access Service) protobuf definitions.""" + +from .kas_pb2 import * +from .kas_pb2_connect import AccessServiceClient, AsyncAccessServiceClient + +__all__ = [ + "AccessServiceClient", + "AsyncAccessServiceClient", +] diff --git a/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2.py b/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2.py new file mode 100644 index 0000000..310630c --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: kas/kas.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'kas/kas.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from protoc_gen_openapiv2.options import annotations_pb2 as protoc__gen__openapiv2_dot_options_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rkas/kas.proto\x12\x03kas\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\r\n\x0bInfoRequest\"(\n\x0cInfoResponse\x12\x18\n\x07version\x18\x01 \x01(\tR\x07version\"6\n\x16LegacyPublicKeyRequest\x12\x1c\n\talgorithm\x18\x01 \x01(\tR\talgorithm\";\n\rPolicyBinding\x12\x16\n\talgorithm\x18\x01 \x01(\tR\x03\x61lg\x12\x12\n\x04hash\x18\x02 \x01(\tR\x04hash\"\xd3\x02\n\tKeyAccess\x12-\n\x12\x65ncrypted_metadata\x18\x01 \x01(\tR\x11\x65ncryptedMetadata\x12\x39\n\x0epolicy_binding\x18\x02 \x01(\x0b\x32\x12.kas.PolicyBindingR\rpolicyBinding\x12\x1a\n\x08protocol\x18\x03 \x01(\tR\x08protocol\x12\x16\n\x08key_type\x18\x04 \x01(\tR\x04type\x12\x14\n\x07kas_url\x18\x05 \x01(\tR\x03url\x12\x10\n\x03kid\x18\x06 \x01(\tR\x03kid\x12\x15\n\x08split_id\x18\x07 \x01(\tR\x03sid\x12\x1f\n\x0bwrapped_key\x18\x08 \x01(\x0cR\nwrappedKey\x12\x16\n\x06header\x18\t \x01(\x0cR\x06header\x12\x30\n\x14\x65phemeral_public_key\x18\n \x01(\tR\x12\x65phemeralPublicKey\"\x86\x05\n\x15UnsignedRewrapRequest\x12*\n\x11\x63lient_public_key\x18\x01 \x01(\tR\x0f\x63lientPublicKey\x12H\n\x08requests\x18\x02 \x03(\x0b\x32,.kas.UnsignedRewrapRequest.WithPolicyRequestR\x08requests\x12\x31\n\nkey_access\x18\x03 \x01(\x0b\x32\x0e.kas.KeyAccessB\x02\x18\x01R\tkeyAccess\x12\x1a\n\x06policy\x18\x04 \x01(\tB\x02\x18\x01R\x06policy\x12 \n\talgorithm\x18\x05 \x01(\tB\x02\x18\x01R\talgorithm\x1a\x30\n\nWithPolicy\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04\x62ody\x18\x02 \x01(\tR\x04\x62ody\x1a\x82\x01\n\x13WithKeyAccessObject\x12/\n\x14key_access_object_id\x18\x01 \x01(\tR\x11keyAccessObjectId\x12:\n\x11key_access_object\x18\x02 \x01(\x0b\x32\x0e.kas.KeyAccessR\x0fkeyAccessObject\x1a\xce\x01\n\x11WithPolicyRequest\x12\\\n\x12key_access_objects\x18\x01 \x03(\x0b\x32..kas.UnsignedRewrapRequest.WithKeyAccessObjectR\x10keyAccessObjects\x12=\n\x06policy\x18\x02 \x01(\x0b\x32%.kas.UnsignedRewrapRequest.WithPolicyR\x06policy\x12\x1c\n\talgorithm\x18\x03 \x01(\tR\talgorithm\"\xb1\x01\n\x10PublicKeyRequest\x12Q\n\talgorithm\x18\x01 \x01(\tB3\x92\x41\x30\x32.algorithm type rsa: or ec:R\talgorithm\x12&\n\x03\x66mt\x18\x02 \x01(\tB\x14\x92\x41\x11\x32\x0fresponse formatR\x03\x66mt\x12\"\n\x01v\x18\x03 \x01(\tB\x14\x92\x41\x11\x32\x0frequest versionR\x01v\"D\n\x11PublicKeyResponse\x12\x1d\n\npublic_key\x18\x01 \x01(\tR\tpublicKey\x12\x10\n\x03kid\x18\x02 \x01(\tR\x03kid\"O\n\rRewrapRequest\x12\x30\n\x14signed_request_token\x18\x01 \x01(\tR\x12signedRequestTokenJ\x04\x08\x02\x10\x03R\x06\x62\x65\x61rer\"\xc7\x02\n\x15KeyAccessRewrapResult\x12\x44\n\x08metadata\x18\x01 \x03(\x0b\x32(.kas.KeyAccessRewrapResult.MetadataEntryR\x08metadata\x12/\n\x14key_access_object_id\x18\x02 \x01(\tR\x11keyAccessObjectId\x12\x16\n\x06status\x18\x03 \x01(\tR\x06status\x12(\n\x0fkas_wrapped_key\x18\x04 \x01(\x0cH\x00R\rkasWrappedKey\x12\x16\n\x05\x65rror\x18\x05 \x01(\tH\x00R\x05\x65rror\x1aS\n\rMetadataEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x05value:\x02\x38\x01\x42\x08\n\x06result\"g\n\x12PolicyRewrapResult\x12\x1b\n\tpolicy_id\x18\x01 \x01(\tR\x08policyId\x12\x34\n\x07results\x18\x02 \x03(\x0b\x32\x1a.kas.KeyAccessRewrapResultR\x07results\"\xea\x02\n\x0eRewrapResponse\x12\x41\n\x08metadata\x18\x01 \x03(\x0b\x32!.kas.RewrapResponse.MetadataEntryB\x02\x18\x01R\x08metadata\x12\x30\n\x12\x65ntity_wrapped_key\x18\x02 \x01(\x0c\x42\x02\x18\x01R\x10\x65ntityWrappedKey\x12,\n\x12session_public_key\x18\x03 \x01(\tR\x10sessionPublicKey\x12)\n\x0eschema_version\x18\x04 \x01(\tB\x02\x18\x01R\rschemaVersion\x12\x35\n\tresponses\x18\x05 \x03(\x0b\x32\x17.kas.PolicyRewrapResultR\tresponses\x1aS\n\rMetadataEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x05value:\x02\x38\x01\x32\xd1\x02\n\rAccessService\x12i\n\tPublicKey\x12\x15.kas.PublicKeyRequest\x1a\x16.kas.PublicKeyResponse\"-\x90\x02\x01\x92\x41\tJ\x07\n\x03\x32\x30\x30\x12\x00\x82\xd3\xe4\x93\x02\x18\x12\x16/kas/v2/kas_public_key\x12{\n\x0fLegacyPublicKey\x12\x1b.kas.LegacyPublicKeyRequest\x1a\x1c.google.protobuf.StringValue\"-\x88\x02\x01\x90\x02\x01\x92\x41\tJ\x07\n\x03\x32\x30\x30\x12\x00\x82\xd3\xe4\x93\x02\x15\x12\x13/kas/kas_public_key\x12X\n\x06Rewrap\x12\x12.kas.RewrapRequest\x1a\x13.kas.RewrapResponse\"%\x92\x41\tJ\x07\n\x03\x32\x30\x30\x12\x00\x82\xd3\xe4\x93\x02\x13\"\x0e/kas/v2/rewrap:\x01*B\xb5\x01\n\x07\x63om.kasB\x08KasProtoP\x01\xa2\x02\x03KXX\xaa\x02\x03Kas\xca\x02\x03Kas\xe2\x02\x0fKas\\GPBMetadata\xea\x02\x03Kas\x92\x41s\x12q\n\x1aOpenTDF Key Access Service*L\n\x12\x42SD 3-Clause Clear\x12\x36https://github.com/opentdf/backend/blob/master/LICENSE2\x05\x31.5.0b\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'kas.kas_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\007com.kasB\010KasProtoP\001\242\002\003KXX\252\002\003Kas\312\002\003Kas\342\002\017Kas\\GPBMetadata\352\002\003Kas\222As\022q\n\032OpenTDF Key Access Service*L\n\022BSD 3-Clause Clear\0226https://github.com/opentdf/backend/blob/master/LICENSE2\0051.5.0' + _globals['_UNSIGNEDREWRAPREQUEST'].fields_by_name['key_access']._loaded_options = None + _globals['_UNSIGNEDREWRAPREQUEST'].fields_by_name['key_access']._serialized_options = b'\030\001' + _globals['_UNSIGNEDREWRAPREQUEST'].fields_by_name['policy']._loaded_options = None + _globals['_UNSIGNEDREWRAPREQUEST'].fields_by_name['policy']._serialized_options = b'\030\001' + _globals['_UNSIGNEDREWRAPREQUEST'].fields_by_name['algorithm']._loaded_options = None + _globals['_UNSIGNEDREWRAPREQUEST'].fields_by_name['algorithm']._serialized_options = b'\030\001' + _globals['_PUBLICKEYREQUEST'].fields_by_name['algorithm']._loaded_options = None + _globals['_PUBLICKEYREQUEST'].fields_by_name['algorithm']._serialized_options = b'\222A02.algorithm type rsa: or ec:' + _globals['_PUBLICKEYREQUEST'].fields_by_name['fmt']._loaded_options = None + _globals['_PUBLICKEYREQUEST'].fields_by_name['fmt']._serialized_options = b'\222A\0212\017response format' + _globals['_PUBLICKEYREQUEST'].fields_by_name['v']._loaded_options = None + _globals['_PUBLICKEYREQUEST'].fields_by_name['v']._serialized_options = b'\222A\0212\017request version' + _globals['_KEYACCESSREWRAPRESULT_METADATAENTRY']._loaded_options = None + _globals['_KEYACCESSREWRAPRESULT_METADATAENTRY']._serialized_options = b'8\001' + _globals['_REWRAPRESPONSE_METADATAENTRY']._loaded_options = None + _globals['_REWRAPRESPONSE_METADATAENTRY']._serialized_options = b'8\001' + _globals['_REWRAPRESPONSE'].fields_by_name['metadata']._loaded_options = None + _globals['_REWRAPRESPONSE'].fields_by_name['metadata']._serialized_options = b'\030\001' + _globals['_REWRAPRESPONSE'].fields_by_name['entity_wrapped_key']._loaded_options = None + _globals['_REWRAPRESPONSE'].fields_by_name['entity_wrapped_key']._serialized_options = b'\030\001' + _globals['_REWRAPRESPONSE'].fields_by_name['schema_version']._loaded_options = None + _globals['_REWRAPRESPONSE'].fields_by_name['schema_version']._serialized_options = b'\030\001' + _globals['_ACCESSSERVICE'].methods_by_name['PublicKey']._loaded_options = None + _globals['_ACCESSSERVICE'].methods_by_name['PublicKey']._serialized_options = b'\220\002\001\222A\tJ\007\n\003200\022\000\202\323\344\223\002\030\022\026/kas/v2/kas_public_key' + _globals['_ACCESSSERVICE'].methods_by_name['LegacyPublicKey']._loaded_options = None + _globals['_ACCESSSERVICE'].methods_by_name['LegacyPublicKey']._serialized_options = b'\210\002\001\220\002\001\222A\tJ\007\n\003200\022\000\202\323\344\223\002\025\022\023/kas/kas_public_key' + _globals['_ACCESSSERVICE'].methods_by_name['Rewrap']._loaded_options = None + _globals['_ACCESSSERVICE'].methods_by_name['Rewrap']._serialized_options = b'\222A\tJ\007\n\003200\022\000\202\323\344\223\002\023\"\016/kas/v2/rewrap:\001*' + _globals['_INFOREQUEST']._serialized_start=162 + _globals['_INFOREQUEST']._serialized_end=175 + _globals['_INFORESPONSE']._serialized_start=177 + _globals['_INFORESPONSE']._serialized_end=217 + _globals['_LEGACYPUBLICKEYREQUEST']._serialized_start=219 + _globals['_LEGACYPUBLICKEYREQUEST']._serialized_end=273 + _globals['_POLICYBINDING']._serialized_start=275 + _globals['_POLICYBINDING']._serialized_end=334 + _globals['_KEYACCESS']._serialized_start=337 + _globals['_KEYACCESS']._serialized_end=676 + _globals['_UNSIGNEDREWRAPREQUEST']._serialized_start=679 + _globals['_UNSIGNEDREWRAPREQUEST']._serialized_end=1325 + _globals['_UNSIGNEDREWRAPREQUEST_WITHPOLICY']._serialized_start=935 + _globals['_UNSIGNEDREWRAPREQUEST_WITHPOLICY']._serialized_end=983 + _globals['_UNSIGNEDREWRAPREQUEST_WITHKEYACCESSOBJECT']._serialized_start=986 + _globals['_UNSIGNEDREWRAPREQUEST_WITHKEYACCESSOBJECT']._serialized_end=1116 + _globals['_UNSIGNEDREWRAPREQUEST_WITHPOLICYREQUEST']._serialized_start=1119 + _globals['_UNSIGNEDREWRAPREQUEST_WITHPOLICYREQUEST']._serialized_end=1325 + _globals['_PUBLICKEYREQUEST']._serialized_start=1328 + _globals['_PUBLICKEYREQUEST']._serialized_end=1505 + _globals['_PUBLICKEYRESPONSE']._serialized_start=1507 + _globals['_PUBLICKEYRESPONSE']._serialized_end=1575 + _globals['_REWRAPREQUEST']._serialized_start=1577 + _globals['_REWRAPREQUEST']._serialized_end=1656 + _globals['_KEYACCESSREWRAPRESULT']._serialized_start=1659 + _globals['_KEYACCESSREWRAPRESULT']._serialized_end=1986 + _globals['_KEYACCESSREWRAPRESULT_METADATAENTRY']._serialized_start=1893 + _globals['_KEYACCESSREWRAPRESULT_METADATAENTRY']._serialized_end=1976 + _globals['_POLICYREWRAPRESULT']._serialized_start=1988 + _globals['_POLICYREWRAPRESULT']._serialized_end=2091 + _globals['_REWRAPRESPONSE']._serialized_start=2094 + _globals['_REWRAPRESPONSE']._serialized_end=2456 + _globals['_REWRAPRESPONSE_METADATAENTRY']._serialized_start=1893 + _globals['_REWRAPRESPONSE_METADATAENTRY']._serialized_end=1976 + _globals['_ACCESSSERVICE']._serialized_start=2459 + _globals['_ACCESSSERVICE']._serialized_end=2796 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2.pyi new file mode 100644 index 0000000..eeb55aa --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2.pyi @@ -0,0 +1,170 @@ +from google.api import annotations_pb2 as _annotations_pb2 +from google.protobuf import struct_pb2 as _struct_pb2 +from google.protobuf import wrappers_pb2 as _wrappers_pb2 +from protoc_gen_openapiv2.options import annotations_pb2 as _annotations_pb2_1 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class InfoRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class InfoResponse(_message.Message): + __slots__ = ("version",) + VERSION_FIELD_NUMBER: _ClassVar[int] + version: str + def __init__(self, version: _Optional[str] = ...) -> None: ... + +class LegacyPublicKeyRequest(_message.Message): + __slots__ = ("algorithm",) + ALGORITHM_FIELD_NUMBER: _ClassVar[int] + algorithm: str + def __init__(self, algorithm: _Optional[str] = ...) -> None: ... + +class PolicyBinding(_message.Message): + __slots__ = ("algorithm", "hash") + ALGORITHM_FIELD_NUMBER: _ClassVar[int] + HASH_FIELD_NUMBER: _ClassVar[int] + algorithm: str + hash: str + def __init__(self, algorithm: _Optional[str] = ..., hash: _Optional[str] = ...) -> None: ... + +class KeyAccess(_message.Message): + __slots__ = ("encrypted_metadata", "policy_binding", "protocol", "key_type", "kas_url", "kid", "split_id", "wrapped_key", "header", "ephemeral_public_key") + ENCRYPTED_METADATA_FIELD_NUMBER: _ClassVar[int] + POLICY_BINDING_FIELD_NUMBER: _ClassVar[int] + PROTOCOL_FIELD_NUMBER: _ClassVar[int] + KEY_TYPE_FIELD_NUMBER: _ClassVar[int] + KAS_URL_FIELD_NUMBER: _ClassVar[int] + KID_FIELD_NUMBER: _ClassVar[int] + SPLIT_ID_FIELD_NUMBER: _ClassVar[int] + WRAPPED_KEY_FIELD_NUMBER: _ClassVar[int] + HEADER_FIELD_NUMBER: _ClassVar[int] + EPHEMERAL_PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + encrypted_metadata: str + policy_binding: PolicyBinding + protocol: str + key_type: str + kas_url: str + kid: str + split_id: str + wrapped_key: bytes + header: bytes + ephemeral_public_key: str + def __init__(self, encrypted_metadata: _Optional[str] = ..., policy_binding: _Optional[_Union[PolicyBinding, _Mapping]] = ..., protocol: _Optional[str] = ..., key_type: _Optional[str] = ..., kas_url: _Optional[str] = ..., kid: _Optional[str] = ..., split_id: _Optional[str] = ..., wrapped_key: _Optional[bytes] = ..., header: _Optional[bytes] = ..., ephemeral_public_key: _Optional[str] = ...) -> None: ... + +class UnsignedRewrapRequest(_message.Message): + __slots__ = ("client_public_key", "requests", "key_access", "policy", "algorithm") + class WithPolicy(_message.Message): + __slots__ = ("id", "body") + ID_FIELD_NUMBER: _ClassVar[int] + BODY_FIELD_NUMBER: _ClassVar[int] + id: str + body: str + def __init__(self, id: _Optional[str] = ..., body: _Optional[str] = ...) -> None: ... + class WithKeyAccessObject(_message.Message): + __slots__ = ("key_access_object_id", "key_access_object") + KEY_ACCESS_OBJECT_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ACCESS_OBJECT_FIELD_NUMBER: _ClassVar[int] + key_access_object_id: str + key_access_object: KeyAccess + def __init__(self, key_access_object_id: _Optional[str] = ..., key_access_object: _Optional[_Union[KeyAccess, _Mapping]] = ...) -> None: ... + class WithPolicyRequest(_message.Message): + __slots__ = ("key_access_objects", "policy", "algorithm") + KEY_ACCESS_OBJECTS_FIELD_NUMBER: _ClassVar[int] + POLICY_FIELD_NUMBER: _ClassVar[int] + ALGORITHM_FIELD_NUMBER: _ClassVar[int] + key_access_objects: _containers.RepeatedCompositeFieldContainer[UnsignedRewrapRequest.WithKeyAccessObject] + policy: UnsignedRewrapRequest.WithPolicy + algorithm: str + def __init__(self, key_access_objects: _Optional[_Iterable[_Union[UnsignedRewrapRequest.WithKeyAccessObject, _Mapping]]] = ..., policy: _Optional[_Union[UnsignedRewrapRequest.WithPolicy, _Mapping]] = ..., algorithm: _Optional[str] = ...) -> None: ... + CLIENT_PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + REQUESTS_FIELD_NUMBER: _ClassVar[int] + KEY_ACCESS_FIELD_NUMBER: _ClassVar[int] + POLICY_FIELD_NUMBER: _ClassVar[int] + ALGORITHM_FIELD_NUMBER: _ClassVar[int] + client_public_key: str + requests: _containers.RepeatedCompositeFieldContainer[UnsignedRewrapRequest.WithPolicyRequest] + key_access: KeyAccess + policy: str + algorithm: str + def __init__(self, client_public_key: _Optional[str] = ..., requests: _Optional[_Iterable[_Union[UnsignedRewrapRequest.WithPolicyRequest, _Mapping]]] = ..., key_access: _Optional[_Union[KeyAccess, _Mapping]] = ..., policy: _Optional[str] = ..., algorithm: _Optional[str] = ...) -> None: ... + +class PublicKeyRequest(_message.Message): + __slots__ = ("algorithm", "fmt", "v") + ALGORITHM_FIELD_NUMBER: _ClassVar[int] + FMT_FIELD_NUMBER: _ClassVar[int] + V_FIELD_NUMBER: _ClassVar[int] + algorithm: str + fmt: str + v: str + def __init__(self, algorithm: _Optional[str] = ..., fmt: _Optional[str] = ..., v: _Optional[str] = ...) -> None: ... + +class PublicKeyResponse(_message.Message): + __slots__ = ("public_key", "kid") + PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + KID_FIELD_NUMBER: _ClassVar[int] + public_key: str + kid: str + def __init__(self, public_key: _Optional[str] = ..., kid: _Optional[str] = ...) -> None: ... + +class RewrapRequest(_message.Message): + __slots__ = ("signed_request_token",) + SIGNED_REQUEST_TOKEN_FIELD_NUMBER: _ClassVar[int] + signed_request_token: str + def __init__(self, signed_request_token: _Optional[str] = ...) -> None: ... + +class KeyAccessRewrapResult(_message.Message): + __slots__ = ("metadata", "key_access_object_id", "status", "kas_wrapped_key", "error") + class MetadataEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _struct_pb2.Value + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_struct_pb2.Value, _Mapping]] = ...) -> None: ... + METADATA_FIELD_NUMBER: _ClassVar[int] + KEY_ACCESS_OBJECT_ID_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + KAS_WRAPPED_KEY_FIELD_NUMBER: _ClassVar[int] + ERROR_FIELD_NUMBER: _ClassVar[int] + metadata: _containers.MessageMap[str, _struct_pb2.Value] + key_access_object_id: str + status: str + kas_wrapped_key: bytes + error: str + def __init__(self, metadata: _Optional[_Mapping[str, _struct_pb2.Value]] = ..., key_access_object_id: _Optional[str] = ..., status: _Optional[str] = ..., kas_wrapped_key: _Optional[bytes] = ..., error: _Optional[str] = ...) -> None: ... + +class PolicyRewrapResult(_message.Message): + __slots__ = ("policy_id", "results") + POLICY_ID_FIELD_NUMBER: _ClassVar[int] + RESULTS_FIELD_NUMBER: _ClassVar[int] + policy_id: str + results: _containers.RepeatedCompositeFieldContainer[KeyAccessRewrapResult] + def __init__(self, policy_id: _Optional[str] = ..., results: _Optional[_Iterable[_Union[KeyAccessRewrapResult, _Mapping]]] = ...) -> None: ... + +class RewrapResponse(_message.Message): + __slots__ = ("metadata", "entity_wrapped_key", "session_public_key", "schema_version", "responses") + class MetadataEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _struct_pb2.Value + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_struct_pb2.Value, _Mapping]] = ...) -> None: ... + METADATA_FIELD_NUMBER: _ClassVar[int] + ENTITY_WRAPPED_KEY_FIELD_NUMBER: _ClassVar[int] + SESSION_PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + SCHEMA_VERSION_FIELD_NUMBER: _ClassVar[int] + RESPONSES_FIELD_NUMBER: _ClassVar[int] + metadata: _containers.MessageMap[str, _struct_pb2.Value] + entity_wrapped_key: bytes + session_public_key: str + schema_version: str + responses: _containers.RepeatedCompositeFieldContainer[PolicyRewrapResult] + def __init__(self, metadata: _Optional[_Mapping[str, _struct_pb2.Value]] = ..., entity_wrapped_key: _Optional[bytes] = ..., session_public_key: _Optional[str] = ..., schema_version: _Optional[str] = ..., responses: _Optional[_Iterable[_Union[PolicyRewrapResult, _Mapping]]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2_connect.py new file mode 100644 index 0000000..e7983b7 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/kas/kas_pb2_connect.py @@ -0,0 +1,192 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import google.protobuf.wrappers_pb2 +from . import kas_pb2 + +class AccessServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_public_key( + self, req: kas_pb2.PublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[kas_pb2.PublicKeyResponse]: + """Low-level method to call PublicKey, granting access to errors and metadata""" + url = self.base_url + "/kas.AccessService/PublicKey" + return self._connect_client.call_unary(url, req, kas_pb2.PublicKeyResponse,extra_headers, timeout_seconds) + + + def public_key( + self, req: kas_pb2.PublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> kas_pb2.PublicKeyResponse: + response = self.call_public_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_legacy_public_key( + self, req: kas_pb2.LegacyPublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[google.protobuf.wrappers_pb2.StringValue]: + """Low-level method to call LegacyPublicKey, granting access to errors and metadata""" + url = self.base_url + "/kas.AccessService/LegacyPublicKey" + return self._connect_client.call_unary(url, req, google.protobuf.wrappers_pb2.StringValue,extra_headers, timeout_seconds) + + + def legacy_public_key( + self, req: kas_pb2.LegacyPublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> google.protobuf.wrappers_pb2.StringValue: + response = self.call_legacy_public_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_rewrap( + self, req: kas_pb2.RewrapRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[kas_pb2.RewrapResponse]: + """Low-level method to call Rewrap, granting access to errors and metadata""" + url = self.base_url + "/kas.AccessService/Rewrap" + return self._connect_client.call_unary(url, req, kas_pb2.RewrapResponse,extra_headers, timeout_seconds) + + + def rewrap( + self, req: kas_pb2.RewrapRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> kas_pb2.RewrapResponse: + response = self.call_rewrap(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncAccessServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_public_key( + self, req: kas_pb2.PublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[kas_pb2.PublicKeyResponse]: + """Low-level method to call PublicKey, granting access to errors and metadata""" + url = self.base_url + "/kas.AccessService/PublicKey" + return await self._connect_client.call_unary(url, req, kas_pb2.PublicKeyResponse,extra_headers, timeout_seconds) + + async def public_key( + self, req: kas_pb2.PublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> kas_pb2.PublicKeyResponse: + response = await self.call_public_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_legacy_public_key( + self, req: kas_pb2.LegacyPublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[google.protobuf.wrappers_pb2.StringValue]: + """Low-level method to call LegacyPublicKey, granting access to errors and metadata""" + url = self.base_url + "/kas.AccessService/LegacyPublicKey" + return await self._connect_client.call_unary(url, req, google.protobuf.wrappers_pb2.StringValue,extra_headers, timeout_seconds) + + async def legacy_public_key( + self, req: kas_pb2.LegacyPublicKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> google.protobuf.wrappers_pb2.StringValue: + response = await self.call_legacy_public_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_rewrap( + self, req: kas_pb2.RewrapRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[kas_pb2.RewrapResponse]: + """Low-level method to call Rewrap, granting access to errors and metadata""" + url = self.base_url + "/kas.AccessService/Rewrap" + return await self._connect_client.call_unary(url, req, kas_pb2.RewrapResponse,extra_headers, timeout_seconds) + + async def rewrap( + self, req: kas_pb2.RewrapRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> kas_pb2.RewrapResponse: + response = await self.call_rewrap(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class AccessServiceProtocol(typing.Protocol): + def public_key(self, req: ClientRequest[kas_pb2.PublicKeyRequest]) -> ServerResponse[kas_pb2.PublicKeyResponse]: + ... + def legacy_public_key(self, req: ClientRequest[kas_pb2.LegacyPublicKeyRequest]) -> ServerResponse[google.protobuf.wrappers_pb2.StringValue]: + ... + def rewrap(self, req: ClientRequest[kas_pb2.RewrapRequest]) -> ServerResponse[kas_pb2.RewrapResponse]: + ... + +ACCESS_SERVICE_PATH_PREFIX = "/kas.AccessService" + +def wsgi_access_service(implementation: AccessServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/kas.AccessService/PublicKey", implementation.public_key, kas_pb2.PublicKeyRequest) + app.register_unary_rpc("/kas.AccessService/LegacyPublicKey", implementation.legacy_public_key, kas_pb2.LegacyPublicKeyRequest) + app.register_unary_rpc("/kas.AccessService/Rewrap", implementation.rewrap, kas_pb2.RewrapRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/__init__.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/__init__.py new file mode 100644 index 0000000..fcae387 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/__init__.py @@ -0,0 +1 @@ +"""legacy_grpc protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/authorization/authorization_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/authorization/authorization_pb2_grpc.py new file mode 100644 index 0000000..4b20995 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/authorization/authorization_pb2_grpc.py @@ -0,0 +1,163 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from authorization import authorization_pb2 as authorization_dot_authorization__pb2 + + +class AuthorizationServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetDecisions = channel.unary_unary( + '/authorization.AuthorizationService/GetDecisions', + request_serializer=authorization_dot_authorization__pb2.GetDecisionsRequest.SerializeToString, + response_deserializer=authorization_dot_authorization__pb2.GetDecisionsResponse.FromString, + _registered_method=True) + self.GetDecisionsByToken = channel.unary_unary( + '/authorization.AuthorizationService/GetDecisionsByToken', + request_serializer=authorization_dot_authorization__pb2.GetDecisionsByTokenRequest.SerializeToString, + response_deserializer=authorization_dot_authorization__pb2.GetDecisionsByTokenResponse.FromString, + _registered_method=True) + self.GetEntitlements = channel.unary_unary( + '/authorization.AuthorizationService/GetEntitlements', + request_serializer=authorization_dot_authorization__pb2.GetEntitlementsRequest.SerializeToString, + response_deserializer=authorization_dot_authorization__pb2.GetEntitlementsResponse.FromString, + _registered_method=True) + + +class AuthorizationServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetDecisions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDecisionsByToken(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetEntitlements(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_AuthorizationServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetDecisions': grpc.unary_unary_rpc_method_handler( + servicer.GetDecisions, + request_deserializer=authorization_dot_authorization__pb2.GetDecisionsRequest.FromString, + response_serializer=authorization_dot_authorization__pb2.GetDecisionsResponse.SerializeToString, + ), + 'GetDecisionsByToken': grpc.unary_unary_rpc_method_handler( + servicer.GetDecisionsByToken, + request_deserializer=authorization_dot_authorization__pb2.GetDecisionsByTokenRequest.FromString, + response_serializer=authorization_dot_authorization__pb2.GetDecisionsByTokenResponse.SerializeToString, + ), + 'GetEntitlements': grpc.unary_unary_rpc_method_handler( + servicer.GetEntitlements, + request_deserializer=authorization_dot_authorization__pb2.GetEntitlementsRequest.FromString, + response_serializer=authorization_dot_authorization__pb2.GetEntitlementsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'authorization.AuthorizationService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('authorization.AuthorizationService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class AuthorizationService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetDecisions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/authorization.AuthorizationService/GetDecisions', + authorization_dot_authorization__pb2.GetDecisionsRequest.SerializeToString, + authorization_dot_authorization__pb2.GetDecisionsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetDecisionsByToken(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/authorization.AuthorizationService/GetDecisionsByToken', + authorization_dot_authorization__pb2.GetDecisionsByTokenRequest.SerializeToString, + authorization_dot_authorization__pb2.GetDecisionsByTokenResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetEntitlements(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/authorization.AuthorizationService/GetEntitlements', + authorization_dot_authorization__pb2.GetEntitlementsRequest.SerializeToString, + authorization_dot_authorization__pb2.GetEntitlementsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/authorization/v2/authorization_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/authorization/v2/authorization_pb2_grpc.py new file mode 100644 index 0000000..15488b5 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/authorization/v2/authorization_pb2_grpc.py @@ -0,0 +1,206 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from authorization.v2 import authorization_pb2 as authorization_dot_v2_dot_authorization__pb2 + + +class AuthorizationServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetDecision = channel.unary_unary( + '/authorization.v2.AuthorizationService/GetDecision', + request_serializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionRequest.SerializeToString, + response_deserializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionResponse.FromString, + _registered_method=True) + self.GetDecisionMultiResource = channel.unary_unary( + '/authorization.v2.AuthorizationService/GetDecisionMultiResource', + request_serializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionMultiResourceRequest.SerializeToString, + response_deserializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionMultiResourceResponse.FromString, + _registered_method=True) + self.GetDecisionBulk = channel.unary_unary( + '/authorization.v2.AuthorizationService/GetDecisionBulk', + request_serializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionBulkRequest.SerializeToString, + response_deserializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionBulkResponse.FromString, + _registered_method=True) + self.GetEntitlements = channel.unary_unary( + '/authorization.v2.AuthorizationService/GetEntitlements', + request_serializer=authorization_dot_v2_dot_authorization__pb2.GetEntitlementsRequest.SerializeToString, + response_deserializer=authorization_dot_v2_dot_authorization__pb2.GetEntitlementsResponse.FromString, + _registered_method=True) + + +class AuthorizationServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetDecision(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDecisionMultiResource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDecisionBulk(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetEntitlements(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_AuthorizationServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetDecision': grpc.unary_unary_rpc_method_handler( + servicer.GetDecision, + request_deserializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionRequest.FromString, + response_serializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionResponse.SerializeToString, + ), + 'GetDecisionMultiResource': grpc.unary_unary_rpc_method_handler( + servicer.GetDecisionMultiResource, + request_deserializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionMultiResourceRequest.FromString, + response_serializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionMultiResourceResponse.SerializeToString, + ), + 'GetDecisionBulk': grpc.unary_unary_rpc_method_handler( + servicer.GetDecisionBulk, + request_deserializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionBulkRequest.FromString, + response_serializer=authorization_dot_v2_dot_authorization__pb2.GetDecisionBulkResponse.SerializeToString, + ), + 'GetEntitlements': grpc.unary_unary_rpc_method_handler( + servicer.GetEntitlements, + request_deserializer=authorization_dot_v2_dot_authorization__pb2.GetEntitlementsRequest.FromString, + response_serializer=authorization_dot_v2_dot_authorization__pb2.GetEntitlementsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'authorization.v2.AuthorizationService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('authorization.v2.AuthorizationService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class AuthorizationService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetDecision(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/authorization.v2.AuthorizationService/GetDecision', + authorization_dot_v2_dot_authorization__pb2.GetDecisionRequest.SerializeToString, + authorization_dot_v2_dot_authorization__pb2.GetDecisionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetDecisionMultiResource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/authorization.v2.AuthorizationService/GetDecisionMultiResource', + authorization_dot_v2_dot_authorization__pb2.GetDecisionMultiResourceRequest.SerializeToString, + authorization_dot_v2_dot_authorization__pb2.GetDecisionMultiResourceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetDecisionBulk(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/authorization.v2.AuthorizationService/GetDecisionBulk', + authorization_dot_v2_dot_authorization__pb2.GetDecisionBulkRequest.SerializeToString, + authorization_dot_v2_dot_authorization__pb2.GetDecisionBulkResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetEntitlements(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/authorization.v2.AuthorizationService/GetEntitlements', + authorization_dot_v2_dot_authorization__pb2.GetEntitlementsRequest.SerializeToString, + authorization_dot_v2_dot_authorization__pb2.GetEntitlementsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/common/common_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/common/common_pb2_grpc.py new file mode 100644 index 0000000..2daafff --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/common/common_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entity/entity_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entity/entity_pb2_grpc.py new file mode 100644 index 0000000..2daafff --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entity/entity_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entityresolution/entity_resolution_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entityresolution/entity_resolution_pb2_grpc.py new file mode 100644 index 0000000..1f04f2b --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entityresolution/entity_resolution_pb2_grpc.py @@ -0,0 +1,122 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from entityresolution import entity_resolution_pb2 as entityresolution_dot_entity__resolution__pb2 + + +class EntityResolutionServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ResolveEntities = channel.unary_unary( + '/entityresolution.EntityResolutionService/ResolveEntities', + request_serializer=entityresolution_dot_entity__resolution__pb2.ResolveEntitiesRequest.SerializeToString, + response_deserializer=entityresolution_dot_entity__resolution__pb2.ResolveEntitiesResponse.FromString, + _registered_method=True) + self.CreateEntityChainFromJwt = channel.unary_unary( + '/entityresolution.EntityResolutionService/CreateEntityChainFromJwt', + request_serializer=entityresolution_dot_entity__resolution__pb2.CreateEntityChainFromJwtRequest.SerializeToString, + response_deserializer=entityresolution_dot_entity__resolution__pb2.CreateEntityChainFromJwtResponse.FromString, + _registered_method=True) + + +class EntityResolutionServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def ResolveEntities(self, request, context): + """Deprecated: use v2 ResolveEntities instead + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateEntityChainFromJwt(self, request, context): + """Deprecated: use v2 CreateEntityChainsFromTokens instead + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_EntityResolutionServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ResolveEntities': grpc.unary_unary_rpc_method_handler( + servicer.ResolveEntities, + request_deserializer=entityresolution_dot_entity__resolution__pb2.ResolveEntitiesRequest.FromString, + response_serializer=entityresolution_dot_entity__resolution__pb2.ResolveEntitiesResponse.SerializeToString, + ), + 'CreateEntityChainFromJwt': grpc.unary_unary_rpc_method_handler( + servicer.CreateEntityChainFromJwt, + request_deserializer=entityresolution_dot_entity__resolution__pb2.CreateEntityChainFromJwtRequest.FromString, + response_serializer=entityresolution_dot_entity__resolution__pb2.CreateEntityChainFromJwtResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'entityresolution.EntityResolutionService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('entityresolution.EntityResolutionService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class EntityResolutionService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def ResolveEntities(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/entityresolution.EntityResolutionService/ResolveEntities', + entityresolution_dot_entity__resolution__pb2.ResolveEntitiesRequest.SerializeToString, + entityresolution_dot_entity__resolution__pb2.ResolveEntitiesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateEntityChainFromJwt(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/entityresolution.EntityResolutionService/CreateEntityChainFromJwt', + entityresolution_dot_entity__resolution__pb2.CreateEntityChainFromJwtRequest.SerializeToString, + entityresolution_dot_entity__resolution__pb2.CreateEntityChainFromJwtResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entityresolution/v2/entity_resolution_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entityresolution/v2/entity_resolution_pb2_grpc.py new file mode 100644 index 0000000..b81ed6a --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/entityresolution/v2/entity_resolution_pb2_grpc.py @@ -0,0 +1,120 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from entityresolution.v2 import entity_resolution_pb2 as entityresolution_dot_v2_dot_entity__resolution__pb2 + + +class EntityResolutionServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ResolveEntities = channel.unary_unary( + '/entityresolution.v2.EntityResolutionService/ResolveEntities', + request_serializer=entityresolution_dot_v2_dot_entity__resolution__pb2.ResolveEntitiesRequest.SerializeToString, + response_deserializer=entityresolution_dot_v2_dot_entity__resolution__pb2.ResolveEntitiesResponse.FromString, + _registered_method=True) + self.CreateEntityChainsFromTokens = channel.unary_unary( + '/entityresolution.v2.EntityResolutionService/CreateEntityChainsFromTokens', + request_serializer=entityresolution_dot_v2_dot_entity__resolution__pb2.CreateEntityChainsFromTokensRequest.SerializeToString, + response_deserializer=entityresolution_dot_v2_dot_entity__resolution__pb2.CreateEntityChainsFromTokensResponse.FromString, + _registered_method=True) + + +class EntityResolutionServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def ResolveEntities(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateEntityChainsFromTokens(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_EntityResolutionServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ResolveEntities': grpc.unary_unary_rpc_method_handler( + servicer.ResolveEntities, + request_deserializer=entityresolution_dot_v2_dot_entity__resolution__pb2.ResolveEntitiesRequest.FromString, + response_serializer=entityresolution_dot_v2_dot_entity__resolution__pb2.ResolveEntitiesResponse.SerializeToString, + ), + 'CreateEntityChainsFromTokens': grpc.unary_unary_rpc_method_handler( + servicer.CreateEntityChainsFromTokens, + request_deserializer=entityresolution_dot_v2_dot_entity__resolution__pb2.CreateEntityChainsFromTokensRequest.FromString, + response_serializer=entityresolution_dot_v2_dot_entity__resolution__pb2.CreateEntityChainsFromTokensResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'entityresolution.v2.EntityResolutionService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('entityresolution.v2.EntityResolutionService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class EntityResolutionService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def ResolveEntities(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/entityresolution.v2.EntityResolutionService/ResolveEntities', + entityresolution_dot_v2_dot_entity__resolution__pb2.ResolveEntitiesRequest.SerializeToString, + entityresolution_dot_v2_dot_entity__resolution__pb2.ResolveEntitiesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateEntityChainsFromTokens(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/entityresolution.v2.EntityResolutionService/CreateEntityChainsFromTokens', + entityresolution_dot_v2_dot_entity__resolution__pb2.CreateEntityChainsFromTokensRequest.SerializeToString, + entityresolution_dot_v2_dot_entity__resolution__pb2.CreateEntityChainsFromTokensResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/kas/kas_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/kas/kas_pb2_grpc.py new file mode 100644 index 0000000..7a6b3f9 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/kas/kas_pb2_grpc.py @@ -0,0 +1,172 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from kas import kas_pb2 as kas_dot_kas__pb2 + + +class AccessServiceStub(object): + """Get app info from the root path + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.PublicKey = channel.unary_unary( + '/kas.AccessService/PublicKey', + request_serializer=kas_dot_kas__pb2.PublicKeyRequest.SerializeToString, + response_deserializer=kas_dot_kas__pb2.PublicKeyResponse.FromString, + _registered_method=True) + self.LegacyPublicKey = channel.unary_unary( + '/kas.AccessService/LegacyPublicKey', + request_serializer=kas_dot_kas__pb2.LegacyPublicKeyRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.FromString, + _registered_method=True) + self.Rewrap = channel.unary_unary( + '/kas.AccessService/Rewrap', + request_serializer=kas_dot_kas__pb2.RewrapRequest.SerializeToString, + response_deserializer=kas_dot_kas__pb2.RewrapResponse.FromString, + _registered_method=True) + + +class AccessServiceServicer(object): + """Get app info from the root path + """ + + def PublicKey(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LegacyPublicKey(self, request, context): + """Endpoint intended for gRPC Gateway's REST endpoint to provide v1 compatibility with older TDF clients + + This endpoint is not recommended for use in new applications, prefer the v2 endpoint ('PublicKey') instead. + + buf:lint:ignore RPC_RESPONSE_STANDARD_NAME + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Rewrap(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_AccessServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'PublicKey': grpc.unary_unary_rpc_method_handler( + servicer.PublicKey, + request_deserializer=kas_dot_kas__pb2.PublicKeyRequest.FromString, + response_serializer=kas_dot_kas__pb2.PublicKeyResponse.SerializeToString, + ), + 'LegacyPublicKey': grpc.unary_unary_rpc_method_handler( + servicer.LegacyPublicKey, + request_deserializer=kas_dot_kas__pb2.LegacyPublicKeyRequest.FromString, + response_serializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.SerializeToString, + ), + 'Rewrap': grpc.unary_unary_rpc_method_handler( + servicer.Rewrap, + request_deserializer=kas_dot_kas__pb2.RewrapRequest.FromString, + response_serializer=kas_dot_kas__pb2.RewrapResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'kas.AccessService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('kas.AccessService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class AccessService(object): + """Get app info from the root path + """ + + @staticmethod + def PublicKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/kas.AccessService/PublicKey', + kas_dot_kas__pb2.PublicKeyRequest.SerializeToString, + kas_dot_kas__pb2.PublicKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def LegacyPublicKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/kas.AccessService/LegacyPublicKey', + kas_dot_kas__pb2.LegacyPublicKeyRequest.SerializeToString, + google_dot_protobuf_dot_wrappers__pb2.StringValue.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Rewrap(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/kas.AccessService/Rewrap', + kas_dot_kas__pb2.RewrapRequest.SerializeToString, + kas_dot_kas__pb2.RewrapResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/logger/audit/test_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/logger/audit/test_pb2_grpc.py new file mode 100644 index 0000000..2daafff --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/logger/audit/test_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/actions/actions_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/actions/actions_pb2_grpc.py new file mode 100644 index 0000000..58ef85e --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/actions/actions_pb2_grpc.py @@ -0,0 +1,249 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.actions import actions_pb2 as policy_dot_actions_dot_actions__pb2 + + +class ActionServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetAction = channel.unary_unary( + '/policy.actions.ActionService/GetAction', + request_serializer=policy_dot_actions_dot_actions__pb2.GetActionRequest.SerializeToString, + response_deserializer=policy_dot_actions_dot_actions__pb2.GetActionResponse.FromString, + _registered_method=True) + self.ListActions = channel.unary_unary( + '/policy.actions.ActionService/ListActions', + request_serializer=policy_dot_actions_dot_actions__pb2.ListActionsRequest.SerializeToString, + response_deserializer=policy_dot_actions_dot_actions__pb2.ListActionsResponse.FromString, + _registered_method=True) + self.CreateAction = channel.unary_unary( + '/policy.actions.ActionService/CreateAction', + request_serializer=policy_dot_actions_dot_actions__pb2.CreateActionRequest.SerializeToString, + response_deserializer=policy_dot_actions_dot_actions__pb2.CreateActionResponse.FromString, + _registered_method=True) + self.UpdateAction = channel.unary_unary( + '/policy.actions.ActionService/UpdateAction', + request_serializer=policy_dot_actions_dot_actions__pb2.UpdateActionRequest.SerializeToString, + response_deserializer=policy_dot_actions_dot_actions__pb2.UpdateActionResponse.FromString, + _registered_method=True) + self.DeleteAction = channel.unary_unary( + '/policy.actions.ActionService/DeleteAction', + request_serializer=policy_dot_actions_dot_actions__pb2.DeleteActionRequest.SerializeToString, + response_deserializer=policy_dot_actions_dot_actions__pb2.DeleteActionResponse.FromString, + _registered_method=True) + + +class ActionServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetAction(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListActions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateAction(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateAction(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteAction(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ActionServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetAction': grpc.unary_unary_rpc_method_handler( + servicer.GetAction, + request_deserializer=policy_dot_actions_dot_actions__pb2.GetActionRequest.FromString, + response_serializer=policy_dot_actions_dot_actions__pb2.GetActionResponse.SerializeToString, + ), + 'ListActions': grpc.unary_unary_rpc_method_handler( + servicer.ListActions, + request_deserializer=policy_dot_actions_dot_actions__pb2.ListActionsRequest.FromString, + response_serializer=policy_dot_actions_dot_actions__pb2.ListActionsResponse.SerializeToString, + ), + 'CreateAction': grpc.unary_unary_rpc_method_handler( + servicer.CreateAction, + request_deserializer=policy_dot_actions_dot_actions__pb2.CreateActionRequest.FromString, + response_serializer=policy_dot_actions_dot_actions__pb2.CreateActionResponse.SerializeToString, + ), + 'UpdateAction': grpc.unary_unary_rpc_method_handler( + servicer.UpdateAction, + request_deserializer=policy_dot_actions_dot_actions__pb2.UpdateActionRequest.FromString, + response_serializer=policy_dot_actions_dot_actions__pb2.UpdateActionResponse.SerializeToString, + ), + 'DeleteAction': grpc.unary_unary_rpc_method_handler( + servicer.DeleteAction, + request_deserializer=policy_dot_actions_dot_actions__pb2.DeleteActionRequest.FromString, + response_serializer=policy_dot_actions_dot_actions__pb2.DeleteActionResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.actions.ActionService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.actions.ActionService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class ActionService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetAction(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.actions.ActionService/GetAction', + policy_dot_actions_dot_actions__pb2.GetActionRequest.SerializeToString, + policy_dot_actions_dot_actions__pb2.GetActionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListActions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.actions.ActionService/ListActions', + policy_dot_actions_dot_actions__pb2.ListActionsRequest.SerializeToString, + policy_dot_actions_dot_actions__pb2.ListActionsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateAction(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.actions.ActionService/CreateAction', + policy_dot_actions_dot_actions__pb2.CreateActionRequest.SerializeToString, + policy_dot_actions_dot_actions__pb2.CreateActionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateAction(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.actions.ActionService/UpdateAction', + policy_dot_actions_dot_actions__pb2.UpdateActionRequest.SerializeToString, + policy_dot_actions_dot_actions__pb2.UpdateActionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteAction(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.actions.ActionService/DeleteAction', + policy_dot_actions_dot_actions__pb2.DeleteActionRequest.SerializeToString, + policy_dot_actions_dot_actions__pb2.DeleteActionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/attributes/attributes_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/attributes/attributes_pb2_grpc.py new file mode 100644 index 0000000..5920319 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/attributes/attributes_pb2_grpc.py @@ -0,0 +1,873 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.attributes import attributes_pb2 as policy_dot_attributes_dot_attributes__pb2 + + +class AttributesServiceStub(object): + """/ + / Attribute Service + / + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListAttributes = channel.unary_unary( + '/policy.attributes.AttributesService/ListAttributes', + request_serializer=policy_dot_attributes_dot_attributes__pb2.ListAttributesRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.ListAttributesResponse.FromString, + _registered_method=True) + self.ListAttributeValues = channel.unary_unary( + '/policy.attributes.AttributesService/ListAttributeValues', + request_serializer=policy_dot_attributes_dot_attributes__pb2.ListAttributeValuesRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.ListAttributeValuesResponse.FromString, + _registered_method=True) + self.GetAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/GetAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeResponse.FromString, + _registered_method=True) + self.GetAttributeValuesByFqns = channel.unary_unary( + '/policy.attributes.AttributesService/GetAttributeValuesByFqns', + request_serializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValuesByFqnsRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValuesByFqnsResponse.FromString, + _registered_method=True) + self.CreateAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/CreateAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeResponse.FromString, + _registered_method=True) + self.UpdateAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/UpdateAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeResponse.FromString, + _registered_method=True) + self.DeactivateAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/DeactivateAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeResponse.FromString, + _registered_method=True) + self.GetAttributeValue = channel.unary_unary( + '/policy.attributes.AttributesService/GetAttributeValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValueResponse.FromString, + _registered_method=True) + self.CreateAttributeValue = channel.unary_unary( + '/policy.attributes.AttributesService/CreateAttributeValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeValueResponse.FromString, + _registered_method=True) + self.UpdateAttributeValue = channel.unary_unary( + '/policy.attributes.AttributesService/UpdateAttributeValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeValueResponse.FromString, + _registered_method=True) + self.DeactivateAttributeValue = channel.unary_unary( + '/policy.attributes.AttributesService/DeactivateAttributeValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeValueResponse.FromString, + _registered_method=True) + self.AssignKeyAccessServerToAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/AssignKeyAccessServerToAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToAttributeResponse.FromString, + _registered_method=True) + self.RemoveKeyAccessServerFromAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/RemoveKeyAccessServerFromAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromAttributeResponse.FromString, + _registered_method=True) + self.AssignKeyAccessServerToValue = channel.unary_unary( + '/policy.attributes.AttributesService/AssignKeyAccessServerToValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToValueResponse.FromString, + _registered_method=True) + self.RemoveKeyAccessServerFromValue = channel.unary_unary( + '/policy.attributes.AttributesService/RemoveKeyAccessServerFromValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromValueResponse.FromString, + _registered_method=True) + self.AssignPublicKeyToAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/AssignPublicKeyToAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToAttributeResponse.FromString, + _registered_method=True) + self.RemovePublicKeyFromAttribute = channel.unary_unary( + '/policy.attributes.AttributesService/RemovePublicKeyFromAttribute', + request_serializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromAttributeRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromAttributeResponse.FromString, + _registered_method=True) + self.AssignPublicKeyToValue = channel.unary_unary( + '/policy.attributes.AttributesService/AssignPublicKeyToValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToValueResponse.FromString, + _registered_method=True) + self.RemovePublicKeyFromValue = channel.unary_unary( + '/policy.attributes.AttributesService/RemovePublicKeyFromValue', + request_serializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromValueRequest.SerializeToString, + response_deserializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromValueResponse.FromString, + _registered_method=True) + + +class AttributesServiceServicer(object): + """/ + / Attribute Service + / + """ + + def ListAttributes(self, request, context): + """--------------------------------------* + Attribute RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListAttributeValues(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetAttributeValuesByFqns(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeactivateAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetAttributeValue(self, request, context): + """--------------------------------------* + Value RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateAttributeValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateAttributeValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeactivateAttributeValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AssignKeyAccessServerToAttribute(self, request, context): + """--------------------------------------* + Attribute <> Key Access Server RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RemoveKeyAccessServerFromAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AssignKeyAccessServerToValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RemoveKeyAccessServerFromValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AssignPublicKeyToAttribute(self, request, context): + """--------------------------------------* + Attribute <> Key RPCs + --------------------------------------- + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RemovePublicKeyFromAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AssignPublicKeyToValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RemovePublicKeyFromValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_AttributesServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListAttributes': grpc.unary_unary_rpc_method_handler( + servicer.ListAttributes, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.ListAttributesRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.ListAttributesResponse.SerializeToString, + ), + 'ListAttributeValues': grpc.unary_unary_rpc_method_handler( + servicer.ListAttributeValues, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.ListAttributeValuesRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.ListAttributeValuesResponse.SerializeToString, + ), + 'GetAttribute': grpc.unary_unary_rpc_method_handler( + servicer.GetAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeResponse.SerializeToString, + ), + 'GetAttributeValuesByFqns': grpc.unary_unary_rpc_method_handler( + servicer.GetAttributeValuesByFqns, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValuesByFqnsRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValuesByFqnsResponse.SerializeToString, + ), + 'CreateAttribute': grpc.unary_unary_rpc_method_handler( + servicer.CreateAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeResponse.SerializeToString, + ), + 'UpdateAttribute': grpc.unary_unary_rpc_method_handler( + servicer.UpdateAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeResponse.SerializeToString, + ), + 'DeactivateAttribute': grpc.unary_unary_rpc_method_handler( + servicer.DeactivateAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeResponse.SerializeToString, + ), + 'GetAttributeValue': grpc.unary_unary_rpc_method_handler( + servicer.GetAttributeValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.GetAttributeValueResponse.SerializeToString, + ), + 'CreateAttributeValue': grpc.unary_unary_rpc_method_handler( + servicer.CreateAttributeValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.CreateAttributeValueResponse.SerializeToString, + ), + 'UpdateAttributeValue': grpc.unary_unary_rpc_method_handler( + servicer.UpdateAttributeValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.UpdateAttributeValueResponse.SerializeToString, + ), + 'DeactivateAttributeValue': grpc.unary_unary_rpc_method_handler( + servicer.DeactivateAttributeValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeValueResponse.SerializeToString, + ), + 'AssignKeyAccessServerToAttribute': grpc.unary_unary_rpc_method_handler( + servicer.AssignKeyAccessServerToAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToAttributeResponse.SerializeToString, + ), + 'RemoveKeyAccessServerFromAttribute': grpc.unary_unary_rpc_method_handler( + servicer.RemoveKeyAccessServerFromAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromAttributeResponse.SerializeToString, + ), + 'AssignKeyAccessServerToValue': grpc.unary_unary_rpc_method_handler( + servicer.AssignKeyAccessServerToValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToValueResponse.SerializeToString, + ), + 'RemoveKeyAccessServerFromValue': grpc.unary_unary_rpc_method_handler( + servicer.RemoveKeyAccessServerFromValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromValueResponse.SerializeToString, + ), + 'AssignPublicKeyToAttribute': grpc.unary_unary_rpc_method_handler( + servicer.AssignPublicKeyToAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToAttributeResponse.SerializeToString, + ), + 'RemovePublicKeyFromAttribute': grpc.unary_unary_rpc_method_handler( + servicer.RemovePublicKeyFromAttribute, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromAttributeRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromAttributeResponse.SerializeToString, + ), + 'AssignPublicKeyToValue': grpc.unary_unary_rpc_method_handler( + servicer.AssignPublicKeyToValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToValueResponse.SerializeToString, + ), + 'RemovePublicKeyFromValue': grpc.unary_unary_rpc_method_handler( + servicer.RemovePublicKeyFromValue, + request_deserializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromValueRequest.FromString, + response_serializer=policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromValueResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.attributes.AttributesService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.attributes.AttributesService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class AttributesService(object): + """/ + / Attribute Service + / + """ + + @staticmethod + def ListAttributes(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/ListAttributes', + policy_dot_attributes_dot_attributes__pb2.ListAttributesRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.ListAttributesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListAttributeValues(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/ListAttributeValues', + policy_dot_attributes_dot_attributes__pb2.ListAttributeValuesRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.ListAttributeValuesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/GetAttribute', + policy_dot_attributes_dot_attributes__pb2.GetAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.GetAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetAttributeValuesByFqns(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/GetAttributeValuesByFqns', + policy_dot_attributes_dot_attributes__pb2.GetAttributeValuesByFqnsRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.GetAttributeValuesByFqnsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/CreateAttribute', + policy_dot_attributes_dot_attributes__pb2.CreateAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.CreateAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/UpdateAttribute', + policy_dot_attributes_dot_attributes__pb2.UpdateAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.UpdateAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeactivateAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/DeactivateAttribute', + policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetAttributeValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/GetAttributeValue', + policy_dot_attributes_dot_attributes__pb2.GetAttributeValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.GetAttributeValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateAttributeValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/CreateAttributeValue', + policy_dot_attributes_dot_attributes__pb2.CreateAttributeValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.CreateAttributeValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateAttributeValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/UpdateAttributeValue', + policy_dot_attributes_dot_attributes__pb2.UpdateAttributeValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.UpdateAttributeValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeactivateAttributeValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/DeactivateAttributeValue', + policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.DeactivateAttributeValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def AssignKeyAccessServerToAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/AssignKeyAccessServerToAttribute', + policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RemoveKeyAccessServerFromAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/RemoveKeyAccessServerFromAttribute', + policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def AssignKeyAccessServerToValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/AssignKeyAccessServerToValue', + policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.AssignKeyAccessServerToValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RemoveKeyAccessServerFromValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/RemoveKeyAccessServerFromValue', + policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.RemoveKeyAccessServerFromValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def AssignPublicKeyToAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/AssignPublicKeyToAttribute', + policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RemovePublicKeyFromAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/RemovePublicKeyFromAttribute', + policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromAttributeRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def AssignPublicKeyToValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/AssignPublicKeyToValue', + policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.AssignPublicKeyToValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RemovePublicKeyFromValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.attributes.AttributesService/RemovePublicKeyFromValue', + policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromValueRequest.SerializeToString, + policy_dot_attributes_dot_attributes__pb2.RemovePublicKeyFromValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/kasregistry/key_access_server_registry_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/kasregistry/key_access_server_registry_pb2_grpc.py new file mode 100644 index 0000000..686c567 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/kasregistry/key_access_server_registry_pb2_grpc.py @@ -0,0 +1,602 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.kasregistry import key_access_server_registry_pb2 as policy_dot_kasregistry_dot_key__access__server__registry__pb2 + + +class KeyAccessServerRegistryServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListKeyAccessServers = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServers', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServersRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServersResponse.FromString, + _registered_method=True) + self.GetKeyAccessServer = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/GetKeyAccessServer', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyAccessServerRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyAccessServerResponse.FromString, + _registered_method=True) + self.CreateKeyAccessServer = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/CreateKeyAccessServer', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyAccessServerRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyAccessServerResponse.FromString, + _registered_method=True) + self.UpdateKeyAccessServer = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/UpdateKeyAccessServer', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyAccessServerRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyAccessServerResponse.FromString, + _registered_method=True) + self.DeleteKeyAccessServer = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/DeleteKeyAccessServer', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.DeleteKeyAccessServerRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.DeleteKeyAccessServerResponse.FromString, + _registered_method=True) + self.ListKeyAccessServerGrants = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServerGrants', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServerGrantsRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServerGrantsResponse.FromString, + _registered_method=True) + self.CreateKey = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/CreateKey', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyResponse.FromString, + _registered_method=True) + self.GetKey = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/GetKey', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyResponse.FromString, + _registered_method=True) + self.ListKeys = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/ListKeys', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeysRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeysResponse.FromString, + _registered_method=True) + self.UpdateKey = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/UpdateKey', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyResponse.FromString, + _registered_method=True) + self.RotateKey = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/RotateKey', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.RotateKeyRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.RotateKeyResponse.FromString, + _registered_method=True) + self.SetBaseKey = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/SetBaseKey', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.SetBaseKeyRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.SetBaseKeyResponse.FromString, + _registered_method=True) + self.GetBaseKey = channel.unary_unary( + '/policy.kasregistry.KeyAccessServerRegistryService/GetBaseKey', + request_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetBaseKeyRequest.SerializeToString, + response_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetBaseKeyResponse.FromString, + _registered_method=True) + + +class KeyAccessServerRegistryServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def ListKeyAccessServers(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetKeyAccessServer(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateKeyAccessServer(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateKeyAccessServer(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteKeyAccessServer(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListKeyAccessServerGrants(self, request, context): + """Deprecated + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateKey(self, request, context): + """KAS Key Management + Request to create a new key in the Key Access Service. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetKey(self, request, context): + """Request to retrieve a key from the Key Access Service. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListKeys(self, request, context): + """Request to list keys in the Key Access Service. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateKey(self, request, context): + """Request to update a key in the Key Access Service. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RotateKey(self, request, context): + """Request to rotate a key in the Key Access Service. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetBaseKey(self, request, context): + """Request to set the default a default kas key. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetBaseKey(self, request, context): + """Get Default kas keys + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_KeyAccessServerRegistryServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListKeyAccessServers': grpc.unary_unary_rpc_method_handler( + servicer.ListKeyAccessServers, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServersRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServersResponse.SerializeToString, + ), + 'GetKeyAccessServer': grpc.unary_unary_rpc_method_handler( + servicer.GetKeyAccessServer, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyAccessServerRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyAccessServerResponse.SerializeToString, + ), + 'CreateKeyAccessServer': grpc.unary_unary_rpc_method_handler( + servicer.CreateKeyAccessServer, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyAccessServerRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyAccessServerResponse.SerializeToString, + ), + 'UpdateKeyAccessServer': grpc.unary_unary_rpc_method_handler( + servicer.UpdateKeyAccessServer, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyAccessServerRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyAccessServerResponse.SerializeToString, + ), + 'DeleteKeyAccessServer': grpc.unary_unary_rpc_method_handler( + servicer.DeleteKeyAccessServer, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.DeleteKeyAccessServerRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.DeleteKeyAccessServerResponse.SerializeToString, + ), + 'ListKeyAccessServerGrants': grpc.unary_unary_rpc_method_handler( + servicer.ListKeyAccessServerGrants, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServerGrantsRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServerGrantsResponse.SerializeToString, + ), + 'CreateKey': grpc.unary_unary_rpc_method_handler( + servicer.CreateKey, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyResponse.SerializeToString, + ), + 'GetKey': grpc.unary_unary_rpc_method_handler( + servicer.GetKey, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyResponse.SerializeToString, + ), + 'ListKeys': grpc.unary_unary_rpc_method_handler( + servicer.ListKeys, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeysRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeysResponse.SerializeToString, + ), + 'UpdateKey': grpc.unary_unary_rpc_method_handler( + servicer.UpdateKey, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyResponse.SerializeToString, + ), + 'RotateKey': grpc.unary_unary_rpc_method_handler( + servicer.RotateKey, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.RotateKeyRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.RotateKeyResponse.SerializeToString, + ), + 'SetBaseKey': grpc.unary_unary_rpc_method_handler( + servicer.SetBaseKey, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.SetBaseKeyRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.SetBaseKeyResponse.SerializeToString, + ), + 'GetBaseKey': grpc.unary_unary_rpc_method_handler( + servicer.GetBaseKey, + request_deserializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetBaseKeyRequest.FromString, + response_serializer=policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetBaseKeyResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.kasregistry.KeyAccessServerRegistryService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.kasregistry.KeyAccessServerRegistryService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class KeyAccessServerRegistryService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def ListKeyAccessServers(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServers', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServersRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServersResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetKeyAccessServer(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/GetKeyAccessServer', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyAccessServerRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyAccessServerResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateKeyAccessServer(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/CreateKeyAccessServer', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyAccessServerRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyAccessServerResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateKeyAccessServer(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/UpdateKeyAccessServer', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyAccessServerRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyAccessServerResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteKeyAccessServer(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/DeleteKeyAccessServer', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.DeleteKeyAccessServerRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.DeleteKeyAccessServerResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListKeyAccessServerGrants(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServerGrants', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServerGrantsRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeyAccessServerGrantsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/CreateKey', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.CreateKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/GetKey', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListKeys(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/ListKeys', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeysRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.ListKeysResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/UpdateKey', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.UpdateKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RotateKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/RotateKey', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.RotateKeyRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.RotateKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def SetBaseKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/SetBaseKey', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.SetBaseKeyRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.SetBaseKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetBaseKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.kasregistry.KeyAccessServerRegistryService/GetBaseKey', + policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetBaseKeyRequest.SerializeToString, + policy_dot_kasregistry_dot_key__access__server__registry__pb2.GetBaseKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/keymanagement/key_management_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/keymanagement/key_management_pb2_grpc.py new file mode 100644 index 0000000..6b3d6ec --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/keymanagement/key_management_pb2_grpc.py @@ -0,0 +1,251 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.keymanagement import key_management_pb2 as policy_dot_keymanagement_dot_key__management__pb2 + + +class KeyManagementServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateProviderConfig = channel.unary_unary( + '/policy.keymanagement.KeyManagementService/CreateProviderConfig', + request_serializer=policy_dot_keymanagement_dot_key__management__pb2.CreateProviderConfigRequest.SerializeToString, + response_deserializer=policy_dot_keymanagement_dot_key__management__pb2.CreateProviderConfigResponse.FromString, + _registered_method=True) + self.GetProviderConfig = channel.unary_unary( + '/policy.keymanagement.KeyManagementService/GetProviderConfig', + request_serializer=policy_dot_keymanagement_dot_key__management__pb2.GetProviderConfigRequest.SerializeToString, + response_deserializer=policy_dot_keymanagement_dot_key__management__pb2.GetProviderConfigResponse.FromString, + _registered_method=True) + self.ListProviderConfigs = channel.unary_unary( + '/policy.keymanagement.KeyManagementService/ListProviderConfigs', + request_serializer=policy_dot_keymanagement_dot_key__management__pb2.ListProviderConfigsRequest.SerializeToString, + response_deserializer=policy_dot_keymanagement_dot_key__management__pb2.ListProviderConfigsResponse.FromString, + _registered_method=True) + self.UpdateProviderConfig = channel.unary_unary( + '/policy.keymanagement.KeyManagementService/UpdateProviderConfig', + request_serializer=policy_dot_keymanagement_dot_key__management__pb2.UpdateProviderConfigRequest.SerializeToString, + response_deserializer=policy_dot_keymanagement_dot_key__management__pb2.UpdateProviderConfigResponse.FromString, + _registered_method=True) + self.DeleteProviderConfig = channel.unary_unary( + '/policy.keymanagement.KeyManagementService/DeleteProviderConfig', + request_serializer=policy_dot_keymanagement_dot_key__management__pb2.DeleteProviderConfigRequest.SerializeToString, + response_deserializer=policy_dot_keymanagement_dot_key__management__pb2.DeleteProviderConfigResponse.FromString, + _registered_method=True) + + +class KeyManagementServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def CreateProviderConfig(self, request, context): + """Key Management + Provider Management + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetProviderConfig(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListProviderConfigs(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateProviderConfig(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteProviderConfig(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_KeyManagementServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateProviderConfig': grpc.unary_unary_rpc_method_handler( + servicer.CreateProviderConfig, + request_deserializer=policy_dot_keymanagement_dot_key__management__pb2.CreateProviderConfigRequest.FromString, + response_serializer=policy_dot_keymanagement_dot_key__management__pb2.CreateProviderConfigResponse.SerializeToString, + ), + 'GetProviderConfig': grpc.unary_unary_rpc_method_handler( + servicer.GetProviderConfig, + request_deserializer=policy_dot_keymanagement_dot_key__management__pb2.GetProviderConfigRequest.FromString, + response_serializer=policy_dot_keymanagement_dot_key__management__pb2.GetProviderConfigResponse.SerializeToString, + ), + 'ListProviderConfigs': grpc.unary_unary_rpc_method_handler( + servicer.ListProviderConfigs, + request_deserializer=policy_dot_keymanagement_dot_key__management__pb2.ListProviderConfigsRequest.FromString, + response_serializer=policy_dot_keymanagement_dot_key__management__pb2.ListProviderConfigsResponse.SerializeToString, + ), + 'UpdateProviderConfig': grpc.unary_unary_rpc_method_handler( + servicer.UpdateProviderConfig, + request_deserializer=policy_dot_keymanagement_dot_key__management__pb2.UpdateProviderConfigRequest.FromString, + response_serializer=policy_dot_keymanagement_dot_key__management__pb2.UpdateProviderConfigResponse.SerializeToString, + ), + 'DeleteProviderConfig': grpc.unary_unary_rpc_method_handler( + servicer.DeleteProviderConfig, + request_deserializer=policy_dot_keymanagement_dot_key__management__pb2.DeleteProviderConfigRequest.FromString, + response_serializer=policy_dot_keymanagement_dot_key__management__pb2.DeleteProviderConfigResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.keymanagement.KeyManagementService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.keymanagement.KeyManagementService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class KeyManagementService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def CreateProviderConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.keymanagement.KeyManagementService/CreateProviderConfig', + policy_dot_keymanagement_dot_key__management__pb2.CreateProviderConfigRequest.SerializeToString, + policy_dot_keymanagement_dot_key__management__pb2.CreateProviderConfigResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetProviderConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.keymanagement.KeyManagementService/GetProviderConfig', + policy_dot_keymanagement_dot_key__management__pb2.GetProviderConfigRequest.SerializeToString, + policy_dot_keymanagement_dot_key__management__pb2.GetProviderConfigResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListProviderConfigs(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.keymanagement.KeyManagementService/ListProviderConfigs', + policy_dot_keymanagement_dot_key__management__pb2.ListProviderConfigsRequest.SerializeToString, + policy_dot_keymanagement_dot_key__management__pb2.ListProviderConfigsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateProviderConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.keymanagement.KeyManagementService/UpdateProviderConfig', + policy_dot_keymanagement_dot_key__management__pb2.UpdateProviderConfigRequest.SerializeToString, + policy_dot_keymanagement_dot_key__management__pb2.UpdateProviderConfigResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteProviderConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.keymanagement.KeyManagementService/DeleteProviderConfig', + policy_dot_keymanagement_dot_key__management__pb2.DeleteProviderConfigRequest.SerializeToString, + policy_dot_keymanagement_dot_key__management__pb2.DeleteProviderConfigResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/namespaces/namespaces_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/namespaces/namespaces_pb2_grpc.py new file mode 100644 index 0000000..8ddc67d --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/namespaces/namespaces_pb2_grpc.py @@ -0,0 +1,427 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.namespaces import namespaces_pb2 as policy_dot_namespaces_dot_namespaces__pb2 + + +class NamespaceServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/GetNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.GetNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.GetNamespaceResponse.FromString, + _registered_method=True) + self.ListNamespaces = channel.unary_unary( + '/policy.namespaces.NamespaceService/ListNamespaces', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.ListNamespacesRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.ListNamespacesResponse.FromString, + _registered_method=True) + self.CreateNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/CreateNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.CreateNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.CreateNamespaceResponse.FromString, + _registered_method=True) + self.UpdateNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/UpdateNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.UpdateNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.UpdateNamespaceResponse.FromString, + _registered_method=True) + self.DeactivateNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/DeactivateNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.DeactivateNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.DeactivateNamespaceResponse.FromString, + _registered_method=True) + self.AssignKeyAccessServerToNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/AssignKeyAccessServerToNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.AssignKeyAccessServerToNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.AssignKeyAccessServerToNamespaceResponse.FromString, + _registered_method=True) + self.RemoveKeyAccessServerFromNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/RemoveKeyAccessServerFromNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.RemoveKeyAccessServerFromNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.RemoveKeyAccessServerFromNamespaceResponse.FromString, + _registered_method=True) + self.AssignPublicKeyToNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/AssignPublicKeyToNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.AssignPublicKeyToNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.AssignPublicKeyToNamespaceResponse.FromString, + _registered_method=True) + self.RemovePublicKeyFromNamespace = channel.unary_unary( + '/policy.namespaces.NamespaceService/RemovePublicKeyFromNamespace', + request_serializer=policy_dot_namespaces_dot_namespaces__pb2.RemovePublicKeyFromNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_namespaces_dot_namespaces__pb2.RemovePublicKeyFromNamespaceResponse.FromString, + _registered_method=True) + + +class NamespaceServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListNamespaces(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeactivateNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AssignKeyAccessServerToNamespace(self, request, context): + """--------------------------------------* + Namespace <> Key Access Server RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RemoveKeyAccessServerFromNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AssignPublicKeyToNamespace(self, request, context): + """--------------------------------------* + Namespace <> Key RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RemovePublicKeyFromNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_NamespaceServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetNamespace': grpc.unary_unary_rpc_method_handler( + servicer.GetNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.GetNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.GetNamespaceResponse.SerializeToString, + ), + 'ListNamespaces': grpc.unary_unary_rpc_method_handler( + servicer.ListNamespaces, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.ListNamespacesRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.ListNamespacesResponse.SerializeToString, + ), + 'CreateNamespace': grpc.unary_unary_rpc_method_handler( + servicer.CreateNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.CreateNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.CreateNamespaceResponse.SerializeToString, + ), + 'UpdateNamespace': grpc.unary_unary_rpc_method_handler( + servicer.UpdateNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.UpdateNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.UpdateNamespaceResponse.SerializeToString, + ), + 'DeactivateNamespace': grpc.unary_unary_rpc_method_handler( + servicer.DeactivateNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.DeactivateNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.DeactivateNamespaceResponse.SerializeToString, + ), + 'AssignKeyAccessServerToNamespace': grpc.unary_unary_rpc_method_handler( + servicer.AssignKeyAccessServerToNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.AssignKeyAccessServerToNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.AssignKeyAccessServerToNamespaceResponse.SerializeToString, + ), + 'RemoveKeyAccessServerFromNamespace': grpc.unary_unary_rpc_method_handler( + servicer.RemoveKeyAccessServerFromNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.RemoveKeyAccessServerFromNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.RemoveKeyAccessServerFromNamespaceResponse.SerializeToString, + ), + 'AssignPublicKeyToNamespace': grpc.unary_unary_rpc_method_handler( + servicer.AssignPublicKeyToNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.AssignPublicKeyToNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.AssignPublicKeyToNamespaceResponse.SerializeToString, + ), + 'RemovePublicKeyFromNamespace': grpc.unary_unary_rpc_method_handler( + servicer.RemovePublicKeyFromNamespace, + request_deserializer=policy_dot_namespaces_dot_namespaces__pb2.RemovePublicKeyFromNamespaceRequest.FromString, + response_serializer=policy_dot_namespaces_dot_namespaces__pb2.RemovePublicKeyFromNamespaceResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.namespaces.NamespaceService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.namespaces.NamespaceService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class NamespaceService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/GetNamespace', + policy_dot_namespaces_dot_namespaces__pb2.GetNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.GetNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListNamespaces(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/ListNamespaces', + policy_dot_namespaces_dot_namespaces__pb2.ListNamespacesRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.ListNamespacesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/CreateNamespace', + policy_dot_namespaces_dot_namespaces__pb2.CreateNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.CreateNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/UpdateNamespace', + policy_dot_namespaces_dot_namespaces__pb2.UpdateNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.UpdateNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeactivateNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/DeactivateNamespace', + policy_dot_namespaces_dot_namespaces__pb2.DeactivateNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.DeactivateNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def AssignKeyAccessServerToNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/AssignKeyAccessServerToNamespace', + policy_dot_namespaces_dot_namespaces__pb2.AssignKeyAccessServerToNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.AssignKeyAccessServerToNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RemoveKeyAccessServerFromNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/RemoveKeyAccessServerFromNamespace', + policy_dot_namespaces_dot_namespaces__pb2.RemoveKeyAccessServerFromNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.RemoveKeyAccessServerFromNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def AssignPublicKeyToNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/AssignPublicKeyToNamespace', + policy_dot_namespaces_dot_namespaces__pb2.AssignPublicKeyToNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.AssignPublicKeyToNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RemovePublicKeyFromNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.namespaces.NamespaceService/RemovePublicKeyFromNamespace', + policy_dot_namespaces_dot_namespaces__pb2.RemovePublicKeyFromNamespaceRequest.SerializeToString, + policy_dot_namespaces_dot_namespaces__pb2.RemovePublicKeyFromNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/objects_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/objects_pb2_grpc.py new file mode 100644 index 0000000..2daafff --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/objects_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/registeredresources/registered_resources_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/registeredresources/registered_resources_pb2_grpc.py new file mode 100644 index 0000000..6c52364 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/registeredresources/registered_resources_pb2_grpc.py @@ -0,0 +1,524 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.registeredresources import registered_resources_pb2 as policy_dot_registeredresources_dot_registered__resources__pb2 + + +class RegisteredResourcesServiceStub(object): + """/ + / Registered Resources Service + / + + Registered Resources + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateRegisteredResource = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResource', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceResponse.FromString, + _registered_method=True) + self.GetRegisteredResource = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/GetRegisteredResource', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceResponse.FromString, + _registered_method=True) + self.ListRegisteredResources = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/ListRegisteredResources', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourcesRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourcesResponse.FromString, + _registered_method=True) + self.UpdateRegisteredResource = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResource', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceResponse.FromString, + _registered_method=True) + self.DeleteRegisteredResource = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResource', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceResponse.FromString, + _registered_method=True) + self.CreateRegisteredResourceValue = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResourceValue', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceValueRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceValueResponse.FromString, + _registered_method=True) + self.GetRegisteredResourceValue = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValue', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValueRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValueResponse.FromString, + _registered_method=True) + self.GetRegisteredResourceValuesByFQNs = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValuesByFQNs', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValuesByFQNsRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValuesByFQNsResponse.FromString, + _registered_method=True) + self.ListRegisteredResourceValues = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/ListRegisteredResourceValues', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourceValuesRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourceValuesResponse.FromString, + _registered_method=True) + self.UpdateRegisteredResourceValue = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResourceValue', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceValueRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceValueResponse.FromString, + _registered_method=True) + self.DeleteRegisteredResourceValue = channel.unary_unary( + '/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResourceValue', + request_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceValueRequest.SerializeToString, + response_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceValueResponse.FromString, + _registered_method=True) + + +class RegisteredResourcesServiceServicer(object): + """/ + / Registered Resources Service + / + + Registered Resources + """ + + def CreateRegisteredResource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetRegisteredResource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListRegisteredResources(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateRegisteredResource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteRegisteredResource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateRegisteredResourceValue(self, request, context): + """Registered Resource Values + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetRegisteredResourceValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetRegisteredResourceValuesByFQNs(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListRegisteredResourceValues(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateRegisteredResourceValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteRegisteredResourceValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_RegisteredResourcesServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateRegisteredResource': grpc.unary_unary_rpc_method_handler( + servicer.CreateRegisteredResource, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceResponse.SerializeToString, + ), + 'GetRegisteredResource': grpc.unary_unary_rpc_method_handler( + servicer.GetRegisteredResource, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceResponse.SerializeToString, + ), + 'ListRegisteredResources': grpc.unary_unary_rpc_method_handler( + servicer.ListRegisteredResources, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourcesRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourcesResponse.SerializeToString, + ), + 'UpdateRegisteredResource': grpc.unary_unary_rpc_method_handler( + servicer.UpdateRegisteredResource, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceResponse.SerializeToString, + ), + 'DeleteRegisteredResource': grpc.unary_unary_rpc_method_handler( + servicer.DeleteRegisteredResource, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceResponse.SerializeToString, + ), + 'CreateRegisteredResourceValue': grpc.unary_unary_rpc_method_handler( + servicer.CreateRegisteredResourceValue, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceValueRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceValueResponse.SerializeToString, + ), + 'GetRegisteredResourceValue': grpc.unary_unary_rpc_method_handler( + servicer.GetRegisteredResourceValue, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValueRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValueResponse.SerializeToString, + ), + 'GetRegisteredResourceValuesByFQNs': grpc.unary_unary_rpc_method_handler( + servicer.GetRegisteredResourceValuesByFQNs, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValuesByFQNsRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValuesByFQNsResponse.SerializeToString, + ), + 'ListRegisteredResourceValues': grpc.unary_unary_rpc_method_handler( + servicer.ListRegisteredResourceValues, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourceValuesRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourceValuesResponse.SerializeToString, + ), + 'UpdateRegisteredResourceValue': grpc.unary_unary_rpc_method_handler( + servicer.UpdateRegisteredResourceValue, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceValueRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceValueResponse.SerializeToString, + ), + 'DeleteRegisteredResourceValue': grpc.unary_unary_rpc_method_handler( + servicer.DeleteRegisteredResourceValue, + request_deserializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceValueRequest.FromString, + response_serializer=policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceValueResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.registeredresources.RegisteredResourcesService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.registeredresources.RegisteredResourcesService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class RegisteredResourcesService(object): + """/ + / Registered Resources Service + / + + Registered Resources + """ + + @staticmethod + def CreateRegisteredResource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResource', + policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetRegisteredResource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/GetRegisteredResource', + policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListRegisteredResources(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/ListRegisteredResources', + policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourcesRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourcesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateRegisteredResource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResource', + policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteRegisteredResource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResource', + policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateRegisteredResourceValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResourceValue', + policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceValueRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.CreateRegisteredResourceValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetRegisteredResourceValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValue', + policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValueRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetRegisteredResourceValuesByFQNs(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValuesByFQNs', + policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValuesByFQNsRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.GetRegisteredResourceValuesByFQNsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListRegisteredResourceValues(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/ListRegisteredResourceValues', + policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourceValuesRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.ListRegisteredResourceValuesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateRegisteredResourceValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResourceValue', + policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceValueRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.UpdateRegisteredResourceValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteRegisteredResourceValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResourceValue', + policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceValueRequest.SerializeToString, + policy_dot_registeredresources_dot_registered__resources__pb2.DeleteRegisteredResourceValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/resourcemapping/resource_mapping_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/resourcemapping/resource_mapping_pb2_grpc.py new file mode 100644 index 0000000..47e64e8 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/resourcemapping/resource_mapping_pb2_grpc.py @@ -0,0 +1,516 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.resourcemapping import resource_mapping_pb2 as policy_dot_resourcemapping_dot_resource__mapping__pb2 + + +class ResourceMappingServiceStub(object): + """ + Resource Mapping Groups + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListResourceMappingGroups = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/ListResourceMappingGroups', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingGroupsRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingGroupsResponse.FromString, + _registered_method=True) + self.GetResourceMappingGroup = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/GetResourceMappingGroup', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingGroupRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingGroupResponse.FromString, + _registered_method=True) + self.CreateResourceMappingGroup = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/CreateResourceMappingGroup', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingGroupRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingGroupResponse.FromString, + _registered_method=True) + self.UpdateResourceMappingGroup = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/UpdateResourceMappingGroup', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingGroupRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingGroupResponse.FromString, + _registered_method=True) + self.DeleteResourceMappingGroup = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/DeleteResourceMappingGroup', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingGroupRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingGroupResponse.FromString, + _registered_method=True) + self.ListResourceMappings = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/ListResourceMappings', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsResponse.FromString, + _registered_method=True) + self.ListResourceMappingsByGroupFqns = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/ListResourceMappingsByGroupFqns', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsByGroupFqnsRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsByGroupFqnsResponse.FromString, + _registered_method=True) + self.GetResourceMapping = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/GetResourceMapping', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingResponse.FromString, + _registered_method=True) + self.CreateResourceMapping = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/CreateResourceMapping', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingResponse.FromString, + _registered_method=True) + self.UpdateResourceMapping = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/UpdateResourceMapping', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingResponse.FromString, + _registered_method=True) + self.DeleteResourceMapping = channel.unary_unary( + '/policy.resourcemapping.ResourceMappingService/DeleteResourceMapping', + request_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingRequest.SerializeToString, + response_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingResponse.FromString, + _registered_method=True) + + +class ResourceMappingServiceServicer(object): + """ + Resource Mapping Groups + """ + + def ListResourceMappingGroups(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetResourceMappingGroup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateResourceMappingGroup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateResourceMappingGroup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteResourceMappingGroup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListResourceMappings(self, request, context): + """ + Resource Mappings + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListResourceMappingsByGroupFqns(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetResourceMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateResourceMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateResourceMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteResourceMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ResourceMappingServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListResourceMappingGroups': grpc.unary_unary_rpc_method_handler( + servicer.ListResourceMappingGroups, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingGroupsRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingGroupsResponse.SerializeToString, + ), + 'GetResourceMappingGroup': grpc.unary_unary_rpc_method_handler( + servicer.GetResourceMappingGroup, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingGroupRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingGroupResponse.SerializeToString, + ), + 'CreateResourceMappingGroup': grpc.unary_unary_rpc_method_handler( + servicer.CreateResourceMappingGroup, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingGroupRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingGroupResponse.SerializeToString, + ), + 'UpdateResourceMappingGroup': grpc.unary_unary_rpc_method_handler( + servicer.UpdateResourceMappingGroup, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingGroupRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingGroupResponse.SerializeToString, + ), + 'DeleteResourceMappingGroup': grpc.unary_unary_rpc_method_handler( + servicer.DeleteResourceMappingGroup, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingGroupRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingGroupResponse.SerializeToString, + ), + 'ListResourceMappings': grpc.unary_unary_rpc_method_handler( + servicer.ListResourceMappings, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsResponse.SerializeToString, + ), + 'ListResourceMappingsByGroupFqns': grpc.unary_unary_rpc_method_handler( + servicer.ListResourceMappingsByGroupFqns, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsByGroupFqnsRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsByGroupFqnsResponse.SerializeToString, + ), + 'GetResourceMapping': grpc.unary_unary_rpc_method_handler( + servicer.GetResourceMapping, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingResponse.SerializeToString, + ), + 'CreateResourceMapping': grpc.unary_unary_rpc_method_handler( + servicer.CreateResourceMapping, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingResponse.SerializeToString, + ), + 'UpdateResourceMapping': grpc.unary_unary_rpc_method_handler( + servicer.UpdateResourceMapping, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingResponse.SerializeToString, + ), + 'DeleteResourceMapping': grpc.unary_unary_rpc_method_handler( + servicer.DeleteResourceMapping, + request_deserializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingRequest.FromString, + response_serializer=policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.resourcemapping.ResourceMappingService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.resourcemapping.ResourceMappingService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class ResourceMappingService(object): + """ + Resource Mapping Groups + """ + + @staticmethod + def ListResourceMappingGroups(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/ListResourceMappingGroups', + policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingGroupsRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingGroupsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetResourceMappingGroup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/GetResourceMappingGroup', + policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingGroupRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingGroupResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateResourceMappingGroup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/CreateResourceMappingGroup', + policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingGroupRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingGroupResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateResourceMappingGroup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/UpdateResourceMappingGroup', + policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingGroupRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingGroupResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteResourceMappingGroup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/DeleteResourceMappingGroup', + policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingGroupRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingGroupResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListResourceMappings(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/ListResourceMappings', + policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListResourceMappingsByGroupFqns(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/ListResourceMappingsByGroupFqns', + policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsByGroupFqnsRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.ListResourceMappingsByGroupFqnsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetResourceMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/GetResourceMapping', + policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.GetResourceMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateResourceMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/CreateResourceMapping', + policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.CreateResourceMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateResourceMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/UpdateResourceMapping', + policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.UpdateResourceMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteResourceMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.resourcemapping.ResourceMappingService/DeleteResourceMapping', + policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingRequest.SerializeToString, + policy_dot_resourcemapping_dot_resource__mapping__pb2.DeleteResourceMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/selectors_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/selectors_pb2_grpc.py new file mode 100644 index 0000000..2daafff --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/selectors_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/subjectmapping/subject_mapping_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/subjectmapping/subject_mapping_pb2_grpc.py new file mode 100644 index 0000000..aeb26b3 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/subjectmapping/subject_mapping_pb2_grpc.py @@ -0,0 +1,551 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.subjectmapping import subject_mapping_pb2 as policy_dot_subjectmapping_dot_subject__mapping__pb2 + + +class SubjectMappingServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.MatchSubjectMappings = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/MatchSubjectMappings', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.MatchSubjectMappingsRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.MatchSubjectMappingsResponse.FromString, + _registered_method=True) + self.ListSubjectMappings = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/ListSubjectMappings', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectMappingsRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectMappingsResponse.FromString, + _registered_method=True) + self.GetSubjectMapping = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/GetSubjectMapping', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectMappingRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectMappingResponse.FromString, + _registered_method=True) + self.CreateSubjectMapping = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/CreateSubjectMapping', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectMappingRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectMappingResponse.FromString, + _registered_method=True) + self.UpdateSubjectMapping = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/UpdateSubjectMapping', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectMappingRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectMappingResponse.FromString, + _registered_method=True) + self.DeleteSubjectMapping = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/DeleteSubjectMapping', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectMappingRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectMappingResponse.FromString, + _registered_method=True) + self.ListSubjectConditionSets = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/ListSubjectConditionSets', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectConditionSetsRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectConditionSetsResponse.FromString, + _registered_method=True) + self.GetSubjectConditionSet = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/GetSubjectConditionSet', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectConditionSetRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectConditionSetResponse.FromString, + _registered_method=True) + self.CreateSubjectConditionSet = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/CreateSubjectConditionSet', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectConditionSetRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectConditionSetResponse.FromString, + _registered_method=True) + self.UpdateSubjectConditionSet = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/UpdateSubjectConditionSet', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectConditionSetRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectConditionSetResponse.FromString, + _registered_method=True) + self.DeleteSubjectConditionSet = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/DeleteSubjectConditionSet', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectConditionSetRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectConditionSetResponse.FromString, + _registered_method=True) + self.DeleteAllUnmappedSubjectConditionSets = channel.unary_unary( + '/policy.subjectmapping.SubjectMappingService/DeleteAllUnmappedSubjectConditionSets', + request_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteAllUnmappedSubjectConditionSetsRequest.SerializeToString, + response_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteAllUnmappedSubjectConditionSetsResponse.FromString, + _registered_method=True) + + +class SubjectMappingServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def MatchSubjectMappings(self, request, context): + """Find matching Subject Mappings for a given Subject + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubjectMappings(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubjectMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSubjectMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubjectMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubjectMapping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubjectConditionSets(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubjectConditionSet(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSubjectConditionSet(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubjectConditionSet(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubjectConditionSet(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteAllUnmappedSubjectConditionSets(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SubjectMappingServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'MatchSubjectMappings': grpc.unary_unary_rpc_method_handler( + servicer.MatchSubjectMappings, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.MatchSubjectMappingsRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.MatchSubjectMappingsResponse.SerializeToString, + ), + 'ListSubjectMappings': grpc.unary_unary_rpc_method_handler( + servicer.ListSubjectMappings, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectMappingsRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectMappingsResponse.SerializeToString, + ), + 'GetSubjectMapping': grpc.unary_unary_rpc_method_handler( + servicer.GetSubjectMapping, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectMappingRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectMappingResponse.SerializeToString, + ), + 'CreateSubjectMapping': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubjectMapping, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectMappingRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectMappingResponse.SerializeToString, + ), + 'UpdateSubjectMapping': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubjectMapping, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectMappingRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectMappingResponse.SerializeToString, + ), + 'DeleteSubjectMapping': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubjectMapping, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectMappingRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectMappingResponse.SerializeToString, + ), + 'ListSubjectConditionSets': grpc.unary_unary_rpc_method_handler( + servicer.ListSubjectConditionSets, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectConditionSetsRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectConditionSetsResponse.SerializeToString, + ), + 'GetSubjectConditionSet': grpc.unary_unary_rpc_method_handler( + servicer.GetSubjectConditionSet, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectConditionSetRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectConditionSetResponse.SerializeToString, + ), + 'CreateSubjectConditionSet': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubjectConditionSet, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectConditionSetRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectConditionSetResponse.SerializeToString, + ), + 'UpdateSubjectConditionSet': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubjectConditionSet, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectConditionSetRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectConditionSetResponse.SerializeToString, + ), + 'DeleteSubjectConditionSet': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubjectConditionSet, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectConditionSetRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectConditionSetResponse.SerializeToString, + ), + 'DeleteAllUnmappedSubjectConditionSets': grpc.unary_unary_rpc_method_handler( + servicer.DeleteAllUnmappedSubjectConditionSets, + request_deserializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteAllUnmappedSubjectConditionSetsRequest.FromString, + response_serializer=policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteAllUnmappedSubjectConditionSetsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.subjectmapping.SubjectMappingService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.subjectmapping.SubjectMappingService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class SubjectMappingService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def MatchSubjectMappings(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/MatchSubjectMappings', + policy_dot_subjectmapping_dot_subject__mapping__pb2.MatchSubjectMappingsRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.MatchSubjectMappingsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListSubjectMappings(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/ListSubjectMappings', + policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectMappingsRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectMappingsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetSubjectMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/GetSubjectMapping', + policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectMappingRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateSubjectMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/CreateSubjectMapping', + policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectMappingRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateSubjectMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/UpdateSubjectMapping', + policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectMappingRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteSubjectMapping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/DeleteSubjectMapping', + policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectMappingRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectMappingResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListSubjectConditionSets(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/ListSubjectConditionSets', + policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectConditionSetsRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.ListSubjectConditionSetsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetSubjectConditionSet(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/GetSubjectConditionSet', + policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectConditionSetRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.GetSubjectConditionSetResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateSubjectConditionSet(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/CreateSubjectConditionSet', + policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectConditionSetRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.CreateSubjectConditionSetResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateSubjectConditionSet(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/UpdateSubjectConditionSet', + policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectConditionSetRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.UpdateSubjectConditionSetResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteSubjectConditionSet(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/DeleteSubjectConditionSet', + policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectConditionSetRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteSubjectConditionSetResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteAllUnmappedSubjectConditionSets(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.subjectmapping.SubjectMappingService/DeleteAllUnmappedSubjectConditionSets', + policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteAllUnmappedSubjectConditionSetsRequest.SerializeToString, + policy_dot_subjectmapping_dot_subject__mapping__pb2.DeleteAllUnmappedSubjectConditionSetsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/unsafe/unsafe_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/unsafe/unsafe_pb2_grpc.py new file mode 100644 index 0000000..4051d73 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/policy/unsafe/unsafe_pb2_grpc.py @@ -0,0 +1,485 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from policy.unsafe import unsafe_pb2 as policy_dot_unsafe_dot_unsafe__pb2 + + +class UnsafeServiceStub(object): + """/ + / Unsafe Service + / + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.UnsafeUpdateNamespace = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeUpdateNamespace', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateNamespaceResponse.FromString, + _registered_method=True) + self.UnsafeReactivateNamespace = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeReactivateNamespace', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateNamespaceResponse.FromString, + _registered_method=True) + self.UnsafeDeleteNamespace = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeDeleteNamespace', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteNamespaceRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteNamespaceResponse.FromString, + _registered_method=True) + self.UnsafeUpdateAttribute = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeUpdateAttribute', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeResponse.FromString, + _registered_method=True) + self.UnsafeReactivateAttribute = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeReactivateAttribute', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeResponse.FromString, + _registered_method=True) + self.UnsafeDeleteAttribute = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeDeleteAttribute', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeResponse.FromString, + _registered_method=True) + self.UnsafeUpdateAttributeValue = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeUpdateAttributeValue', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeValueRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeValueResponse.FromString, + _registered_method=True) + self.UnsafeReactivateAttributeValue = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeReactivateAttributeValue', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeValueRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeValueResponse.FromString, + _registered_method=True) + self.UnsafeDeleteAttributeValue = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeDeleteAttributeValue', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeValueRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeValueResponse.FromString, + _registered_method=True) + self.UnsafeDeleteKasKey = channel.unary_unary( + '/policy.unsafe.UnsafeService/UnsafeDeleteKasKey', + request_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteKasKeyRequest.SerializeToString, + response_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteKasKeyResponse.FromString, + _registered_method=True) + + +class UnsafeServiceServicer(object): + """/ + / Unsafe Service + / + """ + + def UnsafeUpdateNamespace(self, request, context): + """--------------------------------------* + Namespace RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeReactivateNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeDeleteNamespace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeUpdateAttribute(self, request, context): + """--------------------------------------* + Attribute RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeReactivateAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeDeleteAttribute(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeUpdateAttributeValue(self, request, context): + """--------------------------------------* + Value RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeReactivateAttributeValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeDeleteAttributeValue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UnsafeDeleteKasKey(self, request, context): + """--------------------------------------* + Kas Key RPCs + --------------------------------------- + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_UnsafeServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'UnsafeUpdateNamespace': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeUpdateNamespace, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateNamespaceRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateNamespaceResponse.SerializeToString, + ), + 'UnsafeReactivateNamespace': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeReactivateNamespace, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateNamespaceRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateNamespaceResponse.SerializeToString, + ), + 'UnsafeDeleteNamespace': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeDeleteNamespace, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteNamespaceRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteNamespaceResponse.SerializeToString, + ), + 'UnsafeUpdateAttribute': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeUpdateAttribute, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeResponse.SerializeToString, + ), + 'UnsafeReactivateAttribute': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeReactivateAttribute, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeResponse.SerializeToString, + ), + 'UnsafeDeleteAttribute': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeDeleteAttribute, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeResponse.SerializeToString, + ), + 'UnsafeUpdateAttributeValue': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeUpdateAttributeValue, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeValueRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeValueResponse.SerializeToString, + ), + 'UnsafeReactivateAttributeValue': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeReactivateAttributeValue, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeValueRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeValueResponse.SerializeToString, + ), + 'UnsafeDeleteAttributeValue': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeDeleteAttributeValue, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeValueRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeValueResponse.SerializeToString, + ), + 'UnsafeDeleteKasKey': grpc.unary_unary_rpc_method_handler( + servicer.UnsafeDeleteKasKey, + request_deserializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteKasKeyRequest.FromString, + response_serializer=policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteKasKeyResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'policy.unsafe.UnsafeService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('policy.unsafe.UnsafeService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class UnsafeService(object): + """/ + / Unsafe Service + / + """ + + @staticmethod + def UnsafeUpdateNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeUpdateNamespace', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateNamespaceRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeReactivateNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeReactivateNamespace', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateNamespaceRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeDeleteNamespace(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeDeleteNamespace', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteNamespaceRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteNamespaceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeUpdateAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeUpdateAttribute', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeReactivateAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeReactivateAttribute', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeDeleteAttribute(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeDeleteAttribute', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeUpdateAttributeValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeUpdateAttributeValue', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeValueRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeUpdateAttributeValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeReactivateAttributeValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeReactivateAttributeValue', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeValueRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeReactivateAttributeValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeDeleteAttributeValue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeDeleteAttributeValue', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeValueRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteAttributeValueResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UnsafeDeleteKasKey(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/policy.unsafe.UnsafeService/UnsafeDeleteKasKey', + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteKasKeyRequest.SerializeToString, + policy_dot_unsafe_dot_unsafe__pb2.UnsafeDeleteKasKeyResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/legacy_grpc/wellknownconfiguration/wellknown_configuration_pb2_grpc.py b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/wellknownconfiguration/wellknown_configuration_pb2_grpc.py new file mode 100644 index 0000000..3a81cb0 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/legacy_grpc/wellknownconfiguration/wellknown_configuration_pb2_grpc.py @@ -0,0 +1,77 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from wellknownconfiguration import wellknown_configuration_pb2 as wellknownconfiguration_dot_wellknown__configuration__pb2 + + +class WellKnownServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetWellKnownConfiguration = channel.unary_unary( + '/wellknownconfiguration.WellKnownService/GetWellKnownConfiguration', + request_serializer=wellknownconfiguration_dot_wellknown__configuration__pb2.GetWellKnownConfigurationRequest.SerializeToString, + response_deserializer=wellknownconfiguration_dot_wellknown__configuration__pb2.GetWellKnownConfigurationResponse.FromString, + _registered_method=True) + + +class WellKnownServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetWellKnownConfiguration(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_WellKnownServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetWellKnownConfiguration': grpc.unary_unary_rpc_method_handler( + servicer.GetWellKnownConfiguration, + request_deserializer=wellknownconfiguration_dot_wellknown__configuration__pb2.GetWellKnownConfigurationRequest.FromString, + response_serializer=wellknownconfiguration_dot_wellknown__configuration__pb2.GetWellKnownConfigurationResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'wellknownconfiguration.WellKnownService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('wellknownconfiguration.WellKnownService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class WellKnownService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetWellKnownConfiguration(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/wellknownconfiguration.WellKnownService/GetWellKnownConfiguration', + wellknownconfiguration_dot_wellknown__configuration__pb2.GetWellKnownConfigurationRequest.SerializeToString, + wellknownconfiguration_dot_wellknown__configuration__pb2.GetWellKnownConfigurationResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/otdf-python-proto/src/otdf_python_proto/logger/__init__.py b/otdf-python-proto/src/otdf_python_proto/logger/__init__.py new file mode 100644 index 0000000..4d00b66 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/logger/__init__.py @@ -0,0 +1 @@ +"""logger protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/logger/audit/test_pb2.py b/otdf-python-proto/src/otdf_python_proto/logger/audit/test_pb2.py new file mode 100644 index 0000000..c51d00b --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/logger/audit/test_pb2.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: logger/audit/test.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'logger/audit/test.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from common import common_pb2 as common_dot_common__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17logger/audit/test.proto\x12\x14service.logger.audit\x1a\x13\x63ommon/common.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xc4\x02\n\x10TestPolicyObject\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x32\n\x06\x61\x63tive\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\x06\x61\x63tive\x12K\n\x07version\x18\x03 \x01(\x0e\x32\x31.service.logger.audit.TestPolicyObjectVersionEnumR\x07version\x12\x12\n\x04tags\x18\x05 \x03(\tR\x04tags\x12\x1c\n\x08username\x18\x06 \x01(\tH\x00R\x08username\x12\x30\n\x04user\x18\x07 \x01(\x0b\x32\x1a.service.logger.audit.UserH\x00R\x04user\x12,\n\x08metadata\x18\x04 \x01(\x0b\x32\x10.common.MetadataR\x08metadataB\r\n\x0bpolicy_user\"*\n\x04User\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name*\xa0\x01\n\x1bTestPolicyObjectVersionEnum\x12/\n+TEST_POLICY_OBJECT_VERSION_ENUM_UNSPECIFIED\x10\x00\x12\'\n#TEST_POLICY_OBJECT_VERSION_ENUM_OLD\x10\x01\x12\'\n#TEST_POLICY_OBJECT_VERSION_ENUM_NEW\x10\x02\x42\x97\x01\n\x18\x63om.service.logger.auditB\tTestProtoP\x01\xa2\x02\x03SLA\xaa\x02\x14Service.Logger.Audit\xca\x02\x14Service\\Logger\\Audit\xe2\x02 Service\\Logger\\Audit\\GPBMetadata\xea\x02\x16Service::Logger::Auditb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'logger.audit.test_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\030com.service.logger.auditB\tTestProtoP\001\242\002\003SLA\252\002\024Service.Logger.Audit\312\002\024Service\\Logger\\Audit\342\002 Service\\Logger\\Audit\\GPBMetadata\352\002\026Service::Logger::Audit' + _globals['_TESTPOLICYOBJECTVERSIONENUM']._serialized_start=474 + _globals['_TESTPOLICYOBJECTVERSIONENUM']._serialized_end=634 + _globals['_TESTPOLICYOBJECT']._serialized_start=103 + _globals['_TESTPOLICYOBJECT']._serialized_end=427 + _globals['_USER']._serialized_start=429 + _globals['_USER']._serialized_end=471 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/logger/audit/test_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/logger/audit/test_pb2.pyi new file mode 100644 index 0000000..0065d87 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/logger/audit/test_pb2.pyi @@ -0,0 +1,45 @@ +from common import common_pb2 as _common_pb2 +from google.protobuf import wrappers_pb2 as _wrappers_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class TestPolicyObjectVersionEnum(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + TEST_POLICY_OBJECT_VERSION_ENUM_UNSPECIFIED: _ClassVar[TestPolicyObjectVersionEnum] + TEST_POLICY_OBJECT_VERSION_ENUM_OLD: _ClassVar[TestPolicyObjectVersionEnum] + TEST_POLICY_OBJECT_VERSION_ENUM_NEW: _ClassVar[TestPolicyObjectVersionEnum] +TEST_POLICY_OBJECT_VERSION_ENUM_UNSPECIFIED: TestPolicyObjectVersionEnum +TEST_POLICY_OBJECT_VERSION_ENUM_OLD: TestPolicyObjectVersionEnum +TEST_POLICY_OBJECT_VERSION_ENUM_NEW: TestPolicyObjectVersionEnum + +class TestPolicyObject(_message.Message): + __slots__ = ("id", "active", "version", "tags", "username", "user", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + ACTIVE_FIELD_NUMBER: _ClassVar[int] + VERSION_FIELD_NUMBER: _ClassVar[int] + TAGS_FIELD_NUMBER: _ClassVar[int] + USERNAME_FIELD_NUMBER: _ClassVar[int] + USER_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + active: _wrappers_pb2.BoolValue + version: TestPolicyObjectVersionEnum + tags: _containers.RepeatedScalarFieldContainer[str] + username: str + user: User + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., active: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., version: _Optional[_Union[TestPolicyObjectVersionEnum, str]] = ..., tags: _Optional[_Iterable[str]] = ..., username: _Optional[str] = ..., user: _Optional[_Union[User, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class User(_message.Message): + __slots__ = ("id", "name") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/__init__.py b/otdf-python-proto/src/otdf_python_proto/policy/__init__.py new file mode 100644 index 0000000..e4cc131 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/__init__.py @@ -0,0 +1 @@ +"""policy protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2.py new file mode 100644 index 0000000..dd5a166 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/actions/actions.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/actions/actions.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cpolicy/actions/actions.proto\x12\x0epolicy.actions\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"\xeb\x02\n\x10GetActionRequest\x12\x1a\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02id\x12\xa5\x02\n\x04name\x18\x02 \x01(\tB\x8e\x02\xbaH\x8a\x02r\x03\x18\xfd\x01\xba\x01\x81\x02\n\x12\x61\x63tion_name_format\x12\xad\x01\x41\x63tion name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\x1a;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')H\x00R\x04nameB\x13\n\nidentifier\x12\x05\xbaH\x02\x08\x01\"~\n\x11GetActionResponse\x12&\n\x06\x61\x63tion\x18\x01 \x01(\x0b\x32\x0e.policy.ActionR\x06\x61\x63tion\x12\x41\n\x10subject_mappings\x18\x02 \x03(\x0b\x32\x16.policy.SubjectMappingR\x0fsubjectMappings\"I\n\x12ListActionsRequest\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\xbd\x01\n\x13ListActionsResponse\x12\x39\n\x10\x61\x63tions_standard\x18\x01 \x03(\x0b\x32\x0e.policy.ActionR\x0f\x61\x63tionsStandard\x12\x35\n\x0e\x61\x63tions_custom\x18\x02 \x03(\x0b\x32\x0e.policy.ActionR\ractionsCustom\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\xf3\x02\n\x13\x43reateActionRequest\x12\xa6\x02\n\x04name\x18\x01 \x01(\tB\x91\x02\xbaH\x8d\x02r\x03\x18\xfd\x01\xba\x01\x81\x02\n\x12\x61\x63tion_name_format\x12\xad\x01\x41\x63tion name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\x1a;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\xc8\x01\x01R\x04name\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\">\n\x14\x43reateActionResponse\x12&\n\x06\x61\x63tion\x18\x01 \x01(\x0b\x32\x0e.policy.ActionR\x06\x61\x63tion\"\xf3\x03\n\x13UpdateActionRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xb6\x02\n\x04name\x18\x02 \x01(\tB\xa1\x02\xbaH\x9d\x02r\x03\x18\xfd\x01\xba\x01\x94\x02\n\x12\x61\x63tion_name_format\x12\xad\x01\x41\x63tion name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\x1aNsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')R\x04name\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\">\n\x14UpdateActionResponse\x12&\n\x06\x61\x63tion\x18\x01 \x01(\x0b\x32\x0e.policy.ActionR\x06\x61\x63tion\"/\n\x13\x44\x65leteActionRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\">\n\x14\x44\x65leteActionResponse\x12&\n\x06\x61\x63tion\x18\x01 \x01(\x0b\x32\x0e.policy.ActionR\x06\x61\x63tion2\xd4\x03\n\rActionService\x12R\n\tGetAction\x12 .policy.actions.GetActionRequest\x1a!.policy.actions.GetActionResponse\"\x00\x12X\n\x0bListActions\x12\".policy.actions.ListActionsRequest\x1a#.policy.actions.ListActionsResponse\"\x00\x12[\n\x0c\x43reateAction\x12#.policy.actions.CreateActionRequest\x1a$.policy.actions.CreateActionResponse\"\x00\x12[\n\x0cUpdateAction\x12#.policy.actions.UpdateActionRequest\x1a$.policy.actions.UpdateActionResponse\"\x00\x12[\n\x0c\x44\x65leteAction\x12#.policy.actions.DeleteActionRequest\x1a$.policy.actions.DeleteActionResponse\"\x00\x42{\n\x12\x63om.policy.actionsB\x0c\x41\x63tionsProtoP\x01\xa2\x02\x03PAX\xaa\x02\x0ePolicy.Actions\xca\x02\x0ePolicy\\Actions\xe2\x02\x1aPolicy\\Actions\\GPBMetadata\xea\x02\x0fPolicy::Actionsb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.actions.actions_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\022com.policy.actionsB\014ActionsProtoP\001\242\002\003PAX\252\002\016Policy.Actions\312\002\016Policy\\Actions\342\002\032Policy\\Actions\\GPBMetadata\352\002\017Policy::Actions' + _globals['_GETACTIONREQUEST'].oneofs_by_name['identifier']._loaded_options = None + _globals['_GETACTIONREQUEST'].oneofs_by_name['identifier']._serialized_options = b'\272H\002\010\001' + _globals['_GETACTIONREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETACTIONREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETACTIONREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_GETACTIONREQUEST'].fields_by_name['name']._serialized_options = b'\272H\212\002r\003\030\375\001\272\001\201\002\n\022action_name_format\022\255\001Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\032;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')' + _globals['_CREATEACTIONREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_CREATEACTIONREQUEST'].fields_by_name['name']._serialized_options = b'\272H\215\002r\003\030\375\001\272\001\201\002\n\022action_name_format\022\255\001Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\032;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\310\001\001' + _globals['_UPDATEACTIONREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATEACTIONREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATEACTIONREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_UPDATEACTIONREQUEST'].fields_by_name['name']._serialized_options = b'\272H\235\002r\003\030\375\001\272\001\224\002\n\022action_name_format\022\255\001Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\032Nsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')' + _globals['_DELETEACTIONREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETEACTIONREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETACTIONREQUEST']._serialized_start=145 + _globals['_GETACTIONREQUEST']._serialized_end=508 + _globals['_GETACTIONRESPONSE']._serialized_start=510 + _globals['_GETACTIONRESPONSE']._serialized_end=636 + _globals['_LISTACTIONSREQUEST']._serialized_start=638 + _globals['_LISTACTIONSREQUEST']._serialized_end=711 + _globals['_LISTACTIONSRESPONSE']._serialized_start=714 + _globals['_LISTACTIONSRESPONSE']._serialized_end=903 + _globals['_CREATEACTIONREQUEST']._serialized_start=906 + _globals['_CREATEACTIONREQUEST']._serialized_end=1277 + _globals['_CREATEACTIONRESPONSE']._serialized_start=1279 + _globals['_CREATEACTIONRESPONSE']._serialized_end=1341 + _globals['_UPDATEACTIONREQUEST']._serialized_start=1344 + _globals['_UPDATEACTIONREQUEST']._serialized_end=1843 + _globals['_UPDATEACTIONRESPONSE']._serialized_start=1845 + _globals['_UPDATEACTIONRESPONSE']._serialized_end=1907 + _globals['_DELETEACTIONREQUEST']._serialized_start=1909 + _globals['_DELETEACTIONREQUEST']._serialized_end=1956 + _globals['_DELETEACTIONRESPONSE']._serialized_start=1958 + _globals['_DELETEACTIONRESPONSE']._serialized_end=2020 + _globals['_ACTIONSERVICE']._serialized_start=2023 + _globals['_ACTIONSERVICE']._serialized_end=2491 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2.pyi new file mode 100644 index 0000000..28f4342 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2.pyi @@ -0,0 +1,87 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from common import common_pb2 as _common_pb2 +from policy import objects_pb2 as _objects_pb2 +from policy import selectors_pb2 as _selectors_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class GetActionRequest(_message.Message): + __slots__ = ("id", "name") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class GetActionResponse(_message.Message): + __slots__ = ("action", "subject_mappings") + ACTION_FIELD_NUMBER: _ClassVar[int] + SUBJECT_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + action: _objects_pb2.Action + subject_mappings: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectMapping] + def __init__(self, action: _Optional[_Union[_objects_pb2.Action, _Mapping]] = ..., subject_mappings: _Optional[_Iterable[_Union[_objects_pb2.SubjectMapping, _Mapping]]] = ...) -> None: ... + +class ListActionsRequest(_message.Message): + __slots__ = ("pagination",) + PAGINATION_FIELD_NUMBER: _ClassVar[int] + pagination: _selectors_pb2.PageRequest + def __init__(self, pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListActionsResponse(_message.Message): + __slots__ = ("actions_standard", "actions_custom", "pagination") + ACTIONS_STANDARD_FIELD_NUMBER: _ClassVar[int] + ACTIONS_CUSTOM_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + actions_standard: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Action] + actions_custom: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Action] + pagination: _selectors_pb2.PageResponse + def __init__(self, actions_standard: _Optional[_Iterable[_Union[_objects_pb2.Action, _Mapping]]] = ..., actions_custom: _Optional[_Iterable[_Union[_objects_pb2.Action, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class CreateActionRequest(_message.Message): + __slots__ = ("name", "metadata") + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + name: str + metadata: _common_pb2.MetadataMutable + def __init__(self, name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateActionResponse(_message.Message): + __slots__ = ("action",) + ACTION_FIELD_NUMBER: _ClassVar[int] + action: _objects_pb2.Action + def __init__(self, action: _Optional[_Union[_objects_pb2.Action, _Mapping]] = ...) -> None: ... + +class UpdateActionRequest(_message.Message): + __slots__ = ("id", "name", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateActionResponse(_message.Message): + __slots__ = ("action",) + ACTION_FIELD_NUMBER: _ClassVar[int] + action: _objects_pb2.Action + def __init__(self, action: _Optional[_Union[_objects_pb2.Action, _Mapping]] = ...) -> None: ... + +class DeleteActionRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteActionResponse(_message.Message): + __slots__ = ("action",) + ACTION_FIELD_NUMBER: _ClassVar[int] + action: _objects_pb2.Action + def __init__(self, action: _Optional[_Union[_objects_pb2.Action, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2_connect.py new file mode 100644 index 0000000..f9e5cd8 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/actions/actions_pb2_connect.py @@ -0,0 +1,275 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.actions.actions_pb2 + +class ActionServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_get_action( + self, req: policy.actions.actions_pb2.GetActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.GetActionResponse]: + """Low-level method to call GetAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/GetAction" + return self._connect_client.call_unary(url, req, policy.actions.actions_pb2.GetActionResponse,extra_headers, timeout_seconds) + + + def get_action( + self, req: policy.actions.actions_pb2.GetActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.GetActionResponse: + response = self.call_get_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_actions( + self, req: policy.actions.actions_pb2.ListActionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.ListActionsResponse]: + """Low-level method to call ListActions, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/ListActions" + return self._connect_client.call_unary(url, req, policy.actions.actions_pb2.ListActionsResponse,extra_headers, timeout_seconds) + + + def list_actions( + self, req: policy.actions.actions_pb2.ListActionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.ListActionsResponse: + response = self.call_list_actions(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_action( + self, req: policy.actions.actions_pb2.CreateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.CreateActionResponse]: + """Low-level method to call CreateAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/CreateAction" + return self._connect_client.call_unary(url, req, policy.actions.actions_pb2.CreateActionResponse,extra_headers, timeout_seconds) + + + def create_action( + self, req: policy.actions.actions_pb2.CreateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.CreateActionResponse: + response = self.call_create_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_action( + self, req: policy.actions.actions_pb2.UpdateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.UpdateActionResponse]: + """Low-level method to call UpdateAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/UpdateAction" + return self._connect_client.call_unary(url, req, policy.actions.actions_pb2.UpdateActionResponse,extra_headers, timeout_seconds) + + + def update_action( + self, req: policy.actions.actions_pb2.UpdateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.UpdateActionResponse: + response = self.call_update_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_action( + self, req: policy.actions.actions_pb2.DeleteActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.DeleteActionResponse]: + """Low-level method to call DeleteAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/DeleteAction" + return self._connect_client.call_unary(url, req, policy.actions.actions_pb2.DeleteActionResponse,extra_headers, timeout_seconds) + + + def delete_action( + self, req: policy.actions.actions_pb2.DeleteActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.DeleteActionResponse: + response = self.call_delete_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncActionServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_get_action( + self, req: policy.actions.actions_pb2.GetActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.GetActionResponse]: + """Low-level method to call GetAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/GetAction" + return await self._connect_client.call_unary(url, req, policy.actions.actions_pb2.GetActionResponse,extra_headers, timeout_seconds) + + async def get_action( + self, req: policy.actions.actions_pb2.GetActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.GetActionResponse: + response = await self.call_get_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_actions( + self, req: policy.actions.actions_pb2.ListActionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.ListActionsResponse]: + """Low-level method to call ListActions, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/ListActions" + return await self._connect_client.call_unary(url, req, policy.actions.actions_pb2.ListActionsResponse,extra_headers, timeout_seconds) + + async def list_actions( + self, req: policy.actions.actions_pb2.ListActionsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.ListActionsResponse: + response = await self.call_list_actions(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_action( + self, req: policy.actions.actions_pb2.CreateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.CreateActionResponse]: + """Low-level method to call CreateAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/CreateAction" + return await self._connect_client.call_unary(url, req, policy.actions.actions_pb2.CreateActionResponse,extra_headers, timeout_seconds) + + async def create_action( + self, req: policy.actions.actions_pb2.CreateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.CreateActionResponse: + response = await self.call_create_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_action( + self, req: policy.actions.actions_pb2.UpdateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.UpdateActionResponse]: + """Low-level method to call UpdateAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/UpdateAction" + return await self._connect_client.call_unary(url, req, policy.actions.actions_pb2.UpdateActionResponse,extra_headers, timeout_seconds) + + async def update_action( + self, req: policy.actions.actions_pb2.UpdateActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.UpdateActionResponse: + response = await self.call_update_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_action( + self, req: policy.actions.actions_pb2.DeleteActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.actions.actions_pb2.DeleteActionResponse]: + """Low-level method to call DeleteAction, granting access to errors and metadata""" + url = self.base_url + "/policy.actions.ActionService/DeleteAction" + return await self._connect_client.call_unary(url, req, policy.actions.actions_pb2.DeleteActionResponse,extra_headers, timeout_seconds) + + async def delete_action( + self, req: policy.actions.actions_pb2.DeleteActionRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.actions.actions_pb2.DeleteActionResponse: + response = await self.call_delete_action(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class ActionServiceProtocol(typing.Protocol): + def get_action(self, req: ClientRequest[policy.actions.actions_pb2.GetActionRequest]) -> ServerResponse[policy.actions.actions_pb2.GetActionResponse]: + ... + def list_actions(self, req: ClientRequest[policy.actions.actions_pb2.ListActionsRequest]) -> ServerResponse[policy.actions.actions_pb2.ListActionsResponse]: + ... + def create_action(self, req: ClientRequest[policy.actions.actions_pb2.CreateActionRequest]) -> ServerResponse[policy.actions.actions_pb2.CreateActionResponse]: + ... + def update_action(self, req: ClientRequest[policy.actions.actions_pb2.UpdateActionRequest]) -> ServerResponse[policy.actions.actions_pb2.UpdateActionResponse]: + ... + def delete_action(self, req: ClientRequest[policy.actions.actions_pb2.DeleteActionRequest]) -> ServerResponse[policy.actions.actions_pb2.DeleteActionResponse]: + ... + +ACTION_SERVICE_PATH_PREFIX = "/policy.actions.ActionService" + +def wsgi_action_service(implementation: ActionServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.actions.ActionService/GetAction", implementation.get_action, policy.actions.actions_pb2.GetActionRequest) + app.register_unary_rpc("/policy.actions.ActionService/ListActions", implementation.list_actions, policy.actions.actions_pb2.ListActionsRequest) + app.register_unary_rpc("/policy.actions.ActionService/CreateAction", implementation.create_action, policy.actions.actions_pb2.CreateActionRequest) + app.register_unary_rpc("/policy.actions.ActionService/UpdateAction", implementation.update_action, policy.actions.actions_pb2.UpdateActionRequest) + app.register_unary_rpc("/policy.actions.ActionService/DeleteAction", implementation.delete_action, policy.actions.actions_pb2.DeleteActionRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/attributes/attributes_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/attributes/attributes_pb2.py new file mode 100644 index 0000000..801433a --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/attributes/attributes_pb2.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/attributes/attributes.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/attributes/attributes.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"policy/attributes/attributes.proto\x12\x11policy.attributes\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"\x86\x01\n\x18\x41ttributeKeyAccessServer\x12+\n\x0c\x61ttribute_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x0b\x61ttributeId\x12\x39\n\x14key_access_server_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x11keyAccessServerId:\x02\x18\x01\"z\n\x14ValueKeyAccessServer\x12#\n\x08value_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x07valueId\x12\x39\n\x14key_access_server_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x11keyAccessServerId:\x02\x18\x01\"b\n\x0c\x41ttributeKey\x12.\n\x0c\x61ttribute_id\x18\x01 \x01(\tB\x0b\xbaH\x08r\x03\xb0\x01\x01\xc8\x01\x01R\x0b\x61ttributeId\x12\"\n\x06key_id\x18\x02 \x01(\tB\x0b\xbaH\x08r\x03\xb0\x01\x01\xc8\x01\x01R\x05keyId\"V\n\x08ValueKey\x12&\n\x08value_id\x18\x01 \x01(\tB\x0b\xbaH\x08r\x03\xb0\x01\x01\xc8\x01\x01R\x07valueId\x12\"\n\x06key_id\x18\x02 \x01(\tB\x0b\xbaH\x08r\x03\xb0\x01\x01\xc8\x01\x01R\x05keyId\"\x99\x01\n\x15ListAttributesRequest\x12-\n\x05state\x18\x01 \x01(\x0e\x32\x17.common.ActiveStateEnumR\x05state\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\x81\x01\n\x16ListAttributesResponse\x12\x31\n\nattributes\x18\x01 \x03(\x0b\x32\x11.policy.AttributeR\nattributes\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\xbe\x03\n\x13GetAttributeRequest\x12\x1d\n\x02id\x18\x01 \x01(\tB\r\x18\x01\xbaH\x08r\x03\xb0\x01\x01\xd8\x01\x01R\x02id\x12-\n\x0c\x61ttribute_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x0b\x61ttributeId\x12\x1e\n\x03\x66qn\x18\x03 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x03\x66qn:\xaa\x02\xbaH\xa6\x02\x1a\xa2\x01\n\x10\x65xclusive_fields\x12PEither use deprecated \'id\' field or one of \'attribute_id\' or \'fqn\', but not both\x1a\n\x04rule\x18\x03 \x01(\x0e\x32\x1d.policy.AttributeRuleTypeEnumB\x0b\xbaH\x08\x82\x01\x02\x10\x01\xc8\x01\x01R\x04rule\x12V\n\x06values\x18\x04 \x03(\tB>\xbaH;\x92\x01\x38\x08\x00\x18\x01\"2r0\x18\xfd\x01\x32+^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$R\x06values\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"J\n\x17\x43reateAttributeResponse\x12/\n\tattribute\x18\x01 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\"\xbd\x01\n\x16UpdateAttributeRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"J\n\x17UpdateAttributeResponse\x12/\n\tattribute\x18\x01 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\"6\n\x1a\x44\x65\x61\x63tivateAttributeRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"N\n\x1b\x44\x65\x61\x63tivateAttributeResponse\x12/\n\tattribute\x18\x01 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\"\xab\x03\n\x18GetAttributeValueRequest\x12\x1d\n\x02id\x18\x01 \x01(\tB\r\x18\x01\xbaH\x08r\x03\xb0\x01\x01\xd8\x01\x01R\x02id\x12%\n\x08value_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x07valueId\x12\x1e\n\x03\x66qn\x18\x03 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x03\x66qn:\x9a\x02\xbaH\x96\x02\x1a\x9a\x01\n\x10\x65xclusive_fields\x12LEither use deprecated \'id\' field or one of \'value_id\' or \'fqn\', but not both\x1a\x38!(has(this.id) && (has(this.value_id) || has(this.fqn)))\x1aw\n\x0frequired_fields\x12/Either id or one of value_id or fqn must be set\x1a\x33has(this.id) || has(this.value_id) || has(this.fqn)B\x0c\n\nidentifier\"@\n\x19GetAttributeValueResponse\x12#\n\x05value\x18\x01 \x01(\x0b\x32\r.policy.ValueR\x05value\"\xad\x01\n\x1aListAttributeValuesRequest\x12+\n\x0c\x61ttribute_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x0b\x61ttributeId\x12-\n\x05state\x18\x02 \x01(\x0e\x32\x17.common.ActiveStateEnumR\x05state\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"z\n\x1bListAttributeValuesResponse\x12%\n\x06values\x18\x01 \x03(\x0b\x32\r.policy.ValueR\x06values\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\xc5\x03\n\x1b\x43reateAttributeValueRequest\x12+\n\x0c\x61ttribute_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x0b\x61ttributeId\x12\xb4\x02\n\x05value\x18\x02 \x01(\tB\x9d\x02\xbaH\x99\x02r\x03\x18\xfd\x01\xba\x01\x8d\x02\n\x16\x61ttribute_value_format\x12\xb5\x01\x41ttribute value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute value will be normalized to lower case.\x1a;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\xc8\x01\x01R\x05value\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadataJ\x04\x08\x03\x10\x04R\x07members\"C\n\x1c\x43reateAttributeValueResponse\x12#\n\x05value\x18\x01 \x01(\x0b\x32\r.policy.ValueR\x05value\"\xd1\x01\n\x1bUpdateAttributeValueRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehaviorJ\x04\x08\x04\x10\x05R\x07members\"C\n\x1cUpdateAttributeValueResponse\x12#\n\x05value\x18\x01 \x01(\x0b\x32\r.policy.ValueR\x05value\";\n\x1f\x44\x65\x61\x63tivateAttributeValueRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"G\n DeactivateAttributeValueResponse\x12#\n\x05value\x18\x01 \x01(\x0b\x32\r.policy.ValueR\x05value\"\x81\x01\n\x1fGetAttributeValuesByFqnsRequest\x12\x1f\n\x04\x66qns\x18\x01 \x03(\tB\x0b\xbaH\x08\x92\x01\x05\x08\x01\x10\xfa\x01R\x04\x66qns\x12=\n\nwith_value\x18\x02 \x01(\x0b\x32\x1e.policy.AttributeValueSelectorR\twithValue\"\x9b\x03\n GetAttributeValuesByFqnsResponse\x12}\n\x14\x66qn_attribute_values\x18\x01 \x03(\x0b\x32K.policy.attributes.GetAttributeValuesByFqnsResponse.FqnAttributeValuesEntryR\x12\x66qnAttributeValues\x1ai\n\x11\x41ttributeAndValue\x12/\n\tattribute\x18\x01 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\x12#\n\x05value\x18\x02 \x01(\x0b\x32\r.policy.ValueR\x05value\x1a\x8c\x01\n\x17\x46qnAttributeValuesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12[\n\x05value\x18\x02 \x01(\x0b\x32\x45.policy.attributes.GetAttributeValuesByFqnsResponse.AttributeAndValueR\x05value:\x02\x38\x01\"\x99\x01\n\'AssignKeyAccessServerToAttributeRequest\x12j\n\x1b\x61ttribute_key_access_server\x18\x01 \x01(\x0b\x32+.policy.attributes.AttributeKeyAccessServerR\x18\x61ttributeKeyAccessServer:\x02\x18\x01\"\x9a\x01\n(AssignKeyAccessServerToAttributeResponse\x12j\n\x1b\x61ttribute_key_access_server\x18\x01 \x01(\x0b\x32+.policy.attributes.AttributeKeyAccessServerR\x18\x61ttributeKeyAccessServer:\x02\x18\x01\"\x9b\x01\n)RemoveKeyAccessServerFromAttributeRequest\x12j\n\x1b\x61ttribute_key_access_server\x18\x01 \x01(\x0b\x32+.policy.attributes.AttributeKeyAccessServerR\x18\x61ttributeKeyAccessServer:\x02\x18\x01\"\x9c\x01\n*RemoveKeyAccessServerFromAttributeResponse\x12j\n\x1b\x61ttribute_key_access_server\x18\x01 \x01(\x0b\x32+.policy.attributes.AttributeKeyAccessServerR\x18\x61ttributeKeyAccessServer:\x02\x18\x01\"\x89\x01\n#AssignKeyAccessServerToValueRequest\x12^\n\x17value_key_access_server\x18\x01 \x01(\x0b\x32\'.policy.attributes.ValueKeyAccessServerR\x14valueKeyAccessServer:\x02\x18\x01\"\x8a\x01\n$AssignKeyAccessServerToValueResponse\x12^\n\x17value_key_access_server\x18\x01 \x01(\x0b\x32\'.policy.attributes.ValueKeyAccessServerR\x14valueKeyAccessServer:\x02\x18\x01\"\x8b\x01\n%RemoveKeyAccessServerFromValueRequest\x12^\n\x17value_key_access_server\x18\x01 \x01(\x0b\x32\'.policy.attributes.ValueKeyAccessServerR\x14valueKeyAccessServer:\x02\x18\x01\"\x8c\x01\n&RemoveKeyAccessServerFromValueResponse\x12^\n\x17value_key_access_server\x18\x01 \x01(\x0b\x32\'.policy.attributes.ValueKeyAccessServerR\x14valueKeyAccessServer:\x02\x18\x01\"q\n!AssignPublicKeyToAttributeRequest\x12L\n\rattribute_key\x18\x01 \x01(\x0b\x32\x1f.policy.attributes.AttributeKeyB\x06\xbaH\x03\xc8\x01\x01R\x0c\x61ttributeKey\"j\n\"AssignPublicKeyToAttributeResponse\x12\x44\n\rattribute_key\x18\x01 \x01(\x0b\x32\x1f.policy.attributes.AttributeKeyR\x0c\x61ttributeKey\"s\n#RemovePublicKeyFromAttributeRequest\x12L\n\rattribute_key\x18\x01 \x01(\x0b\x32\x1f.policy.attributes.AttributeKeyB\x06\xbaH\x03\xc8\x01\x01R\x0c\x61ttributeKey\"l\n$RemovePublicKeyFromAttributeResponse\x12\x44\n\rattribute_key\x18\x01 \x01(\x0b\x32\x1f.policy.attributes.AttributeKeyR\x0c\x61ttributeKey\"a\n\x1d\x41ssignPublicKeyToValueRequest\x12@\n\tvalue_key\x18\x01 \x01(\x0b\x32\x1b.policy.attributes.ValueKeyB\x06\xbaH\x03\xc8\x01\x01R\x08valueKey\"Z\n\x1e\x41ssignPublicKeyToValueResponse\x12\x38\n\tvalue_key\x18\x01 \x01(\x0b\x32\x1b.policy.attributes.ValueKeyR\x08valueKey\"c\n\x1fRemovePublicKeyFromValueRequest\x12@\n\tvalue_key\x18\x01 \x01(\x0b\x32\x1b.policy.attributes.ValueKeyB\x06\xbaH\x03\xc8\x01\x01R\x08valueKey\"\\\n RemovePublicKeyFromValueResponse\x12\x38\n\tvalue_key\x18\x01 \x01(\x0b\x32\x1b.policy.attributes.ValueKeyR\x08valueKey2\xf2\x13\n\x11\x41ttributesService\x12j\n\x0eListAttributes\x12(.policy.attributes.ListAttributesRequest\x1a).policy.attributes.ListAttributesResponse\"\x03\x90\x02\x01\x12y\n\x13ListAttributeValues\x12-.policy.attributes.ListAttributeValuesRequest\x1a..policy.attributes.ListAttributeValuesResponse\"\x03\x90\x02\x01\x12\x64\n\x0cGetAttribute\x12&.policy.attributes.GetAttributeRequest\x1a\'.policy.attributes.GetAttributeResponse\"\x03\x90\x02\x01\x12\xa1\x01\n\x18GetAttributeValuesByFqns\x12\x32.policy.attributes.GetAttributeValuesByFqnsRequest\x1a\x33.policy.attributes.GetAttributeValuesByFqnsResponse\"\x1c\x90\x02\x01\x82\xd3\xe4\x93\x02\x13\x12\x11/attributes/*/fqn\x12j\n\x0f\x43reateAttribute\x12).policy.attributes.CreateAttributeRequest\x1a*.policy.attributes.CreateAttributeResponse\"\x00\x12j\n\x0fUpdateAttribute\x12).policy.attributes.UpdateAttributeRequest\x1a*.policy.attributes.UpdateAttributeResponse\"\x00\x12v\n\x13\x44\x65\x61\x63tivateAttribute\x12-.policy.attributes.DeactivateAttributeRequest\x1a..policy.attributes.DeactivateAttributeResponse\"\x00\x12s\n\x11GetAttributeValue\x12+.policy.attributes.GetAttributeValueRequest\x1a,.policy.attributes.GetAttributeValueResponse\"\x03\x90\x02\x01\x12y\n\x14\x43reateAttributeValue\x12..policy.attributes.CreateAttributeValueRequest\x1a/.policy.attributes.CreateAttributeValueResponse\"\x00\x12y\n\x14UpdateAttributeValue\x12..policy.attributes.UpdateAttributeValueRequest\x1a/.policy.attributes.UpdateAttributeValueResponse\"\x00\x12\x85\x01\n\x18\x44\x65\x61\x63tivateAttributeValue\x12\x32.policy.attributes.DeactivateAttributeValueRequest\x1a\x33.policy.attributes.DeactivateAttributeValueResponse\"\x00\x12\xa0\x01\n AssignKeyAccessServerToAttribute\x12:.policy.attributes.AssignKeyAccessServerToAttributeRequest\x1a;.policy.attributes.AssignKeyAccessServerToAttributeResponse\"\x03\x88\x02\x01\x12\xa6\x01\n\"RemoveKeyAccessServerFromAttribute\x12<.policy.attributes.RemoveKeyAccessServerFromAttributeRequest\x1a=.policy.attributes.RemoveKeyAccessServerFromAttributeResponse\"\x03\x88\x02\x01\x12\x94\x01\n\x1c\x41ssignKeyAccessServerToValue\x12\x36.policy.attributes.AssignKeyAccessServerToValueRequest\x1a\x37.policy.attributes.AssignKeyAccessServerToValueResponse\"\x03\x88\x02\x01\x12\x9a\x01\n\x1eRemoveKeyAccessServerFromValue\x12\x38.policy.attributes.RemoveKeyAccessServerFromValueRequest\x1a\x39.policy.attributes.RemoveKeyAccessServerFromValueResponse\"\x03\x88\x02\x01\x12\x8b\x01\n\x1a\x41ssignPublicKeyToAttribute\x12\x34.policy.attributes.AssignPublicKeyToAttributeRequest\x1a\x35.policy.attributes.AssignPublicKeyToAttributeResponse\"\x00\x12\x91\x01\n\x1cRemovePublicKeyFromAttribute\x12\x36.policy.attributes.RemovePublicKeyFromAttributeRequest\x1a\x37.policy.attributes.RemovePublicKeyFromAttributeResponse\"\x00\x12\x7f\n\x16\x41ssignPublicKeyToValue\x12\x30.policy.attributes.AssignPublicKeyToValueRequest\x1a\x31.policy.attributes.AssignPublicKeyToValueResponse\"\x00\x12\x85\x01\n\x18RemovePublicKeyFromValue\x12\x32.policy.attributes.RemovePublicKeyFromValueRequest\x1a\x33.policy.attributes.RemovePublicKeyFromValueResponse\"\x00\x42\x8d\x01\n\x15\x63om.policy.attributesB\x0f\x41ttributesProtoP\x01\xa2\x02\x03PAX\xaa\x02\x11Policy.Attributes\xca\x02\x11Policy\\Attributes\xe2\x02\x1dPolicy\\Attributes\\GPBMetadata\xea\x02\x12Policy::Attributesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.attributes.attributes_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\025com.policy.attributesB\017AttributesProtoP\001\242\002\003PAX\252\002\021Policy.Attributes\312\002\021Policy\\Attributes\342\002\035Policy\\Attributes\\GPBMetadata\352\002\022Policy::Attributes' + _globals['_ATTRIBUTEKEYACCESSSERVER'].fields_by_name['attribute_id']._loaded_options = None + _globals['_ATTRIBUTEKEYACCESSSERVER'].fields_by_name['attribute_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_ATTRIBUTEKEYACCESSSERVER'].fields_by_name['key_access_server_id']._loaded_options = None + _globals['_ATTRIBUTEKEYACCESSSERVER'].fields_by_name['key_access_server_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_ATTRIBUTEKEYACCESSSERVER']._loaded_options = None + _globals['_ATTRIBUTEKEYACCESSSERVER']._serialized_options = b'\030\001' + _globals['_VALUEKEYACCESSSERVER'].fields_by_name['value_id']._loaded_options = None + _globals['_VALUEKEYACCESSSERVER'].fields_by_name['value_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_VALUEKEYACCESSSERVER'].fields_by_name['key_access_server_id']._loaded_options = None + _globals['_VALUEKEYACCESSSERVER'].fields_by_name['key_access_server_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_VALUEKEYACCESSSERVER']._loaded_options = None + _globals['_VALUEKEYACCESSSERVER']._serialized_options = b'\030\001' + _globals['_ATTRIBUTEKEY'].fields_by_name['attribute_id']._loaded_options = None + _globals['_ATTRIBUTEKEY'].fields_by_name['attribute_id']._serialized_options = b'\272H\010r\003\260\001\001\310\001\001' + _globals['_ATTRIBUTEKEY'].fields_by_name['key_id']._loaded_options = None + _globals['_ATTRIBUTEKEY'].fields_by_name['key_id']._serialized_options = b'\272H\010r\003\260\001\001\310\001\001' + _globals['_VALUEKEY'].fields_by_name['value_id']._loaded_options = None + _globals['_VALUEKEY'].fields_by_name['value_id']._serialized_options = b'\272H\010r\003\260\001\001\310\001\001' + _globals['_VALUEKEY'].fields_by_name['key_id']._loaded_options = None + _globals['_VALUEKEY'].fields_by_name['key_id']._serialized_options = b'\272H\010r\003\260\001\001\310\001\001' + _globals['_GETATTRIBUTEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETATTRIBUTEREQUEST'].fields_by_name['id']._serialized_options = b'\030\001\272H\010r\003\260\001\001\330\001\001' + _globals['_GETATTRIBUTEREQUEST'].fields_by_name['attribute_id']._loaded_options = None + _globals['_GETATTRIBUTEREQUEST'].fields_by_name['attribute_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETATTRIBUTEREQUEST'].fields_by_name['fqn']._loaded_options = None + _globals['_GETATTRIBUTEREQUEST'].fields_by_name['fqn']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_GETATTRIBUTEREQUEST']._loaded_options = None + _globals['_GETATTRIBUTEREQUEST']._serialized_options = b'\272H\246\002\032\242\001\n\020exclusive_fields\022PEither use deprecated \'id\' field or one of \'attribute_id\' or \'fqn\', but not both\032 None: ... + +class ValueKeyAccessServer(_message.Message): + __slots__ = ("value_id", "key_access_server_id") + VALUE_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ACCESS_SERVER_ID_FIELD_NUMBER: _ClassVar[int] + value_id: str + key_access_server_id: str + def __init__(self, value_id: _Optional[str] = ..., key_access_server_id: _Optional[str] = ...) -> None: ... + +class AttributeKey(_message.Message): + __slots__ = ("attribute_id", "key_id") + ATTRIBUTE_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ID_FIELD_NUMBER: _ClassVar[int] + attribute_id: str + key_id: str + def __init__(self, attribute_id: _Optional[str] = ..., key_id: _Optional[str] = ...) -> None: ... + +class ValueKey(_message.Message): + __slots__ = ("value_id", "key_id") + VALUE_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ID_FIELD_NUMBER: _ClassVar[int] + value_id: str + key_id: str + def __init__(self, value_id: _Optional[str] = ..., key_id: _Optional[str] = ...) -> None: ... + +class ListAttributesRequest(_message.Message): + __slots__ = ("state", "namespace", "pagination") + STATE_FIELD_NUMBER: _ClassVar[int] + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + state: _common_pb2.ActiveStateEnum + namespace: str + pagination: _selectors_pb2.PageRequest + def __init__(self, state: _Optional[_Union[_common_pb2.ActiveStateEnum, str]] = ..., namespace: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListAttributesResponse(_message.Message): + __slots__ = ("attributes", "pagination") + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + attributes: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Attribute] + pagination: _selectors_pb2.PageResponse + def __init__(self, attributes: _Optional[_Iterable[_Union[_objects_pb2.Attribute, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class GetAttributeRequest(_message.Message): + __slots__ = ("id", "attribute_id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + attribute_id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., attribute_id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class GetAttributeResponse(_message.Message): + __slots__ = ("attribute",) + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ...) -> None: ... + +class CreateAttributeRequest(_message.Message): + __slots__ = ("namespace_id", "name", "rule", "values", "metadata") + NAMESPACE_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + RULE_FIELD_NUMBER: _ClassVar[int] + VALUES_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + namespace_id: str + name: str + rule: _objects_pb2.AttributeRuleTypeEnum + values: _containers.RepeatedScalarFieldContainer[str] + metadata: _common_pb2.MetadataMutable + def __init__(self, namespace_id: _Optional[str] = ..., name: _Optional[str] = ..., rule: _Optional[_Union[_objects_pb2.AttributeRuleTypeEnum, str]] = ..., values: _Optional[_Iterable[str]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateAttributeResponse(_message.Message): + __slots__ = ("attribute",) + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ...) -> None: ... + +class UpdateAttributeRequest(_message.Message): + __slots__ = ("id", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateAttributeResponse(_message.Message): + __slots__ = ("attribute",) + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ...) -> None: ... + +class DeactivateAttributeRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeactivateAttributeResponse(_message.Message): + __slots__ = ("attribute",) + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ...) -> None: ... + +class GetAttributeValueRequest(_message.Message): + __slots__ = ("id", "value_id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + VALUE_ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + value_id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., value_id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class GetAttributeValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.Value + def __init__(self, value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + +class ListAttributeValuesRequest(_message.Message): + __slots__ = ("attribute_id", "state", "pagination") + ATTRIBUTE_ID_FIELD_NUMBER: _ClassVar[int] + STATE_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + attribute_id: str + state: _common_pb2.ActiveStateEnum + pagination: _selectors_pb2.PageRequest + def __init__(self, attribute_id: _Optional[str] = ..., state: _Optional[_Union[_common_pb2.ActiveStateEnum, str]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListAttributeValuesResponse(_message.Message): + __slots__ = ("values", "pagination") + VALUES_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + values: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Value] + pagination: _selectors_pb2.PageResponse + def __init__(self, values: _Optional[_Iterable[_Union[_objects_pb2.Value, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class CreateAttributeValueRequest(_message.Message): + __slots__ = ("attribute_id", "value", "metadata") + ATTRIBUTE_ID_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + attribute_id: str + value: str + metadata: _common_pb2.MetadataMutable + def __init__(self, attribute_id: _Optional[str] = ..., value: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateAttributeValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.Value + def __init__(self, value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + +class UpdateAttributeValueRequest(_message.Message): + __slots__ = ("id", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateAttributeValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.Value + def __init__(self, value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + +class DeactivateAttributeValueRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeactivateAttributeValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.Value + def __init__(self, value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + +class GetAttributeValuesByFqnsRequest(_message.Message): + __slots__ = ("fqns", "with_value") + FQNS_FIELD_NUMBER: _ClassVar[int] + WITH_VALUE_FIELD_NUMBER: _ClassVar[int] + fqns: _containers.RepeatedScalarFieldContainer[str] + with_value: _selectors_pb2.AttributeValueSelector + def __init__(self, fqns: _Optional[_Iterable[str]] = ..., with_value: _Optional[_Union[_selectors_pb2.AttributeValueSelector, _Mapping]] = ...) -> None: ... + +class GetAttributeValuesByFqnsResponse(_message.Message): + __slots__ = ("fqn_attribute_values",) + class AttributeAndValue(_message.Message): + __slots__ = ("attribute", "value") + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + value: _objects_pb2.Value + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ..., value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + class FqnAttributeValuesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: GetAttributeValuesByFqnsResponse.AttributeAndValue + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[GetAttributeValuesByFqnsResponse.AttributeAndValue, _Mapping]] = ...) -> None: ... + FQN_ATTRIBUTE_VALUES_FIELD_NUMBER: _ClassVar[int] + fqn_attribute_values: _containers.MessageMap[str, GetAttributeValuesByFqnsResponse.AttributeAndValue] + def __init__(self, fqn_attribute_values: _Optional[_Mapping[str, GetAttributeValuesByFqnsResponse.AttributeAndValue]] = ...) -> None: ... + +class AssignKeyAccessServerToAttributeRequest(_message.Message): + __slots__ = ("attribute_key_access_server",) + ATTRIBUTE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + attribute_key_access_server: AttributeKeyAccessServer + def __init__(self, attribute_key_access_server: _Optional[_Union[AttributeKeyAccessServer, _Mapping]] = ...) -> None: ... + +class AssignKeyAccessServerToAttributeResponse(_message.Message): + __slots__ = ("attribute_key_access_server",) + ATTRIBUTE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + attribute_key_access_server: AttributeKeyAccessServer + def __init__(self, attribute_key_access_server: _Optional[_Union[AttributeKeyAccessServer, _Mapping]] = ...) -> None: ... + +class RemoveKeyAccessServerFromAttributeRequest(_message.Message): + __slots__ = ("attribute_key_access_server",) + ATTRIBUTE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + attribute_key_access_server: AttributeKeyAccessServer + def __init__(self, attribute_key_access_server: _Optional[_Union[AttributeKeyAccessServer, _Mapping]] = ...) -> None: ... + +class RemoveKeyAccessServerFromAttributeResponse(_message.Message): + __slots__ = ("attribute_key_access_server",) + ATTRIBUTE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + attribute_key_access_server: AttributeKeyAccessServer + def __init__(self, attribute_key_access_server: _Optional[_Union[AttributeKeyAccessServer, _Mapping]] = ...) -> None: ... + +class AssignKeyAccessServerToValueRequest(_message.Message): + __slots__ = ("value_key_access_server",) + VALUE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + value_key_access_server: ValueKeyAccessServer + def __init__(self, value_key_access_server: _Optional[_Union[ValueKeyAccessServer, _Mapping]] = ...) -> None: ... + +class AssignKeyAccessServerToValueResponse(_message.Message): + __slots__ = ("value_key_access_server",) + VALUE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + value_key_access_server: ValueKeyAccessServer + def __init__(self, value_key_access_server: _Optional[_Union[ValueKeyAccessServer, _Mapping]] = ...) -> None: ... + +class RemoveKeyAccessServerFromValueRequest(_message.Message): + __slots__ = ("value_key_access_server",) + VALUE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + value_key_access_server: ValueKeyAccessServer + def __init__(self, value_key_access_server: _Optional[_Union[ValueKeyAccessServer, _Mapping]] = ...) -> None: ... + +class RemoveKeyAccessServerFromValueResponse(_message.Message): + __slots__ = ("value_key_access_server",) + VALUE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + value_key_access_server: ValueKeyAccessServer + def __init__(self, value_key_access_server: _Optional[_Union[ValueKeyAccessServer, _Mapping]] = ...) -> None: ... + +class AssignPublicKeyToAttributeRequest(_message.Message): + __slots__ = ("attribute_key",) + ATTRIBUTE_KEY_FIELD_NUMBER: _ClassVar[int] + attribute_key: AttributeKey + def __init__(self, attribute_key: _Optional[_Union[AttributeKey, _Mapping]] = ...) -> None: ... + +class AssignPublicKeyToAttributeResponse(_message.Message): + __slots__ = ("attribute_key",) + ATTRIBUTE_KEY_FIELD_NUMBER: _ClassVar[int] + attribute_key: AttributeKey + def __init__(self, attribute_key: _Optional[_Union[AttributeKey, _Mapping]] = ...) -> None: ... + +class RemovePublicKeyFromAttributeRequest(_message.Message): + __slots__ = ("attribute_key",) + ATTRIBUTE_KEY_FIELD_NUMBER: _ClassVar[int] + attribute_key: AttributeKey + def __init__(self, attribute_key: _Optional[_Union[AttributeKey, _Mapping]] = ...) -> None: ... + +class RemovePublicKeyFromAttributeResponse(_message.Message): + __slots__ = ("attribute_key",) + ATTRIBUTE_KEY_FIELD_NUMBER: _ClassVar[int] + attribute_key: AttributeKey + def __init__(self, attribute_key: _Optional[_Union[AttributeKey, _Mapping]] = ...) -> None: ... + +class AssignPublicKeyToValueRequest(_message.Message): + __slots__ = ("value_key",) + VALUE_KEY_FIELD_NUMBER: _ClassVar[int] + value_key: ValueKey + def __init__(self, value_key: _Optional[_Union[ValueKey, _Mapping]] = ...) -> None: ... + +class AssignPublicKeyToValueResponse(_message.Message): + __slots__ = ("value_key",) + VALUE_KEY_FIELD_NUMBER: _ClassVar[int] + value_key: ValueKey + def __init__(self, value_key: _Optional[_Union[ValueKey, _Mapping]] = ...) -> None: ... + +class RemovePublicKeyFromValueRequest(_message.Message): + __slots__ = ("value_key",) + VALUE_KEY_FIELD_NUMBER: _ClassVar[int] + value_key: ValueKey + def __init__(self, value_key: _Optional[_Union[ValueKey, _Mapping]] = ...) -> None: ... + +class RemovePublicKeyFromValueResponse(_message.Message): + __slots__ = ("value_key",) + VALUE_KEY_FIELD_NUMBER: _ClassVar[int] + value_key: ValueKey + def __init__(self, value_key: _Optional[_Union[ValueKey, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/attributes/attributes_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/attributes/attributes_pb2_connect.py new file mode 100644 index 0000000..e10a553 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/attributes/attributes_pb2_connect.py @@ -0,0 +1,863 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.attributes.attributes_pb2 + +class AttributesServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_list_attributes( + self, req: policy.attributes.attributes_pb2.ListAttributesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.ListAttributesResponse]: + """Low-level method to call ListAttributes, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/ListAttributes" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.ListAttributesResponse,extra_headers, timeout_seconds) + + + def list_attributes( + self, req: policy.attributes.attributes_pb2.ListAttributesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.ListAttributesResponse: + response = self.call_list_attributes(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_attribute_values( + self, req: policy.attributes.attributes_pb2.ListAttributeValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.ListAttributeValuesResponse]: + """Low-level method to call ListAttributeValues, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/ListAttributeValues" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.ListAttributeValuesResponse,extra_headers, timeout_seconds) + + + def list_attribute_values( + self, req: policy.attributes.attributes_pb2.ListAttributeValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.ListAttributeValuesResponse: + response = self.call_list_attribute_values(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_attribute( + self, req: policy.attributes.attributes_pb2.GetAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.GetAttributeResponse]: + """Low-level method to call GetAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/GetAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.GetAttributeResponse,extra_headers, timeout_seconds) + + + def get_attribute( + self, req: policy.attributes.attributes_pb2.GetAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.GetAttributeResponse: + response = self.call_get_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_attribute_values_by_fqns( + self, req: policy.attributes.attributes_pb2.GetAttributeValuesByFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.GetAttributeValuesByFqnsResponse]: + """Low-level method to call GetAttributeValuesByFqns, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/GetAttributeValuesByFqns" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.GetAttributeValuesByFqnsResponse,extra_headers, timeout_seconds) + + + def get_attribute_values_by_fqns( + self, req: policy.attributes.attributes_pb2.GetAttributeValuesByFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.GetAttributeValuesByFqnsResponse: + response = self.call_get_attribute_values_by_fqns(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_attribute( + self, req: policy.attributes.attributes_pb2.CreateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.CreateAttributeResponse]: + """Low-level method to call CreateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/CreateAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.CreateAttributeResponse,extra_headers, timeout_seconds) + + + def create_attribute( + self, req: policy.attributes.attributes_pb2.CreateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.CreateAttributeResponse: + response = self.call_create_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_attribute( + self, req: policy.attributes.attributes_pb2.UpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.UpdateAttributeResponse]: + """Low-level method to call UpdateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/UpdateAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.UpdateAttributeResponse,extra_headers, timeout_seconds) + + + def update_attribute( + self, req: policy.attributes.attributes_pb2.UpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.UpdateAttributeResponse: + response = self.call_update_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_deactivate_attribute( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.DeactivateAttributeResponse]: + """Low-level method to call DeactivateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/DeactivateAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.DeactivateAttributeResponse,extra_headers, timeout_seconds) + + + def deactivate_attribute( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.DeactivateAttributeResponse: + response = self.call_deactivate_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_attribute_value( + self, req: policy.attributes.attributes_pb2.GetAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.GetAttributeValueResponse]: + """Low-level method to call GetAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/GetAttributeValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.GetAttributeValueResponse,extra_headers, timeout_seconds) + + + def get_attribute_value( + self, req: policy.attributes.attributes_pb2.GetAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.GetAttributeValueResponse: + response = self.call_get_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_attribute_value( + self, req: policy.attributes.attributes_pb2.CreateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.CreateAttributeValueResponse]: + """Low-level method to call CreateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/CreateAttributeValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.CreateAttributeValueResponse,extra_headers, timeout_seconds) + + + def create_attribute_value( + self, req: policy.attributes.attributes_pb2.CreateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.CreateAttributeValueResponse: + response = self.call_create_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_attribute_value( + self, req: policy.attributes.attributes_pb2.UpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.UpdateAttributeValueResponse]: + """Low-level method to call UpdateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/UpdateAttributeValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.UpdateAttributeValueResponse,extra_headers, timeout_seconds) + + + def update_attribute_value( + self, req: policy.attributes.attributes_pb2.UpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.UpdateAttributeValueResponse: + response = self.call_update_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_deactivate_attribute_value( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.DeactivateAttributeValueResponse]: + """Low-level method to call DeactivateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/DeactivateAttributeValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.DeactivateAttributeValueResponse,extra_headers, timeout_seconds) + + + def deactivate_attribute_value( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.DeactivateAttributeValueResponse: + response = self.call_deactivate_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_assign_key_access_server_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeResponse]: + """Low-level method to call AssignKeyAccessServerToAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignKeyAccessServerToAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeResponse,extra_headers, timeout_seconds) + + + def assign_key_access_server_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeResponse: + response = self.call_assign_key_access_server_to_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_remove_key_access_server_from_attribute( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeResponse]: + """Low-level method to call RemoveKeyAccessServerFromAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemoveKeyAccessServerFromAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeResponse,extra_headers, timeout_seconds) + + + def remove_key_access_server_from_attribute( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeResponse: + response = self.call_remove_key_access_server_from_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_assign_key_access_server_to_value( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignKeyAccessServerToValueResponse]: + """Low-level method to call AssignKeyAccessServerToValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignKeyAccessServerToValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignKeyAccessServerToValueResponse,extra_headers, timeout_seconds) + + + def assign_key_access_server_to_value( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignKeyAccessServerToValueResponse: + response = self.call_assign_key_access_server_to_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_remove_key_access_server_from_value( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueResponse]: + """Low-level method to call RemoveKeyAccessServerFromValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemoveKeyAccessServerFromValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueResponse,extra_headers, timeout_seconds) + + + def remove_key_access_server_from_value( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueResponse: + response = self.call_remove_key_access_server_from_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_assign_public_key_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignPublicKeyToAttributeResponse]: + """Low-level method to call AssignPublicKeyToAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignPublicKeyToAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignPublicKeyToAttributeResponse,extra_headers, timeout_seconds) + + + def assign_public_key_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignPublicKeyToAttributeResponse: + response = self.call_assign_public_key_to_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_remove_public_key_from_attribute( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeResponse]: + """Low-level method to call RemovePublicKeyFromAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemovePublicKeyFromAttribute" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeResponse,extra_headers, timeout_seconds) + + + def remove_public_key_from_attribute( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeResponse: + response = self.call_remove_public_key_from_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_assign_public_key_to_value( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignPublicKeyToValueResponse]: + """Low-level method to call AssignPublicKeyToValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignPublicKeyToValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignPublicKeyToValueResponse,extra_headers, timeout_seconds) + + + def assign_public_key_to_value( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignPublicKeyToValueResponse: + response = self.call_assign_public_key_to_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_remove_public_key_from_value( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemovePublicKeyFromValueResponse]: + """Low-level method to call RemovePublicKeyFromValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemovePublicKeyFromValue" + return self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemovePublicKeyFromValueResponse,extra_headers, timeout_seconds) + + + def remove_public_key_from_value( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemovePublicKeyFromValueResponse: + response = self.call_remove_public_key_from_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncAttributesServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_list_attributes( + self, req: policy.attributes.attributes_pb2.ListAttributesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.ListAttributesResponse]: + """Low-level method to call ListAttributes, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/ListAttributes" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.ListAttributesResponse,extra_headers, timeout_seconds) + + async def list_attributes( + self, req: policy.attributes.attributes_pb2.ListAttributesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.ListAttributesResponse: + response = await self.call_list_attributes(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_attribute_values( + self, req: policy.attributes.attributes_pb2.ListAttributeValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.ListAttributeValuesResponse]: + """Low-level method to call ListAttributeValues, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/ListAttributeValues" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.ListAttributeValuesResponse,extra_headers, timeout_seconds) + + async def list_attribute_values( + self, req: policy.attributes.attributes_pb2.ListAttributeValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.ListAttributeValuesResponse: + response = await self.call_list_attribute_values(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_attribute( + self, req: policy.attributes.attributes_pb2.GetAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.GetAttributeResponse]: + """Low-level method to call GetAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/GetAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.GetAttributeResponse,extra_headers, timeout_seconds) + + async def get_attribute( + self, req: policy.attributes.attributes_pb2.GetAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.GetAttributeResponse: + response = await self.call_get_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_attribute_values_by_fqns( + self, req: policy.attributes.attributes_pb2.GetAttributeValuesByFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.GetAttributeValuesByFqnsResponse]: + """Low-level method to call GetAttributeValuesByFqns, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/GetAttributeValuesByFqns" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.GetAttributeValuesByFqnsResponse,extra_headers, timeout_seconds) + + async def get_attribute_values_by_fqns( + self, req: policy.attributes.attributes_pb2.GetAttributeValuesByFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.GetAttributeValuesByFqnsResponse: + response = await self.call_get_attribute_values_by_fqns(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_attribute( + self, req: policy.attributes.attributes_pb2.CreateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.CreateAttributeResponse]: + """Low-level method to call CreateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/CreateAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.CreateAttributeResponse,extra_headers, timeout_seconds) + + async def create_attribute( + self, req: policy.attributes.attributes_pb2.CreateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.CreateAttributeResponse: + response = await self.call_create_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_attribute( + self, req: policy.attributes.attributes_pb2.UpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.UpdateAttributeResponse]: + """Low-level method to call UpdateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/UpdateAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.UpdateAttributeResponse,extra_headers, timeout_seconds) + + async def update_attribute( + self, req: policy.attributes.attributes_pb2.UpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.UpdateAttributeResponse: + response = await self.call_update_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_deactivate_attribute( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.DeactivateAttributeResponse]: + """Low-level method to call DeactivateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/DeactivateAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.DeactivateAttributeResponse,extra_headers, timeout_seconds) + + async def deactivate_attribute( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.DeactivateAttributeResponse: + response = await self.call_deactivate_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_attribute_value( + self, req: policy.attributes.attributes_pb2.GetAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.GetAttributeValueResponse]: + """Low-level method to call GetAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/GetAttributeValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.GetAttributeValueResponse,extra_headers, timeout_seconds) + + async def get_attribute_value( + self, req: policy.attributes.attributes_pb2.GetAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.GetAttributeValueResponse: + response = await self.call_get_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_attribute_value( + self, req: policy.attributes.attributes_pb2.CreateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.CreateAttributeValueResponse]: + """Low-level method to call CreateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/CreateAttributeValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.CreateAttributeValueResponse,extra_headers, timeout_seconds) + + async def create_attribute_value( + self, req: policy.attributes.attributes_pb2.CreateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.CreateAttributeValueResponse: + response = await self.call_create_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_attribute_value( + self, req: policy.attributes.attributes_pb2.UpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.UpdateAttributeValueResponse]: + """Low-level method to call UpdateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/UpdateAttributeValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.UpdateAttributeValueResponse,extra_headers, timeout_seconds) + + async def update_attribute_value( + self, req: policy.attributes.attributes_pb2.UpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.UpdateAttributeValueResponse: + response = await self.call_update_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_deactivate_attribute_value( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.DeactivateAttributeValueResponse]: + """Low-level method to call DeactivateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/DeactivateAttributeValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.DeactivateAttributeValueResponse,extra_headers, timeout_seconds) + + async def deactivate_attribute_value( + self, req: policy.attributes.attributes_pb2.DeactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.DeactivateAttributeValueResponse: + response = await self.call_deactivate_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_assign_key_access_server_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeResponse]: + """Low-level method to call AssignKeyAccessServerToAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignKeyAccessServerToAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeResponse,extra_headers, timeout_seconds) + + async def assign_key_access_server_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeResponse: + response = await self.call_assign_key_access_server_to_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_remove_key_access_server_from_attribute( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeResponse]: + """Low-level method to call RemoveKeyAccessServerFromAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemoveKeyAccessServerFromAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeResponse,extra_headers, timeout_seconds) + + async def remove_key_access_server_from_attribute( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeResponse: + response = await self.call_remove_key_access_server_from_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_assign_key_access_server_to_value( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignKeyAccessServerToValueResponse]: + """Low-level method to call AssignKeyAccessServerToValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignKeyAccessServerToValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignKeyAccessServerToValueResponse,extra_headers, timeout_seconds) + + async def assign_key_access_server_to_value( + self, req: policy.attributes.attributes_pb2.AssignKeyAccessServerToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignKeyAccessServerToValueResponse: + response = await self.call_assign_key_access_server_to_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_remove_key_access_server_from_value( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueResponse]: + """Low-level method to call RemoveKeyAccessServerFromValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemoveKeyAccessServerFromValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueResponse,extra_headers, timeout_seconds) + + async def remove_key_access_server_from_value( + self, req: policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueResponse: + response = await self.call_remove_key_access_server_from_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_assign_public_key_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignPublicKeyToAttributeResponse]: + """Low-level method to call AssignPublicKeyToAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignPublicKeyToAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignPublicKeyToAttributeResponse,extra_headers, timeout_seconds) + + async def assign_public_key_to_attribute( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignPublicKeyToAttributeResponse: + response = await self.call_assign_public_key_to_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_remove_public_key_from_attribute( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeResponse]: + """Low-level method to call RemovePublicKeyFromAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemovePublicKeyFromAttribute" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeResponse,extra_headers, timeout_seconds) + + async def remove_public_key_from_attribute( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeResponse: + response = await self.call_remove_public_key_from_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_assign_public_key_to_value( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.AssignPublicKeyToValueResponse]: + """Low-level method to call AssignPublicKeyToValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/AssignPublicKeyToValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.AssignPublicKeyToValueResponse,extra_headers, timeout_seconds) + + async def assign_public_key_to_value( + self, req: policy.attributes.attributes_pb2.AssignPublicKeyToValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.AssignPublicKeyToValueResponse: + response = await self.call_assign_public_key_to_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_remove_public_key_from_value( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.attributes.attributes_pb2.RemovePublicKeyFromValueResponse]: + """Low-level method to call RemovePublicKeyFromValue, granting access to errors and metadata""" + url = self.base_url + "/policy.attributes.AttributesService/RemovePublicKeyFromValue" + return await self._connect_client.call_unary(url, req, policy.attributes.attributes_pb2.RemovePublicKeyFromValueResponse,extra_headers, timeout_seconds) + + async def remove_public_key_from_value( + self, req: policy.attributes.attributes_pb2.RemovePublicKeyFromValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.attributes.attributes_pb2.RemovePublicKeyFromValueResponse: + response = await self.call_remove_public_key_from_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class AttributesServiceProtocol(typing.Protocol): + def list_attributes(self, req: ClientRequest[policy.attributes.attributes_pb2.ListAttributesRequest]) -> ServerResponse[policy.attributes.attributes_pb2.ListAttributesResponse]: + ... + def list_attribute_values(self, req: ClientRequest[policy.attributes.attributes_pb2.ListAttributeValuesRequest]) -> ServerResponse[policy.attributes.attributes_pb2.ListAttributeValuesResponse]: + ... + def get_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.GetAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.GetAttributeResponse]: + ... + def get_attribute_values_by_fqns(self, req: ClientRequest[policy.attributes.attributes_pb2.GetAttributeValuesByFqnsRequest]) -> ServerResponse[policy.attributes.attributes_pb2.GetAttributeValuesByFqnsResponse]: + ... + def create_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.CreateAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.CreateAttributeResponse]: + ... + def update_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.UpdateAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.UpdateAttributeResponse]: + ... + def deactivate_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.DeactivateAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.DeactivateAttributeResponse]: + ... + def get_attribute_value(self, req: ClientRequest[policy.attributes.attributes_pb2.GetAttributeValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.GetAttributeValueResponse]: + ... + def create_attribute_value(self, req: ClientRequest[policy.attributes.attributes_pb2.CreateAttributeValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.CreateAttributeValueResponse]: + ... + def update_attribute_value(self, req: ClientRequest[policy.attributes.attributes_pb2.UpdateAttributeValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.UpdateAttributeValueResponse]: + ... + def deactivate_attribute_value(self, req: ClientRequest[policy.attributes.attributes_pb2.DeactivateAttributeValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.DeactivateAttributeValueResponse]: + ... + def assign_key_access_server_to_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeResponse]: + ... + def remove_key_access_server_from_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeResponse]: + ... + def assign_key_access_server_to_value(self, req: ClientRequest[policy.attributes.attributes_pb2.AssignKeyAccessServerToValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.AssignKeyAccessServerToValueResponse]: + ... + def remove_key_access_server_from_value(self, req: ClientRequest[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueResponse]: + ... + def assign_public_key_to_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.AssignPublicKeyToAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.AssignPublicKeyToAttributeResponse]: + ... + def remove_public_key_from_attribute(self, req: ClientRequest[policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeRequest]) -> ServerResponse[policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeResponse]: + ... + def assign_public_key_to_value(self, req: ClientRequest[policy.attributes.attributes_pb2.AssignPublicKeyToValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.AssignPublicKeyToValueResponse]: + ... + def remove_public_key_from_value(self, req: ClientRequest[policy.attributes.attributes_pb2.RemovePublicKeyFromValueRequest]) -> ServerResponse[policy.attributes.attributes_pb2.RemovePublicKeyFromValueResponse]: + ... + +ATTRIBUTES_SERVICE_PATH_PREFIX = "/policy.attributes.AttributesService" + +def wsgi_attributes_service(implementation: AttributesServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.attributes.AttributesService/ListAttributes", implementation.list_attributes, policy.attributes.attributes_pb2.ListAttributesRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/ListAttributeValues", implementation.list_attribute_values, policy.attributes.attributes_pb2.ListAttributeValuesRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/GetAttribute", implementation.get_attribute, policy.attributes.attributes_pb2.GetAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/GetAttributeValuesByFqns", implementation.get_attribute_values_by_fqns, policy.attributes.attributes_pb2.GetAttributeValuesByFqnsRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/CreateAttribute", implementation.create_attribute, policy.attributes.attributes_pb2.CreateAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/UpdateAttribute", implementation.update_attribute, policy.attributes.attributes_pb2.UpdateAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/DeactivateAttribute", implementation.deactivate_attribute, policy.attributes.attributes_pb2.DeactivateAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/GetAttributeValue", implementation.get_attribute_value, policy.attributes.attributes_pb2.GetAttributeValueRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/CreateAttributeValue", implementation.create_attribute_value, policy.attributes.attributes_pb2.CreateAttributeValueRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/UpdateAttributeValue", implementation.update_attribute_value, policy.attributes.attributes_pb2.UpdateAttributeValueRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/DeactivateAttributeValue", implementation.deactivate_attribute_value, policy.attributes.attributes_pb2.DeactivateAttributeValueRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/AssignKeyAccessServerToAttribute", implementation.assign_key_access_server_to_attribute, policy.attributes.attributes_pb2.AssignKeyAccessServerToAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/RemoveKeyAccessServerFromAttribute", implementation.remove_key_access_server_from_attribute, policy.attributes.attributes_pb2.RemoveKeyAccessServerFromAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/AssignKeyAccessServerToValue", implementation.assign_key_access_server_to_value, policy.attributes.attributes_pb2.AssignKeyAccessServerToValueRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/RemoveKeyAccessServerFromValue", implementation.remove_key_access_server_from_value, policy.attributes.attributes_pb2.RemoveKeyAccessServerFromValueRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/AssignPublicKeyToAttribute", implementation.assign_public_key_to_attribute, policy.attributes.attributes_pb2.AssignPublicKeyToAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/RemovePublicKeyFromAttribute", implementation.remove_public_key_from_attribute, policy.attributes.attributes_pb2.RemovePublicKeyFromAttributeRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/AssignPublicKeyToValue", implementation.assign_public_key_to_value, policy.attributes.attributes_pb2.AssignPublicKeyToValueRequest) + app.register_unary_rpc("/policy.attributes.AttributesService/RemovePublicKeyFromValue", implementation.remove_public_key_from_value, policy.attributes.attributes_pb2.RemovePublicKeyFromValueRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.py new file mode 100644 index 0000000..8f3a48e --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/kasregistry/key_access_server_registry.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/kasregistry/key_access_server_registry.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n3policy/kasregistry/key_access_server_registry.proto\x12\x12policy.kasregistry\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"\xe4\x03\n\x19GetKeyAccessServerRequest\x12\x1d\n\x02id\x18\x01 \x01(\tB\r\x18\x01\xbaH\x08r\x03\xb0\x01\x01\xd8\x01\x01R\x02id\x12!\n\x06kas_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x05kasId\x12\x1d\n\x04name\x18\x03 \x01(\tB\x07\xbaH\x04r\x02\x10\x01H\x00R\x04name\x12\x1e\n\x03uri\x18\x04 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x03uri:\xb7\x02\xbaH\xb3\x02\x1a\xa8\x01\n\x10\x65xclusive_fields\x12JEither use deprecated \'id\' field or one of \'kas_id\' or \'uri\', but not both\x1aH!(has(this.id) && (has(this.kas_id) || has(this.uri) || has(this.name)))\x1a\x85\x01\n\x0frequired_fields\x12-Either id or one of kas_id or uri must be set\x1a\x43has(this.id) || has(this.kas_id) || has(this.uri) || has(this.name)B\x0c\n\nidentifier\"a\n\x1aGetKeyAccessServerResponse\x12\x43\n\x11key_access_server\x18\x01 \x01(\x0b\x32\x17.policy.KeyAccessServerR\x0fkeyAccessServer\"R\n\x1bListKeyAccessServersRequest\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\x9b\x01\n\x1cListKeyAccessServersResponse\x12\x45\n\x12key_access_servers\x18\x01 \x03(\x0b\x32\x17.policy.KeyAccessServerR\x10keyAccessServers\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\x95\x06\n\x1c\x43reateKeyAccessServerRequest\x12\x87\x02\n\x03uri\x18\x01 \x01(\tB\xf4\x01\xbaH\xf0\x01\xba\x01\xec\x01\n\nuri_format\x12\xcf\x01URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\x1a\x0cthis.isUri()R\x03uri\x12\x30\n\npublic_key\x18\x02 \x01(\x0b\x32\x11.policy.PublicKeyR\tpublicKey\x12@\n\x0bsource_type\x18\x03 \x01(\x0e\x32\x12.policy.SourceTypeB\x0b\xbaH\x08\x82\x01\x02\x10\x01\xc8\x01\x00R\nsourceType\x12\xc1\x02\n\x04name\x18\x14 \x01(\tB\xac\x02\xbaH\xa8\x02r\x03\x18\xfd\x01\xba\x01\x9c\x02\n\x0fkas_name_format\x12\xb3\x01Registered KAS name must be an alphanumeric string, allowing hyphens, and underscores but not as the first or last character. The stored KAS name will be normalized to lower case.\x1aSsize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\xc8\x01\x00R\x04name\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"d\n\x1d\x43reateKeyAccessServerResponse\x12\x43\n\x11key_access_server\x18\x01 \x01(\x0b\x32\x17.policy.KeyAccessServerR\x0fkeyAccessServer\"\xa5\x07\n\x1cUpdateKeyAccessServerRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xac\x02\n\x03uri\x18\x02 \x01(\tB\x99\x02\xbaH\x95\x02\xba\x01\x91\x02\n\x13optional_uri_format\x12\xd8\x01Optional URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\x1a\x1fsize(this) == 0 || this.isUri()R\x03uri\x12\x30\n\npublic_key\x18\x03 \x01(\x0b\x32\x11.policy.PublicKeyR\tpublicKey\x12@\n\x0bsource_type\x18\x04 \x01(\x0e\x32\x12.policy.SourceTypeB\x0b\xbaH\x08\x82\x01\x02\x10\x01\xc8\x01\x00R\nsourceType\x12\xbc\x02\n\x04name\x18\x14 \x01(\tB\xa7\x02\xbaH\xa3\x02r\x03\x18\xfd\x01\xba\x01\x97\x02\n\x0fkas_name_format\x12\xb3\x01Registered KAS name must be an alphanumeric string, allowing hyphens, and underscores but not as the first or last character. The stored KAS name will be normalized to lower case.\x1aNsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\xc8\x01\x00R\x04name\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"d\n\x1dUpdateKeyAccessServerResponse\x12\x43\n\x11key_access_server\x18\x01 \x01(\x0b\x32\x17.policy.KeyAccessServerR\x0fkeyAccessServer\"8\n\x1c\x44\x65leteKeyAccessServerRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"d\n\x1d\x44\x65leteKeyAccessServerResponse\x12\x43\n\x11key_access_server\x18\x01 \x01(\x0b\x32\x17.policy.KeyAccessServerR\x0fkeyAccessServer\"7\n\x13GrantedPolicyObject\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x10\n\x03\x66qn\x18\x02 \x01(\tR\x03\x66qn\"\xd0\x02\n\x15KeyAccessServerGrants\x12\x43\n\x11key_access_server\x18\x01 \x01(\x0b\x32\x17.policy.KeyAccessServerR\x0fkeyAccessServer\x12R\n\x10namespace_grants\x18\x02 \x03(\x0b\x32\'.policy.kasregistry.GrantedPolicyObjectR\x0fnamespaceGrants\x12R\n\x10\x61ttribute_grants\x18\x03 \x03(\x0b\x32\'.policy.kasregistry.GrantedPolicyObjectR\x0f\x61ttributeGrants\x12J\n\x0cvalue_grants\x18\x04 \x03(\x0b\x32\'.policy.kasregistry.GrantedPolicyObjectR\x0bvalueGrants\"\x9e\x01\n\x16\x43reatePublicKeyRequest\x12\x1f\n\x06kas_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x05kasId\x12.\n\x03key\x18\x02 \x01(\x0b\x32\x14.policy.KasPublicKeyB\x06\xbaH\x03\xc8\x01\x01R\x03key\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"8\n\x17\x43reatePublicKeyResponse\x12\x1d\n\x03key\x18\x01 \x01(\x0b\x32\x0b.policy.KeyR\x03key\"?\n\x13GetPublicKeyRequest\x12\x1a\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02idB\x0c\n\nidentifier\"5\n\x14GetPublicKeyResponse\x12\x1d\n\x03key\x18\x01 \x01(\x0b\x32\x0b.policy.KeyR\x03key\"\xca\x01\n\x15ListPublicKeysRequest\x12!\n\x06kas_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x05kasId\x12$\n\x08kas_name\x18\x02 \x01(\tB\x07\xbaH\x04r\x02\x10\x01H\x00R\x07kasName\x12%\n\x07kas_uri\x18\x03 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x06kasUri\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npaginationB\x0c\n\nkas_filter\"o\n\x16ListPublicKeysResponse\x12\x1f\n\x04keys\x18\x01 \x03(\x0b\x32\x0b.policy.KeyR\x04keys\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\x81\x02\n\x1bListPublicKeyMappingRequest\x12!\n\x06kas_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x05kasId\x12$\n\x08kas_name\x18\x02 \x01(\tB\x07\xbaH\x04r\x02\x10\x01H\x00R\x07kasName\x12%\n\x07kas_uri\x18\x03 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x06kasUri\x12/\n\rpublic_key_id\x18\x04 \x01(\tB\x0b\xbaH\x08r\x03\xb0\x01\x01\xd8\x01\x01R\x0bpublicKeyId\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npaginationB\x0c\n\nkas_filter\"\xf6\x05\n\x1cListPublicKeyMappingResponse\x12q\n\x13public_key_mappings\x18\x01 \x03(\x0b\x32\x41.policy.kasregistry.ListPublicKeyMappingResponse.PublicKeyMappingR\x11publicKeyMappings\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\x1a\xba\x01\n\x10PublicKeyMapping\x12\x15\n\x06kas_id\x18\x02 \x01(\tR\x05kasId\x12\x19\n\x08kas_name\x18\x03 \x01(\tR\x07kasName\x12\x17\n\x07kas_uri\x18\x04 \x01(\tR\x06kasUri\x12[\n\x0bpublic_keys\x18\x05 \x03(\x0b\x32:.policy.kasregistry.ListPublicKeyMappingResponse.PublicKeyR\npublicKeys\x1a\xbe\x02\n\tPublicKey\x12\x1d\n\x03key\x18\x01 \x01(\x0b\x32\x0b.policy.KeyR\x03key\x12T\n\x06values\x18\x06 \x03(\x0b\x32<.policy.kasregistry.ListPublicKeyMappingResponse.AssociationR\x06values\x12^\n\x0b\x64\x65\x66initions\x18\x07 \x03(\x0b\x32<.policy.kasregistry.ListPublicKeyMappingResponse.AssociationR\x0b\x64\x65\x66initions\x12\\\n\nnamespaces\x18\x08 \x03(\x0b\x32<.policy.kasregistry.ListPublicKeyMappingResponse.AssociationR\nnamespaces\x1a/\n\x0b\x41ssociation\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x10\n\x03\x66qn\x18\x02 \x01(\tR\x03\x66qn\"\xbd\x01\n\x16UpdatePublicKeyRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"8\n\x17UpdatePublicKeyResponse\x12\x1d\n\x03key\x18\x01 \x01(\x0b\x32\x0b.policy.KeyR\x03key\"6\n\x1a\x44\x65\x61\x63tivatePublicKeyRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"<\n\x1b\x44\x65\x61\x63tivatePublicKeyResponse\x12\x1d\n\x03key\x18\x01 \x01(\x0b\x32\x0b.policy.KeyR\x03key\"4\n\x18\x41\x63tivatePublicKeyRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\":\n\x19\x41\x63tivatePublicKeyResponse\x12\x1d\n\x03key\x18\x01 \x01(\x0b\x32\x0b.policy.KeyR\x03key\"\xa5\x07\n ListKeyAccessServerGrantsRequest\x12\xcb\x01\n\x06kas_id\x18\x01 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x05kasId\x12\xb3\x02\n\x07kas_uri\x18\x02 \x01(\tB\x99\x02\xbaH\x95\x02\xba\x01\x91\x02\n\x13optional_uri_format\x12\xd8\x01Optional URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\x1a\x1fsize(this) == 0 || this.isUri()R\x06kasUri\x12\xc3\x02\n\x08kas_name\x18\x03 \x01(\tB\xa7\x02\xbaH\xa3\x02r\x03\x18\xfd\x01\xba\x01\x97\x02\n\x0fkas_name_format\x12\xb3\x01Registered KAS name must be an alphanumeric string, allowing hyphens, and underscores but not as the first or last character. The stored KAS name will be normalized to lower case.\x1aNsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\xc8\x01\x00R\x07kasName\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination:\x02\x18\x01\"\xa4\x01\n!ListKeyAccessServerGrantsResponse\x12\x45\n\x06grants\x18\x01 \x03(\x0b\x32).policy.kasregistry.KeyAccessServerGrantsB\x02\x18\x01R\x06grants\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination:\x02\x18\x01\"\xb1\x0c\n\x10\x43reateKeyRequest\x12\x1f\n\x06kas_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x05kasId\x12\x1e\n\x06key_id\x18\x02 \x01(\tB\x07\xbaH\x04r\x02\x10\x01R\x05keyId\x12\xa1\x01\n\rkey_algorithm\x18\x03 \x01(\x0e\x32\x11.policy.AlgorithmBi\xbaHf\xba\x01\x63\n\x15key_algorithm_defined\x12\x34The key_algorithm must be one of the defined values.\x1a\x14this in [1, 2, 3, 4]R\x0ckeyAlgorithm\x12\x93\x01\n\x08key_mode\x18\x04 \x01(\x0e\x32\x0f.policy.KeyModeBg\xbaHd\xba\x01\x61\n\x10key_mode_defined\x12\x35The key_mode must be one of the defined values (1-4).\x1a\x16this >= 1 && this <= 4R\x07keyMode\x12\x42\n\x0epublic_key_ctx\x18\x05 \x01(\x0b\x32\x14.policy.PublicKeyCtxB\x06\xbaH\x03\xc8\x01\x01R\x0cpublicKeyCtx\x12=\n\x0fprivate_key_ctx\x18\x06 \x01(\x0b\x32\x15.policy.PrivateKeyCtxR\rprivateKeyCtx\x12,\n\x12provider_config_id\x18\x07 \x01(\tR\x10providerConfigId\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata:\xbb\x07\xbaH\xb7\x07\x1a\x97\x03\n#private_key_ctx_optionally_required\x12\xbc\x01The wrapped_key is required if key_mode is KEY_MODE_CONFIG_ROOT_KEY or KEY_MODE_PROVIDER_ROOT_KEY. The wrapped_key must be empty if key_mode is KEY_MODE_REMOTE or KEY_MODE_PUBLIC_KEY_ONLY.\x1a\xb0\x01((this.key_mode == 1 || this.key_mode == 2) && this.private_key_ctx.wrapped_key != \'\') || ((this.key_mode == 3 || this.key_mode == 4) && this.private_key_ctx.wrapped_key == \'\')\x1a\xf4\x02\n&provider_config_id_optionally_required\x12\xa8\x01Provider config id is required if key_mode is KEY_MODE_PROVIDER_ROOT_KEY or KEY_MODE_REMOTE. It must be empty for KEY_MODE_CONFIG_ROOT_KEY and KEY_MODE_PUBLIC_KEY_ONLY.\x1a\x9e\x01((this.key_mode == 1 || this.key_mode == 4) && this.provider_config_id == \'\') || ((this.key_mode == 2 || this.key_mode == 3) && this.provider_config_id != \'\')\x1a\xa3\x01\n#private_key_ctx_for_public_key_only\x12Hprivate_key_ctx must not be set if key_mode is KEY_MODE_PUBLIC_KEY_ONLY.\x1a\x32!(this.key_mode == 4 && has(this.private_key_ctx))\"<\n\x11\x43reateKeyResponse\x12\'\n\x07kas_key\x18\x01 \x01(\x0b\x32\x0e.policy.KasKeyR\x06kasKey\"z\n\rGetKeyRequest\x12\x1a\n\x02id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02id\x12\x38\n\x03key\x18\x03 \x01(\x0b\x32$.policy.kasregistry.KasKeyIdentifierH\x00R\x03keyB\x13\n\nidentifier\x12\x05\xbaH\x02\x08\x01\"9\n\x0eGetKeyResponse\x12\'\n\x07kas_key\x18\x01 \x01(\x0b\x32\x0e.policy.KasKeyR\x06kasKey\"\xeb\x02\n\x0fListKeysRequest\x12\xa4\x01\n\rkey_algorithm\x18\x01 \x01(\x0e\x32\x11.policy.AlgorithmBl\xbaHi\xba\x01\x66\n\x15key_algorithm_defined\x12\x34The key_algorithm must be one of the defined values.\x1a\x17this in [0, 1, 2, 3, 4]R\x0ckeyAlgorithm\x12!\n\x06kas_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x05kasId\x12$\n\x08kas_name\x18\x03 \x01(\tB\x07\xbaH\x04r\x02\x10\x01H\x00R\x07kasName\x12%\n\x07kas_uri\x18\x04 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x06kasUri\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npaginationB\x0c\n\nkas_filter\"s\n\x10ListKeysResponse\x12)\n\x08kas_keys\x18\x01 \x03(\x0b\x32\x0e.policy.KasKeyR\x07kasKeys\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\x86\x03\n\x10UpdateKeyRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior:\xcc\x01\xbaH\xc8\x01\x1a\xc5\x01\n\x18metadata_update_behavior\x12RMetadata update behavior must be either APPEND or REPLACE, when updating metadata.\x1aU((!has(this.metadata)) || (has(this.metadata) && this.metadata_update_behavior != 0))\"<\n\x11UpdateKeyResponse\x12\'\n\x07kas_key\x18\x01 \x01(\x0b\x32\x0e.policy.KasKeyR\x06kasKey\"\xa4\x01\n\x10KasKeyIdentifier\x12!\n\x06kas_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x05kasId\x12\x1d\n\x04name\x18\x03 \x01(\tB\x07\xbaH\x04r\x02\x10\x01H\x00R\x04name\x12\x1e\n\x03uri\x18\x04 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x03uri\x12\x19\n\x03kid\x18\x05 \x01(\tB\x07\xbaH\x04r\x02\x10\x01R\x03kidB\x13\n\nidentifier\x12\x05\xbaH\x02\x08\x01\"\xe2\x0e\n\x10RotateKeyRequest\x12\x1a\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02id\x12\x38\n\x03key\x18\x02 \x01(\x0b\x32$.policy.kasregistry.KasKeyIdentifierH\x00R\x03key\x12\x44\n\x07new_key\x18\x03 \x01(\x0b\x32+.policy.kasregistry.RotateKeyRequest.NewKeyR\x06newKey\x1a\xcc\x04\n\x06NewKey\x12\x1e\n\x06key_id\x18\x01 \x01(\tB\x07\xbaH\x04r\x02\x10\x01R\x05keyId\x12\x9a\x01\n\talgorithm\x18\x02 \x01(\x0e\x32\x11.policy.AlgorithmBi\xbaHf\xba\x01\x63\n\x15key_algorithm_defined\x12\x34The key_algorithm must be one of the defined values.\x1a\x14this in [1, 2, 3, 4]R\talgorithm\x12\x9e\x01\n\x08key_mode\x18\x03 \x01(\x0e\x32\x0f.policy.KeyModeBr\xbaHo\x82\x01\x02\x10\x01\xba\x01g\n\x14new_key_mode_defined\x12\x39The new key_mode must be one of the defined values (1-4).\x1a\x14this in [1, 2, 3, 4]R\x07keyMode\x12\x42\n\x0epublic_key_ctx\x18\x04 \x01(\x0b\x32\x14.policy.PublicKeyCtxB\x06\xbaH\x03\xc8\x01\x01R\x0cpublicKeyCtx\x12=\n\x0fprivate_key_ctx\x18\x05 \x01(\x0b\x32\x15.policy.PrivateKeyCtxR\rprivateKeyCtx\x12,\n\x12provider_config_id\x18\x06 \x01(\tR\x10providerConfigId\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata:\xcd\x08\xbaH\xc9\x08\x1a\xd8\x03\n#private_key_ctx_optionally_required\x12\xcd\x01\x46or the new key, the wrapped_key is required if key_mode is KEY_MODE_CONFIG_ROOT_KEY or KEY_MODE_PROVIDER_ROOT_KEY. The wrapped_key must be empty if key_mode is KEY_MODE_REMOTE or KEY_MODE_PUBLIC_KEY_ONLY.\x1a\xe0\x01((this.new_key.key_mode == 1 || this.new_key.key_mode == 2) && this.new_key.private_key_ctx.wrapped_key != \'\') || ((this.new_key.key_mode == 3 || this.new_key.key_mode == 4) && this.new_key.private_key_ctx.wrapped_key == \'\')\x1a\xb5\x03\n&provider_config_id_optionally_required\x12\xb9\x01\x46or the new key, provider config id is required if key_mode is KEY_MODE_PROVIDER_ROOT_KEY or KEY_MODE_REMOTE. It must be empty for KEY_MODE_CONFIG_ROOT_KEY and KEY_MODE_PUBLIC_KEY_ONLY.\x1a\xce\x01((this.new_key.key_mode == 1 || this.new_key.key_mode == 4) && this.new_key.provider_config_id == \'\') || ((this.new_key.key_mode == 2 || this.new_key.key_mode == 3) && this.new_key.provider_config_id != \'\')\x1a\xb3\x01\n#private_key_ctx_for_public_key_only\x12Hprivate_key_ctx must not be set if key_mode is KEY_MODE_PUBLIC_KEY_ONLY.\x1a\x42!(this.new_key.key_mode == 4 && has(this.new_key.private_key_ctx))B\x13\n\nactive_key\x12\x05\xbaH\x02\x08\x01\"2\n\x0e\x43hangeMappings\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x10\n\x03\x66qn\x18\x02 \x01(\tR\x03\x66qn\"\xe3\x02\n\x10RotatedResources\x12\x36\n\x0frotated_out_key\x18\x01 \x01(\x0b\x32\x0e.policy.KasKeyR\rrotatedOutKey\x12\x66\n\x1d\x61ttribute_definition_mappings\x18\x02 \x03(\x0b\x32\".policy.kasregistry.ChangeMappingsR\x1b\x61ttributeDefinitionMappings\x12\\\n\x18\x61ttribute_value_mappings\x18\x03 \x03(\x0b\x32\".policy.kasregistry.ChangeMappingsR\x16\x61ttributeValueMappings\x12Q\n\x12namespace_mappings\x18\x04 \x03(\x0b\x32\".policy.kasregistry.ChangeMappingsR\x11namespaceMappings\"\x8f\x01\n\x11RotateKeyResponse\x12\'\n\x07kas_key\x18\x01 \x01(\x0b\x32\x0e.policy.KasKeyR\x06kasKey\x12Q\n\x11rotated_resources\x18\x02 \x01(\x0b\x32$.policy.kasregistry.RotatedResourcesR\x10rotatedResources\"~\n\x11SetBaseKeyRequest\x12\x1a\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02id\x12\x38\n\x03key\x18\x02 \x01(\x0b\x32$.policy.kasregistry.KasKeyIdentifierH\x00R\x03keyB\x13\n\nactive_key\x12\x05\xbaH\x02\x08\x01\"\x13\n\x11GetBaseKeyRequest\"E\n\x12GetBaseKeyResponse\x12/\n\x08\x62\x61se_key\x18\x01 \x01(\x0b\x32\x14.policy.SimpleKasKeyR\x07\x62\x61seKey\"\x8e\x01\n\x12SetBaseKeyResponse\x12\x36\n\x0cnew_base_key\x18\x01 \x01(\x0b\x32\x14.policy.SimpleKasKeyR\nnewBaseKey\x12@\n\x11previous_base_key\x18\x02 \x01(\x0b\x32\x14.policy.SimpleKasKeyR\x0fpreviousBaseKey2\xc7\x0b\n\x1eKeyAccessServerRegistryService\x12\x99\x01\n\x14ListKeyAccessServers\x12/.policy.kasregistry.ListKeyAccessServersRequest\x1a\x30.policy.kasregistry.ListKeyAccessServersResponse\"\x1e\x90\x02\x01\x82\xd3\xe4\x93\x02\x15\x12\x13/key-access-servers\x12x\n\x12GetKeyAccessServer\x12-.policy.kasregistry.GetKeyAccessServerRequest\x1a..policy.kasregistry.GetKeyAccessServerResponse\"\x03\x90\x02\x01\x12~\n\x15\x43reateKeyAccessServer\x12\x30.policy.kasregistry.CreateKeyAccessServerRequest\x1a\x31.policy.kasregistry.CreateKeyAccessServerResponse\"\x00\x12~\n\x15UpdateKeyAccessServer\x12\x30.policy.kasregistry.UpdateKeyAccessServerRequest\x1a\x31.policy.kasregistry.UpdateKeyAccessServerResponse\"\x00\x12~\n\x15\x44\x65leteKeyAccessServer\x12\x30.policy.kasregistry.DeleteKeyAccessServerRequest\x1a\x31.policy.kasregistry.DeleteKeyAccessServerResponse\"\x00\x12\x90\x01\n\x19ListKeyAccessServerGrants\x12\x34.policy.kasregistry.ListKeyAccessServerGrantsRequest\x1a\x35.policy.kasregistry.ListKeyAccessServerGrantsResponse\"\x06\x88\x02\x01\x90\x02\x01\x12Z\n\tCreateKey\x12$.policy.kasregistry.CreateKeyRequest\x1a%.policy.kasregistry.CreateKeyResponse\"\x00\x12Q\n\x06GetKey\x12!.policy.kasregistry.GetKeyRequest\x1a\".policy.kasregistry.GetKeyResponse\"\x00\x12W\n\x08ListKeys\x12#.policy.kasregistry.ListKeysRequest\x1a$.policy.kasregistry.ListKeysResponse\"\x00\x12Z\n\tUpdateKey\x12$.policy.kasregistry.UpdateKeyRequest\x1a%.policy.kasregistry.UpdateKeyResponse\"\x00\x12Z\n\tRotateKey\x12$.policy.kasregistry.RotateKeyRequest\x1a%.policy.kasregistry.RotateKeyResponse\"\x00\x12]\n\nSetBaseKey\x12%.policy.kasregistry.SetBaseKeyRequest\x1a&.policy.kasregistry.SetBaseKeyResponse\"\x00\x12]\n\nGetBaseKey\x12%.policy.kasregistry.GetBaseKeyRequest\x1a&.policy.kasregistry.GetBaseKeyResponse\"\x00\x42\x9f\x01\n\x16\x63om.policy.kasregistryB\x1cKeyAccessServerRegistryProtoP\x01\xa2\x02\x03PKX\xaa\x02\x12Policy.Kasregistry\xca\x02\x12Policy\\Kasregistry\xe2\x02\x1ePolicy\\Kasregistry\\GPBMetadata\xea\x02\x13Policy::Kasregistryb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.kasregistry.key_access_server_registry_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\026com.policy.kasregistryB\034KeyAccessServerRegistryProtoP\001\242\002\003PKX\252\002\022Policy.Kasregistry\312\002\022Policy\\Kasregistry\342\002\036Policy\\Kasregistry\\GPBMetadata\352\002\023Policy::Kasregistry' + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['id']._serialized_options = b'\030\001\272H\010r\003\260\001\001\330\001\001' + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['kas_id']._loaded_options = None + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['kas_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['name']._serialized_options = b'\272H\004r\002\020\001' + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['uri']._loaded_options = None + _globals['_GETKEYACCESSSERVERREQUEST'].fields_by_name['uri']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_GETKEYACCESSSERVERREQUEST']._loaded_options = None + _globals['_GETKEYACCESSSERVERREQUEST']._serialized_options = b'\272H\263\002\032\250\001\n\020exclusive_fields\022JEither use deprecated \'id\' field or one of \'kas_id\' or \'uri\', but not both\032H!(has(this.id) && (has(this.kas_id) || has(this.uri) || has(this.name)))\032\205\001\n\017required_fields\022-Either id or one of kas_id or uri must be set\032Chas(this.id) || has(this.kas_id) || has(this.uri) || has(this.name)' + _globals['_CREATEKEYACCESSSERVERREQUEST'].fields_by_name['uri']._loaded_options = None + _globals['_CREATEKEYACCESSSERVERREQUEST'].fields_by_name['uri']._serialized_options = b'\272H\360\001\272\001\354\001\n\nuri_format\022\317\001URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\032\014this.isUri()' + _globals['_CREATEKEYACCESSSERVERREQUEST'].fields_by_name['source_type']._loaded_options = None + _globals['_CREATEKEYACCESSSERVERREQUEST'].fields_by_name['source_type']._serialized_options = b'\272H\010\202\001\002\020\001\310\001\000' + _globals['_CREATEKEYACCESSSERVERREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_CREATEKEYACCESSSERVERREQUEST'].fields_by_name['name']._serialized_options = b'\272H\250\002r\003\030\375\001\272\001\234\002\n\017kas_name_format\022\263\001Registered KAS name must be an alphanumeric string, allowing hyphens, and underscores but not as the first or last character. The stored KAS name will be normalized to lower case.\032Ssize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\310\001\000' + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['uri']._loaded_options = None + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['uri']._serialized_options = b'\272H\225\002\272\001\221\002\n\023optional_uri_format\022\330\001Optional URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\032\037size(this) == 0 || this.isUri()' + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['source_type']._loaded_options = None + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['source_type']._serialized_options = b'\272H\010\202\001\002\020\001\310\001\000' + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_UPDATEKEYACCESSSERVERREQUEST'].fields_by_name['name']._serialized_options = b'\272H\243\002r\003\030\375\001\272\001\227\002\n\017kas_name_format\022\263\001Registered KAS name must be an alphanumeric string, allowing hyphens, and underscores but not as the first or last character. The stored KAS name will be normalized to lower case.\032Nsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\310\001\000' + _globals['_DELETEKEYACCESSSERVERREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETEKEYACCESSSERVERREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATEPUBLICKEYREQUEST'].fields_by_name['kas_id']._loaded_options = None + _globals['_CREATEPUBLICKEYREQUEST'].fields_by_name['kas_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATEPUBLICKEYREQUEST'].fields_by_name['key']._loaded_options = None + _globals['_CREATEPUBLICKEYREQUEST'].fields_by_name['key']._serialized_options = b'\272H\003\310\001\001' + _globals['_GETPUBLICKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETPUBLICKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_LISTPUBLICKEYSREQUEST'].fields_by_name['kas_id']._loaded_options = None + _globals['_LISTPUBLICKEYSREQUEST'].fields_by_name['kas_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_LISTPUBLICKEYSREQUEST'].fields_by_name['kas_name']._loaded_options = None + _globals['_LISTPUBLICKEYSREQUEST'].fields_by_name['kas_name']._serialized_options = b'\272H\004r\002\020\001' + _globals['_LISTPUBLICKEYSREQUEST'].fields_by_name['kas_uri']._loaded_options = None + _globals['_LISTPUBLICKEYSREQUEST'].fields_by_name['kas_uri']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['kas_id']._loaded_options = None + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['kas_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['kas_name']._loaded_options = None + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['kas_name']._serialized_options = b'\272H\004r\002\020\001' + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['kas_uri']._loaded_options = None + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['kas_uri']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['public_key_id']._loaded_options = None + _globals['_LISTPUBLICKEYMAPPINGREQUEST'].fields_by_name['public_key_id']._serialized_options = b'\272H\010r\003\260\001\001\330\001\001' + _globals['_UPDATEPUBLICKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATEPUBLICKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_DEACTIVATEPUBLICKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DEACTIVATEPUBLICKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_ACTIVATEPUBLICKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_ACTIVATEPUBLICKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST'].fields_by_name['kas_id']._loaded_options = None + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST'].fields_by_name['kas_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST'].fields_by_name['kas_uri']._loaded_options = None + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST'].fields_by_name['kas_uri']._serialized_options = b'\272H\225\002\272\001\221\002\n\023optional_uri_format\022\330\001Optional URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\032\037size(this) == 0 || this.isUri()' + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST'].fields_by_name['kas_name']._loaded_options = None + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST'].fields_by_name['kas_name']._serialized_options = b'\272H\243\002r\003\030\375\001\272\001\227\002\n\017kas_name_format\022\263\001Registered KAS name must be an alphanumeric string, allowing hyphens, and underscores but not as the first or last character. The stored KAS name will be normalized to lower case.\032Nsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\310\001\000' + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST']._loaded_options = None + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST']._serialized_options = b'\030\001' + _globals['_LISTKEYACCESSSERVERGRANTSRESPONSE'].fields_by_name['grants']._loaded_options = None + _globals['_LISTKEYACCESSSERVERGRANTSRESPONSE'].fields_by_name['grants']._serialized_options = b'\030\001' + _globals['_LISTKEYACCESSSERVERGRANTSRESPONSE']._loaded_options = None + _globals['_LISTKEYACCESSSERVERGRANTSRESPONSE']._serialized_options = b'\030\001' + _globals['_CREATEKEYREQUEST'].fields_by_name['kas_id']._loaded_options = None + _globals['_CREATEKEYREQUEST'].fields_by_name['kas_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATEKEYREQUEST'].fields_by_name['key_id']._loaded_options = None + _globals['_CREATEKEYREQUEST'].fields_by_name['key_id']._serialized_options = b'\272H\004r\002\020\001' + _globals['_CREATEKEYREQUEST'].fields_by_name['key_algorithm']._loaded_options = None + _globals['_CREATEKEYREQUEST'].fields_by_name['key_algorithm']._serialized_options = b'\272Hf\272\001c\n\025key_algorithm_defined\0224The key_algorithm must be one of the defined values.\032\024this in [1, 2, 3, 4]' + _globals['_CREATEKEYREQUEST'].fields_by_name['key_mode']._loaded_options = None + _globals['_CREATEKEYREQUEST'].fields_by_name['key_mode']._serialized_options = b'\272Hd\272\001a\n\020key_mode_defined\0225The key_mode must be one of the defined values (1-4).\032\026this >= 1 && this <= 4' + _globals['_CREATEKEYREQUEST'].fields_by_name['public_key_ctx']._loaded_options = None + _globals['_CREATEKEYREQUEST'].fields_by_name['public_key_ctx']._serialized_options = b'\272H\003\310\001\001' + _globals['_CREATEKEYREQUEST']._loaded_options = None + _globals['_CREATEKEYREQUEST']._serialized_options = b'\272H\267\007\032\227\003\n#private_key_ctx_optionally_required\022\274\001The wrapped_key is required if key_mode is KEY_MODE_CONFIG_ROOT_KEY or KEY_MODE_PROVIDER_ROOT_KEY. The wrapped_key must be empty if key_mode is KEY_MODE_REMOTE or KEY_MODE_PUBLIC_KEY_ONLY.\032\260\001((this.key_mode == 1 || this.key_mode == 2) && this.private_key_ctx.wrapped_key != \'\') || ((this.key_mode == 3 || this.key_mode == 4) && this.private_key_ctx.wrapped_key == \'\')\032\364\002\n&provider_config_id_optionally_required\022\250\001Provider config id is required if key_mode is KEY_MODE_PROVIDER_ROOT_KEY or KEY_MODE_REMOTE. It must be empty for KEY_MODE_CONFIG_ROOT_KEY and KEY_MODE_PUBLIC_KEY_ONLY.\032\236\001((this.key_mode == 1 || this.key_mode == 4) && this.provider_config_id == \'\') || ((this.key_mode == 2 || this.key_mode == 3) && this.provider_config_id != \'\')\032\243\001\n#private_key_ctx_for_public_key_only\022Hprivate_key_ctx must not be set if key_mode is KEY_MODE_PUBLIC_KEY_ONLY.\0322!(this.key_mode == 4 && has(this.private_key_ctx))' + _globals['_GETKEYREQUEST'].oneofs_by_name['identifier']._loaded_options = None + _globals['_GETKEYREQUEST'].oneofs_by_name['identifier']._serialized_options = b'\272H\002\010\001' + _globals['_GETKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_LISTKEYSREQUEST'].fields_by_name['key_algorithm']._loaded_options = None + _globals['_LISTKEYSREQUEST'].fields_by_name['key_algorithm']._serialized_options = b'\272Hi\272\001f\n\025key_algorithm_defined\0224The key_algorithm must be one of the defined values.\032\027this in [0, 1, 2, 3, 4]' + _globals['_LISTKEYSREQUEST'].fields_by_name['kas_id']._loaded_options = None + _globals['_LISTKEYSREQUEST'].fields_by_name['kas_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_LISTKEYSREQUEST'].fields_by_name['kas_name']._loaded_options = None + _globals['_LISTKEYSREQUEST'].fields_by_name['kas_name']._serialized_options = b'\272H\004r\002\020\001' + _globals['_LISTKEYSREQUEST'].fields_by_name['kas_uri']._loaded_options = None + _globals['_LISTKEYSREQUEST'].fields_by_name['kas_uri']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_UPDATEKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATEKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATEKEYREQUEST']._loaded_options = None + _globals['_UPDATEKEYREQUEST']._serialized_options = b'\272H\310\001\032\305\001\n\030metadata_update_behavior\022RMetadata update behavior must be either APPEND or REPLACE, when updating metadata.\032U((!has(this.metadata)) || (has(this.metadata) && this.metadata_update_behavior != 0))' + _globals['_KASKEYIDENTIFIER'].oneofs_by_name['identifier']._loaded_options = None + _globals['_KASKEYIDENTIFIER'].oneofs_by_name['identifier']._serialized_options = b'\272H\002\010\001' + _globals['_KASKEYIDENTIFIER'].fields_by_name['kas_id']._loaded_options = None + _globals['_KASKEYIDENTIFIER'].fields_by_name['kas_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_KASKEYIDENTIFIER'].fields_by_name['name']._loaded_options = None + _globals['_KASKEYIDENTIFIER'].fields_by_name['name']._serialized_options = b'\272H\004r\002\020\001' + _globals['_KASKEYIDENTIFIER'].fields_by_name['uri']._loaded_options = None + _globals['_KASKEYIDENTIFIER'].fields_by_name['uri']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_KASKEYIDENTIFIER'].fields_by_name['kid']._loaded_options = None + _globals['_KASKEYIDENTIFIER'].fields_by_name['kid']._serialized_options = b'\272H\004r\002\020\001' + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['key_id']._loaded_options = None + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['key_id']._serialized_options = b'\272H\004r\002\020\001' + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['algorithm']._loaded_options = None + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['algorithm']._serialized_options = b'\272Hf\272\001c\n\025key_algorithm_defined\0224The key_algorithm must be one of the defined values.\032\024this in [1, 2, 3, 4]' + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['key_mode']._loaded_options = None + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['key_mode']._serialized_options = b'\272Ho\202\001\002\020\001\272\001g\n\024new_key_mode_defined\0229The new key_mode must be one of the defined values (1-4).\032\024this in [1, 2, 3, 4]' + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['public_key_ctx']._loaded_options = None + _globals['_ROTATEKEYREQUEST_NEWKEY'].fields_by_name['public_key_ctx']._serialized_options = b'\272H\003\310\001\001' + _globals['_ROTATEKEYREQUEST'].oneofs_by_name['active_key']._loaded_options = None + _globals['_ROTATEKEYREQUEST'].oneofs_by_name['active_key']._serialized_options = b'\272H\002\010\001' + _globals['_ROTATEKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_ROTATEKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_ROTATEKEYREQUEST']._loaded_options = None + _globals['_ROTATEKEYREQUEST']._serialized_options = b'\272H\311\010\032\330\003\n#private_key_ctx_optionally_required\022\315\001For the new key, the wrapped_key is required if key_mode is KEY_MODE_CONFIG_ROOT_KEY or KEY_MODE_PROVIDER_ROOT_KEY. The wrapped_key must be empty if key_mode is KEY_MODE_REMOTE or KEY_MODE_PUBLIC_KEY_ONLY.\032\340\001((this.new_key.key_mode == 1 || this.new_key.key_mode == 2) && this.new_key.private_key_ctx.wrapped_key != \'\') || ((this.new_key.key_mode == 3 || this.new_key.key_mode == 4) && this.new_key.private_key_ctx.wrapped_key == \'\')\032\265\003\n&provider_config_id_optionally_required\022\271\001For the new key, provider config id is required if key_mode is KEY_MODE_PROVIDER_ROOT_KEY or KEY_MODE_REMOTE. It must be empty for KEY_MODE_CONFIG_ROOT_KEY and KEY_MODE_PUBLIC_KEY_ONLY.\032\316\001((this.new_key.key_mode == 1 || this.new_key.key_mode == 4) && this.new_key.provider_config_id == \'\') || ((this.new_key.key_mode == 2 || this.new_key.key_mode == 3) && this.new_key.provider_config_id != \'\')\032\263\001\n#private_key_ctx_for_public_key_only\022Hprivate_key_ctx must not be set if key_mode is KEY_MODE_PUBLIC_KEY_ONLY.\032B!(this.new_key.key_mode == 4 && has(this.new_key.private_key_ctx))' + _globals['_SETBASEKEYREQUEST'].oneofs_by_name['active_key']._loaded_options = None + _globals['_SETBASEKEYREQUEST'].oneofs_by_name['active_key']._serialized_options = b'\272H\002\010\001' + _globals['_SETBASEKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_SETBASEKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_KEYACCESSSERVERREGISTRYSERVICE'].methods_by_name['ListKeyAccessServers']._loaded_options = None + _globals['_KEYACCESSSERVERREGISTRYSERVICE'].methods_by_name['ListKeyAccessServers']._serialized_options = b'\220\002\001\202\323\344\223\002\025\022\023/key-access-servers' + _globals['_KEYACCESSSERVERREGISTRYSERVICE'].methods_by_name['GetKeyAccessServer']._loaded_options = None + _globals['_KEYACCESSSERVERREGISTRYSERVICE'].methods_by_name['GetKeyAccessServer']._serialized_options = b'\220\002\001' + _globals['_KEYACCESSSERVERREGISTRYSERVICE'].methods_by_name['ListKeyAccessServerGrants']._loaded_options = None + _globals['_KEYACCESSSERVERREGISTRYSERVICE'].methods_by_name['ListKeyAccessServerGrants']._serialized_options = b'\210\002\001\220\002\001' + _globals['_GETKEYACCESSSERVERREQUEST']._serialized_start=202 + _globals['_GETKEYACCESSSERVERREQUEST']._serialized_end=686 + _globals['_GETKEYACCESSSERVERRESPONSE']._serialized_start=688 + _globals['_GETKEYACCESSSERVERRESPONSE']._serialized_end=785 + _globals['_LISTKEYACCESSSERVERSREQUEST']._serialized_start=787 + _globals['_LISTKEYACCESSSERVERSREQUEST']._serialized_end=869 + _globals['_LISTKEYACCESSSERVERSRESPONSE']._serialized_start=872 + _globals['_LISTKEYACCESSSERVERSRESPONSE']._serialized_end=1027 + _globals['_CREATEKEYACCESSSERVERREQUEST']._serialized_start=1030 + _globals['_CREATEKEYACCESSSERVERREQUEST']._serialized_end=1819 + _globals['_CREATEKEYACCESSSERVERRESPONSE']._serialized_start=1821 + _globals['_CREATEKEYACCESSSERVERRESPONSE']._serialized_end=1921 + _globals['_UPDATEKEYACCESSSERVERREQUEST']._serialized_start=1924 + _globals['_UPDATEKEYACCESSSERVERREQUEST']._serialized_end=2857 + _globals['_UPDATEKEYACCESSSERVERRESPONSE']._serialized_start=2859 + _globals['_UPDATEKEYACCESSSERVERRESPONSE']._serialized_end=2959 + _globals['_DELETEKEYACCESSSERVERREQUEST']._serialized_start=2961 + _globals['_DELETEKEYACCESSSERVERREQUEST']._serialized_end=3017 + _globals['_DELETEKEYACCESSSERVERRESPONSE']._serialized_start=3019 + _globals['_DELETEKEYACCESSSERVERRESPONSE']._serialized_end=3119 + _globals['_GRANTEDPOLICYOBJECT']._serialized_start=3121 + _globals['_GRANTEDPOLICYOBJECT']._serialized_end=3176 + _globals['_KEYACCESSSERVERGRANTS']._serialized_start=3179 + _globals['_KEYACCESSSERVERGRANTS']._serialized_end=3515 + _globals['_CREATEPUBLICKEYREQUEST']._serialized_start=3518 + _globals['_CREATEPUBLICKEYREQUEST']._serialized_end=3676 + _globals['_CREATEPUBLICKEYRESPONSE']._serialized_start=3678 + _globals['_CREATEPUBLICKEYRESPONSE']._serialized_end=3734 + _globals['_GETPUBLICKEYREQUEST']._serialized_start=3736 + _globals['_GETPUBLICKEYREQUEST']._serialized_end=3799 + _globals['_GETPUBLICKEYRESPONSE']._serialized_start=3801 + _globals['_GETPUBLICKEYRESPONSE']._serialized_end=3854 + _globals['_LISTPUBLICKEYSREQUEST']._serialized_start=3857 + _globals['_LISTPUBLICKEYSREQUEST']._serialized_end=4059 + _globals['_LISTPUBLICKEYSRESPONSE']._serialized_start=4061 + _globals['_LISTPUBLICKEYSRESPONSE']._serialized_end=4172 + _globals['_LISTPUBLICKEYMAPPINGREQUEST']._serialized_start=4175 + _globals['_LISTPUBLICKEYMAPPINGREQUEST']._serialized_end=4432 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE']._serialized_start=4435 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE']._serialized_end=5193 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE_PUBLICKEYMAPPING']._serialized_start=4637 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE_PUBLICKEYMAPPING']._serialized_end=4823 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE_PUBLICKEY']._serialized_start=4826 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE_PUBLICKEY']._serialized_end=5144 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE_ASSOCIATION']._serialized_start=5146 + _globals['_LISTPUBLICKEYMAPPINGRESPONSE_ASSOCIATION']._serialized_end=5193 + _globals['_UPDATEPUBLICKEYREQUEST']._serialized_start=5196 + _globals['_UPDATEPUBLICKEYREQUEST']._serialized_end=5385 + _globals['_UPDATEPUBLICKEYRESPONSE']._serialized_start=5387 + _globals['_UPDATEPUBLICKEYRESPONSE']._serialized_end=5443 + _globals['_DEACTIVATEPUBLICKEYREQUEST']._serialized_start=5445 + _globals['_DEACTIVATEPUBLICKEYREQUEST']._serialized_end=5499 + _globals['_DEACTIVATEPUBLICKEYRESPONSE']._serialized_start=5501 + _globals['_DEACTIVATEPUBLICKEYRESPONSE']._serialized_end=5561 + _globals['_ACTIVATEPUBLICKEYREQUEST']._serialized_start=5563 + _globals['_ACTIVATEPUBLICKEYREQUEST']._serialized_end=5615 + _globals['_ACTIVATEPUBLICKEYRESPONSE']._serialized_start=5617 + _globals['_ACTIVATEPUBLICKEYRESPONSE']._serialized_end=5675 + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST']._serialized_start=5678 + _globals['_LISTKEYACCESSSERVERGRANTSREQUEST']._serialized_end=6611 + _globals['_LISTKEYACCESSSERVERGRANTSRESPONSE']._serialized_start=6614 + _globals['_LISTKEYACCESSSERVERGRANTSRESPONSE']._serialized_end=6778 + _globals['_CREATEKEYREQUEST']._serialized_start=6781 + _globals['_CREATEKEYREQUEST']._serialized_end=8366 + _globals['_CREATEKEYRESPONSE']._serialized_start=8368 + _globals['_CREATEKEYRESPONSE']._serialized_end=8428 + _globals['_GETKEYREQUEST']._serialized_start=8430 + _globals['_GETKEYREQUEST']._serialized_end=8552 + _globals['_GETKEYRESPONSE']._serialized_start=8554 + _globals['_GETKEYRESPONSE']._serialized_end=8611 + _globals['_LISTKEYSREQUEST']._serialized_start=8614 + _globals['_LISTKEYSREQUEST']._serialized_end=8977 + _globals['_LISTKEYSRESPONSE']._serialized_start=8979 + _globals['_LISTKEYSRESPONSE']._serialized_end=9094 + _globals['_UPDATEKEYREQUEST']._serialized_start=9097 + _globals['_UPDATEKEYREQUEST']._serialized_end=9487 + _globals['_UPDATEKEYRESPONSE']._serialized_start=9489 + _globals['_UPDATEKEYRESPONSE']._serialized_end=9549 + _globals['_KASKEYIDENTIFIER']._serialized_start=9552 + _globals['_KASKEYIDENTIFIER']._serialized_end=9716 + _globals['_ROTATEKEYREQUEST']._serialized_start=9719 + _globals['_ROTATEKEYREQUEST']._serialized_end=11609 + _globals['_ROTATEKEYREQUEST_NEWKEY']._serialized_start=9896 + _globals['_ROTATEKEYREQUEST_NEWKEY']._serialized_end=10484 + _globals['_CHANGEMAPPINGS']._serialized_start=11611 + _globals['_CHANGEMAPPINGS']._serialized_end=11661 + _globals['_ROTATEDRESOURCES']._serialized_start=11664 + _globals['_ROTATEDRESOURCES']._serialized_end=12019 + _globals['_ROTATEKEYRESPONSE']._serialized_start=12022 + _globals['_ROTATEKEYRESPONSE']._serialized_end=12165 + _globals['_SETBASEKEYREQUEST']._serialized_start=12167 + _globals['_SETBASEKEYREQUEST']._serialized_end=12293 + _globals['_GETBASEKEYREQUEST']._serialized_start=12295 + _globals['_GETBASEKEYREQUEST']._serialized_end=12314 + _globals['_GETBASEKEYRESPONSE']._serialized_start=12316 + _globals['_GETBASEKEYRESPONSE']._serialized_end=12385 + _globals['_SETBASEKEYRESPONSE']._serialized_start=12388 + _globals['_SETBASEKEYRESPONSE']._serialized_end=12530 + _globals['_KEYACCESSSERVERREGISTRYSERVICE']._serialized_start=12533 + _globals['_KEYACCESSSERVERREGISTRYSERVICE']._serialized_end=14012 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.pyi new file mode 100644 index 0000000..734a4cb --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.pyi @@ -0,0 +1,450 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from common import common_pb2 as _common_pb2 +from google.api import annotations_pb2 as _annotations_pb2 +from policy import objects_pb2 as _objects_pb2 +from policy import selectors_pb2 as _selectors_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class GetKeyAccessServerRequest(_message.Message): + __slots__ = ("id", "kas_id", "name", "uri") + ID_FIELD_NUMBER: _ClassVar[int] + KAS_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + URI_FIELD_NUMBER: _ClassVar[int] + id: str + kas_id: str + name: str + uri: str + def __init__(self, id: _Optional[str] = ..., kas_id: _Optional[str] = ..., name: _Optional[str] = ..., uri: _Optional[str] = ...) -> None: ... + +class GetKeyAccessServerResponse(_message.Message): + __slots__ = ("key_access_server",) + KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + key_access_server: _objects_pb2.KeyAccessServer + def __init__(self, key_access_server: _Optional[_Union[_objects_pb2.KeyAccessServer, _Mapping]] = ...) -> None: ... + +class ListKeyAccessServersRequest(_message.Message): + __slots__ = ("pagination",) + PAGINATION_FIELD_NUMBER: _ClassVar[int] + pagination: _selectors_pb2.PageRequest + def __init__(self, pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListKeyAccessServersResponse(_message.Message): + __slots__ = ("key_access_servers", "pagination") + KEY_ACCESS_SERVERS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + key_access_servers: _containers.RepeatedCompositeFieldContainer[_objects_pb2.KeyAccessServer] + pagination: _selectors_pb2.PageResponse + def __init__(self, key_access_servers: _Optional[_Iterable[_Union[_objects_pb2.KeyAccessServer, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class CreateKeyAccessServerRequest(_message.Message): + __slots__ = ("uri", "public_key", "source_type", "name", "metadata") + URI_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + SOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + uri: str + public_key: _objects_pb2.PublicKey + source_type: _objects_pb2.SourceType + name: str + metadata: _common_pb2.MetadataMutable + def __init__(self, uri: _Optional[str] = ..., public_key: _Optional[_Union[_objects_pb2.PublicKey, _Mapping]] = ..., source_type: _Optional[_Union[_objects_pb2.SourceType, str]] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateKeyAccessServerResponse(_message.Message): + __slots__ = ("key_access_server",) + KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + key_access_server: _objects_pb2.KeyAccessServer + def __init__(self, key_access_server: _Optional[_Union[_objects_pb2.KeyAccessServer, _Mapping]] = ...) -> None: ... + +class UpdateKeyAccessServerRequest(_message.Message): + __slots__ = ("id", "uri", "public_key", "source_type", "name", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + URI_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + SOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + uri: str + public_key: _objects_pb2.PublicKey + source_type: _objects_pb2.SourceType + name: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., uri: _Optional[str] = ..., public_key: _Optional[_Union[_objects_pb2.PublicKey, _Mapping]] = ..., source_type: _Optional[_Union[_objects_pb2.SourceType, str]] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateKeyAccessServerResponse(_message.Message): + __slots__ = ("key_access_server",) + KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + key_access_server: _objects_pb2.KeyAccessServer + def __init__(self, key_access_server: _Optional[_Union[_objects_pb2.KeyAccessServer, _Mapping]] = ...) -> None: ... + +class DeleteKeyAccessServerRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteKeyAccessServerResponse(_message.Message): + __slots__ = ("key_access_server",) + KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + key_access_server: _objects_pb2.KeyAccessServer + def __init__(self, key_access_server: _Optional[_Union[_objects_pb2.KeyAccessServer, _Mapping]] = ...) -> None: ... + +class GrantedPolicyObject(_message.Message): + __slots__ = ("id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class KeyAccessServerGrants(_message.Message): + __slots__ = ("key_access_server", "namespace_grants", "attribute_grants", "value_grants") + KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + NAMESPACE_GRANTS_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_GRANTS_FIELD_NUMBER: _ClassVar[int] + VALUE_GRANTS_FIELD_NUMBER: _ClassVar[int] + key_access_server: _objects_pb2.KeyAccessServer + namespace_grants: _containers.RepeatedCompositeFieldContainer[GrantedPolicyObject] + attribute_grants: _containers.RepeatedCompositeFieldContainer[GrantedPolicyObject] + value_grants: _containers.RepeatedCompositeFieldContainer[GrantedPolicyObject] + def __init__(self, key_access_server: _Optional[_Union[_objects_pb2.KeyAccessServer, _Mapping]] = ..., namespace_grants: _Optional[_Iterable[_Union[GrantedPolicyObject, _Mapping]]] = ..., attribute_grants: _Optional[_Iterable[_Union[GrantedPolicyObject, _Mapping]]] = ..., value_grants: _Optional[_Iterable[_Union[GrantedPolicyObject, _Mapping]]] = ...) -> None: ... + +class CreatePublicKeyRequest(_message.Message): + __slots__ = ("kas_id", "key", "metadata") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KEY_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + kas_id: str + key: _objects_pb2.KasPublicKey + metadata: _common_pb2.MetadataMutable + def __init__(self, kas_id: _Optional[str] = ..., key: _Optional[_Union[_objects_pb2.KasPublicKey, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreatePublicKeyResponse(_message.Message): + __slots__ = ("key",) + KEY_FIELD_NUMBER: _ClassVar[int] + key: _objects_pb2.Key + def __init__(self, key: _Optional[_Union[_objects_pb2.Key, _Mapping]] = ...) -> None: ... + +class GetPublicKeyRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class GetPublicKeyResponse(_message.Message): + __slots__ = ("key",) + KEY_FIELD_NUMBER: _ClassVar[int] + key: _objects_pb2.Key + def __init__(self, key: _Optional[_Union[_objects_pb2.Key, _Mapping]] = ...) -> None: ... + +class ListPublicKeysRequest(_message.Message): + __slots__ = ("kas_id", "kas_name", "kas_uri", "pagination") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KAS_NAME_FIELD_NUMBER: _ClassVar[int] + KAS_URI_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + kas_id: str + kas_name: str + kas_uri: str + pagination: _selectors_pb2.PageRequest + def __init__(self, kas_id: _Optional[str] = ..., kas_name: _Optional[str] = ..., kas_uri: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListPublicKeysResponse(_message.Message): + __slots__ = ("keys", "pagination") + KEYS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + keys: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Key] + pagination: _selectors_pb2.PageResponse + def __init__(self, keys: _Optional[_Iterable[_Union[_objects_pb2.Key, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class ListPublicKeyMappingRequest(_message.Message): + __slots__ = ("kas_id", "kas_name", "kas_uri", "public_key_id", "pagination") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KAS_NAME_FIELD_NUMBER: _ClassVar[int] + KAS_URI_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_ID_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + kas_id: str + kas_name: str + kas_uri: str + public_key_id: str + pagination: _selectors_pb2.PageRequest + def __init__(self, kas_id: _Optional[str] = ..., kas_name: _Optional[str] = ..., kas_uri: _Optional[str] = ..., public_key_id: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListPublicKeyMappingResponse(_message.Message): + __slots__ = ("public_key_mappings", "pagination") + class PublicKeyMapping(_message.Message): + __slots__ = ("kas_id", "kas_name", "kas_uri", "public_keys") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KAS_NAME_FIELD_NUMBER: _ClassVar[int] + KAS_URI_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEYS_FIELD_NUMBER: _ClassVar[int] + kas_id: str + kas_name: str + kas_uri: str + public_keys: _containers.RepeatedCompositeFieldContainer[ListPublicKeyMappingResponse.PublicKey] + def __init__(self, kas_id: _Optional[str] = ..., kas_name: _Optional[str] = ..., kas_uri: _Optional[str] = ..., public_keys: _Optional[_Iterable[_Union[ListPublicKeyMappingResponse.PublicKey, _Mapping]]] = ...) -> None: ... + class PublicKey(_message.Message): + __slots__ = ("key", "values", "definitions", "namespaces") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUES_FIELD_NUMBER: _ClassVar[int] + DEFINITIONS_FIELD_NUMBER: _ClassVar[int] + NAMESPACES_FIELD_NUMBER: _ClassVar[int] + key: _objects_pb2.Key + values: _containers.RepeatedCompositeFieldContainer[ListPublicKeyMappingResponse.Association] + definitions: _containers.RepeatedCompositeFieldContainer[ListPublicKeyMappingResponse.Association] + namespaces: _containers.RepeatedCompositeFieldContainer[ListPublicKeyMappingResponse.Association] + def __init__(self, key: _Optional[_Union[_objects_pb2.Key, _Mapping]] = ..., values: _Optional[_Iterable[_Union[ListPublicKeyMappingResponse.Association, _Mapping]]] = ..., definitions: _Optional[_Iterable[_Union[ListPublicKeyMappingResponse.Association, _Mapping]]] = ..., namespaces: _Optional[_Iterable[_Union[ListPublicKeyMappingResponse.Association, _Mapping]]] = ...) -> None: ... + class Association(_message.Message): + __slots__ = ("id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + PUBLIC_KEY_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + public_key_mappings: _containers.RepeatedCompositeFieldContainer[ListPublicKeyMappingResponse.PublicKeyMapping] + pagination: _selectors_pb2.PageResponse + def __init__(self, public_key_mappings: _Optional[_Iterable[_Union[ListPublicKeyMappingResponse.PublicKeyMapping, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class UpdatePublicKeyRequest(_message.Message): + __slots__ = ("id", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdatePublicKeyResponse(_message.Message): + __slots__ = ("key",) + KEY_FIELD_NUMBER: _ClassVar[int] + key: _objects_pb2.Key + def __init__(self, key: _Optional[_Union[_objects_pb2.Key, _Mapping]] = ...) -> None: ... + +class DeactivatePublicKeyRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeactivatePublicKeyResponse(_message.Message): + __slots__ = ("key",) + KEY_FIELD_NUMBER: _ClassVar[int] + key: _objects_pb2.Key + def __init__(self, key: _Optional[_Union[_objects_pb2.Key, _Mapping]] = ...) -> None: ... + +class ActivatePublicKeyRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class ActivatePublicKeyResponse(_message.Message): + __slots__ = ("key",) + KEY_FIELD_NUMBER: _ClassVar[int] + key: _objects_pb2.Key + def __init__(self, key: _Optional[_Union[_objects_pb2.Key, _Mapping]] = ...) -> None: ... + +class ListKeyAccessServerGrantsRequest(_message.Message): + __slots__ = ("kas_id", "kas_uri", "kas_name", "pagination") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KAS_URI_FIELD_NUMBER: _ClassVar[int] + KAS_NAME_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + kas_id: str + kas_uri: str + kas_name: str + pagination: _selectors_pb2.PageRequest + def __init__(self, kas_id: _Optional[str] = ..., kas_uri: _Optional[str] = ..., kas_name: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListKeyAccessServerGrantsResponse(_message.Message): + __slots__ = ("grants", "pagination") + GRANTS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + grants: _containers.RepeatedCompositeFieldContainer[KeyAccessServerGrants] + pagination: _selectors_pb2.PageResponse + def __init__(self, grants: _Optional[_Iterable[_Union[KeyAccessServerGrants, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class CreateKeyRequest(_message.Message): + __slots__ = ("kas_id", "key_id", "key_algorithm", "key_mode", "public_key_ctx", "private_key_ctx", "provider_config_id", "metadata") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ALGORITHM_FIELD_NUMBER: _ClassVar[int] + KEY_MODE_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_CTX_FIELD_NUMBER: _ClassVar[int] + PRIVATE_KEY_CTX_FIELD_NUMBER: _ClassVar[int] + PROVIDER_CONFIG_ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + kas_id: str + key_id: str + key_algorithm: _objects_pb2.Algorithm + key_mode: _objects_pb2.KeyMode + public_key_ctx: _objects_pb2.PublicKeyCtx + private_key_ctx: _objects_pb2.PrivateKeyCtx + provider_config_id: str + metadata: _common_pb2.MetadataMutable + def __init__(self, kas_id: _Optional[str] = ..., key_id: _Optional[str] = ..., key_algorithm: _Optional[_Union[_objects_pb2.Algorithm, str]] = ..., key_mode: _Optional[_Union[_objects_pb2.KeyMode, str]] = ..., public_key_ctx: _Optional[_Union[_objects_pb2.PublicKeyCtx, _Mapping]] = ..., private_key_ctx: _Optional[_Union[_objects_pb2.PrivateKeyCtx, _Mapping]] = ..., provider_config_id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateKeyResponse(_message.Message): + __slots__ = ("kas_key",) + KAS_KEY_FIELD_NUMBER: _ClassVar[int] + kas_key: _objects_pb2.KasKey + def __init__(self, kas_key: _Optional[_Union[_objects_pb2.KasKey, _Mapping]] = ...) -> None: ... + +class GetKeyRequest(_message.Message): + __slots__ = ("id", "key") + ID_FIELD_NUMBER: _ClassVar[int] + KEY_FIELD_NUMBER: _ClassVar[int] + id: str + key: KasKeyIdentifier + def __init__(self, id: _Optional[str] = ..., key: _Optional[_Union[KasKeyIdentifier, _Mapping]] = ...) -> None: ... + +class GetKeyResponse(_message.Message): + __slots__ = ("kas_key",) + KAS_KEY_FIELD_NUMBER: _ClassVar[int] + kas_key: _objects_pb2.KasKey + def __init__(self, kas_key: _Optional[_Union[_objects_pb2.KasKey, _Mapping]] = ...) -> None: ... + +class ListKeysRequest(_message.Message): + __slots__ = ("key_algorithm", "kas_id", "kas_name", "kas_uri", "pagination") + KEY_ALGORITHM_FIELD_NUMBER: _ClassVar[int] + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KAS_NAME_FIELD_NUMBER: _ClassVar[int] + KAS_URI_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + key_algorithm: _objects_pb2.Algorithm + kas_id: str + kas_name: str + kas_uri: str + pagination: _selectors_pb2.PageRequest + def __init__(self, key_algorithm: _Optional[_Union[_objects_pb2.Algorithm, str]] = ..., kas_id: _Optional[str] = ..., kas_name: _Optional[str] = ..., kas_uri: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListKeysResponse(_message.Message): + __slots__ = ("kas_keys", "pagination") + KAS_KEYS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + kas_keys: _containers.RepeatedCompositeFieldContainer[_objects_pb2.KasKey] + pagination: _selectors_pb2.PageResponse + def __init__(self, kas_keys: _Optional[_Iterable[_Union[_objects_pb2.KasKey, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class UpdateKeyRequest(_message.Message): + __slots__ = ("id", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateKeyResponse(_message.Message): + __slots__ = ("kas_key",) + KAS_KEY_FIELD_NUMBER: _ClassVar[int] + kas_key: _objects_pb2.KasKey + def __init__(self, kas_key: _Optional[_Union[_objects_pb2.KasKey, _Mapping]] = ...) -> None: ... + +class KasKeyIdentifier(_message.Message): + __slots__ = ("kas_id", "name", "uri", "kid") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + URI_FIELD_NUMBER: _ClassVar[int] + KID_FIELD_NUMBER: _ClassVar[int] + kas_id: str + name: str + uri: str + kid: str + def __init__(self, kas_id: _Optional[str] = ..., name: _Optional[str] = ..., uri: _Optional[str] = ..., kid: _Optional[str] = ...) -> None: ... + +class RotateKeyRequest(_message.Message): + __slots__ = ("id", "key", "new_key") + class NewKey(_message.Message): + __slots__ = ("key_id", "algorithm", "key_mode", "public_key_ctx", "private_key_ctx", "provider_config_id", "metadata") + KEY_ID_FIELD_NUMBER: _ClassVar[int] + ALGORITHM_FIELD_NUMBER: _ClassVar[int] + KEY_MODE_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_CTX_FIELD_NUMBER: _ClassVar[int] + PRIVATE_KEY_CTX_FIELD_NUMBER: _ClassVar[int] + PROVIDER_CONFIG_ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + key_id: str + algorithm: _objects_pb2.Algorithm + key_mode: _objects_pb2.KeyMode + public_key_ctx: _objects_pb2.PublicKeyCtx + private_key_ctx: _objects_pb2.PrivateKeyCtx + provider_config_id: str + metadata: _common_pb2.MetadataMutable + def __init__(self, key_id: _Optional[str] = ..., algorithm: _Optional[_Union[_objects_pb2.Algorithm, str]] = ..., key_mode: _Optional[_Union[_objects_pb2.KeyMode, str]] = ..., public_key_ctx: _Optional[_Union[_objects_pb2.PublicKeyCtx, _Mapping]] = ..., private_key_ctx: _Optional[_Union[_objects_pb2.PrivateKeyCtx, _Mapping]] = ..., provider_config_id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + ID_FIELD_NUMBER: _ClassVar[int] + KEY_FIELD_NUMBER: _ClassVar[int] + NEW_KEY_FIELD_NUMBER: _ClassVar[int] + id: str + key: KasKeyIdentifier + new_key: RotateKeyRequest.NewKey + def __init__(self, id: _Optional[str] = ..., key: _Optional[_Union[KasKeyIdentifier, _Mapping]] = ..., new_key: _Optional[_Union[RotateKeyRequest.NewKey, _Mapping]] = ...) -> None: ... + +class ChangeMappings(_message.Message): + __slots__ = ("id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class RotatedResources(_message.Message): + __slots__ = ("rotated_out_key", "attribute_definition_mappings", "attribute_value_mappings", "namespace_mappings") + ROTATED_OUT_KEY_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_DEFINITION_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + NAMESPACE_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + rotated_out_key: _objects_pb2.KasKey + attribute_definition_mappings: _containers.RepeatedCompositeFieldContainer[ChangeMappings] + attribute_value_mappings: _containers.RepeatedCompositeFieldContainer[ChangeMappings] + namespace_mappings: _containers.RepeatedCompositeFieldContainer[ChangeMappings] + def __init__(self, rotated_out_key: _Optional[_Union[_objects_pb2.KasKey, _Mapping]] = ..., attribute_definition_mappings: _Optional[_Iterable[_Union[ChangeMappings, _Mapping]]] = ..., attribute_value_mappings: _Optional[_Iterable[_Union[ChangeMappings, _Mapping]]] = ..., namespace_mappings: _Optional[_Iterable[_Union[ChangeMappings, _Mapping]]] = ...) -> None: ... + +class RotateKeyResponse(_message.Message): + __slots__ = ("kas_key", "rotated_resources") + KAS_KEY_FIELD_NUMBER: _ClassVar[int] + ROTATED_RESOURCES_FIELD_NUMBER: _ClassVar[int] + kas_key: _objects_pb2.KasKey + rotated_resources: RotatedResources + def __init__(self, kas_key: _Optional[_Union[_objects_pb2.KasKey, _Mapping]] = ..., rotated_resources: _Optional[_Union[RotatedResources, _Mapping]] = ...) -> None: ... + +class SetBaseKeyRequest(_message.Message): + __slots__ = ("id", "key") + ID_FIELD_NUMBER: _ClassVar[int] + KEY_FIELD_NUMBER: _ClassVar[int] + id: str + key: KasKeyIdentifier + def __init__(self, id: _Optional[str] = ..., key: _Optional[_Union[KasKeyIdentifier, _Mapping]] = ...) -> None: ... + +class GetBaseKeyRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class GetBaseKeyResponse(_message.Message): + __slots__ = ("base_key",) + BASE_KEY_FIELD_NUMBER: _ClassVar[int] + base_key: _objects_pb2.SimpleKasKey + def __init__(self, base_key: _Optional[_Union[_objects_pb2.SimpleKasKey, _Mapping]] = ...) -> None: ... + +class SetBaseKeyResponse(_message.Message): + __slots__ = ("new_base_key", "previous_base_key") + NEW_BASE_KEY_FIELD_NUMBER: _ClassVar[int] + PREVIOUS_BASE_KEY_FIELD_NUMBER: _ClassVar[int] + new_base_key: _objects_pb2.SimpleKasKey + previous_base_key: _objects_pb2.SimpleKasKey + def __init__(self, new_base_key: _Optional[_Union[_objects_pb2.SimpleKasKey, _Mapping]] = ..., previous_base_key: _Optional[_Union[_objects_pb2.SimpleKasKey, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2_connect.py new file mode 100644 index 0000000..7bb54e2 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2_connect.py @@ -0,0 +1,611 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.kasregistry.key_access_server_registry_pb2 + +class KeyAccessServerRegistryServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_list_key_access_servers( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersResponse]: + """Low-level method to call ListKeyAccessServers, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServers" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersResponse,extra_headers, timeout_seconds) + + + def list_key_access_servers( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersResponse: + response = self.call_list_key_access_servers(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerResponse]: + """Low-level method to call GetKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/GetKeyAccessServer" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerResponse,extra_headers, timeout_seconds) + + + def get_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerResponse: + response = self.call_get_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerResponse]: + """Low-level method to call CreateKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/CreateKeyAccessServer" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerResponse,extra_headers, timeout_seconds) + + + def create_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerResponse: + response = self.call_create_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerResponse]: + """Low-level method to call UpdateKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/UpdateKeyAccessServer" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerResponse,extra_headers, timeout_seconds) + + + def update_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerResponse: + response = self.call_update_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerResponse]: + """Low-level method to call DeleteKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/DeleteKeyAccessServer" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerResponse,extra_headers, timeout_seconds) + + + def delete_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerResponse: + response = self.call_delete_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_key_access_server_grants( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsResponse]: + """Low-level method to call ListKeyAccessServerGrants, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServerGrants" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsResponse,extra_headers, timeout_seconds) + + + def list_key_access_server_grants( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsResponse: + response = self.call_list_key_access_server_grants(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.CreateKeyResponse]: + """Low-level method to call CreateKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/CreateKey" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.CreateKeyResponse,extra_headers, timeout_seconds) + + + def create_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.CreateKeyResponse: + response = self.call_create_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.GetKeyResponse]: + """Low-level method to call GetKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/GetKey" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.GetKeyResponse,extra_headers, timeout_seconds) + + + def get_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.GetKeyResponse: + response = self.call_get_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_keys( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeysRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.ListKeysResponse]: + """Low-level method to call ListKeys, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/ListKeys" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.ListKeysResponse,extra_headers, timeout_seconds) + + + def list_keys( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeysRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.ListKeysResponse: + response = self.call_list_keys(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyResponse]: + """Low-level method to call UpdateKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/UpdateKey" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.UpdateKeyResponse,extra_headers, timeout_seconds) + + + def update_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.UpdateKeyResponse: + response = self.call_update_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_rotate_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.RotateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.RotateKeyResponse]: + """Low-level method to call RotateKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/RotateKey" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.RotateKeyResponse,extra_headers, timeout_seconds) + + + def rotate_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.RotateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.RotateKeyResponse: + response = self.call_rotate_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_set_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyResponse]: + """Low-level method to call SetBaseKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/SetBaseKey" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyResponse,extra_headers, timeout_seconds) + + + def set_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyResponse: + response = self.call_set_base_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyResponse]: + """Low-level method to call GetBaseKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/GetBaseKey" + return self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyResponse,extra_headers, timeout_seconds) + + + def get_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyResponse: + response = self.call_get_base_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncKeyAccessServerRegistryServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_list_key_access_servers( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersResponse]: + """Low-level method to call ListKeyAccessServers, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServers" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersResponse,extra_headers, timeout_seconds) + + async def list_key_access_servers( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersResponse: + response = await self.call_list_key_access_servers(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerResponse]: + """Low-level method to call GetKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/GetKeyAccessServer" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerResponse,extra_headers, timeout_seconds) + + async def get_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerResponse: + response = await self.call_get_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerResponse]: + """Low-level method to call CreateKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/CreateKeyAccessServer" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerResponse,extra_headers, timeout_seconds) + + async def create_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerResponse: + response = await self.call_create_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerResponse]: + """Low-level method to call UpdateKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/UpdateKeyAccessServer" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerResponse,extra_headers, timeout_seconds) + + async def update_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerResponse: + response = await self.call_update_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerResponse]: + """Low-level method to call DeleteKeyAccessServer, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/DeleteKeyAccessServer" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerResponse,extra_headers, timeout_seconds) + + async def delete_key_access_server( + self, req: policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerResponse: + response = await self.call_delete_key_access_server(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_key_access_server_grants( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsResponse]: + """Low-level method to call ListKeyAccessServerGrants, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServerGrants" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsResponse,extra_headers, timeout_seconds) + + async def list_key_access_server_grants( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsResponse: + response = await self.call_list_key_access_server_grants(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.CreateKeyResponse]: + """Low-level method to call CreateKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/CreateKey" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.CreateKeyResponse,extra_headers, timeout_seconds) + + async def create_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.CreateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.CreateKeyResponse: + response = await self.call_create_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.GetKeyResponse]: + """Low-level method to call GetKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/GetKey" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.GetKeyResponse,extra_headers, timeout_seconds) + + async def get_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.GetKeyResponse: + response = await self.call_get_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_keys( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeysRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.ListKeysResponse]: + """Low-level method to call ListKeys, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/ListKeys" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.ListKeysResponse,extra_headers, timeout_seconds) + + async def list_keys( + self, req: policy.kasregistry.key_access_server_registry_pb2.ListKeysRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.ListKeysResponse: + response = await self.call_list_keys(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyResponse]: + """Low-level method to call UpdateKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/UpdateKey" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.UpdateKeyResponse,extra_headers, timeout_seconds) + + async def update_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.UpdateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.UpdateKeyResponse: + response = await self.call_update_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_rotate_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.RotateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.RotateKeyResponse]: + """Low-level method to call RotateKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/RotateKey" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.RotateKeyResponse,extra_headers, timeout_seconds) + + async def rotate_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.RotateKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.RotateKeyResponse: + response = await self.call_rotate_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_set_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyResponse]: + """Low-level method to call SetBaseKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/SetBaseKey" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyResponse,extra_headers, timeout_seconds) + + async def set_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyResponse: + response = await self.call_set_base_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyResponse]: + """Low-level method to call GetBaseKey, granting access to errors and metadata""" + url = self.base_url + "/policy.kasregistry.KeyAccessServerRegistryService/GetBaseKey" + return await self._connect_client.call_unary(url, req, policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyResponse,extra_headers, timeout_seconds) + + async def get_base_key( + self, req: policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyResponse: + response = await self.call_get_base_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class KeyAccessServerRegistryServiceProtocol(typing.Protocol): + def list_key_access_servers(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersResponse]: + ... + def get_key_access_server(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerResponse]: + ... + def create_key_access_server(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerResponse]: + ... + def update_key_access_server(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerResponse]: + ... + def delete_key_access_server(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerResponse]: + ... + def list_key_access_server_grants(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsResponse]: + ... + def create_key(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.CreateKeyRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.CreateKeyResponse]: + ... + def get_key(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.GetKeyRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.GetKeyResponse]: + ... + def list_keys(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.ListKeysRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.ListKeysResponse]: + ... + def update_key(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.UpdateKeyResponse]: + ... + def rotate_key(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.RotateKeyRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.RotateKeyResponse]: + ... + def set_base_key(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyResponse]: + ... + def get_base_key(self, req: ClientRequest[policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyRequest]) -> ServerResponse[policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyResponse]: + ... + +KEY_ACCESS_SERVER_REGISTRY_SERVICE_PATH_PREFIX = "/policy.kasregistry.KeyAccessServerRegistryService" + +def wsgi_key_access_server_registry_service(implementation: KeyAccessServerRegistryServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServers", implementation.list_key_access_servers, policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServersRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/GetKeyAccessServer", implementation.get_key_access_server, policy.kasregistry.key_access_server_registry_pb2.GetKeyAccessServerRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/CreateKeyAccessServer", implementation.create_key_access_server, policy.kasregistry.key_access_server_registry_pb2.CreateKeyAccessServerRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/UpdateKeyAccessServer", implementation.update_key_access_server, policy.kasregistry.key_access_server_registry_pb2.UpdateKeyAccessServerRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/DeleteKeyAccessServer", implementation.delete_key_access_server, policy.kasregistry.key_access_server_registry_pb2.DeleteKeyAccessServerRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/ListKeyAccessServerGrants", implementation.list_key_access_server_grants, policy.kasregistry.key_access_server_registry_pb2.ListKeyAccessServerGrantsRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/CreateKey", implementation.create_key, policy.kasregistry.key_access_server_registry_pb2.CreateKeyRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/GetKey", implementation.get_key, policy.kasregistry.key_access_server_registry_pb2.GetKeyRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/ListKeys", implementation.list_keys, policy.kasregistry.key_access_server_registry_pb2.ListKeysRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/UpdateKey", implementation.update_key, policy.kasregistry.key_access_server_registry_pb2.UpdateKeyRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/RotateKey", implementation.rotate_key, policy.kasregistry.key_access_server_registry_pb2.RotateKeyRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/SetBaseKey", implementation.set_base_key, policy.kasregistry.key_access_server_registry_pb2.SetBaseKeyRequest) + app.register_unary_rpc("/policy.kasregistry.KeyAccessServerRegistryService/GetBaseKey", implementation.get_base_key, policy.kasregistry.key_access_server_registry_pb2.GetBaseKeyRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2.py new file mode 100644 index 0000000..de3b629 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/keymanagement/key_management.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/keymanagement/key_management.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)policy/keymanagement/key_management.proto\x12\x14policy.keymanagement\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"\x97\x01\n\x1b\x43reateProviderConfigRequest\x12\x1a\n\x04name\x18\x01 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x04name\x12\'\n\x0b\x63onfig_json\x18\x02 \x01(\x0c\x42\x06\xbaH\x03\xc8\x01\x01R\nconfigJson\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"b\n\x1c\x43reateProviderConfigResponse\x12\x42\n\x0fprovider_config\x18\x01 \x01(\x0b\x32\x19.policy.KeyProviderConfigR\x0eproviderConfig\"j\n\x18GetProviderConfigRequest\x12\x1a\n\x02id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02id\x12\x1d\n\x04name\x18\x03 \x01(\tB\x07\xbaH\x04r\x02\x10\x01H\x00R\x04nameB\x13\n\nidentifier\x12\x05\xbaH\x02\x08\x01\"_\n\x19GetProviderConfigResponse\x12\x42\n\x0fprovider_config\x18\x01 \x01(\x0b\x32\x19.policy.KeyProviderConfigR\x0eproviderConfig\"Q\n\x1aListProviderConfigsRequest\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\x99\x01\n\x1bListProviderConfigsResponse\x12\x44\n\x10provider_configs\x18\x01 \x03(\x0b\x32\x19.policy.KeyProviderConfigR\x0fproviderConfigs\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\x87\x02\n\x1bUpdateProviderConfigRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x1a\n\x04name\x18\x02 \x01(\tB\x06\xbaH\x03\xc8\x01\x00R\x04name\x12\'\n\x0b\x63onfig_json\x18\x03 \x01(\x0c\x42\x06\xbaH\x03\xc8\x01\x00R\nconfigJson\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"b\n\x1cUpdateProviderConfigResponse\x12\x42\n\x0fprovider_config\x18\x01 \x01(\x0b\x32\x19.policy.KeyProviderConfigR\x0eproviderConfig\"7\n\x1b\x44\x65leteProviderConfigRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"b\n\x1c\x44\x65leteProviderConfigResponse\x12\x42\n\x0fprovider_config\x18\x01 \x01(\x0b\x32\x19.policy.KeyProviderConfigR\x0eproviderConfig2\x8f\x05\n\x14KeyManagementService\x12\x7f\n\x14\x43reateProviderConfig\x12\x31.policy.keymanagement.CreateProviderConfigRequest\x1a\x32.policy.keymanagement.CreateProviderConfigResponse\"\x00\x12v\n\x11GetProviderConfig\x12..policy.keymanagement.GetProviderConfigRequest\x1a/.policy.keymanagement.GetProviderConfigResponse\"\x00\x12|\n\x13ListProviderConfigs\x12\x30.policy.keymanagement.ListProviderConfigsRequest\x1a\x31.policy.keymanagement.ListProviderConfigsResponse\"\x00\x12\x7f\n\x14UpdateProviderConfig\x12\x31.policy.keymanagement.UpdateProviderConfigRequest\x1a\x32.policy.keymanagement.UpdateProviderConfigResponse\"\x00\x12\x7f\n\x14\x44\x65leteProviderConfig\x12\x31.policy.keymanagement.DeleteProviderConfigRequest\x1a\x32.policy.keymanagement.DeleteProviderConfigResponse\"\x00\x42\x9f\x01\n\x18\x63om.policy.keymanagementB\x12KeyManagementProtoP\x01\xa2\x02\x03PKX\xaa\x02\x14Policy.Keymanagement\xca\x02\x14Policy\\Keymanagement\xe2\x02 Policy\\Keymanagement\\GPBMetadata\xea\x02\x15Policy::Keymanagementb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.keymanagement.key_management_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\030com.policy.keymanagementB\022KeyManagementProtoP\001\242\002\003PKX\252\002\024Policy.Keymanagement\312\002\024Policy\\Keymanagement\342\002 Policy\\Keymanagement\\GPBMetadata\352\002\025Policy::Keymanagement' + _globals['_CREATEPROVIDERCONFIGREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_CREATEPROVIDERCONFIGREQUEST'].fields_by_name['name']._serialized_options = b'\272H\003\310\001\001' + _globals['_CREATEPROVIDERCONFIGREQUEST'].fields_by_name['config_json']._loaded_options = None + _globals['_CREATEPROVIDERCONFIGREQUEST'].fields_by_name['config_json']._serialized_options = b'\272H\003\310\001\001' + _globals['_GETPROVIDERCONFIGREQUEST'].oneofs_by_name['identifier']._loaded_options = None + _globals['_GETPROVIDERCONFIGREQUEST'].oneofs_by_name['identifier']._serialized_options = b'\272H\002\010\001' + _globals['_GETPROVIDERCONFIGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETPROVIDERCONFIGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETPROVIDERCONFIGREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_GETPROVIDERCONFIGREQUEST'].fields_by_name['name']._serialized_options = b'\272H\004r\002\020\001' + _globals['_UPDATEPROVIDERCONFIGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATEPROVIDERCONFIGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATEPROVIDERCONFIGREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_UPDATEPROVIDERCONFIGREQUEST'].fields_by_name['name']._serialized_options = b'\272H\003\310\001\000' + _globals['_UPDATEPROVIDERCONFIGREQUEST'].fields_by_name['config_json']._loaded_options = None + _globals['_UPDATEPROVIDERCONFIGREQUEST'].fields_by_name['config_json']._serialized_options = b'\272H\003\310\001\000' + _globals['_DELETEPROVIDERCONFIGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETEPROVIDERCONFIGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATEPROVIDERCONFIGREQUEST']._serialized_start=164 + _globals['_CREATEPROVIDERCONFIGREQUEST']._serialized_end=315 + _globals['_CREATEPROVIDERCONFIGRESPONSE']._serialized_start=317 + _globals['_CREATEPROVIDERCONFIGRESPONSE']._serialized_end=415 + _globals['_GETPROVIDERCONFIGREQUEST']._serialized_start=417 + _globals['_GETPROVIDERCONFIGREQUEST']._serialized_end=523 + _globals['_GETPROVIDERCONFIGRESPONSE']._serialized_start=525 + _globals['_GETPROVIDERCONFIGRESPONSE']._serialized_end=620 + _globals['_LISTPROVIDERCONFIGSREQUEST']._serialized_start=622 + _globals['_LISTPROVIDERCONFIGSREQUEST']._serialized_end=703 + _globals['_LISTPROVIDERCONFIGSRESPONSE']._serialized_start=706 + _globals['_LISTPROVIDERCONFIGSRESPONSE']._serialized_end=859 + _globals['_UPDATEPROVIDERCONFIGREQUEST']._serialized_start=862 + _globals['_UPDATEPROVIDERCONFIGREQUEST']._serialized_end=1125 + _globals['_UPDATEPROVIDERCONFIGRESPONSE']._serialized_start=1127 + _globals['_UPDATEPROVIDERCONFIGRESPONSE']._serialized_end=1225 + _globals['_DELETEPROVIDERCONFIGREQUEST']._serialized_start=1227 + _globals['_DELETEPROVIDERCONFIGREQUEST']._serialized_end=1282 + _globals['_DELETEPROVIDERCONFIGRESPONSE']._serialized_start=1284 + _globals['_DELETEPROVIDERCONFIGRESPONSE']._serialized_end=1382 + _globals['_KEYMANAGEMENTSERVICE']._serialized_start=1385 + _globals['_KEYMANAGEMENTSERVICE']._serialized_end=2040 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2.pyi new file mode 100644 index 0000000..468d867 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2.pyi @@ -0,0 +1,87 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from common import common_pb2 as _common_pb2 +from policy import objects_pb2 as _objects_pb2 +from policy import selectors_pb2 as _selectors_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class CreateProviderConfigRequest(_message.Message): + __slots__ = ("name", "config_json", "metadata") + NAME_FIELD_NUMBER: _ClassVar[int] + CONFIG_JSON_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + name: str + config_json: bytes + metadata: _common_pb2.MetadataMutable + def __init__(self, name: _Optional[str] = ..., config_json: _Optional[bytes] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateProviderConfigResponse(_message.Message): + __slots__ = ("provider_config",) + PROVIDER_CONFIG_FIELD_NUMBER: _ClassVar[int] + provider_config: _objects_pb2.KeyProviderConfig + def __init__(self, provider_config: _Optional[_Union[_objects_pb2.KeyProviderConfig, _Mapping]] = ...) -> None: ... + +class GetProviderConfigRequest(_message.Message): + __slots__ = ("id", "name") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class GetProviderConfigResponse(_message.Message): + __slots__ = ("provider_config",) + PROVIDER_CONFIG_FIELD_NUMBER: _ClassVar[int] + provider_config: _objects_pb2.KeyProviderConfig + def __init__(self, provider_config: _Optional[_Union[_objects_pb2.KeyProviderConfig, _Mapping]] = ...) -> None: ... + +class ListProviderConfigsRequest(_message.Message): + __slots__ = ("pagination",) + PAGINATION_FIELD_NUMBER: _ClassVar[int] + pagination: _selectors_pb2.PageRequest + def __init__(self, pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListProviderConfigsResponse(_message.Message): + __slots__ = ("provider_configs", "pagination") + PROVIDER_CONFIGS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + provider_configs: _containers.RepeatedCompositeFieldContainer[_objects_pb2.KeyProviderConfig] + pagination: _selectors_pb2.PageResponse + def __init__(self, provider_configs: _Optional[_Iterable[_Union[_objects_pb2.KeyProviderConfig, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class UpdateProviderConfigRequest(_message.Message): + __slots__ = ("id", "name", "config_json", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + CONFIG_JSON_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + config_json: bytes + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., config_json: _Optional[bytes] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateProviderConfigResponse(_message.Message): + __slots__ = ("provider_config",) + PROVIDER_CONFIG_FIELD_NUMBER: _ClassVar[int] + provider_config: _objects_pb2.KeyProviderConfig + def __init__(self, provider_config: _Optional[_Union[_objects_pb2.KeyProviderConfig, _Mapping]] = ...) -> None: ... + +class DeleteProviderConfigRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteProviderConfigResponse(_message.Message): + __slots__ = ("provider_config",) + PROVIDER_CONFIG_FIELD_NUMBER: _ClassVar[int] + provider_config: _objects_pb2.KeyProviderConfig + def __init__(self, provider_config: _Optional[_Union[_objects_pb2.KeyProviderConfig, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2_connect.py new file mode 100644 index 0000000..8db118f --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/keymanagement/key_management_pb2_connect.py @@ -0,0 +1,275 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.keymanagement.key_management_pb2 + +class KeyManagementServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_create_provider_config( + self, req: policy.keymanagement.key_management_pb2.CreateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.CreateProviderConfigResponse]: + """Low-level method to call CreateProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/CreateProviderConfig" + return self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.CreateProviderConfigResponse,extra_headers, timeout_seconds) + + + def create_provider_config( + self, req: policy.keymanagement.key_management_pb2.CreateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.CreateProviderConfigResponse: + response = self.call_create_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_provider_config( + self, req: policy.keymanagement.key_management_pb2.GetProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.GetProviderConfigResponse]: + """Low-level method to call GetProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/GetProviderConfig" + return self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.GetProviderConfigResponse,extra_headers, timeout_seconds) + + + def get_provider_config( + self, req: policy.keymanagement.key_management_pb2.GetProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.GetProviderConfigResponse: + response = self.call_get_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_provider_configs( + self, req: policy.keymanagement.key_management_pb2.ListProviderConfigsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.ListProviderConfigsResponse]: + """Low-level method to call ListProviderConfigs, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/ListProviderConfigs" + return self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.ListProviderConfigsResponse,extra_headers, timeout_seconds) + + + def list_provider_configs( + self, req: policy.keymanagement.key_management_pb2.ListProviderConfigsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.ListProviderConfigsResponse: + response = self.call_list_provider_configs(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_provider_config( + self, req: policy.keymanagement.key_management_pb2.UpdateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.UpdateProviderConfigResponse]: + """Low-level method to call UpdateProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/UpdateProviderConfig" + return self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.UpdateProviderConfigResponse,extra_headers, timeout_seconds) + + + def update_provider_config( + self, req: policy.keymanagement.key_management_pb2.UpdateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.UpdateProviderConfigResponse: + response = self.call_update_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_provider_config( + self, req: policy.keymanagement.key_management_pb2.DeleteProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.DeleteProviderConfigResponse]: + """Low-level method to call DeleteProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/DeleteProviderConfig" + return self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.DeleteProviderConfigResponse,extra_headers, timeout_seconds) + + + def delete_provider_config( + self, req: policy.keymanagement.key_management_pb2.DeleteProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.DeleteProviderConfigResponse: + response = self.call_delete_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncKeyManagementServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_create_provider_config( + self, req: policy.keymanagement.key_management_pb2.CreateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.CreateProviderConfigResponse]: + """Low-level method to call CreateProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/CreateProviderConfig" + return await self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.CreateProviderConfigResponse,extra_headers, timeout_seconds) + + async def create_provider_config( + self, req: policy.keymanagement.key_management_pb2.CreateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.CreateProviderConfigResponse: + response = await self.call_create_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_provider_config( + self, req: policy.keymanagement.key_management_pb2.GetProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.GetProviderConfigResponse]: + """Low-level method to call GetProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/GetProviderConfig" + return await self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.GetProviderConfigResponse,extra_headers, timeout_seconds) + + async def get_provider_config( + self, req: policy.keymanagement.key_management_pb2.GetProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.GetProviderConfigResponse: + response = await self.call_get_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_provider_configs( + self, req: policy.keymanagement.key_management_pb2.ListProviderConfigsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.ListProviderConfigsResponse]: + """Low-level method to call ListProviderConfigs, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/ListProviderConfigs" + return await self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.ListProviderConfigsResponse,extra_headers, timeout_seconds) + + async def list_provider_configs( + self, req: policy.keymanagement.key_management_pb2.ListProviderConfigsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.ListProviderConfigsResponse: + response = await self.call_list_provider_configs(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_provider_config( + self, req: policy.keymanagement.key_management_pb2.UpdateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.UpdateProviderConfigResponse]: + """Low-level method to call UpdateProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/UpdateProviderConfig" + return await self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.UpdateProviderConfigResponse,extra_headers, timeout_seconds) + + async def update_provider_config( + self, req: policy.keymanagement.key_management_pb2.UpdateProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.UpdateProviderConfigResponse: + response = await self.call_update_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_provider_config( + self, req: policy.keymanagement.key_management_pb2.DeleteProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.keymanagement.key_management_pb2.DeleteProviderConfigResponse]: + """Low-level method to call DeleteProviderConfig, granting access to errors and metadata""" + url = self.base_url + "/policy.keymanagement.KeyManagementService/DeleteProviderConfig" + return await self._connect_client.call_unary(url, req, policy.keymanagement.key_management_pb2.DeleteProviderConfigResponse,extra_headers, timeout_seconds) + + async def delete_provider_config( + self, req: policy.keymanagement.key_management_pb2.DeleteProviderConfigRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.keymanagement.key_management_pb2.DeleteProviderConfigResponse: + response = await self.call_delete_provider_config(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class KeyManagementServiceProtocol(typing.Protocol): + def create_provider_config(self, req: ClientRequest[policy.keymanagement.key_management_pb2.CreateProviderConfigRequest]) -> ServerResponse[policy.keymanagement.key_management_pb2.CreateProviderConfigResponse]: + ... + def get_provider_config(self, req: ClientRequest[policy.keymanagement.key_management_pb2.GetProviderConfigRequest]) -> ServerResponse[policy.keymanagement.key_management_pb2.GetProviderConfigResponse]: + ... + def list_provider_configs(self, req: ClientRequest[policy.keymanagement.key_management_pb2.ListProviderConfigsRequest]) -> ServerResponse[policy.keymanagement.key_management_pb2.ListProviderConfigsResponse]: + ... + def update_provider_config(self, req: ClientRequest[policy.keymanagement.key_management_pb2.UpdateProviderConfigRequest]) -> ServerResponse[policy.keymanagement.key_management_pb2.UpdateProviderConfigResponse]: + ... + def delete_provider_config(self, req: ClientRequest[policy.keymanagement.key_management_pb2.DeleteProviderConfigRequest]) -> ServerResponse[policy.keymanagement.key_management_pb2.DeleteProviderConfigResponse]: + ... + +KEY_MANAGEMENT_SERVICE_PATH_PREFIX = "/policy.keymanagement.KeyManagementService" + +def wsgi_key_management_service(implementation: KeyManagementServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.keymanagement.KeyManagementService/CreateProviderConfig", implementation.create_provider_config, policy.keymanagement.key_management_pb2.CreateProviderConfigRequest) + app.register_unary_rpc("/policy.keymanagement.KeyManagementService/GetProviderConfig", implementation.get_provider_config, policy.keymanagement.key_management_pb2.GetProviderConfigRequest) + app.register_unary_rpc("/policy.keymanagement.KeyManagementService/ListProviderConfigs", implementation.list_provider_configs, policy.keymanagement.key_management_pb2.ListProviderConfigsRequest) + app.register_unary_rpc("/policy.keymanagement.KeyManagementService/UpdateProviderConfig", implementation.update_provider_config, policy.keymanagement.key_management_pb2.UpdateProviderConfigRequest) + app.register_unary_rpc("/policy.keymanagement.KeyManagementService/DeleteProviderConfig", implementation.delete_provider_config, policy.keymanagement.key_management_pb2.DeleteProviderConfigRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/namespaces/namespaces_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/namespaces/namespaces_pb2.py new file mode 100644 index 0000000..a35d741 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/namespaces/namespaces_pb2.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/namespaces/namespaces.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/namespaces/namespaces.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"policy/namespaces/namespaces.proto\x12\x11policy.namespaces\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"\x86\x01\n\x18NamespaceKeyAccessServer\x12+\n\x0cnamespace_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x0bnamespaceId\x12\x39\n\x14key_access_server_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x11keyAccessServerId:\x02\x18\x01\"b\n\x0cNamespaceKey\x12.\n\x0cnamespace_id\x18\x01 \x01(\tB\x0b\xbaH\x08r\x03\xb0\x01\x01\xc8\x01\x01R\x0bnamespaceId\x12\"\n\x06key_id\x18\x02 \x01(\tB\x0b\xbaH\x08r\x03\xb0\x01\x01\xc8\x01\x01R\x05keyId\"\xbe\x03\n\x13GetNamespaceRequest\x12\x1d\n\x02id\x18\x01 \x01(\tB\r\x18\x01\xbaH\x08r\x03\xb0\x01\x01\xd8\x01\x01R\x02id\x12-\n\x0cnamespace_id\x18\x02 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x0bnamespaceId\x12\x1e\n\x03\x66qn\x18\x03 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x03\x66qn:\xaa\x02\xbaH\xa6\x02\x1a\xa2\x01\n\x10\x65xclusive_fields\x12PEither use deprecated \'id\' field or one of \'namespace_id\' or \'fqn\', but not both\x1a None: ... + +class NamespaceKey(_message.Message): + __slots__ = ("namespace_id", "key_id") + NAMESPACE_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ID_FIELD_NUMBER: _ClassVar[int] + namespace_id: str + key_id: str + def __init__(self, namespace_id: _Optional[str] = ..., key_id: _Optional[str] = ...) -> None: ... + +class GetNamespaceRequest(_message.Message): + __slots__ = ("id", "namespace_id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + NAMESPACE_ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + namespace_id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., namespace_id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class GetNamespaceResponse(_message.Message): + __slots__ = ("namespace",) + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + namespace: _objects_pb2.Namespace + def __init__(self, namespace: _Optional[_Union[_objects_pb2.Namespace, _Mapping]] = ...) -> None: ... + +class ListNamespacesRequest(_message.Message): + __slots__ = ("state", "pagination") + STATE_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + state: _common_pb2.ActiveStateEnum + pagination: _selectors_pb2.PageRequest + def __init__(self, state: _Optional[_Union[_common_pb2.ActiveStateEnum, str]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListNamespacesResponse(_message.Message): + __slots__ = ("namespaces", "pagination") + NAMESPACES_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + namespaces: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Namespace] + pagination: _selectors_pb2.PageResponse + def __init__(self, namespaces: _Optional[_Iterable[_Union[_objects_pb2.Namespace, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class CreateNamespaceRequest(_message.Message): + __slots__ = ("name", "metadata") + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + name: str + metadata: _common_pb2.MetadataMutable + def __init__(self, name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateNamespaceResponse(_message.Message): + __slots__ = ("namespace",) + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + namespace: _objects_pb2.Namespace + def __init__(self, namespace: _Optional[_Union[_objects_pb2.Namespace, _Mapping]] = ...) -> None: ... + +class UpdateNamespaceRequest(_message.Message): + __slots__ = ("id", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateNamespaceResponse(_message.Message): + __slots__ = ("namespace",) + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + namespace: _objects_pb2.Namespace + def __init__(self, namespace: _Optional[_Union[_objects_pb2.Namespace, _Mapping]] = ...) -> None: ... + +class DeactivateNamespaceRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeactivateNamespaceResponse(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class AssignKeyAccessServerToNamespaceRequest(_message.Message): + __slots__ = ("namespace_key_access_server",) + NAMESPACE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + namespace_key_access_server: NamespaceKeyAccessServer + def __init__(self, namespace_key_access_server: _Optional[_Union[NamespaceKeyAccessServer, _Mapping]] = ...) -> None: ... + +class AssignKeyAccessServerToNamespaceResponse(_message.Message): + __slots__ = ("namespace_key_access_server",) + NAMESPACE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + namespace_key_access_server: NamespaceKeyAccessServer + def __init__(self, namespace_key_access_server: _Optional[_Union[NamespaceKeyAccessServer, _Mapping]] = ...) -> None: ... + +class RemoveKeyAccessServerFromNamespaceRequest(_message.Message): + __slots__ = ("namespace_key_access_server",) + NAMESPACE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + namespace_key_access_server: NamespaceKeyAccessServer + def __init__(self, namespace_key_access_server: _Optional[_Union[NamespaceKeyAccessServer, _Mapping]] = ...) -> None: ... + +class RemoveKeyAccessServerFromNamespaceResponse(_message.Message): + __slots__ = ("namespace_key_access_server",) + NAMESPACE_KEY_ACCESS_SERVER_FIELD_NUMBER: _ClassVar[int] + namespace_key_access_server: NamespaceKeyAccessServer + def __init__(self, namespace_key_access_server: _Optional[_Union[NamespaceKeyAccessServer, _Mapping]] = ...) -> None: ... + +class AssignPublicKeyToNamespaceRequest(_message.Message): + __slots__ = ("namespace_key",) + NAMESPACE_KEY_FIELD_NUMBER: _ClassVar[int] + namespace_key: NamespaceKey + def __init__(self, namespace_key: _Optional[_Union[NamespaceKey, _Mapping]] = ...) -> None: ... + +class AssignPublicKeyToNamespaceResponse(_message.Message): + __slots__ = ("namespace_key",) + NAMESPACE_KEY_FIELD_NUMBER: _ClassVar[int] + namespace_key: NamespaceKey + def __init__(self, namespace_key: _Optional[_Union[NamespaceKey, _Mapping]] = ...) -> None: ... + +class RemovePublicKeyFromNamespaceRequest(_message.Message): + __slots__ = ("namespace_key",) + NAMESPACE_KEY_FIELD_NUMBER: _ClassVar[int] + namespace_key: NamespaceKey + def __init__(self, namespace_key: _Optional[_Union[NamespaceKey, _Mapping]] = ...) -> None: ... + +class RemovePublicKeyFromNamespaceResponse(_message.Message): + __slots__ = ("namespace_key",) + NAMESPACE_KEY_FIELD_NUMBER: _ClassVar[int] + namespace_key: NamespaceKey + def __init__(self, namespace_key: _Optional[_Union[NamespaceKey, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/namespaces/namespaces_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/namespaces/namespaces_pb2_connect.py new file mode 100644 index 0000000..7ae3e05 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/namespaces/namespaces_pb2_connect.py @@ -0,0 +1,443 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.namespaces.namespaces_pb2 + +class NamespaceServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_get_namespace( + self, req: policy.namespaces.namespaces_pb2.GetNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.GetNamespaceResponse]: + """Low-level method to call GetNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/GetNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.GetNamespaceResponse,extra_headers, timeout_seconds) + + + def get_namespace( + self, req: policy.namespaces.namespaces_pb2.GetNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.GetNamespaceResponse: + response = self.call_get_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_namespaces( + self, req: policy.namespaces.namespaces_pb2.ListNamespacesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.ListNamespacesResponse]: + """Low-level method to call ListNamespaces, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/ListNamespaces" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.ListNamespacesResponse,extra_headers, timeout_seconds) + + + def list_namespaces( + self, req: policy.namespaces.namespaces_pb2.ListNamespacesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.ListNamespacesResponse: + response = self.call_list_namespaces(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_namespace( + self, req: policy.namespaces.namespaces_pb2.CreateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.CreateNamespaceResponse]: + """Low-level method to call CreateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/CreateNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.CreateNamespaceResponse,extra_headers, timeout_seconds) + + + def create_namespace( + self, req: policy.namespaces.namespaces_pb2.CreateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.CreateNamespaceResponse: + response = self.call_create_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_namespace( + self, req: policy.namespaces.namespaces_pb2.UpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.UpdateNamespaceResponse]: + """Low-level method to call UpdateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/UpdateNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.UpdateNamespaceResponse,extra_headers, timeout_seconds) + + + def update_namespace( + self, req: policy.namespaces.namespaces_pb2.UpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.UpdateNamespaceResponse: + response = self.call_update_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_deactivate_namespace( + self, req: policy.namespaces.namespaces_pb2.DeactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.DeactivateNamespaceResponse]: + """Low-level method to call DeactivateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/DeactivateNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.DeactivateNamespaceResponse,extra_headers, timeout_seconds) + + + def deactivate_namespace( + self, req: policy.namespaces.namespaces_pb2.DeactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.DeactivateNamespaceResponse: + response = self.call_deactivate_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_assign_key_access_server_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceResponse]: + """Low-level method to call AssignKeyAccessServerToNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/AssignKeyAccessServerToNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceResponse,extra_headers, timeout_seconds) + + + def assign_key_access_server_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceResponse: + response = self.call_assign_key_access_server_to_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_remove_key_access_server_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceResponse]: + """Low-level method to call RemoveKeyAccessServerFromNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/RemoveKeyAccessServerFromNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceResponse,extra_headers, timeout_seconds) + + + def remove_key_access_server_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceResponse: + response = self.call_remove_key_access_server_from_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_assign_public_key_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceResponse]: + """Low-level method to call AssignPublicKeyToNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/AssignPublicKeyToNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceResponse,extra_headers, timeout_seconds) + + + def assign_public_key_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceResponse: + response = self.call_assign_public_key_to_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_remove_public_key_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceResponse]: + """Low-level method to call RemovePublicKeyFromNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/RemovePublicKeyFromNamespace" + return self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceResponse,extra_headers, timeout_seconds) + + + def remove_public_key_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceResponse: + response = self.call_remove_public_key_from_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncNamespaceServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_get_namespace( + self, req: policy.namespaces.namespaces_pb2.GetNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.GetNamespaceResponse]: + """Low-level method to call GetNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/GetNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.GetNamespaceResponse,extra_headers, timeout_seconds) + + async def get_namespace( + self, req: policy.namespaces.namespaces_pb2.GetNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.GetNamespaceResponse: + response = await self.call_get_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_namespaces( + self, req: policy.namespaces.namespaces_pb2.ListNamespacesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.ListNamespacesResponse]: + """Low-level method to call ListNamespaces, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/ListNamespaces" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.ListNamespacesResponse,extra_headers, timeout_seconds) + + async def list_namespaces( + self, req: policy.namespaces.namespaces_pb2.ListNamespacesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.ListNamespacesResponse: + response = await self.call_list_namespaces(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_namespace( + self, req: policy.namespaces.namespaces_pb2.CreateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.CreateNamespaceResponse]: + """Low-level method to call CreateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/CreateNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.CreateNamespaceResponse,extra_headers, timeout_seconds) + + async def create_namespace( + self, req: policy.namespaces.namespaces_pb2.CreateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.CreateNamespaceResponse: + response = await self.call_create_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_namespace( + self, req: policy.namespaces.namespaces_pb2.UpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.UpdateNamespaceResponse]: + """Low-level method to call UpdateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/UpdateNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.UpdateNamespaceResponse,extra_headers, timeout_seconds) + + async def update_namespace( + self, req: policy.namespaces.namespaces_pb2.UpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.UpdateNamespaceResponse: + response = await self.call_update_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_deactivate_namespace( + self, req: policy.namespaces.namespaces_pb2.DeactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.DeactivateNamespaceResponse]: + """Low-level method to call DeactivateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/DeactivateNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.DeactivateNamespaceResponse,extra_headers, timeout_seconds) + + async def deactivate_namespace( + self, req: policy.namespaces.namespaces_pb2.DeactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.DeactivateNamespaceResponse: + response = await self.call_deactivate_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_assign_key_access_server_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceResponse]: + """Low-level method to call AssignKeyAccessServerToNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/AssignKeyAccessServerToNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceResponse,extra_headers, timeout_seconds) + + async def assign_key_access_server_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceResponse: + response = await self.call_assign_key_access_server_to_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_remove_key_access_server_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceResponse]: + """Low-level method to call RemoveKeyAccessServerFromNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/RemoveKeyAccessServerFromNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceResponse,extra_headers, timeout_seconds) + + async def remove_key_access_server_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceResponse: + response = await self.call_remove_key_access_server_from_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_assign_public_key_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceResponse]: + """Low-level method to call AssignPublicKeyToNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/AssignPublicKeyToNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceResponse,extra_headers, timeout_seconds) + + async def assign_public_key_to_namespace( + self, req: policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceResponse: + response = await self.call_assign_public_key_to_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_remove_public_key_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceResponse]: + """Low-level method to call RemovePublicKeyFromNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.namespaces.NamespaceService/RemovePublicKeyFromNamespace" + return await self._connect_client.call_unary(url, req, policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceResponse,extra_headers, timeout_seconds) + + async def remove_public_key_from_namespace( + self, req: policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceResponse: + response = await self.call_remove_public_key_from_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class NamespaceServiceProtocol(typing.Protocol): + def get_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.GetNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.GetNamespaceResponse]: + ... + def list_namespaces(self, req: ClientRequest[policy.namespaces.namespaces_pb2.ListNamespacesRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.ListNamespacesResponse]: + ... + def create_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.CreateNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.CreateNamespaceResponse]: + ... + def update_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.UpdateNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.UpdateNamespaceResponse]: + ... + def deactivate_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.DeactivateNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.DeactivateNamespaceResponse]: + ... + def assign_key_access_server_to_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceResponse]: + ... + def remove_key_access_server_from_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceResponse]: + ... + def assign_public_key_to_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceResponse]: + ... + def remove_public_key_from_namespace(self, req: ClientRequest[policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceRequest]) -> ServerResponse[policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceResponse]: + ... + +NAMESPACE_SERVICE_PATH_PREFIX = "/policy.namespaces.NamespaceService" + +def wsgi_namespace_service(implementation: NamespaceServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.namespaces.NamespaceService/GetNamespace", implementation.get_namespace, policy.namespaces.namespaces_pb2.GetNamespaceRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/ListNamespaces", implementation.list_namespaces, policy.namespaces.namespaces_pb2.ListNamespacesRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/CreateNamespace", implementation.create_namespace, policy.namespaces.namespaces_pb2.CreateNamespaceRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/UpdateNamespace", implementation.update_namespace, policy.namespaces.namespaces_pb2.UpdateNamespaceRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/DeactivateNamespace", implementation.deactivate_namespace, policy.namespaces.namespaces_pb2.DeactivateNamespaceRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/AssignKeyAccessServerToNamespace", implementation.assign_key_access_server_to_namespace, policy.namespaces.namespaces_pb2.AssignKeyAccessServerToNamespaceRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/RemoveKeyAccessServerFromNamespace", implementation.remove_key_access_server_from_namespace, policy.namespaces.namespaces_pb2.RemoveKeyAccessServerFromNamespaceRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/AssignPublicKeyToNamespace", implementation.assign_public_key_to_namespace, policy.namespaces.namespaces_pb2.AssignPublicKeyToNamespaceRequest) + app.register_unary_rpc("/policy.namespaces.NamespaceService/RemovePublicKeyFromNamespace", implementation.remove_public_key_from_namespace, policy.namespaces.namespaces_pb2.RemovePublicKeyFromNamespaceRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/objects_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/objects_pb2.py new file mode 100644 index 0000000..c760902 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/objects_pb2.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/objects.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/objects.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14policy/objects.proto\x12\x06policy\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x1egoogle/protobuf/wrappers.proto\"i\n\x12SimpleKasPublicKey\x12/\n\talgorithm\x18\x01 \x01(\x0e\x32\x11.policy.AlgorithmR\talgorithm\x12\x10\n\x03kid\x18\x02 \x01(\tR\x03kid\x12\x10\n\x03pem\x18\x03 \x01(\tR\x03pem\"y\n\x0cSimpleKasKey\x12\x17\n\x07kas_uri\x18\x01 \x01(\tR\x06kasUri\x12\x39\n\npublic_key\x18\x02 \x01(\x0b\x32\x1a.policy.SimpleKasPublicKeyR\tpublicKey\x12\x15\n\x06kas_id\x18\x03 \x01(\tR\x05kasId\"\x86\x01\n\x11KeyProviderConfig\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1f\n\x0b\x63onfig_json\x18\x03 \x01(\x0cR\nconfigJson\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\x85\x02\n\tNamespace\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x10\n\x03\x66qn\x18\x03 \x01(\tR\x03\x66qn\x12\x32\n\x06\x61\x63tive\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\x06\x61\x63tive\x12,\n\x08metadata\x18\x05 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\x12/\n\x06grants\x18\x06 \x03(\x0b\x32\x17.policy.KeyAccessServerR\x06grants\x12/\n\x08kas_keys\x18\x07 \x03(\x0b\x32\x14.policy.SimpleKasKeyR\x07kasKeys\"\x9d\x03\n\tAttribute\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12/\n\tnamespace\x18\x02 \x01(\x0b\x32\x11.policy.NamespaceR\tnamespace\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12>\n\x04rule\x18\x04 \x01(\x0e\x32\x1d.policy.AttributeRuleTypeEnumB\x0b\xbaH\x08\x82\x01\x02\x10\x01\xc8\x01\x01R\x04rule\x12%\n\x06values\x18\x05 \x03(\x0b\x32\r.policy.ValueR\x06values\x12/\n\x06grants\x18\x06 \x03(\x0b\x32\x17.policy.KeyAccessServerR\x06grants\x12\x10\n\x03\x66qn\x18\x07 \x01(\tR\x03\x66qn\x12\x32\n\x06\x61\x63tive\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\x06\x61\x63tive\x12/\n\x08kas_keys\x18\t \x03(\x0b\x32\x14.policy.SimpleKasKeyR\x07kasKeys\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\xcc\x03\n\x05Value\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12/\n\tattribute\x18\x02 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\x12\x14\n\x05value\x18\x03 \x01(\tR\x05value\x12/\n\x06grants\x18\x05 \x03(\x0b\x32\x17.policy.KeyAccessServerR\x06grants\x12\x10\n\x03\x66qn\x18\x06 \x01(\tR\x03\x66qn\x12\x32\n\x06\x61\x63tive\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\x06\x61\x63tive\x12\x41\n\x10subject_mappings\x18\x08 \x03(\x0b\x32\x16.policy.SubjectMappingR\x0fsubjectMappings\x12/\n\x08kas_keys\x18\t \x03(\x0b\x32\x14.policy.SimpleKasKeyR\x07kasKeys\x12\x44\n\x11resource_mappings\x18\n \x03(\x0b\x32\x17.policy.ResourceMappingR\x10resourceMappings\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadataJ\x04\x08\x04\x10\x05R\x07members\"\xa8\x02\n\x06\x41\x63tion\x12\x0e\n\x02id\x18\x03 \x01(\tR\x02id\x12;\n\x08standard\x18\x01 \x01(\x0e\x32\x1d.policy.Action.StandardActionH\x00R\x08standard\x12\x18\n\x06\x63ustom\x18\x02 \x01(\tH\x00R\x06\x63ustom\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"l\n\x0eStandardAction\x12\x1f\n\x1bSTANDARD_ACTION_UNSPECIFIED\x10\x00\x12\x1b\n\x17STANDARD_ACTION_DECRYPT\x10\x01\x12\x1c\n\x18STANDARD_ACTION_TRANSMIT\x10\x02\x42\x07\n\x05value\"\x81\x02\n\x0eSubjectMapping\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x36\n\x0f\x61ttribute_value\x18\x02 \x01(\x0b\x32\r.policy.ValueR\x0e\x61ttributeValue\x12O\n\x15subject_condition_set\x18\x03 \x01(\x0b\x32\x1b.policy.SubjectConditionSetR\x13subjectConditionSet\x12(\n\x07\x61\x63tions\x18\x04 \x03(\x0b\x32\x0e.policy.ActionR\x07\x61\x63tions\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\xe9\x01\n\tCondition\x12M\n\x1fsubject_external_selector_value\x18\x01 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x1csubjectExternalSelectorValue\x12K\n\x08operator\x18\x02 \x01(\x0e\x32\".policy.SubjectMappingOperatorEnumB\x0b\xbaH\x08\x82\x01\x02\x10\x01\xc8\x01\x01R\x08operator\x12@\n\x17subject_external_values\x18\x03 \x03(\tB\x08\xbaH\x05\x92\x01\x02\x08\x01R\x15subjectExternalValues\"\xa7\x01\n\x0e\x43onditionGroup\x12;\n\nconditions\x18\x01 \x03(\x0b\x32\x11.policy.ConditionB\x08\xbaH\x05\x92\x01\x02\x08\x01R\nconditions\x12X\n\x10\x62oolean_operator\x18\x02 \x01(\x0e\x32 .policy.ConditionBooleanTypeEnumB\x0b\xbaH\x08\x82\x01\x02\x10\x01\xc8\x01\x01R\x0f\x62ooleanOperator\"Y\n\nSubjectSet\x12K\n\x10\x63ondition_groups\x18\x01 \x03(\x0b\x32\x16.policy.ConditionGroupB\x08\xbaH\x05\x92\x01\x02\x08\x01R\x0f\x63onditionGroups\"\x94\x01\n\x13SubjectConditionSet\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12?\n\x0csubject_sets\x18\x03 \x03(\x0b\x32\x12.policy.SubjectSetB\x08\xbaH\x05\x92\x01\x02\x08\x01R\x0bsubjectSets\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"|\n\x0fSubjectProperty\x12\x42\n\x17\x65xternal_selector_value\x18\x01 \x01(\tB\n\xbaH\x07r\x02\x10\x01\xc8\x01\x01R\x15\x65xternalSelectorValue\x12%\n\x0e\x65xternal_value\x18\x02 \x01(\tR\rexternalValue\"\x9b\x01\n\x14ResourceMappingGroup\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12)\n\x0cnamespace_id\x18\x02 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x0bnamespaceId\x12\x1a\n\x04name\x18\x03 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x04name\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\xd9\x01\n\x0fResourceMapping\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12,\n\x08metadata\x18\x02 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\x12>\n\x0f\x61ttribute_value\x18\x03 \x01(\x0b\x32\r.policy.ValueB\x06\xbaH\x03\xc8\x01\x01R\x0e\x61ttributeValue\x12\x14\n\x05terms\x18\x04 \x03(\tR\x05terms\x12\x32\n\x05group\x18\x05 \x01(\x0b\x32\x1c.policy.ResourceMappingGroupR\x05group\"\x85\x05\n\x0fKeyAccessServer\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x87\x03\n\x03uri\x18\x02 \x01(\tB\xf4\x02\xbaH\xf0\x02\xba\x01\xec\x02\n\nuri_format\x12\xcf\x01URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\x1a\x8b\x01this.matches(\'^https?://[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?)*(:[0-9]+)?(/.*)?$\')R\x03uri\x12\x30\n\npublic_key\x18\x03 \x01(\x0b\x32\x11.policy.PublicKeyR\tpublicKey\x12\x33\n\x0bsource_type\x18\x04 \x01(\x0e\x32\x12.policy.SourceTypeR\nsourceType\x12/\n\x08kas_keys\x18\x05 \x03(\x0b\x32\x14.policy.SimpleKasKeyR\x07kasKeys\x12\x12\n\x04name\x18\x14 \x01(\tR\x04name\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\x97\x02\n\x03Key\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x37\n\tis_active\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\x08isActive\x12\x39\n\nwas_mapped\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\twasMapped\x12\x33\n\npublic_key\x18\x04 \x01(\x0b\x32\x14.policy.KasPublicKeyR\tpublicKey\x12)\n\x03kas\x18\x05 \x01(\x0b\x32\x17.policy.KeyAccessServerR\x03kas\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\x84\x01\n\x0cKasPublicKey\x12\x1c\n\x03pem\x18\x01 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x18\x80@R\x03pem\x12\x1b\n\x03kid\x18\x02 \x01(\tB\t\xbaH\x06r\x04\x10\x01\x18 R\x03kid\x12\x39\n\x03\x61lg\x18\x03 \x01(\x0e\x32\x1b.policy.KasPublicKeyAlgEnumB\n\xbaH\x07\x82\x01\x04\x10\x01 \x00R\x03\x61lg\";\n\x0fKasPublicKeySet\x12(\n\x04keys\x18\x01 \x03(\x0b\x32\x14.policy.KasPublicKeyR\x04keys\"\xe0\x03\n\tPublicKey\x12\x84\x03\n\x06remote\x18\x01 \x01(\tB\xe9\x02\xbaH\xe5\x02\xba\x01\xe1\x02\n\nuri_format\x12\xcf\x01URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\x1a\x80\x01this.matches(\'^https://[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?)*(/.*)?$\')H\x00R\x06remote\x12\x31\n\x06\x63\x61\x63hed\x18\x03 \x01(\x0b\x32\x17.policy.KasPublicKeySetH\x00R\x06\x63\x61\x63hedB\x0c\n\npublic_keyJ\x04\x08\x02\x10\x03R\x05local\"\x9f\x01\n\x12RegisteredResource\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x37\n\x06values\x18\x03 \x03(\x0b\x32\x1f.policy.RegisteredResourceValueR\x06values\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\xca\x03\n\x17RegisteredResourceValue\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\x12\x36\n\x08resource\x18\x03 \x01(\x0b\x32\x1a.policy.RegisteredResourceR\x08resource\x12l\n\x17\x61\x63tion_attribute_values\x18\x04 \x03(\x0b\x32\x34.policy.RegisteredResourceValue.ActionAttributeValueR\x15\x61\x63tionAttributeValues\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\x1a\xb4\x01\n\x14\x41\x63tionAttributeValue\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12&\n\x06\x61\x63tion\x18\x02 \x01(\x0b\x32\x0e.policy.ActionR\x06\x61\x63tion\x12\x36\n\x0f\x61ttribute_value\x18\x03 \x01(\x0b\x32\r.policy.ValueR\x0e\x61ttributeValue\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"a\n\x06KasKey\x12\x15\n\x06kas_id\x18\x01 \x01(\tR\x05kasId\x12\'\n\x03key\x18\x02 \x01(\x0b\x32\x15.policy.AsymmetricKeyR\x03key\x12\x17\n\x07kas_uri\x18\x03 \x01(\tR\x06kasUri\")\n\x0cPublicKeyCtx\x12\x19\n\x03pem\x18\x01 \x01(\tB\x07\xbaH\x04r\x02\x10\x01R\x03pem\"P\n\rPrivateKeyCtx\x12\x1e\n\x06key_id\x18\x01 \x01(\tB\x07\xbaH\x04r\x02\x10\x01R\x05keyId\x12\x1f\n\x0bwrapped_key\x18\x02 \x01(\tR\nwrappedKey\"\xb9\x03\n\rAsymmetricKey\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x15\n\x06key_id\x18\x02 \x01(\tR\x05keyId\x12\x36\n\rkey_algorithm\x18\x03 \x01(\x0e\x32\x11.policy.AlgorithmR\x0ckeyAlgorithm\x12\x30\n\nkey_status\x18\x04 \x01(\x0e\x32\x11.policy.KeyStatusR\tkeyStatus\x12*\n\x08key_mode\x18\x05 \x01(\x0e\x32\x0f.policy.KeyModeR\x07keyMode\x12:\n\x0epublic_key_ctx\x18\x06 \x01(\x0b\x32\x14.policy.PublicKeyCtxR\x0cpublicKeyCtx\x12=\n\x0fprivate_key_ctx\x18\x07 \x01(\x0b\x32\x15.policy.PrivateKeyCtxR\rprivateKeyCtx\x12\x42\n\x0fprovider_config\x18\x08 \x01(\x0b\x32\x19.policy.KeyProviderConfigR\x0eproviderConfig\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata\"\x9e\x02\n\x0cSymmetricKey\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x15\n\x06key_id\x18\x02 \x01(\tR\x05keyId\x12\x30\n\nkey_status\x18\x03 \x01(\x0e\x32\x11.policy.KeyStatusR\tkeyStatus\x12*\n\x08key_mode\x18\x04 \x01(\x0e\x32\x0f.policy.KeyModeR\x07keyMode\x12\x17\n\x07key_ctx\x18\x05 \x01(\x0cR\x06keyCtx\x12\x42\n\x0fprovider_config\x18\x06 \x01(\x0b\x32\x19.policy.KeyProviderConfigR\x0eproviderConfig\x12,\n\x08metadata\x18\x64 \x01(\x0b\x32\x10.common.MetadataR\x08metadata*\xb3\x01\n\x15\x41ttributeRuleTypeEnum\x12(\n$ATTRIBUTE_RULE_TYPE_ENUM_UNSPECIFIED\x10\x00\x12#\n\x1f\x41TTRIBUTE_RULE_TYPE_ENUM_ALL_OF\x10\x01\x12#\n\x1f\x41TTRIBUTE_RULE_TYPE_ENUM_ANY_OF\x10\x02\x12&\n\"ATTRIBUTE_RULE_TYPE_ENUM_HIERARCHY\x10\x03*\xca\x01\n\x1aSubjectMappingOperatorEnum\x12-\n)SUBJECT_MAPPING_OPERATOR_ENUM_UNSPECIFIED\x10\x00\x12$\n SUBJECT_MAPPING_OPERATOR_ENUM_IN\x10\x01\x12(\n$SUBJECT_MAPPING_OPERATOR_ENUM_NOT_IN\x10\x02\x12-\n)SUBJECT_MAPPING_OPERATOR_ENUM_IN_CONTAINS\x10\x03*\x90\x01\n\x18\x43onditionBooleanTypeEnum\x12+\n\'CONDITION_BOOLEAN_TYPE_ENUM_UNSPECIFIED\x10\x00\x12#\n\x1f\x43ONDITION_BOOLEAN_TYPE_ENUM_AND\x10\x01\x12\"\n\x1e\x43ONDITION_BOOLEAN_TYPE_ENUM_OR\x10\x02*]\n\nSourceType\x12\x1b\n\x17SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x18\n\x14SOURCE_TYPE_INTERNAL\x10\x01\x12\x18\n\x14SOURCE_TYPE_EXTERNAL\x10\x02*\x88\x02\n\x13KasPublicKeyAlgEnum\x12\'\n#KAS_PUBLIC_KEY_ALG_ENUM_UNSPECIFIED\x10\x00\x12$\n KAS_PUBLIC_KEY_ALG_ENUM_RSA_2048\x10\x01\x12$\n KAS_PUBLIC_KEY_ALG_ENUM_RSA_4096\x10\x02\x12(\n$KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP256R1\x10\x05\x12(\n$KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP384R1\x10\x06\x12(\n$KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP521R1\x10\x07*\x9b\x01\n\tAlgorithm\x12\x19\n\x15\x41LGORITHM_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41LGORITHM_RSA_2048\x10\x01\x12\x16\n\x12\x41LGORITHM_RSA_4096\x10\x02\x12\x15\n\x11\x41LGORITHM_EC_P256\x10\x03\x12\x15\n\x11\x41LGORITHM_EC_P384\x10\x04\x12\x15\n\x11\x41LGORITHM_EC_P521\x10\x05*V\n\tKeyStatus\x12\x1a\n\x16KEY_STATUS_UNSPECIFIED\x10\x00\x12\x15\n\x11KEY_STATUS_ACTIVE\x10\x01\x12\x16\n\x12KEY_STATUS_ROTATED\x10\x02*\x94\x01\n\x07KeyMode\x12\x18\n\x14KEY_MODE_UNSPECIFIED\x10\x00\x12\x1c\n\x18KEY_MODE_CONFIG_ROOT_KEY\x10\x01\x12\x1e\n\x1aKEY_MODE_PROVIDER_ROOT_KEY\x10\x02\x12\x13\n\x0fKEY_MODE_REMOTE\x10\x03\x12\x1c\n\x18KEY_MODE_PUBLIC_KEY_ONLY\x10\x04\x42R\n\ncom.policyB\x0cObjectsProtoP\x01\xa2\x02\x03PXX\xaa\x02\x06Policy\xca\x02\x06Policy\xe2\x02\x12Policy\\GPBMetadata\xea\x02\x06Policyb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.objects_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\ncom.policyB\014ObjectsProtoP\001\242\002\003PXX\252\002\006Policy\312\002\006Policy\342\002\022Policy\\GPBMetadata\352\002\006Policy' + _globals['_ATTRIBUTE'].fields_by_name['rule']._loaded_options = None + _globals['_ATTRIBUTE'].fields_by_name['rule']._serialized_options = b'\272H\010\202\001\002\020\001\310\001\001' + _globals['_CONDITION'].fields_by_name['subject_external_selector_value']._loaded_options = None + _globals['_CONDITION'].fields_by_name['subject_external_selector_value']._serialized_options = b'\272H\003\310\001\001' + _globals['_CONDITION'].fields_by_name['operator']._loaded_options = None + _globals['_CONDITION'].fields_by_name['operator']._serialized_options = b'\272H\010\202\001\002\020\001\310\001\001' + _globals['_CONDITION'].fields_by_name['subject_external_values']._loaded_options = None + _globals['_CONDITION'].fields_by_name['subject_external_values']._serialized_options = b'\272H\005\222\001\002\010\001' + _globals['_CONDITIONGROUP'].fields_by_name['conditions']._loaded_options = None + _globals['_CONDITIONGROUP'].fields_by_name['conditions']._serialized_options = b'\272H\005\222\001\002\010\001' + _globals['_CONDITIONGROUP'].fields_by_name['boolean_operator']._loaded_options = None + _globals['_CONDITIONGROUP'].fields_by_name['boolean_operator']._serialized_options = b'\272H\010\202\001\002\020\001\310\001\001' + _globals['_SUBJECTSET'].fields_by_name['condition_groups']._loaded_options = None + _globals['_SUBJECTSET'].fields_by_name['condition_groups']._serialized_options = b'\272H\005\222\001\002\010\001' + _globals['_SUBJECTCONDITIONSET'].fields_by_name['subject_sets']._loaded_options = None + _globals['_SUBJECTCONDITIONSET'].fields_by_name['subject_sets']._serialized_options = b'\272H\005\222\001\002\010\001' + _globals['_SUBJECTPROPERTY'].fields_by_name['external_selector_value']._loaded_options = None + _globals['_SUBJECTPROPERTY'].fields_by_name['external_selector_value']._serialized_options = b'\272H\007r\002\020\001\310\001\001' + _globals['_RESOURCEMAPPINGGROUP'].fields_by_name['namespace_id']._loaded_options = None + _globals['_RESOURCEMAPPINGGROUP'].fields_by_name['namespace_id']._serialized_options = b'\272H\003\310\001\001' + _globals['_RESOURCEMAPPINGGROUP'].fields_by_name['name']._loaded_options = None + _globals['_RESOURCEMAPPINGGROUP'].fields_by_name['name']._serialized_options = b'\272H\003\310\001\001' + _globals['_RESOURCEMAPPING'].fields_by_name['attribute_value']._loaded_options = None + _globals['_RESOURCEMAPPING'].fields_by_name['attribute_value']._serialized_options = b'\272H\003\310\001\001' + _globals['_KEYACCESSSERVER'].fields_by_name['uri']._loaded_options = None + _globals['_KEYACCESSSERVER'].fields_by_name['uri']._serialized_options = b'\272H\360\002\272\001\354\002\n\nuri_format\022\317\001URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\032\213\001this.matches(\'^https?://[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?)*(:[0-9]+)?(/.*)?$\')' + _globals['_KASPUBLICKEY'].fields_by_name['pem']._loaded_options = None + _globals['_KASPUBLICKEY'].fields_by_name['pem']._serialized_options = b'\272H\007r\005\020\001\030\200@' + _globals['_KASPUBLICKEY'].fields_by_name['kid']._loaded_options = None + _globals['_KASPUBLICKEY'].fields_by_name['kid']._serialized_options = b'\272H\006r\004\020\001\030 ' + _globals['_KASPUBLICKEY'].fields_by_name['alg']._loaded_options = None + _globals['_KASPUBLICKEY'].fields_by_name['alg']._serialized_options = b'\272H\007\202\001\004\020\001 \000' + _globals['_PUBLICKEY'].fields_by_name['remote']._loaded_options = None + _globals['_PUBLICKEY'].fields_by_name['remote']._serialized_options = b'\272H\345\002\272\001\341\002\n\nuri_format\022\317\001URI must be a valid URL (e.g., \'https://demo.com/\') followed by additional segments. Each segment must start and end with an alphanumeric character, can contain hyphens, alphanumeric characters, and slashes.\032\200\001this.matches(\'^https://[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?(\\\\.[a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?)*(/.*)?$\')' + _globals['_PUBLICKEYCTX'].fields_by_name['pem']._loaded_options = None + _globals['_PUBLICKEYCTX'].fields_by_name['pem']._serialized_options = b'\272H\004r\002\020\001' + _globals['_PRIVATEKEYCTX'].fields_by_name['key_id']._loaded_options = None + _globals['_PRIVATEKEYCTX'].fields_by_name['key_id']._serialized_options = b'\272H\004r\002\020\001' + _globals['_ATTRIBUTERULETYPEENUM']._serialized_start=6525 + _globals['_ATTRIBUTERULETYPEENUM']._serialized_end=6704 + _globals['_SUBJECTMAPPINGOPERATORENUM']._serialized_start=6707 + _globals['_SUBJECTMAPPINGOPERATORENUM']._serialized_end=6909 + _globals['_CONDITIONBOOLEANTYPEENUM']._serialized_start=6912 + _globals['_CONDITIONBOOLEANTYPEENUM']._serialized_end=7056 + _globals['_SOURCETYPE']._serialized_start=7058 + _globals['_SOURCETYPE']._serialized_end=7151 + _globals['_KASPUBLICKEYALGENUM']._serialized_start=7154 + _globals['_KASPUBLICKEYALGENUM']._serialized_end=7418 + _globals['_ALGORITHM']._serialized_start=7421 + _globals['_ALGORITHM']._serialized_end=7576 + _globals['_KEYSTATUS']._serialized_start=7578 + _globals['_KEYSTATUS']._serialized_end=7664 + _globals['_KEYMODE']._serialized_start=7667 + _globals['_KEYMODE']._serialized_end=7815 + _globals['_SIMPLEKASPUBLICKEY']._serialized_start=114 + _globals['_SIMPLEKASPUBLICKEY']._serialized_end=219 + _globals['_SIMPLEKASKEY']._serialized_start=221 + _globals['_SIMPLEKASKEY']._serialized_end=342 + _globals['_KEYPROVIDERCONFIG']._serialized_start=345 + _globals['_KEYPROVIDERCONFIG']._serialized_end=479 + _globals['_NAMESPACE']._serialized_start=482 + _globals['_NAMESPACE']._serialized_end=743 + _globals['_ATTRIBUTE']._serialized_start=746 + _globals['_ATTRIBUTE']._serialized_end=1159 + _globals['_VALUE']._serialized_start=1162 + _globals['_VALUE']._serialized_end=1622 + _globals['_ACTION']._serialized_start=1625 + _globals['_ACTION']._serialized_end=1921 + _globals['_ACTION_STANDARDACTION']._serialized_start=1804 + _globals['_ACTION_STANDARDACTION']._serialized_end=1912 + _globals['_SUBJECTMAPPING']._serialized_start=1924 + _globals['_SUBJECTMAPPING']._serialized_end=2181 + _globals['_CONDITION']._serialized_start=2184 + _globals['_CONDITION']._serialized_end=2417 + _globals['_CONDITIONGROUP']._serialized_start=2420 + _globals['_CONDITIONGROUP']._serialized_end=2587 + _globals['_SUBJECTSET']._serialized_start=2589 + _globals['_SUBJECTSET']._serialized_end=2678 + _globals['_SUBJECTCONDITIONSET']._serialized_start=2681 + _globals['_SUBJECTCONDITIONSET']._serialized_end=2829 + _globals['_SUBJECTPROPERTY']._serialized_start=2831 + _globals['_SUBJECTPROPERTY']._serialized_end=2955 + _globals['_RESOURCEMAPPINGGROUP']._serialized_start=2958 + _globals['_RESOURCEMAPPINGGROUP']._serialized_end=3113 + _globals['_RESOURCEMAPPING']._serialized_start=3116 + _globals['_RESOURCEMAPPING']._serialized_end=3333 + _globals['_KEYACCESSSERVER']._serialized_start=3336 + _globals['_KEYACCESSSERVER']._serialized_end=3981 + _globals['_KEY']._serialized_start=3984 + _globals['_KEY']._serialized_end=4263 + _globals['_KASPUBLICKEY']._serialized_start=4266 + _globals['_KASPUBLICKEY']._serialized_end=4398 + _globals['_KASPUBLICKEYSET']._serialized_start=4400 + _globals['_KASPUBLICKEYSET']._serialized_end=4459 + _globals['_PUBLICKEY']._serialized_start=4462 + _globals['_PUBLICKEY']._serialized_end=4942 + _globals['_REGISTEREDRESOURCE']._serialized_start=4945 + _globals['_REGISTEREDRESOURCE']._serialized_end=5104 + _globals['_REGISTEREDRESOURCEVALUE']._serialized_start=5107 + _globals['_REGISTEREDRESOURCEVALUE']._serialized_end=5565 + _globals['_REGISTEREDRESOURCEVALUE_ACTIONATTRIBUTEVALUE']._serialized_start=5385 + _globals['_REGISTEREDRESOURCEVALUE_ACTIONATTRIBUTEVALUE']._serialized_end=5565 + _globals['_KASKEY']._serialized_start=5567 + _globals['_KASKEY']._serialized_end=5664 + _globals['_PUBLICKEYCTX']._serialized_start=5666 + _globals['_PUBLICKEYCTX']._serialized_end=5707 + _globals['_PRIVATEKEYCTX']._serialized_start=5709 + _globals['_PRIVATEKEYCTX']._serialized_end=5789 + _globals['_ASYMMETRICKEY']._serialized_start=5792 + _globals['_ASYMMETRICKEY']._serialized_end=6233 + _globals['_SYMMETRICKEY']._serialized_start=6236 + _globals['_SYMMETRICKEY']._serialized_end=6522 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/objects_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/objects_pb2.pyi new file mode 100644 index 0000000..532000f --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/objects_pb2.pyi @@ -0,0 +1,464 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from common import common_pb2 as _common_pb2 +from google.protobuf import wrappers_pb2 as _wrappers_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class AttributeRuleTypeEnum(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ATTRIBUTE_RULE_TYPE_ENUM_UNSPECIFIED: _ClassVar[AttributeRuleTypeEnum] + ATTRIBUTE_RULE_TYPE_ENUM_ALL_OF: _ClassVar[AttributeRuleTypeEnum] + ATTRIBUTE_RULE_TYPE_ENUM_ANY_OF: _ClassVar[AttributeRuleTypeEnum] + ATTRIBUTE_RULE_TYPE_ENUM_HIERARCHY: _ClassVar[AttributeRuleTypeEnum] + +class SubjectMappingOperatorEnum(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SUBJECT_MAPPING_OPERATOR_ENUM_UNSPECIFIED: _ClassVar[SubjectMappingOperatorEnum] + SUBJECT_MAPPING_OPERATOR_ENUM_IN: _ClassVar[SubjectMappingOperatorEnum] + SUBJECT_MAPPING_OPERATOR_ENUM_NOT_IN: _ClassVar[SubjectMappingOperatorEnum] + SUBJECT_MAPPING_OPERATOR_ENUM_IN_CONTAINS: _ClassVar[SubjectMappingOperatorEnum] + +class ConditionBooleanTypeEnum(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + CONDITION_BOOLEAN_TYPE_ENUM_UNSPECIFIED: _ClassVar[ConditionBooleanTypeEnum] + CONDITION_BOOLEAN_TYPE_ENUM_AND: _ClassVar[ConditionBooleanTypeEnum] + CONDITION_BOOLEAN_TYPE_ENUM_OR: _ClassVar[ConditionBooleanTypeEnum] + +class SourceType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SOURCE_TYPE_UNSPECIFIED: _ClassVar[SourceType] + SOURCE_TYPE_INTERNAL: _ClassVar[SourceType] + SOURCE_TYPE_EXTERNAL: _ClassVar[SourceType] + +class KasPublicKeyAlgEnum(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + KAS_PUBLIC_KEY_ALG_ENUM_UNSPECIFIED: _ClassVar[KasPublicKeyAlgEnum] + KAS_PUBLIC_KEY_ALG_ENUM_RSA_2048: _ClassVar[KasPublicKeyAlgEnum] + KAS_PUBLIC_KEY_ALG_ENUM_RSA_4096: _ClassVar[KasPublicKeyAlgEnum] + KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP256R1: _ClassVar[KasPublicKeyAlgEnum] + KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP384R1: _ClassVar[KasPublicKeyAlgEnum] + KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP521R1: _ClassVar[KasPublicKeyAlgEnum] + +class Algorithm(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ALGORITHM_UNSPECIFIED: _ClassVar[Algorithm] + ALGORITHM_RSA_2048: _ClassVar[Algorithm] + ALGORITHM_RSA_4096: _ClassVar[Algorithm] + ALGORITHM_EC_P256: _ClassVar[Algorithm] + ALGORITHM_EC_P384: _ClassVar[Algorithm] + ALGORITHM_EC_P521: _ClassVar[Algorithm] + +class KeyStatus(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + KEY_STATUS_UNSPECIFIED: _ClassVar[KeyStatus] + KEY_STATUS_ACTIVE: _ClassVar[KeyStatus] + KEY_STATUS_ROTATED: _ClassVar[KeyStatus] + +class KeyMode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + KEY_MODE_UNSPECIFIED: _ClassVar[KeyMode] + KEY_MODE_CONFIG_ROOT_KEY: _ClassVar[KeyMode] + KEY_MODE_PROVIDER_ROOT_KEY: _ClassVar[KeyMode] + KEY_MODE_REMOTE: _ClassVar[KeyMode] + KEY_MODE_PUBLIC_KEY_ONLY: _ClassVar[KeyMode] +ATTRIBUTE_RULE_TYPE_ENUM_UNSPECIFIED: AttributeRuleTypeEnum +ATTRIBUTE_RULE_TYPE_ENUM_ALL_OF: AttributeRuleTypeEnum +ATTRIBUTE_RULE_TYPE_ENUM_ANY_OF: AttributeRuleTypeEnum +ATTRIBUTE_RULE_TYPE_ENUM_HIERARCHY: AttributeRuleTypeEnum +SUBJECT_MAPPING_OPERATOR_ENUM_UNSPECIFIED: SubjectMappingOperatorEnum +SUBJECT_MAPPING_OPERATOR_ENUM_IN: SubjectMappingOperatorEnum +SUBJECT_MAPPING_OPERATOR_ENUM_NOT_IN: SubjectMappingOperatorEnum +SUBJECT_MAPPING_OPERATOR_ENUM_IN_CONTAINS: SubjectMappingOperatorEnum +CONDITION_BOOLEAN_TYPE_ENUM_UNSPECIFIED: ConditionBooleanTypeEnum +CONDITION_BOOLEAN_TYPE_ENUM_AND: ConditionBooleanTypeEnum +CONDITION_BOOLEAN_TYPE_ENUM_OR: ConditionBooleanTypeEnum +SOURCE_TYPE_UNSPECIFIED: SourceType +SOURCE_TYPE_INTERNAL: SourceType +SOURCE_TYPE_EXTERNAL: SourceType +KAS_PUBLIC_KEY_ALG_ENUM_UNSPECIFIED: KasPublicKeyAlgEnum +KAS_PUBLIC_KEY_ALG_ENUM_RSA_2048: KasPublicKeyAlgEnum +KAS_PUBLIC_KEY_ALG_ENUM_RSA_4096: KasPublicKeyAlgEnum +KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP256R1: KasPublicKeyAlgEnum +KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP384R1: KasPublicKeyAlgEnum +KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP521R1: KasPublicKeyAlgEnum +ALGORITHM_UNSPECIFIED: Algorithm +ALGORITHM_RSA_2048: Algorithm +ALGORITHM_RSA_4096: Algorithm +ALGORITHM_EC_P256: Algorithm +ALGORITHM_EC_P384: Algorithm +ALGORITHM_EC_P521: Algorithm +KEY_STATUS_UNSPECIFIED: KeyStatus +KEY_STATUS_ACTIVE: KeyStatus +KEY_STATUS_ROTATED: KeyStatus +KEY_MODE_UNSPECIFIED: KeyMode +KEY_MODE_CONFIG_ROOT_KEY: KeyMode +KEY_MODE_PROVIDER_ROOT_KEY: KeyMode +KEY_MODE_REMOTE: KeyMode +KEY_MODE_PUBLIC_KEY_ONLY: KeyMode + +class SimpleKasPublicKey(_message.Message): + __slots__ = ("algorithm", "kid", "pem") + ALGORITHM_FIELD_NUMBER: _ClassVar[int] + KID_FIELD_NUMBER: _ClassVar[int] + PEM_FIELD_NUMBER: _ClassVar[int] + algorithm: Algorithm + kid: str + pem: str + def __init__(self, algorithm: _Optional[_Union[Algorithm, str]] = ..., kid: _Optional[str] = ..., pem: _Optional[str] = ...) -> None: ... + +class SimpleKasKey(_message.Message): + __slots__ = ("kas_uri", "public_key", "kas_id") + KAS_URI_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + KAS_ID_FIELD_NUMBER: _ClassVar[int] + kas_uri: str + public_key: SimpleKasPublicKey + kas_id: str + def __init__(self, kas_uri: _Optional[str] = ..., public_key: _Optional[_Union[SimpleKasPublicKey, _Mapping]] = ..., kas_id: _Optional[str] = ...) -> None: ... + +class KeyProviderConfig(_message.Message): + __slots__ = ("id", "name", "config_json", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + CONFIG_JSON_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + config_json: bytes + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., config_json: _Optional[bytes] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class Namespace(_message.Message): + __slots__ = ("id", "name", "fqn", "active", "metadata", "grants", "kas_keys") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + ACTIVE_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + GRANTS_FIELD_NUMBER: _ClassVar[int] + KAS_KEYS_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + fqn: str + active: _wrappers_pb2.BoolValue + metadata: _common_pb2.Metadata + grants: _containers.RepeatedCompositeFieldContainer[KeyAccessServer] + kas_keys: _containers.RepeatedCompositeFieldContainer[SimpleKasKey] + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., fqn: _Optional[str] = ..., active: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ..., grants: _Optional[_Iterable[_Union[KeyAccessServer, _Mapping]]] = ..., kas_keys: _Optional[_Iterable[_Union[SimpleKasKey, _Mapping]]] = ...) -> None: ... + +class Attribute(_message.Message): + __slots__ = ("id", "namespace", "name", "rule", "values", "grants", "fqn", "active", "kas_keys", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + RULE_FIELD_NUMBER: _ClassVar[int] + VALUES_FIELD_NUMBER: _ClassVar[int] + GRANTS_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + ACTIVE_FIELD_NUMBER: _ClassVar[int] + KAS_KEYS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + namespace: Namespace + name: str + rule: AttributeRuleTypeEnum + values: _containers.RepeatedCompositeFieldContainer[Value] + grants: _containers.RepeatedCompositeFieldContainer[KeyAccessServer] + fqn: str + active: _wrappers_pb2.BoolValue + kas_keys: _containers.RepeatedCompositeFieldContainer[SimpleKasKey] + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., namespace: _Optional[_Union[Namespace, _Mapping]] = ..., name: _Optional[str] = ..., rule: _Optional[_Union[AttributeRuleTypeEnum, str]] = ..., values: _Optional[_Iterable[_Union[Value, _Mapping]]] = ..., grants: _Optional[_Iterable[_Union[KeyAccessServer, _Mapping]]] = ..., fqn: _Optional[str] = ..., active: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., kas_keys: _Optional[_Iterable[_Union[SimpleKasKey, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class Value(_message.Message): + __slots__ = ("id", "attribute", "value", "grants", "fqn", "active", "subject_mappings", "kas_keys", "resource_mappings", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + GRANTS_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + ACTIVE_FIELD_NUMBER: _ClassVar[int] + SUBJECT_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + KAS_KEYS_FIELD_NUMBER: _ClassVar[int] + RESOURCE_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + attribute: Attribute + value: str + grants: _containers.RepeatedCompositeFieldContainer[KeyAccessServer] + fqn: str + active: _wrappers_pb2.BoolValue + subject_mappings: _containers.RepeatedCompositeFieldContainer[SubjectMapping] + kas_keys: _containers.RepeatedCompositeFieldContainer[SimpleKasKey] + resource_mappings: _containers.RepeatedCompositeFieldContainer[ResourceMapping] + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., attribute: _Optional[_Union[Attribute, _Mapping]] = ..., value: _Optional[str] = ..., grants: _Optional[_Iterable[_Union[KeyAccessServer, _Mapping]]] = ..., fqn: _Optional[str] = ..., active: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., subject_mappings: _Optional[_Iterable[_Union[SubjectMapping, _Mapping]]] = ..., kas_keys: _Optional[_Iterable[_Union[SimpleKasKey, _Mapping]]] = ..., resource_mappings: _Optional[_Iterable[_Union[ResourceMapping, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class Action(_message.Message): + __slots__ = ("id", "standard", "custom", "name", "metadata") + class StandardAction(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + STANDARD_ACTION_UNSPECIFIED: _ClassVar[Action.StandardAction] + STANDARD_ACTION_DECRYPT: _ClassVar[Action.StandardAction] + STANDARD_ACTION_TRANSMIT: _ClassVar[Action.StandardAction] + STANDARD_ACTION_UNSPECIFIED: Action.StandardAction + STANDARD_ACTION_DECRYPT: Action.StandardAction + STANDARD_ACTION_TRANSMIT: Action.StandardAction + ID_FIELD_NUMBER: _ClassVar[int] + STANDARD_FIELD_NUMBER: _ClassVar[int] + CUSTOM_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + standard: Action.StandardAction + custom: str + name: str + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., standard: _Optional[_Union[Action.StandardAction, str]] = ..., custom: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class SubjectMapping(_message.Message): + __slots__ = ("id", "attribute_value", "subject_condition_set", "actions", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_FIELD_NUMBER: _ClassVar[int] + SUBJECT_CONDITION_SET_FIELD_NUMBER: _ClassVar[int] + ACTIONS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + attribute_value: Value + subject_condition_set: SubjectConditionSet + actions: _containers.RepeatedCompositeFieldContainer[Action] + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., attribute_value: _Optional[_Union[Value, _Mapping]] = ..., subject_condition_set: _Optional[_Union[SubjectConditionSet, _Mapping]] = ..., actions: _Optional[_Iterable[_Union[Action, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class Condition(_message.Message): + __slots__ = ("subject_external_selector_value", "operator", "subject_external_values") + SUBJECT_EXTERNAL_SELECTOR_VALUE_FIELD_NUMBER: _ClassVar[int] + OPERATOR_FIELD_NUMBER: _ClassVar[int] + SUBJECT_EXTERNAL_VALUES_FIELD_NUMBER: _ClassVar[int] + subject_external_selector_value: str + operator: SubjectMappingOperatorEnum + subject_external_values: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, subject_external_selector_value: _Optional[str] = ..., operator: _Optional[_Union[SubjectMappingOperatorEnum, str]] = ..., subject_external_values: _Optional[_Iterable[str]] = ...) -> None: ... + +class ConditionGroup(_message.Message): + __slots__ = ("conditions", "boolean_operator") + CONDITIONS_FIELD_NUMBER: _ClassVar[int] + BOOLEAN_OPERATOR_FIELD_NUMBER: _ClassVar[int] + conditions: _containers.RepeatedCompositeFieldContainer[Condition] + boolean_operator: ConditionBooleanTypeEnum + def __init__(self, conditions: _Optional[_Iterable[_Union[Condition, _Mapping]]] = ..., boolean_operator: _Optional[_Union[ConditionBooleanTypeEnum, str]] = ...) -> None: ... + +class SubjectSet(_message.Message): + __slots__ = ("condition_groups",) + CONDITION_GROUPS_FIELD_NUMBER: _ClassVar[int] + condition_groups: _containers.RepeatedCompositeFieldContainer[ConditionGroup] + def __init__(self, condition_groups: _Optional[_Iterable[_Union[ConditionGroup, _Mapping]]] = ...) -> None: ... + +class SubjectConditionSet(_message.Message): + __slots__ = ("id", "subject_sets", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + SUBJECT_SETS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + subject_sets: _containers.RepeatedCompositeFieldContainer[SubjectSet] + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., subject_sets: _Optional[_Iterable[_Union[SubjectSet, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class SubjectProperty(_message.Message): + __slots__ = ("external_selector_value", "external_value") + EXTERNAL_SELECTOR_VALUE_FIELD_NUMBER: _ClassVar[int] + EXTERNAL_VALUE_FIELD_NUMBER: _ClassVar[int] + external_selector_value: str + external_value: str + def __init__(self, external_selector_value: _Optional[str] = ..., external_value: _Optional[str] = ...) -> None: ... + +class ResourceMappingGroup(_message.Message): + __slots__ = ("id", "namespace_id", "name", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + NAMESPACE_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + namespace_id: str + name: str + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., namespace_id: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class ResourceMapping(_message.Message): + __slots__ = ("id", "metadata", "attribute_value", "terms", "group") + ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_FIELD_NUMBER: _ClassVar[int] + TERMS_FIELD_NUMBER: _ClassVar[int] + GROUP_FIELD_NUMBER: _ClassVar[int] + id: str + metadata: _common_pb2.Metadata + attribute_value: Value + terms: _containers.RepeatedScalarFieldContainer[str] + group: ResourceMappingGroup + def __init__(self, id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ..., attribute_value: _Optional[_Union[Value, _Mapping]] = ..., terms: _Optional[_Iterable[str]] = ..., group: _Optional[_Union[ResourceMappingGroup, _Mapping]] = ...) -> None: ... + +class KeyAccessServer(_message.Message): + __slots__ = ("id", "uri", "public_key", "source_type", "kas_keys", "name", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + URI_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + SOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + KAS_KEYS_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + uri: str + public_key: PublicKey + source_type: SourceType + kas_keys: _containers.RepeatedCompositeFieldContainer[SimpleKasKey] + name: str + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., uri: _Optional[str] = ..., public_key: _Optional[_Union[PublicKey, _Mapping]] = ..., source_type: _Optional[_Union[SourceType, str]] = ..., kas_keys: _Optional[_Iterable[_Union[SimpleKasKey, _Mapping]]] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class Key(_message.Message): + __slots__ = ("id", "is_active", "was_mapped", "public_key", "kas", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + IS_ACTIVE_FIELD_NUMBER: _ClassVar[int] + WAS_MAPPED_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int] + KAS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + is_active: _wrappers_pb2.BoolValue + was_mapped: _wrappers_pb2.BoolValue + public_key: KasPublicKey + kas: KeyAccessServer + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., is_active: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., was_mapped: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., public_key: _Optional[_Union[KasPublicKey, _Mapping]] = ..., kas: _Optional[_Union[KeyAccessServer, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class KasPublicKey(_message.Message): + __slots__ = ("pem", "kid", "alg") + PEM_FIELD_NUMBER: _ClassVar[int] + KID_FIELD_NUMBER: _ClassVar[int] + ALG_FIELD_NUMBER: _ClassVar[int] + pem: str + kid: str + alg: KasPublicKeyAlgEnum + def __init__(self, pem: _Optional[str] = ..., kid: _Optional[str] = ..., alg: _Optional[_Union[KasPublicKeyAlgEnum, str]] = ...) -> None: ... + +class KasPublicKeySet(_message.Message): + __slots__ = ("keys",) + KEYS_FIELD_NUMBER: _ClassVar[int] + keys: _containers.RepeatedCompositeFieldContainer[KasPublicKey] + def __init__(self, keys: _Optional[_Iterable[_Union[KasPublicKey, _Mapping]]] = ...) -> None: ... + +class PublicKey(_message.Message): + __slots__ = ("remote", "cached") + REMOTE_FIELD_NUMBER: _ClassVar[int] + CACHED_FIELD_NUMBER: _ClassVar[int] + remote: str + cached: KasPublicKeySet + def __init__(self, remote: _Optional[str] = ..., cached: _Optional[_Union[KasPublicKeySet, _Mapping]] = ...) -> None: ... + +class RegisteredResource(_message.Message): + __slots__ = ("id", "name", "values", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + VALUES_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + values: _containers.RepeatedCompositeFieldContainer[RegisteredResourceValue] + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., values: _Optional[_Iterable[_Union[RegisteredResourceValue, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class RegisteredResourceValue(_message.Message): + __slots__ = ("id", "value", "resource", "action_attribute_values", "metadata") + class ActionAttributeValue(_message.Message): + __slots__ = ("id", "action", "attribute_value", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + ACTION_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + action: Action + attribute_value: Value + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., action: _Optional[_Union[Action, _Mapping]] = ..., attribute_value: _Optional[_Union[Value, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + ID_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + RESOURCE_FIELD_NUMBER: _ClassVar[int] + ACTION_ATTRIBUTE_VALUES_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + value: str + resource: RegisteredResource + action_attribute_values: _containers.RepeatedCompositeFieldContainer[RegisteredResourceValue.ActionAttributeValue] + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., value: _Optional[str] = ..., resource: _Optional[_Union[RegisteredResource, _Mapping]] = ..., action_attribute_values: _Optional[_Iterable[_Union[RegisteredResourceValue.ActionAttributeValue, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class KasKey(_message.Message): + __slots__ = ("kas_id", "key", "kas_uri") + KAS_ID_FIELD_NUMBER: _ClassVar[int] + KEY_FIELD_NUMBER: _ClassVar[int] + KAS_URI_FIELD_NUMBER: _ClassVar[int] + kas_id: str + key: AsymmetricKey + kas_uri: str + def __init__(self, kas_id: _Optional[str] = ..., key: _Optional[_Union[AsymmetricKey, _Mapping]] = ..., kas_uri: _Optional[str] = ...) -> None: ... + +class PublicKeyCtx(_message.Message): + __slots__ = ("pem",) + PEM_FIELD_NUMBER: _ClassVar[int] + pem: str + def __init__(self, pem: _Optional[str] = ...) -> None: ... + +class PrivateKeyCtx(_message.Message): + __slots__ = ("key_id", "wrapped_key") + KEY_ID_FIELD_NUMBER: _ClassVar[int] + WRAPPED_KEY_FIELD_NUMBER: _ClassVar[int] + key_id: str + wrapped_key: str + def __init__(self, key_id: _Optional[str] = ..., wrapped_key: _Optional[str] = ...) -> None: ... + +class AsymmetricKey(_message.Message): + __slots__ = ("id", "key_id", "key_algorithm", "key_status", "key_mode", "public_key_ctx", "private_key_ctx", "provider_config", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + KEY_ID_FIELD_NUMBER: _ClassVar[int] + KEY_ALGORITHM_FIELD_NUMBER: _ClassVar[int] + KEY_STATUS_FIELD_NUMBER: _ClassVar[int] + KEY_MODE_FIELD_NUMBER: _ClassVar[int] + PUBLIC_KEY_CTX_FIELD_NUMBER: _ClassVar[int] + PRIVATE_KEY_CTX_FIELD_NUMBER: _ClassVar[int] + PROVIDER_CONFIG_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + key_id: str + key_algorithm: Algorithm + key_status: KeyStatus + key_mode: KeyMode + public_key_ctx: PublicKeyCtx + private_key_ctx: PrivateKeyCtx + provider_config: KeyProviderConfig + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., key_id: _Optional[str] = ..., key_algorithm: _Optional[_Union[Algorithm, str]] = ..., key_status: _Optional[_Union[KeyStatus, str]] = ..., key_mode: _Optional[_Union[KeyMode, str]] = ..., public_key_ctx: _Optional[_Union[PublicKeyCtx, _Mapping]] = ..., private_key_ctx: _Optional[_Union[PrivateKeyCtx, _Mapping]] = ..., provider_config: _Optional[_Union[KeyProviderConfig, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... + +class SymmetricKey(_message.Message): + __slots__ = ("id", "key_id", "key_status", "key_mode", "key_ctx", "provider_config", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + KEY_ID_FIELD_NUMBER: _ClassVar[int] + KEY_STATUS_FIELD_NUMBER: _ClassVar[int] + KEY_MODE_FIELD_NUMBER: _ClassVar[int] + KEY_CTX_FIELD_NUMBER: _ClassVar[int] + PROVIDER_CONFIG_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + key_id: str + key_status: KeyStatus + key_mode: KeyMode + key_ctx: bytes + provider_config: KeyProviderConfig + metadata: _common_pb2.Metadata + def __init__(self, id: _Optional[str] = ..., key_id: _Optional[str] = ..., key_status: _Optional[_Union[KeyStatus, str]] = ..., key_mode: _Optional[_Union[KeyMode, str]] = ..., key_ctx: _Optional[bytes] = ..., provider_config: _Optional[_Union[KeyProviderConfig, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.Metadata, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2.py new file mode 100644 index 0000000..ab32eea --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/registeredresources/registered_resources.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/registeredresources/registered_resources.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5policy/registeredresources/registered_resources.proto\x12\x1apolicy.registeredresources\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"\xd9\x03\n\x1f\x43reateRegisteredResourceRequest\x12\xa8\x02\n\x04name\x18\x01 \x01(\tB\x93\x02\xbaH\x8f\x02r\x03\x18\xfd\x01\xba\x01\x83\x02\n\x0err_name_format\x12\xb3\x01Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.\x1a;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\xc8\x01\x01R\x04name\x12V\n\x06values\x18\x02 \x03(\tB>\xbaH;\x92\x01\x38\x08\x00\x18\x01\"2r0\x18\xfd\x01\x32+^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$R\x06values\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"Z\n CreateRegisteredResourceResponse\x12\x36\n\x08resource\x18\x01 \x01(\x0b\x32\x1a.policy.RegisteredResourceR\x08resource\"\x94\x03\n\x1cGetRegisteredResourceRequest\x12\x1a\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02id\x12\xc2\x02\n\x04name\x18\x02 \x01(\tB\xab\x02\xbaH\xa7\x02r\x03\x18\xfd\x01\xba\x01\x9b\x02\n\x0err_name_format\x12\xb3\x01Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.\x1aSsize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\xc8\x01\x00H\x00R\x04nameB\x13\n\nidentifier\x12\x05\xbaH\x02\x08\x01\"W\n\x1dGetRegisteredResourceResponse\x12\x36\n\x08resource\x18\x01 \x01(\x0b\x32\x1a.policy.RegisteredResourceR\x08resource\"U\n\x1eListRegisteredResourcesRequest\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\x91\x01\n\x1fListRegisteredResourcesResponse\x12\x38\n\tresources\x18\x01 \x03(\x0b\x32\x1a.policy.RegisteredResourceR\tresources\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\x89\x04\n\x1fUpdateRegisteredResourceRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xc0\x02\n\x04name\x18\x02 \x01(\tB\xab\x02\xbaH\xa7\x02r\x03\x18\xfd\x01\xba\x01\x9b\x02\n\x0err_name_format\x12\xb3\x01Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.\x1aSsize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\xc8\x01\x00R\x04name\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"Z\n UpdateRegisteredResourceResponse\x12\x36\n\x08resource\x18\x01 \x01(\x0b\x32\x1a.policy.RegisteredResourceR\x08resource\";\n\x1f\x44\x65leteRegisteredResourceRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"Z\n DeleteRegisteredResourceResponse\x12\x36\n\x08resource\x18\x01 \x01(\x0b\x32\x1a.policy.RegisteredResourceR\x08resource\"\xad\x04\n\x14\x41\x63tionAttributeValue\x12\'\n\taction_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x08\x61\x63tionId\x12\xb2\x02\n\x0b\x61\x63tion_name\x18\x02 \x01(\tB\x8e\x02\xbaH\x8a\x02r\x03\x18\xfd\x01\xba\x01\x81\x02\n\x12\x61\x63tion_name_format\x12\xad\x01\x41\x63tion name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\x1a;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')H\x00R\nactionName\x12\x38\n\x12\x61ttribute_value_id\x18\x03 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x01R\x10\x61ttributeValueId\x12<\n\x13\x61ttribute_value_fqn\x18\x04 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x01R\x11\x61ttributeValueFqnB\x1a\n\x11\x61\x63tion_identifier\x12\x05\xbaH\x02\x08\x01\x42#\n\x1a\x61ttribute_value_identifier\x12\x05\xbaH\x02\x08\x01\"\xa0\x04\n$CreateRegisteredResourceValueRequest\x12)\n\x0bresource_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\nresourceId\x12\xad\x02\n\x05value\x18\x02 \x01(\tB\x96\x02\xbaH\x92\x02r\x03\x18\xfd\x01\xba\x01\x86\x02\n\x0frr_value_format\x12\xb5\x01Registered Resource Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored value will be normalized to lower case.\x1a;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\xc8\x01\x01R\x05value\x12h\n\x17\x61\x63tion_attribute_values\x18\x03 \x03(\x0b\x32\x30.policy.registeredresources.ActionAttributeValueR\x15\x61\x63tionAttributeValues\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"^\n%CreateRegisteredResourceValueResponse\x12\x35\n\x05value\x18\x01 \x01(\x0b\x32\x1f.policy.RegisteredResourceValueR\x05value\"t\n!GetRegisteredResourceValueRequest\x12\x1a\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01H\x00R\x02id\x12\x1e\n\x03\x66qn\x18\x02 \x01(\tB\n\xbaH\x07r\x05\x10\x01\x88\x01\x01H\x00R\x03\x66qnB\x13\n\nidentifier\x12\x05\xbaH\x02\x08\x01\"[\n\"GetRegisteredResourceValueResponse\x12\x35\n\x05value\x18\x01 \x01(\x0b\x32\x1f.policy.RegisteredResourceValueR\x05value\"S\n(GetRegisteredResourceValuesByFQNsRequest\x12\'\n\x04\x66qns\x18\x01 \x03(\tB\x13\xbaH\x10\x92\x01\r\x08\x01\x18\x01\"\x07r\x05\x10\x01\x88\x01\x01R\x04\x66qns\"\x88\x02\n)GetRegisteredResourceValuesByFQNsResponse\x12z\n\rfqn_value_map\x18\x01 \x03(\x0b\x32V.policy.registeredresources.GetRegisteredResourceValuesByFQNsResponse.FqnValueMapEntryR\x0b\x66qnValueMap\x1a_\n\x10\x46qnValueMapEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32\x1f.policy.RegisteredResourceValueR\x05value:\x02\x38\x01\"\xb2\x02\n#ListRegisteredResourceValuesRequest\x12\xd5\x01\n\x0bresource_id\x18\x01 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\nresourceId\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\x95\x01\n$ListRegisteredResourceValuesResponse\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\x1f.policy.RegisteredResourceValueR\x06values\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\xfd\x04\n$UpdateRegisteredResourceValueRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xc5\x02\n\x05value\x18\x02 \x01(\tB\xae\x02\xbaH\xaa\x02r\x03\x18\xfd\x01\xba\x01\x9e\x02\n\x0frr_value_format\x12\xb5\x01Registered Resource Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored value will be normalized to lower case.\x1aSsize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\xc8\x01\x00R\x05value\x12h\n\x17\x61\x63tion_attribute_values\x18\x03 \x03(\x0b\x32\x30.policy.registeredresources.ActionAttributeValueR\x15\x61\x63tionAttributeValues\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"^\n%UpdateRegisteredResourceValueResponse\x12\x35\n\x05value\x18\x01 \x01(\x0b\x32\x1f.policy.RegisteredResourceValueR\x05value\"@\n$DeleteRegisteredResourceValueRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"^\n%DeleteRegisteredResourceValueResponse\x12\x35\n\x05value\x18\x01 \x01(\x0b\x32\x1f.policy.RegisteredResourceValueR\x05value2\x88\x0e\n\x1aRegisteredResourcesService\x12\x97\x01\n\x18\x43reateRegisteredResource\x12;.policy.registeredresources.CreateRegisteredResourceRequest\x1a<.policy.registeredresources.CreateRegisteredResourceResponse\"\x00\x12\x8e\x01\n\x15GetRegisteredResource\x12\x38.policy.registeredresources.GetRegisteredResourceRequest\x1a\x39.policy.registeredresources.GetRegisteredResourceResponse\"\x00\x12\x94\x01\n\x17ListRegisteredResources\x12:.policy.registeredresources.ListRegisteredResourcesRequest\x1a;.policy.registeredresources.ListRegisteredResourcesResponse\"\x00\x12\x97\x01\n\x18UpdateRegisteredResource\x12;.policy.registeredresources.UpdateRegisteredResourceRequest\x1a<.policy.registeredresources.UpdateRegisteredResourceResponse\"\x00\x12\x97\x01\n\x18\x44\x65leteRegisteredResource\x12;.policy.registeredresources.DeleteRegisteredResourceRequest\x1a<.policy.registeredresources.DeleteRegisteredResourceResponse\"\x00\x12\xa6\x01\n\x1d\x43reateRegisteredResourceValue\x12@.policy.registeredresources.CreateRegisteredResourceValueRequest\x1a\x41.policy.registeredresources.CreateRegisteredResourceValueResponse\"\x00\x12\x9d\x01\n\x1aGetRegisteredResourceValue\x12=.policy.registeredresources.GetRegisteredResourceValueRequest\x1a>.policy.registeredresources.GetRegisteredResourceValueResponse\"\x00\x12\xb2\x01\n!GetRegisteredResourceValuesByFQNs\x12\x44.policy.registeredresources.GetRegisteredResourceValuesByFQNsRequest\x1a\x45.policy.registeredresources.GetRegisteredResourceValuesByFQNsResponse\"\x00\x12\xa3\x01\n\x1cListRegisteredResourceValues\x12?.policy.registeredresources.ListRegisteredResourceValuesRequest\x1a@.policy.registeredresources.ListRegisteredResourceValuesResponse\"\x00\x12\xa6\x01\n\x1dUpdateRegisteredResourceValue\x12@.policy.registeredresources.UpdateRegisteredResourceValueRequest\x1a\x41.policy.registeredresources.UpdateRegisteredResourceValueResponse\"\x00\x12\xa6\x01\n\x1d\x44\x65leteRegisteredResourceValue\x12@.policy.registeredresources.DeleteRegisteredResourceValueRequest\x1a\x41.policy.registeredresources.DeleteRegisteredResourceValueResponse\"\x00\x42\xc3\x01\n\x1e\x63om.policy.registeredresourcesB\x18RegisteredResourcesProtoP\x01\xa2\x02\x03PRX\xaa\x02\x1aPolicy.Registeredresources\xca\x02\x1aPolicy\\Registeredresources\xe2\x02&Policy\\Registeredresources\\GPBMetadata\xea\x02\x1bPolicy::Registeredresourcesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.registeredresources.registered_resources_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\036com.policy.registeredresourcesB\030RegisteredResourcesProtoP\001\242\002\003PRX\252\002\032Policy.Registeredresources\312\002\032Policy\\Registeredresources\342\002&Policy\\Registeredresources\\GPBMetadata\352\002\033Policy::Registeredresources' + _globals['_CREATEREGISTEREDRESOURCEREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_CREATEREGISTEREDRESOURCEREQUEST'].fields_by_name['name']._serialized_options = b'\272H\217\002r\003\030\375\001\272\001\203\002\n\016rr_name_format\022\263\001Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.\032;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\310\001\001' + _globals['_CREATEREGISTEREDRESOURCEREQUEST'].fields_by_name['values']._loaded_options = None + _globals['_CREATEREGISTEREDRESOURCEREQUEST'].fields_by_name['values']._serialized_options = b'\272H;\222\0018\010\000\030\001\"2r0\030\375\0012+^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$' + _globals['_GETREGISTEREDRESOURCEREQUEST'].oneofs_by_name['identifier']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEREQUEST'].oneofs_by_name['identifier']._serialized_options = b'\272H\002\010\001' + _globals['_GETREGISTEREDRESOURCEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETREGISTEREDRESOURCEREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEREQUEST'].fields_by_name['name']._serialized_options = b'\272H\247\002r\003\030\375\001\272\001\233\002\n\016rr_name_format\022\263\001Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.\032Ssize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\310\001\000' + _globals['_UPDATEREGISTEREDRESOURCEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATEREGISTEREDRESOURCEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATEREGISTEREDRESOURCEREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_UPDATEREGISTEREDRESOURCEREQUEST'].fields_by_name['name']._serialized_options = b'\272H\247\002r\003\030\375\001\272\001\233\002\n\016rr_name_format\022\263\001Registered Resource Name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored name will be normalized to lower case.\032Ssize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\310\001\000' + _globals['_DELETEREGISTEREDRESOURCEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETEREGISTEREDRESOURCEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_ACTIONATTRIBUTEVALUE'].oneofs_by_name['action_identifier']._loaded_options = None + _globals['_ACTIONATTRIBUTEVALUE'].oneofs_by_name['action_identifier']._serialized_options = b'\272H\002\010\001' + _globals['_ACTIONATTRIBUTEVALUE'].oneofs_by_name['attribute_value_identifier']._loaded_options = None + _globals['_ACTIONATTRIBUTEVALUE'].oneofs_by_name['attribute_value_identifier']._serialized_options = b'\272H\002\010\001' + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['action_id']._loaded_options = None + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['action_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['action_name']._loaded_options = None + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['action_name']._serialized_options = b'\272H\212\002r\003\030\375\001\272\001\201\002\n\022action_name_format\022\255\001Action name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored action name will be normalized to lower case.\032;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')' + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['attribute_value_id']._loaded_options = None + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['attribute_value_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['attribute_value_fqn']._loaded_options = None + _globals['_ACTIONATTRIBUTEVALUE'].fields_by_name['attribute_value_fqn']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_CREATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['resource_id']._loaded_options = None + _globals['_CREATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['resource_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['value']._loaded_options = None + _globals['_CREATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['value']._serialized_options = b'\272H\222\002r\003\030\375\001\272\001\206\002\n\017rr_value_format\022\265\001Registered Resource Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored value will be normalized to lower case.\032;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')\310\001\001' + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST'].oneofs_by_name['identifier']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST'].oneofs_by_name['identifier']._serialized_options = b'\272H\002\010\001' + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['fqn']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['fqn']._serialized_options = b'\272H\007r\005\020\001\210\001\001' + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSREQUEST'].fields_by_name['fqns']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSREQUEST'].fields_by_name['fqns']._serialized_options = b'\272H\020\222\001\r\010\001\030\001\"\007r\005\020\001\210\001\001' + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSRESPONSE_FQNVALUEMAPENTRY']._loaded_options = None + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSRESPONSE_FQNVALUEMAPENTRY']._serialized_options = b'8\001' + _globals['_LISTREGISTEREDRESOURCEVALUESREQUEST'].fields_by_name['resource_id']._loaded_options = None + _globals['_LISTREGISTEREDRESOURCEVALUESREQUEST'].fields_by_name['resource_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_UPDATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['value']._loaded_options = None + _globals['_UPDATEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['value']._serialized_options = b'\272H\252\002r\003\030\375\001\272\001\236\002\n\017rr_value_format\022\265\001Registered Resource Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored value will be normalized to lower case.\032Ssize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\310\001\000' + _globals['_DELETEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETEREGISTEREDRESOURCEVALUEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATEREGISTEREDRESOURCEREQUEST']._serialized_start=182 + _globals['_CREATEREGISTEREDRESOURCEREQUEST']._serialized_end=655 + _globals['_CREATEREGISTEREDRESOURCERESPONSE']._serialized_start=657 + _globals['_CREATEREGISTEREDRESOURCERESPONSE']._serialized_end=747 + _globals['_GETREGISTEREDRESOURCEREQUEST']._serialized_start=750 + _globals['_GETREGISTEREDRESOURCEREQUEST']._serialized_end=1154 + _globals['_GETREGISTEREDRESOURCERESPONSE']._serialized_start=1156 + _globals['_GETREGISTEREDRESOURCERESPONSE']._serialized_end=1243 + _globals['_LISTREGISTEREDRESOURCESREQUEST']._serialized_start=1245 + _globals['_LISTREGISTEREDRESOURCESREQUEST']._serialized_end=1330 + _globals['_LISTREGISTEREDRESOURCESRESPONSE']._serialized_start=1333 + _globals['_LISTREGISTEREDRESOURCESRESPONSE']._serialized_end=1478 + _globals['_UPDATEREGISTEREDRESOURCEREQUEST']._serialized_start=1481 + _globals['_UPDATEREGISTEREDRESOURCEREQUEST']._serialized_end=2002 + _globals['_UPDATEREGISTEREDRESOURCERESPONSE']._serialized_start=2004 + _globals['_UPDATEREGISTEREDRESOURCERESPONSE']._serialized_end=2094 + _globals['_DELETEREGISTEREDRESOURCEREQUEST']._serialized_start=2096 + _globals['_DELETEREGISTEREDRESOURCEREQUEST']._serialized_end=2155 + _globals['_DELETEREGISTEREDRESOURCERESPONSE']._serialized_start=2157 + _globals['_DELETEREGISTEREDRESOURCERESPONSE']._serialized_end=2247 + _globals['_ACTIONATTRIBUTEVALUE']._serialized_start=2250 + _globals['_ACTIONATTRIBUTEVALUE']._serialized_end=2807 + _globals['_CREATEREGISTEREDRESOURCEVALUEREQUEST']._serialized_start=2810 + _globals['_CREATEREGISTEREDRESOURCEVALUEREQUEST']._serialized_end=3354 + _globals['_CREATEREGISTEREDRESOURCEVALUERESPONSE']._serialized_start=3356 + _globals['_CREATEREGISTEREDRESOURCEVALUERESPONSE']._serialized_end=3450 + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST']._serialized_start=3452 + _globals['_GETREGISTEREDRESOURCEVALUEREQUEST']._serialized_end=3568 + _globals['_GETREGISTEREDRESOURCEVALUERESPONSE']._serialized_start=3570 + _globals['_GETREGISTEREDRESOURCEVALUERESPONSE']._serialized_end=3661 + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSREQUEST']._serialized_start=3663 + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSREQUEST']._serialized_end=3746 + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSRESPONSE']._serialized_start=3749 + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSRESPONSE']._serialized_end=4013 + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSRESPONSE_FQNVALUEMAPENTRY']._serialized_start=3918 + _globals['_GETREGISTEREDRESOURCEVALUESBYFQNSRESPONSE_FQNVALUEMAPENTRY']._serialized_end=4013 + _globals['_LISTREGISTEREDRESOURCEVALUESREQUEST']._serialized_start=4016 + _globals['_LISTREGISTEREDRESOURCEVALUESREQUEST']._serialized_end=4322 + _globals['_LISTREGISTEREDRESOURCEVALUESRESPONSE']._serialized_start=4325 + _globals['_LISTREGISTEREDRESOURCEVALUESRESPONSE']._serialized_end=4474 + _globals['_UPDATEREGISTEREDRESOURCEVALUEREQUEST']._serialized_start=4477 + _globals['_UPDATEREGISTEREDRESOURCEVALUEREQUEST']._serialized_end=5114 + _globals['_UPDATEREGISTEREDRESOURCEVALUERESPONSE']._serialized_start=5116 + _globals['_UPDATEREGISTEREDRESOURCEVALUERESPONSE']._serialized_end=5210 + _globals['_DELETEREGISTEREDRESOURCEVALUEREQUEST']._serialized_start=5212 + _globals['_DELETEREGISTEREDRESOURCEVALUEREQUEST']._serialized_end=5276 + _globals['_DELETEREGISTEREDRESOURCEVALUERESPONSE']._serialized_start=5278 + _globals['_DELETEREGISTEREDRESOURCEVALUERESPONSE']._serialized_end=5372 + _globals['_REGISTEREDRESOURCESSERVICE']._serialized_start=5375 + _globals['_REGISTEREDRESOURCESSERVICE']._serialized_end=7175 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2.pyi new file mode 100644 index 0000000..7d41bbb --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2.pyi @@ -0,0 +1,196 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from common import common_pb2 as _common_pb2 +from policy import objects_pb2 as _objects_pb2 +from policy import selectors_pb2 as _selectors_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class CreateRegisteredResourceRequest(_message.Message): + __slots__ = ("name", "values", "metadata") + NAME_FIELD_NUMBER: _ClassVar[int] + VALUES_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + name: str + values: _containers.RepeatedScalarFieldContainer[str] + metadata: _common_pb2.MetadataMutable + def __init__(self, name: _Optional[str] = ..., values: _Optional[_Iterable[str]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateRegisteredResourceResponse(_message.Message): + __slots__ = ("resource",) + RESOURCE_FIELD_NUMBER: _ClassVar[int] + resource: _objects_pb2.RegisteredResource + def __init__(self, resource: _Optional[_Union[_objects_pb2.RegisteredResource, _Mapping]] = ...) -> None: ... + +class GetRegisteredResourceRequest(_message.Message): + __slots__ = ("id", "name") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class GetRegisteredResourceResponse(_message.Message): + __slots__ = ("resource",) + RESOURCE_FIELD_NUMBER: _ClassVar[int] + resource: _objects_pb2.RegisteredResource + def __init__(self, resource: _Optional[_Union[_objects_pb2.RegisteredResource, _Mapping]] = ...) -> None: ... + +class ListRegisteredResourcesRequest(_message.Message): + __slots__ = ("pagination",) + PAGINATION_FIELD_NUMBER: _ClassVar[int] + pagination: _selectors_pb2.PageRequest + def __init__(self, pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListRegisteredResourcesResponse(_message.Message): + __slots__ = ("resources", "pagination") + RESOURCES_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + resources: _containers.RepeatedCompositeFieldContainer[_objects_pb2.RegisteredResource] + pagination: _selectors_pb2.PageResponse + def __init__(self, resources: _Optional[_Iterable[_Union[_objects_pb2.RegisteredResource, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class UpdateRegisteredResourceRequest(_message.Message): + __slots__ = ("id", "name", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateRegisteredResourceResponse(_message.Message): + __slots__ = ("resource",) + RESOURCE_FIELD_NUMBER: _ClassVar[int] + resource: _objects_pb2.RegisteredResource + def __init__(self, resource: _Optional[_Union[_objects_pb2.RegisteredResource, _Mapping]] = ...) -> None: ... + +class DeleteRegisteredResourceRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteRegisteredResourceResponse(_message.Message): + __slots__ = ("resource",) + RESOURCE_FIELD_NUMBER: _ClassVar[int] + resource: _objects_pb2.RegisteredResource + def __init__(self, resource: _Optional[_Union[_objects_pb2.RegisteredResource, _Mapping]] = ...) -> None: ... + +class ActionAttributeValue(_message.Message): + __slots__ = ("action_id", "action_name", "attribute_value_id", "attribute_value_fqn") + ACTION_ID_FIELD_NUMBER: _ClassVar[int] + ACTION_NAME_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_FQN_FIELD_NUMBER: _ClassVar[int] + action_id: str + action_name: str + attribute_value_id: str + attribute_value_fqn: str + def __init__(self, action_id: _Optional[str] = ..., action_name: _Optional[str] = ..., attribute_value_id: _Optional[str] = ..., attribute_value_fqn: _Optional[str] = ...) -> None: ... + +class CreateRegisteredResourceValueRequest(_message.Message): + __slots__ = ("resource_id", "value", "action_attribute_values", "metadata") + RESOURCE_ID_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + ACTION_ATTRIBUTE_VALUES_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + resource_id: str + value: str + action_attribute_values: _containers.RepeatedCompositeFieldContainer[ActionAttributeValue] + metadata: _common_pb2.MetadataMutable + def __init__(self, resource_id: _Optional[str] = ..., value: _Optional[str] = ..., action_attribute_values: _Optional[_Iterable[_Union[ActionAttributeValue, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateRegisteredResourceValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.RegisteredResourceValue + def __init__(self, value: _Optional[_Union[_objects_pb2.RegisteredResourceValue, _Mapping]] = ...) -> None: ... + +class GetRegisteredResourceValueRequest(_message.Message): + __slots__ = ("id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class GetRegisteredResourceValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.RegisteredResourceValue + def __init__(self, value: _Optional[_Union[_objects_pb2.RegisteredResourceValue, _Mapping]] = ...) -> None: ... + +class GetRegisteredResourceValuesByFQNsRequest(_message.Message): + __slots__ = ("fqns",) + FQNS_FIELD_NUMBER: _ClassVar[int] + fqns: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, fqns: _Optional[_Iterable[str]] = ...) -> None: ... + +class GetRegisteredResourceValuesByFQNsResponse(_message.Message): + __slots__ = ("fqn_value_map",) + class FqnValueMapEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _objects_pb2.RegisteredResourceValue + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_objects_pb2.RegisteredResourceValue, _Mapping]] = ...) -> None: ... + FQN_VALUE_MAP_FIELD_NUMBER: _ClassVar[int] + fqn_value_map: _containers.MessageMap[str, _objects_pb2.RegisteredResourceValue] + def __init__(self, fqn_value_map: _Optional[_Mapping[str, _objects_pb2.RegisteredResourceValue]] = ...) -> None: ... + +class ListRegisteredResourceValuesRequest(_message.Message): + __slots__ = ("resource_id", "pagination") + RESOURCE_ID_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + resource_id: str + pagination: _selectors_pb2.PageRequest + def __init__(self, resource_id: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListRegisteredResourceValuesResponse(_message.Message): + __slots__ = ("values", "pagination") + VALUES_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + values: _containers.RepeatedCompositeFieldContainer[_objects_pb2.RegisteredResourceValue] + pagination: _selectors_pb2.PageResponse + def __init__(self, values: _Optional[_Iterable[_Union[_objects_pb2.RegisteredResourceValue, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class UpdateRegisteredResourceValueRequest(_message.Message): + __slots__ = ("id", "value", "action_attribute_values", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + ACTION_ATTRIBUTE_VALUES_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + value: str + action_attribute_values: _containers.RepeatedCompositeFieldContainer[ActionAttributeValue] + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., value: _Optional[str] = ..., action_attribute_values: _Optional[_Iterable[_Union[ActionAttributeValue, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateRegisteredResourceValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.RegisteredResourceValue + def __init__(self, value: _Optional[_Union[_objects_pb2.RegisteredResourceValue, _Mapping]] = ...) -> None: ... + +class DeleteRegisteredResourceValueRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteRegisteredResourceValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.RegisteredResourceValue + def __init__(self, value: _Optional[_Union[_objects_pb2.RegisteredResourceValue, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2_connect.py new file mode 100644 index 0000000..96922e5 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/registeredresources/registered_resources_pb2_connect.py @@ -0,0 +1,527 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.registeredresources.registered_resources_pb2 + +class RegisteredResourcesServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_create_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceResponse]: + """Low-level method to call CreateRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResource" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceResponse,extra_headers, timeout_seconds) + + + def create_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceResponse: + response = self.call_create_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceResponse]: + """Low-level method to call GetRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/GetRegisteredResource" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceResponse,extra_headers, timeout_seconds) + + + def get_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.GetRegisteredResourceResponse: + response = self.call_get_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_registered_resources( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesResponse]: + """Low-level method to call ListRegisteredResources, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/ListRegisteredResources" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesResponse,extra_headers, timeout_seconds) + + + def list_registered_resources( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesResponse: + response = self.call_list_registered_resources(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceResponse]: + """Low-level method to call UpdateRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResource" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceResponse,extra_headers, timeout_seconds) + + + def update_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceResponse: + response = self.call_update_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceResponse]: + """Low-level method to call DeleteRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResource" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceResponse,extra_headers, timeout_seconds) + + + def delete_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceResponse: + response = self.call_delete_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueResponse]: + """Low-level method to call CreateRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResourceValue" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + + def create_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueResponse: + response = self.call_create_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueResponse]: + """Low-level method to call GetRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValue" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + + def get_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueResponse: + response = self.call_get_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_registered_resource_values_by_fq_ns( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsResponse]: + """Low-level method to call GetRegisteredResourceValuesByFQNs, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValuesByFQNs" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsResponse,extra_headers, timeout_seconds) + + + def get_registered_resource_values_by_fq_ns( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsResponse: + response = self.call_get_registered_resource_values_by_fq_ns(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_registered_resource_values( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesResponse]: + """Low-level method to call ListRegisteredResourceValues, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/ListRegisteredResourceValues" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesResponse,extra_headers, timeout_seconds) + + + def list_registered_resource_values( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesResponse: + response = self.call_list_registered_resource_values(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueResponse]: + """Low-level method to call UpdateRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResourceValue" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + + def update_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueResponse: + response = self.call_update_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueResponse]: + """Low-level method to call DeleteRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResourceValue" + return self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + + def delete_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueResponse: + response = self.call_delete_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncRegisteredResourcesServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_create_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceResponse]: + """Low-level method to call CreateRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResource" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceResponse,extra_headers, timeout_seconds) + + async def create_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceResponse: + response = await self.call_create_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceResponse]: + """Low-level method to call GetRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/GetRegisteredResource" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceResponse,extra_headers, timeout_seconds) + + async def get_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.GetRegisteredResourceResponse: + response = await self.call_get_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_registered_resources( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesResponse]: + """Low-level method to call ListRegisteredResources, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/ListRegisteredResources" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesResponse,extra_headers, timeout_seconds) + + async def list_registered_resources( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesResponse: + response = await self.call_list_registered_resources(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceResponse]: + """Low-level method to call UpdateRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResource" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceResponse,extra_headers, timeout_seconds) + + async def update_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceResponse: + response = await self.call_update_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceResponse]: + """Low-level method to call DeleteRegisteredResource, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResource" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceResponse,extra_headers, timeout_seconds) + + async def delete_registered_resource( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceResponse: + response = await self.call_delete_registered_resource(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueResponse]: + """Low-level method to call CreateRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResourceValue" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + async def create_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueResponse: + response = await self.call_create_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueResponse]: + """Low-level method to call GetRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValue" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + async def get_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueResponse: + response = await self.call_get_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_registered_resource_values_by_fq_ns( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsResponse]: + """Low-level method to call GetRegisteredResourceValuesByFQNs, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValuesByFQNs" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsResponse,extra_headers, timeout_seconds) + + async def get_registered_resource_values_by_fq_ns( + self, req: policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsResponse: + response = await self.call_get_registered_resource_values_by_fq_ns(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_registered_resource_values( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesResponse]: + """Low-level method to call ListRegisteredResourceValues, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/ListRegisteredResourceValues" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesResponse,extra_headers, timeout_seconds) + + async def list_registered_resource_values( + self, req: policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesResponse: + response = await self.call_list_registered_resource_values(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueResponse]: + """Low-level method to call UpdateRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResourceValue" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + async def update_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueResponse: + response = await self.call_update_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueResponse]: + """Low-level method to call DeleteRegisteredResourceValue, granting access to errors and metadata""" + url = self.base_url + "/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResourceValue" + return await self._connect_client.call_unary(url, req, policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueResponse,extra_headers, timeout_seconds) + + async def delete_registered_resource_value( + self, req: policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueResponse: + response = await self.call_delete_registered_resource_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class RegisteredResourcesServiceProtocol(typing.Protocol): + def create_registered_resource(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceResponse]: + ... + def get_registered_resource(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceResponse]: + ... + def list_registered_resources(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesResponse]: + ... + def update_registered_resource(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceResponse]: + ... + def delete_registered_resource(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceResponse]: + ... + def create_registered_resource_value(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueResponse]: + ... + def get_registered_resource_value(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueResponse]: + ... + def get_registered_resource_values_by_fq_ns(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsResponse]: + ... + def list_registered_resource_values(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesResponse]: + ... + def update_registered_resource_value(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueResponse]: + ... + def delete_registered_resource_value(self, req: ClientRequest[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueRequest]) -> ServerResponse[policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueResponse]: + ... + +REGISTERED_RESOURCES_SERVICE_PATH_PREFIX = "/policy.registeredresources.RegisteredResourcesService" + +def wsgi_registered_resources_service(implementation: RegisteredResourcesServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResource", implementation.create_registered_resource, policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/GetRegisteredResource", implementation.get_registered_resource, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/ListRegisteredResources", implementation.list_registered_resources, policy.registeredresources.registered_resources_pb2.ListRegisteredResourcesRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResource", implementation.update_registered_resource, policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResource", implementation.delete_registered_resource, policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/CreateRegisteredResourceValue", implementation.create_registered_resource_value, policy.registeredresources.registered_resources_pb2.CreateRegisteredResourceValueRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValue", implementation.get_registered_resource_value, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValueRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/GetRegisteredResourceValuesByFQNs", implementation.get_registered_resource_values_by_fq_ns, policy.registeredresources.registered_resources_pb2.GetRegisteredResourceValuesByFQNsRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/ListRegisteredResourceValues", implementation.list_registered_resource_values, policy.registeredresources.registered_resources_pb2.ListRegisteredResourceValuesRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/UpdateRegisteredResourceValue", implementation.update_registered_resource_value, policy.registeredresources.registered_resources_pb2.UpdateRegisteredResourceValueRequest) + app.register_unary_rpc("/policy.registeredresources.RegisteredResourcesService/DeleteRegisteredResourceValue", implementation.delete_registered_resource_value, policy.registeredresources.registered_resources_pb2.DeleteRegisteredResourceValueRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.py new file mode 100644 index 0000000..f52bbe6 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/resourcemapping/resource_mapping.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/resourcemapping/resource_mapping.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-policy/resourcemapping/resource_mapping.proto\x12\x16policy.resourcemapping\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"\xb1\x02\n ListResourceMappingGroupsRequest\x12\xd7\x01\n\x0cnamespace_id\x18\x01 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x0bnamespaceId\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\xaf\x01\n!ListResourceMappingGroupsResponse\x12T\n\x17resource_mapping_groups\x18\x01 \x03(\x0b\x32\x1c.policy.ResourceMappingGroupR\x15resourceMappingGroups\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\":\n\x1eGetResourceMappingGroupRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"u\n\x1fGetResourceMappingGroupResponse\x12R\n\x16resource_mapping_group\x18\x01 \x01(\x0b\x32\x1c.policy.ResourceMappingGroupR\x14resourceMappingGroup\"\xa1\x01\n!CreateResourceMappingGroupRequest\x12+\n\x0cnamespace_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x0bnamespaceId\x12\x1a\n\x04name\x18\x02 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x04name\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"x\n\"CreateResourceMappingGroupResponse\x12R\n\x16resource_mapping_group\x18\x01 \x01(\x0b\x32\x1c.policy.ResourceMappingGroupR\x14resourceMappingGroup\"\xdf\x05\n!UpdateResourceMappingGroupRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xd7\x01\n\x0cnamespace_id\x18\x02 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x0bnamespaceId\x12\xba\x02\n\x04name\x18\x03 \x01(\tB\xa5\x02\xbaH\xa1\x02r\x03\x18\xfd\x01\xba\x01\x98\x02\n\x14optional_name_format\x12\xaf\x01Optional field must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored group name will be normalized to lower case.\x1aNsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')R\x04name\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"x\n\"UpdateResourceMappingGroupResponse\x12R\n\x16resource_mapping_group\x18\x01 \x01(\x0b\x32\x1c.policy.ResourceMappingGroupR\x14resourceMappingGroup\"=\n!DeleteResourceMappingGroupRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"x\n\"DeleteResourceMappingGroupResponse\x12R\n\x16resource_mapping_group\x18\x01 \x01(\x0b\x32\x1c.policy.ResourceMappingGroupR\x14resourceMappingGroup\"\xa4\x02\n\x1bListResourceMappingsRequest\x12\xcf\x01\n\x08group_id\x18\x01 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x07groupId\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\x9a\x01\n\x1cListResourceMappingsResponse\x12\x44\n\x11resource_mappings\x18\x01 \x03(\x0b\x32\x17.policy.ResourceMappingR\x10resourceMappings\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\xd0\x02\n&ListResourceMappingsByGroupFqnsRequest\x12\xa5\x02\n\x04\x66qns\x18\x01 \x03(\tB\x90\x02\xbaH\x8c\x02\x92\x01\x88\x02\x08\x01\"\x83\x02\xba\x01\xff\x01\n\x18resourcemappinggroup_fqn\x12XResource Mapping Group FQN must be in the format \'https:///resm/\'\x1a\x88\x01this.matches(\'^https://([a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?\\\\.)+[a-zA-Z]{2,}/resm/[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')R\x04\x66qns\"\x82\x01\n\x17ResourceMappingsByGroup\x12\x32\n\x05group\x18\x01 \x01(\x0b\x32\x1c.policy.ResourceMappingGroupR\x05group\x12\x33\n\x08mappings\x18\x02 \x03(\x0b\x32\x17.policy.ResourceMappingR\x08mappings\"\xc6\x02\n\'ListResourceMappingsByGroupFqnsResponse\x12\x9c\x01\n\x1b\x66qn_resource_mapping_groups\x18\x01 \x03(\x0b\x32].policy.resourcemapping.ListResourceMappingsByGroupFqnsResponse.FqnResourceMappingGroupsEntryR\x18\x66qnResourceMappingGroups\x1a|\n\x1d\x46qnResourceMappingGroupsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x45\n\x05value\x18\x02 \x01(\x0b\x32/.policy.resourcemapping.ResourceMappingsByGroupR\x05value:\x02\x38\x01\"5\n\x19GetResourceMappingRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"`\n\x1aGetResourceMappingResponse\x12\x42\n\x10resource_mapping\x18\x01 \x01(\x0b\x32\x17.policy.ResourceMappingR\x0fresourceMapping\"\x80\x03\n\x1c\x43reateResourceMappingRequest\x12\x36\n\x12\x61ttribute_value_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x10\x61ttributeValueId\x12!\n\x05terms\x18\x02 \x03(\tB\x0b\xbaH\x08\x92\x01\x05\x08\x01\x10\xe8\x07R\x05terms\x12\xcf\x01\n\x08group_id\x18\x03 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x07groupId\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"c\n\x1d\x43reateResourceMappingResponse\x12\x42\n\x10resource_mapping\x18\x01 \x01(\x0b\x32\x17.policy.ResourceMappingR\x0fresourceMapping\"\x9b\x05\n\x1cUpdateResourceMappingRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xe2\x01\n\x12\x61ttribute_value_id\x18\x04 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x10\x61ttributeValueId\x12\x1f\n\x05terms\x18\x05 \x03(\tB\t\xbaH\x06\x92\x01\x03\x10\xe8\x07R\x05terms\x12\xcf\x01\n\x08group_id\x18\x06 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x07groupId\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"c\n\x1dUpdateResourceMappingResponse\x12\x42\n\x10resource_mapping\x18\x01 \x01(\x0b\x32\x17.policy.ResourceMappingR\x0fresourceMapping\"8\n\x1c\x44\x65leteResourceMappingRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"c\n\x1d\x44\x65leteResourceMappingResponse\x12\x42\n\x10resource_mapping\x18\x01 \x01(\x0b\x32\x17.policy.ResourceMappingR\x0fresourceMapping2\xdb\x0c\n\x16ResourceMappingService\x12\x95\x01\n\x19ListResourceMappingGroups\x12\x38.policy.resourcemapping.ListResourceMappingGroupsRequest\x1a\x39.policy.resourcemapping.ListResourceMappingGroupsResponse\"\x03\x90\x02\x01\x12\x8f\x01\n\x17GetResourceMappingGroup\x12\x36.policy.resourcemapping.GetResourceMappingGroupRequest\x1a\x37.policy.resourcemapping.GetResourceMappingGroupResponse\"\x03\x90\x02\x01\x12\x95\x01\n\x1a\x43reateResourceMappingGroup\x12\x39.policy.resourcemapping.CreateResourceMappingGroupRequest\x1a:.policy.resourcemapping.CreateResourceMappingGroupResponse\"\x00\x12\x95\x01\n\x1aUpdateResourceMappingGroup\x12\x39.policy.resourcemapping.UpdateResourceMappingGroupRequest\x1a:.policy.resourcemapping.UpdateResourceMappingGroupResponse\"\x00\x12\x95\x01\n\x1a\x44\x65leteResourceMappingGroup\x12\x39.policy.resourcemapping.DeleteResourceMappingGroupRequest\x1a:.policy.resourcemapping.DeleteResourceMappingGroupResponse\"\x00\x12\x86\x01\n\x14ListResourceMappings\x12\x33.policy.resourcemapping.ListResourceMappingsRequest\x1a\x34.policy.resourcemapping.ListResourceMappingsResponse\"\x03\x90\x02\x01\x12\xa7\x01\n\x1fListResourceMappingsByGroupFqns\x12>.policy.resourcemapping.ListResourceMappingsByGroupFqnsRequest\x1a?.policy.resourcemapping.ListResourceMappingsByGroupFqnsResponse\"\x03\x90\x02\x01\x12\x80\x01\n\x12GetResourceMapping\x12\x31.policy.resourcemapping.GetResourceMappingRequest\x1a\x32.policy.resourcemapping.GetResourceMappingResponse\"\x03\x90\x02\x01\x12\x86\x01\n\x15\x43reateResourceMapping\x12\x34.policy.resourcemapping.CreateResourceMappingRequest\x1a\x35.policy.resourcemapping.CreateResourceMappingResponse\"\x00\x12\x86\x01\n\x15UpdateResourceMapping\x12\x34.policy.resourcemapping.UpdateResourceMappingRequest\x1a\x35.policy.resourcemapping.UpdateResourceMappingResponse\"\x00\x12\x86\x01\n\x15\x44\x65leteResourceMapping\x12\x34.policy.resourcemapping.DeleteResourceMappingRequest\x1a\x35.policy.resourcemapping.DeleteResourceMappingResponse\"\x00\x42\xab\x01\n\x1a\x63om.policy.resourcemappingB\x14ResourceMappingProtoP\x01\xa2\x02\x03PRX\xaa\x02\x16Policy.Resourcemapping\xca\x02\x16Policy\\Resourcemapping\xe2\x02\"Policy\\Resourcemapping\\GPBMetadata\xea\x02\x17Policy::Resourcemappingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.resourcemapping.resource_mapping_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\032com.policy.resourcemappingB\024ResourceMappingProtoP\001\242\002\003PRX\252\002\026Policy.Resourcemapping\312\002\026Policy\\Resourcemapping\342\002\"Policy\\Resourcemapping\\GPBMetadata\352\002\027Policy::Resourcemapping' + _globals['_LISTRESOURCEMAPPINGGROUPSREQUEST'].fields_by_name['namespace_id']._loaded_options = None + _globals['_LISTRESOURCEMAPPINGGROUPSREQUEST'].fields_by_name['namespace_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_GETRESOURCEMAPPINGGROUPREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETRESOURCEMAPPINGGROUPREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['namespace_id']._loaded_options = None + _globals['_CREATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['namespace_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_CREATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['name']._serialized_options = b'\272H\003\310\001\001' + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['namespace_id']._loaded_options = None + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['namespace_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['name']._serialized_options = b'\272H\241\002r\003\030\375\001\272\001\230\002\n\024optional_name_format\022\257\001Optional field must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored group name will be normalized to lower case.\032Nsize(this) == 0 || this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')' + _globals['_DELETERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETERESOURCEMAPPINGGROUPREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_LISTRESOURCEMAPPINGSREQUEST'].fields_by_name['group_id']._loaded_options = None + _globals['_LISTRESOURCEMAPPINGSREQUEST'].fields_by_name['group_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSREQUEST'].fields_by_name['fqns']._loaded_options = None + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSREQUEST'].fields_by_name['fqns']._serialized_options = b'\272H\214\002\222\001\210\002\010\001\"\203\002\272\001\377\001\n\030resourcemappinggroup_fqn\022XResource Mapping Group FQN must be in the format \'https:///resm/\'\032\210\001this.matches(\'^https://([a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?\\\\.)+[a-zA-Z]{2,}/resm/[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')' + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSRESPONSE_FQNRESOURCEMAPPINGGROUPSENTRY']._loaded_options = None + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSRESPONSE_FQNRESOURCEMAPPINGGROUPSENTRY']._serialized_options = b'8\001' + _globals['_GETRESOURCEMAPPINGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETRESOURCEMAPPINGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATERESOURCEMAPPINGREQUEST'].fields_by_name['attribute_value_id']._loaded_options = None + _globals['_CREATERESOURCEMAPPINGREQUEST'].fields_by_name['attribute_value_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATERESOURCEMAPPINGREQUEST'].fields_by_name['terms']._loaded_options = None + _globals['_CREATERESOURCEMAPPINGREQUEST'].fields_by_name['terms']._serialized_options = b'\272H\010\222\001\005\010\001\020\350\007' + _globals['_CREATERESOURCEMAPPINGREQUEST'].fields_by_name['group_id']._loaded_options = None + _globals['_CREATERESOURCEMAPPINGREQUEST'].fields_by_name['group_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['attribute_value_id']._loaded_options = None + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['attribute_value_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['terms']._loaded_options = None + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['terms']._serialized_options = b'\272H\006\222\001\003\020\350\007' + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['group_id']._loaded_options = None + _globals['_UPDATERESOURCEMAPPINGREQUEST'].fields_by_name['group_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_DELETERESOURCEMAPPINGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETERESOURCEMAPPINGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['ListResourceMappingGroups']._loaded_options = None + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['ListResourceMappingGroups']._serialized_options = b'\220\002\001' + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['GetResourceMappingGroup']._loaded_options = None + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['GetResourceMappingGroup']._serialized_options = b'\220\002\001' + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['ListResourceMappings']._loaded_options = None + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['ListResourceMappings']._serialized_options = b'\220\002\001' + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['ListResourceMappingsByGroupFqns']._loaded_options = None + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['ListResourceMappingsByGroupFqns']._serialized_options = b'\220\002\001' + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['GetResourceMapping']._loaded_options = None + _globals['_RESOURCEMAPPINGSERVICE'].methods_by_name['GetResourceMapping']._serialized_options = b'\220\002\001' + _globals['_LISTRESOURCEMAPPINGGROUPSREQUEST']._serialized_start=170 + _globals['_LISTRESOURCEMAPPINGGROUPSREQUEST']._serialized_end=475 + _globals['_LISTRESOURCEMAPPINGGROUPSRESPONSE']._serialized_start=478 + _globals['_LISTRESOURCEMAPPINGGROUPSRESPONSE']._serialized_end=653 + _globals['_GETRESOURCEMAPPINGGROUPREQUEST']._serialized_start=655 + _globals['_GETRESOURCEMAPPINGGROUPREQUEST']._serialized_end=713 + _globals['_GETRESOURCEMAPPINGGROUPRESPONSE']._serialized_start=715 + _globals['_GETRESOURCEMAPPINGGROUPRESPONSE']._serialized_end=832 + _globals['_CREATERESOURCEMAPPINGGROUPREQUEST']._serialized_start=835 + _globals['_CREATERESOURCEMAPPINGGROUPREQUEST']._serialized_end=996 + _globals['_CREATERESOURCEMAPPINGGROUPRESPONSE']._serialized_start=998 + _globals['_CREATERESOURCEMAPPINGGROUPRESPONSE']._serialized_end=1118 + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST']._serialized_start=1121 + _globals['_UPDATERESOURCEMAPPINGGROUPREQUEST']._serialized_end=1856 + _globals['_UPDATERESOURCEMAPPINGGROUPRESPONSE']._serialized_start=1858 + _globals['_UPDATERESOURCEMAPPINGGROUPRESPONSE']._serialized_end=1978 + _globals['_DELETERESOURCEMAPPINGGROUPREQUEST']._serialized_start=1980 + _globals['_DELETERESOURCEMAPPINGGROUPREQUEST']._serialized_end=2041 + _globals['_DELETERESOURCEMAPPINGGROUPRESPONSE']._serialized_start=2043 + _globals['_DELETERESOURCEMAPPINGGROUPRESPONSE']._serialized_end=2163 + _globals['_LISTRESOURCEMAPPINGSREQUEST']._serialized_start=2166 + _globals['_LISTRESOURCEMAPPINGSREQUEST']._serialized_end=2458 + _globals['_LISTRESOURCEMAPPINGSRESPONSE']._serialized_start=2461 + _globals['_LISTRESOURCEMAPPINGSRESPONSE']._serialized_end=2615 + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSREQUEST']._serialized_start=2618 + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSREQUEST']._serialized_end=2954 + _globals['_RESOURCEMAPPINGSBYGROUP']._serialized_start=2957 + _globals['_RESOURCEMAPPINGSBYGROUP']._serialized_end=3087 + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSRESPONSE']._serialized_start=3090 + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSRESPONSE']._serialized_end=3416 + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSRESPONSE_FQNRESOURCEMAPPINGGROUPSENTRY']._serialized_start=3292 + _globals['_LISTRESOURCEMAPPINGSBYGROUPFQNSRESPONSE_FQNRESOURCEMAPPINGGROUPSENTRY']._serialized_end=3416 + _globals['_GETRESOURCEMAPPINGREQUEST']._serialized_start=3418 + _globals['_GETRESOURCEMAPPINGREQUEST']._serialized_end=3471 + _globals['_GETRESOURCEMAPPINGRESPONSE']._serialized_start=3473 + _globals['_GETRESOURCEMAPPINGRESPONSE']._serialized_end=3569 + _globals['_CREATERESOURCEMAPPINGREQUEST']._serialized_start=3572 + _globals['_CREATERESOURCEMAPPINGREQUEST']._serialized_end=3956 + _globals['_CREATERESOURCEMAPPINGRESPONSE']._serialized_start=3958 + _globals['_CREATERESOURCEMAPPINGRESPONSE']._serialized_end=4057 + _globals['_UPDATERESOURCEMAPPINGREQUEST']._serialized_start=4060 + _globals['_UPDATERESOURCEMAPPINGREQUEST']._serialized_end=4727 + _globals['_UPDATERESOURCEMAPPINGRESPONSE']._serialized_start=4729 + _globals['_UPDATERESOURCEMAPPINGRESPONSE']._serialized_end=4828 + _globals['_DELETERESOURCEMAPPINGREQUEST']._serialized_start=4830 + _globals['_DELETERESOURCEMAPPINGREQUEST']._serialized_end=4886 + _globals['_DELETERESOURCEMAPPINGRESPONSE']._serialized_start=4888 + _globals['_DELETERESOURCEMAPPINGRESPONSE']._serialized_end=4987 + _globals['_RESOURCEMAPPINGSERVICE']._serialized_start=4990 + _globals['_RESOURCEMAPPINGSERVICE']._serialized_end=6617 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.pyi new file mode 100644 index 0000000..f76805d --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.pyi @@ -0,0 +1,194 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from common import common_pb2 as _common_pb2 +from policy import objects_pb2 as _objects_pb2 +from policy import selectors_pb2 as _selectors_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class ListResourceMappingGroupsRequest(_message.Message): + __slots__ = ("namespace_id", "pagination") + NAMESPACE_ID_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + namespace_id: str + pagination: _selectors_pb2.PageRequest + def __init__(self, namespace_id: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListResourceMappingGroupsResponse(_message.Message): + __slots__ = ("resource_mapping_groups", "pagination") + RESOURCE_MAPPING_GROUPS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + resource_mapping_groups: _containers.RepeatedCompositeFieldContainer[_objects_pb2.ResourceMappingGroup] + pagination: _selectors_pb2.PageResponse + def __init__(self, resource_mapping_groups: _Optional[_Iterable[_Union[_objects_pb2.ResourceMappingGroup, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class GetResourceMappingGroupRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class GetResourceMappingGroupResponse(_message.Message): + __slots__ = ("resource_mapping_group",) + RESOURCE_MAPPING_GROUP_FIELD_NUMBER: _ClassVar[int] + resource_mapping_group: _objects_pb2.ResourceMappingGroup + def __init__(self, resource_mapping_group: _Optional[_Union[_objects_pb2.ResourceMappingGroup, _Mapping]] = ...) -> None: ... + +class CreateResourceMappingGroupRequest(_message.Message): + __slots__ = ("namespace_id", "name", "metadata") + NAMESPACE_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + namespace_id: str + name: str + metadata: _common_pb2.MetadataMutable + def __init__(self, namespace_id: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateResourceMappingGroupResponse(_message.Message): + __slots__ = ("resource_mapping_group",) + RESOURCE_MAPPING_GROUP_FIELD_NUMBER: _ClassVar[int] + resource_mapping_group: _objects_pb2.ResourceMappingGroup + def __init__(self, resource_mapping_group: _Optional[_Union[_objects_pb2.ResourceMappingGroup, _Mapping]] = ...) -> None: ... + +class UpdateResourceMappingGroupRequest(_message.Message): + __slots__ = ("id", "namespace_id", "name", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + NAMESPACE_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + namespace_id: str + name: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., namespace_id: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateResourceMappingGroupResponse(_message.Message): + __slots__ = ("resource_mapping_group",) + RESOURCE_MAPPING_GROUP_FIELD_NUMBER: _ClassVar[int] + resource_mapping_group: _objects_pb2.ResourceMappingGroup + def __init__(self, resource_mapping_group: _Optional[_Union[_objects_pb2.ResourceMappingGroup, _Mapping]] = ...) -> None: ... + +class DeleteResourceMappingGroupRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteResourceMappingGroupResponse(_message.Message): + __slots__ = ("resource_mapping_group",) + RESOURCE_MAPPING_GROUP_FIELD_NUMBER: _ClassVar[int] + resource_mapping_group: _objects_pb2.ResourceMappingGroup + def __init__(self, resource_mapping_group: _Optional[_Union[_objects_pb2.ResourceMappingGroup, _Mapping]] = ...) -> None: ... + +class ListResourceMappingsRequest(_message.Message): + __slots__ = ("group_id", "pagination") + GROUP_ID_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + group_id: str + pagination: _selectors_pb2.PageRequest + def __init__(self, group_id: _Optional[str] = ..., pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListResourceMappingsResponse(_message.Message): + __slots__ = ("resource_mappings", "pagination") + RESOURCE_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + resource_mappings: _containers.RepeatedCompositeFieldContainer[_objects_pb2.ResourceMapping] + pagination: _selectors_pb2.PageResponse + def __init__(self, resource_mappings: _Optional[_Iterable[_Union[_objects_pb2.ResourceMapping, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class ListResourceMappingsByGroupFqnsRequest(_message.Message): + __slots__ = ("fqns",) + FQNS_FIELD_NUMBER: _ClassVar[int] + fqns: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, fqns: _Optional[_Iterable[str]] = ...) -> None: ... + +class ResourceMappingsByGroup(_message.Message): + __slots__ = ("group", "mappings") + GROUP_FIELD_NUMBER: _ClassVar[int] + MAPPINGS_FIELD_NUMBER: _ClassVar[int] + group: _objects_pb2.ResourceMappingGroup + mappings: _containers.RepeatedCompositeFieldContainer[_objects_pb2.ResourceMapping] + def __init__(self, group: _Optional[_Union[_objects_pb2.ResourceMappingGroup, _Mapping]] = ..., mappings: _Optional[_Iterable[_Union[_objects_pb2.ResourceMapping, _Mapping]]] = ...) -> None: ... + +class ListResourceMappingsByGroupFqnsResponse(_message.Message): + __slots__ = ("fqn_resource_mapping_groups",) + class FqnResourceMappingGroupsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: ResourceMappingsByGroup + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[ResourceMappingsByGroup, _Mapping]] = ...) -> None: ... + FQN_RESOURCE_MAPPING_GROUPS_FIELD_NUMBER: _ClassVar[int] + fqn_resource_mapping_groups: _containers.MessageMap[str, ResourceMappingsByGroup] + def __init__(self, fqn_resource_mapping_groups: _Optional[_Mapping[str, ResourceMappingsByGroup]] = ...) -> None: ... + +class GetResourceMappingRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class GetResourceMappingResponse(_message.Message): + __slots__ = ("resource_mapping",) + RESOURCE_MAPPING_FIELD_NUMBER: _ClassVar[int] + resource_mapping: _objects_pb2.ResourceMapping + def __init__(self, resource_mapping: _Optional[_Union[_objects_pb2.ResourceMapping, _Mapping]] = ...) -> None: ... + +class CreateResourceMappingRequest(_message.Message): + __slots__ = ("attribute_value_id", "terms", "group_id", "metadata") + ATTRIBUTE_VALUE_ID_FIELD_NUMBER: _ClassVar[int] + TERMS_FIELD_NUMBER: _ClassVar[int] + GROUP_ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + attribute_value_id: str + terms: _containers.RepeatedScalarFieldContainer[str] + group_id: str + metadata: _common_pb2.MetadataMutable + def __init__(self, attribute_value_id: _Optional[str] = ..., terms: _Optional[_Iterable[str]] = ..., group_id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateResourceMappingResponse(_message.Message): + __slots__ = ("resource_mapping",) + RESOURCE_MAPPING_FIELD_NUMBER: _ClassVar[int] + resource_mapping: _objects_pb2.ResourceMapping + def __init__(self, resource_mapping: _Optional[_Union[_objects_pb2.ResourceMapping, _Mapping]] = ...) -> None: ... + +class UpdateResourceMappingRequest(_message.Message): + __slots__ = ("id", "attribute_value_id", "terms", "group_id", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTE_VALUE_ID_FIELD_NUMBER: _ClassVar[int] + TERMS_FIELD_NUMBER: _ClassVar[int] + GROUP_ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + attribute_value_id: str + terms: _containers.RepeatedScalarFieldContainer[str] + group_id: str + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., attribute_value_id: _Optional[str] = ..., terms: _Optional[_Iterable[str]] = ..., group_id: _Optional[str] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateResourceMappingResponse(_message.Message): + __slots__ = ("resource_mapping",) + RESOURCE_MAPPING_FIELD_NUMBER: _ClassVar[int] + resource_mapping: _objects_pb2.ResourceMapping + def __init__(self, resource_mapping: _Optional[_Union[_objects_pb2.ResourceMapping, _Mapping]] = ...) -> None: ... + +class DeleteResourceMappingRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteResourceMappingResponse(_message.Message): + __slots__ = ("resource_mapping",) + RESOURCE_MAPPING_FIELD_NUMBER: _ClassVar[int] + resource_mapping: _objects_pb2.ResourceMapping + def __init__(self, resource_mapping: _Optional[_Union[_objects_pb2.ResourceMapping, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2_connect.py new file mode 100644 index 0000000..8576975 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/resourcemapping/resource_mapping_pb2_connect.py @@ -0,0 +1,527 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.resourcemapping.resource_mapping_pb2 + +class ResourceMappingServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_list_resource_mapping_groups( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsResponse]: + """Low-level method to call ListResourceMappingGroups, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/ListResourceMappingGroups" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsResponse,extra_headers, timeout_seconds) + + + def list_resource_mapping_groups( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsResponse: + response = self.call_list_resource_mapping_groups(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupResponse]: + """Low-level method to call GetResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/GetResourceMappingGroup" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupResponse,extra_headers, timeout_seconds) + + + def get_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupResponse: + response = self.call_get_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupResponse]: + """Low-level method to call CreateResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/CreateResourceMappingGroup" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupResponse,extra_headers, timeout_seconds) + + + def create_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupResponse: + response = self.call_create_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupResponse]: + """Low-level method to call UpdateResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/UpdateResourceMappingGroup" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupResponse,extra_headers, timeout_seconds) + + + def update_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupResponse: + response = self.call_update_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupResponse]: + """Low-level method to call DeleteResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/DeleteResourceMappingGroup" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupResponse,extra_headers, timeout_seconds) + + + def delete_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupResponse: + response = self.call_delete_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_resource_mappings( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsResponse]: + """Low-level method to call ListResourceMappings, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/ListResourceMappings" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsResponse,extra_headers, timeout_seconds) + + + def list_resource_mappings( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsResponse: + response = self.call_list_resource_mappings(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_resource_mappings_by_group_fqns( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsResponse]: + """Low-level method to call ListResourceMappingsByGroupFqns, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/ListResourceMappingsByGroupFqns" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsResponse,extra_headers, timeout_seconds) + + + def list_resource_mappings_by_group_fqns( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsResponse: + response = self.call_list_resource_mappings_by_group_fqns(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingResponse]: + """Low-level method to call GetResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/GetResourceMapping" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.GetResourceMappingResponse,extra_headers, timeout_seconds) + + + def get_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.GetResourceMappingResponse: + response = self.call_get_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingResponse]: + """Low-level method to call CreateResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/CreateResourceMapping" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingResponse,extra_headers, timeout_seconds) + + + def create_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingResponse: + response = self.call_create_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingResponse]: + """Low-level method to call UpdateResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/UpdateResourceMapping" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingResponse,extra_headers, timeout_seconds) + + + def update_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingResponse: + response = self.call_update_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingResponse]: + """Low-level method to call DeleteResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/DeleteResourceMapping" + return self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingResponse,extra_headers, timeout_seconds) + + + def delete_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingResponse: + response = self.call_delete_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncResourceMappingServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_list_resource_mapping_groups( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsResponse]: + """Low-level method to call ListResourceMappingGroups, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/ListResourceMappingGroups" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsResponse,extra_headers, timeout_seconds) + + async def list_resource_mapping_groups( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsResponse: + response = await self.call_list_resource_mapping_groups(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupResponse]: + """Low-level method to call GetResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/GetResourceMappingGroup" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupResponse,extra_headers, timeout_seconds) + + async def get_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupResponse: + response = await self.call_get_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupResponse]: + """Low-level method to call CreateResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/CreateResourceMappingGroup" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupResponse,extra_headers, timeout_seconds) + + async def create_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupResponse: + response = await self.call_create_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupResponse]: + """Low-level method to call UpdateResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/UpdateResourceMappingGroup" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupResponse,extra_headers, timeout_seconds) + + async def update_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupResponse: + response = await self.call_update_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupResponse]: + """Low-level method to call DeleteResourceMappingGroup, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/DeleteResourceMappingGroup" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupResponse,extra_headers, timeout_seconds) + + async def delete_resource_mapping_group( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupResponse: + response = await self.call_delete_resource_mapping_group(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_resource_mappings( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsResponse]: + """Low-level method to call ListResourceMappings, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/ListResourceMappings" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsResponse,extra_headers, timeout_seconds) + + async def list_resource_mappings( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsResponse: + response = await self.call_list_resource_mappings(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_resource_mappings_by_group_fqns( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsResponse]: + """Low-level method to call ListResourceMappingsByGroupFqns, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/ListResourceMappingsByGroupFqns" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsResponse,extra_headers, timeout_seconds) + + async def list_resource_mappings_by_group_fqns( + self, req: policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsResponse: + response = await self.call_list_resource_mappings_by_group_fqns(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingResponse]: + """Low-level method to call GetResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/GetResourceMapping" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.GetResourceMappingResponse,extra_headers, timeout_seconds) + + async def get_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.GetResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.GetResourceMappingResponse: + response = await self.call_get_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingResponse]: + """Low-level method to call CreateResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/CreateResourceMapping" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingResponse,extra_headers, timeout_seconds) + + async def create_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingResponse: + response = await self.call_create_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingResponse]: + """Low-level method to call UpdateResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/UpdateResourceMapping" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingResponse,extra_headers, timeout_seconds) + + async def update_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingResponse: + response = await self.call_update_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingResponse]: + """Low-level method to call DeleteResourceMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.resourcemapping.ResourceMappingService/DeleteResourceMapping" + return await self._connect_client.call_unary(url, req, policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingResponse,extra_headers, timeout_seconds) + + async def delete_resource_mapping( + self, req: policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingResponse: + response = await self.call_delete_resource_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class ResourceMappingServiceProtocol(typing.Protocol): + def list_resource_mapping_groups(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsResponse]: + ... + def get_resource_mapping_group(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupResponse]: + ... + def create_resource_mapping_group(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupResponse]: + ... + def update_resource_mapping_group(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupResponse]: + ... + def delete_resource_mapping_group(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupResponse]: + ... + def list_resource_mappings(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsResponse]: + ... + def list_resource_mappings_by_group_fqns(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsResponse]: + ... + def get_resource_mapping(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.GetResourceMappingResponse]: + ... + def create_resource_mapping(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingResponse]: + ... + def update_resource_mapping(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingResponse]: + ... + def delete_resource_mapping(self, req: ClientRequest[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingRequest]) -> ServerResponse[policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingResponse]: + ... + +RESOURCE_MAPPING_SERVICE_PATH_PREFIX = "/policy.resourcemapping.ResourceMappingService" + +def wsgi_resource_mapping_service(implementation: ResourceMappingServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/ListResourceMappingGroups", implementation.list_resource_mapping_groups, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingGroupsRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/GetResourceMappingGroup", implementation.get_resource_mapping_group, policy.resourcemapping.resource_mapping_pb2.GetResourceMappingGroupRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/CreateResourceMappingGroup", implementation.create_resource_mapping_group, policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingGroupRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/UpdateResourceMappingGroup", implementation.update_resource_mapping_group, policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingGroupRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/DeleteResourceMappingGroup", implementation.delete_resource_mapping_group, policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingGroupRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/ListResourceMappings", implementation.list_resource_mappings, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/ListResourceMappingsByGroupFqns", implementation.list_resource_mappings_by_group_fqns, policy.resourcemapping.resource_mapping_pb2.ListResourceMappingsByGroupFqnsRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/GetResourceMapping", implementation.get_resource_mapping, policy.resourcemapping.resource_mapping_pb2.GetResourceMappingRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/CreateResourceMapping", implementation.create_resource_mapping, policy.resourcemapping.resource_mapping_pb2.CreateResourceMappingRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/UpdateResourceMapping", implementation.update_resource_mapping, policy.resourcemapping.resource_mapping_pb2.UpdateResourceMappingRequest) + app.register_unary_rpc("/policy.resourcemapping.ResourceMappingService/DeleteResourceMapping", implementation.delete_resource_mapping, policy.resourcemapping.resource_mapping_pb2.DeleteResourceMappingRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/selectors_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/selectors_pb2.py new file mode 100644 index 0000000..f443e09 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/selectors_pb2.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/selectors.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/selectors.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16policy/selectors.proto\x12\x06policy\"\xcc\x03\n\x1a\x41ttributeNamespaceSelector\x12]\n\x0fwith_attributes\x18\n \x01(\x0b\x32\x34.policy.AttributeNamespaceSelector.AttributeSelectorR\x0ewithAttributes\x1a\xce\x02\n\x11\x41ttributeSelector\x12\x33\n\x16with_key_access_grants\x18\x01 \x01(\x08R\x13withKeyAccessGrants\x12\x63\n\x0bwith_values\x18\n \x01(\x0b\x32\x42.policy.AttributeNamespaceSelector.AttributeSelector.ValueSelectorR\nwithValues\x1a\x9e\x01\n\rValueSelector\x12\x33\n\x16with_key_access_grants\x18\x01 \x01(\x08R\x13withKeyAccessGrants\x12*\n\x11with_subject_maps\x18\x02 \x01(\x08R\x0fwithSubjectMaps\x12,\n\x12with_resource_maps\x18\x03 \x01(\x08R\x10withResourceMaps\"\xba\x03\n\x1b\x41ttributeDefinitionSelector\x12\x33\n\x16with_key_access_grants\x18\x01 \x01(\x08R\x13withKeyAccessGrants\x12\\\n\x0ewith_namespace\x18\n \x01(\x0b\x32\x35.policy.AttributeDefinitionSelector.NamespaceSelectorR\rwithNamespace\x12R\n\x0bwith_values\x18\x0b \x01(\x0b\x32\x31.policy.AttributeDefinitionSelector.ValueSelectorR\nwithValues\x1a\x13\n\x11NamespaceSelector\x1a\x9e\x01\n\rValueSelector\x12\x33\n\x16with_key_access_grants\x18\x01 \x01(\x08R\x13withKeyAccessGrants\x12*\n\x11with_subject_maps\x18\x02 \x01(\x08R\x0fwithSubjectMaps\x12,\n\x12with_resource_maps\x18\x03 \x01(\x08R\x10withResourceMaps\"\xcb\x03\n\x16\x41ttributeValueSelector\x12\x33\n\x16with_key_access_grants\x18\x01 \x01(\x08R\x13withKeyAccessGrants\x12*\n\x11with_subject_maps\x18\x02 \x01(\x08R\x0fwithSubjectMaps\x12,\n\x12with_resource_maps\x18\x03 \x01(\x08R\x10withResourceMaps\x12W\n\x0ewith_attribute\x18\n \x01(\x0b\x32\x30.policy.AttributeValueSelector.AttributeSelectorR\rwithAttribute\x1a\xc8\x01\n\x11\x41ttributeSelector\x12\x33\n\x16with_key_access_grants\x18\x01 \x01(\x08R\x13withKeyAccessGrants\x12i\n\x0ewith_namespace\x18\n \x01(\x0b\x32\x42.policy.AttributeValueSelector.AttributeSelector.NamespaceSelectorR\rwithNamespace\x1a\x13\n\x11NamespaceSelector\";\n\x0bPageRequest\x12\x14\n\x05limit\x18\x01 \x01(\x05R\x05limit\x12\x16\n\x06offset\x18\x02 \x01(\x05R\x06offset\"l\n\x0cPageResponse\x12%\n\x0e\x63urrent_offset\x18\x01 \x01(\x05R\rcurrentOffset\x12\x1f\n\x0bnext_offset\x18\x02 \x01(\x05R\nnextOffset\x12\x14\n\x05total\x18\x03 \x01(\x05R\x05totalBT\n\ncom.policyB\x0eSelectorsProtoP\x01\xa2\x02\x03PXX\xaa\x02\x06Policy\xca\x02\x06Policy\xe2\x02\x12Policy\\GPBMetadata\xea\x02\x06Policyb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.selectors_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\ncom.policyB\016SelectorsProtoP\001\242\002\003PXX\252\002\006Policy\312\002\006Policy\342\002\022Policy\\GPBMetadata\352\002\006Policy' + _globals['_ATTRIBUTENAMESPACESELECTOR']._serialized_start=35 + _globals['_ATTRIBUTENAMESPACESELECTOR']._serialized_end=495 + _globals['_ATTRIBUTENAMESPACESELECTOR_ATTRIBUTESELECTOR']._serialized_start=161 + _globals['_ATTRIBUTENAMESPACESELECTOR_ATTRIBUTESELECTOR']._serialized_end=495 + _globals['_ATTRIBUTENAMESPACESELECTOR_ATTRIBUTESELECTOR_VALUESELECTOR']._serialized_start=337 + _globals['_ATTRIBUTENAMESPACESELECTOR_ATTRIBUTESELECTOR_VALUESELECTOR']._serialized_end=495 + _globals['_ATTRIBUTEDEFINITIONSELECTOR']._serialized_start=498 + _globals['_ATTRIBUTEDEFINITIONSELECTOR']._serialized_end=940 + _globals['_ATTRIBUTEDEFINITIONSELECTOR_NAMESPACESELECTOR']._serialized_start=760 + _globals['_ATTRIBUTEDEFINITIONSELECTOR_NAMESPACESELECTOR']._serialized_end=779 + _globals['_ATTRIBUTEDEFINITIONSELECTOR_VALUESELECTOR']._serialized_start=337 + _globals['_ATTRIBUTEDEFINITIONSELECTOR_VALUESELECTOR']._serialized_end=495 + _globals['_ATTRIBUTEVALUESELECTOR']._serialized_start=943 + _globals['_ATTRIBUTEVALUESELECTOR']._serialized_end=1402 + _globals['_ATTRIBUTEVALUESELECTOR_ATTRIBUTESELECTOR']._serialized_start=1202 + _globals['_ATTRIBUTEVALUESELECTOR_ATTRIBUTESELECTOR']._serialized_end=1402 + _globals['_ATTRIBUTEVALUESELECTOR_ATTRIBUTESELECTOR_NAMESPACESELECTOR']._serialized_start=760 + _globals['_ATTRIBUTEVALUESELECTOR_ATTRIBUTESELECTOR_NAMESPACESELECTOR']._serialized_end=779 + _globals['_PAGEREQUEST']._serialized_start=1404 + _globals['_PAGEREQUEST']._serialized_end=1463 + _globals['_PAGERESPONSE']._serialized_start=1465 + _globals['_PAGERESPONSE']._serialized_end=1573 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/selectors_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/selectors_pb2.pyi new file mode 100644 index 0000000..4bcaca0 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/selectors_pb2.pyi @@ -0,0 +1,90 @@ +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class AttributeNamespaceSelector(_message.Message): + __slots__ = ("with_attributes",) + class AttributeSelector(_message.Message): + __slots__ = ("with_key_access_grants", "with_values") + class ValueSelector(_message.Message): + __slots__ = ("with_key_access_grants", "with_subject_maps", "with_resource_maps") + WITH_KEY_ACCESS_GRANTS_FIELD_NUMBER: _ClassVar[int] + WITH_SUBJECT_MAPS_FIELD_NUMBER: _ClassVar[int] + WITH_RESOURCE_MAPS_FIELD_NUMBER: _ClassVar[int] + with_key_access_grants: bool + with_subject_maps: bool + with_resource_maps: bool + def __init__(self, with_key_access_grants: bool = ..., with_subject_maps: bool = ..., with_resource_maps: bool = ...) -> None: ... + WITH_KEY_ACCESS_GRANTS_FIELD_NUMBER: _ClassVar[int] + WITH_VALUES_FIELD_NUMBER: _ClassVar[int] + with_key_access_grants: bool + with_values: AttributeNamespaceSelector.AttributeSelector.ValueSelector + def __init__(self, with_key_access_grants: bool = ..., with_values: _Optional[_Union[AttributeNamespaceSelector.AttributeSelector.ValueSelector, _Mapping]] = ...) -> None: ... + WITH_ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + with_attributes: AttributeNamespaceSelector.AttributeSelector + def __init__(self, with_attributes: _Optional[_Union[AttributeNamespaceSelector.AttributeSelector, _Mapping]] = ...) -> None: ... + +class AttributeDefinitionSelector(_message.Message): + __slots__ = ("with_key_access_grants", "with_namespace", "with_values") + class NamespaceSelector(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + class ValueSelector(_message.Message): + __slots__ = ("with_key_access_grants", "with_subject_maps", "with_resource_maps") + WITH_KEY_ACCESS_GRANTS_FIELD_NUMBER: _ClassVar[int] + WITH_SUBJECT_MAPS_FIELD_NUMBER: _ClassVar[int] + WITH_RESOURCE_MAPS_FIELD_NUMBER: _ClassVar[int] + with_key_access_grants: bool + with_subject_maps: bool + with_resource_maps: bool + def __init__(self, with_key_access_grants: bool = ..., with_subject_maps: bool = ..., with_resource_maps: bool = ...) -> None: ... + WITH_KEY_ACCESS_GRANTS_FIELD_NUMBER: _ClassVar[int] + WITH_NAMESPACE_FIELD_NUMBER: _ClassVar[int] + WITH_VALUES_FIELD_NUMBER: _ClassVar[int] + with_key_access_grants: bool + with_namespace: AttributeDefinitionSelector.NamespaceSelector + with_values: AttributeDefinitionSelector.ValueSelector + def __init__(self, with_key_access_grants: bool = ..., with_namespace: _Optional[_Union[AttributeDefinitionSelector.NamespaceSelector, _Mapping]] = ..., with_values: _Optional[_Union[AttributeDefinitionSelector.ValueSelector, _Mapping]] = ...) -> None: ... + +class AttributeValueSelector(_message.Message): + __slots__ = ("with_key_access_grants", "with_subject_maps", "with_resource_maps", "with_attribute") + class AttributeSelector(_message.Message): + __slots__ = ("with_key_access_grants", "with_namespace") + class NamespaceSelector(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + WITH_KEY_ACCESS_GRANTS_FIELD_NUMBER: _ClassVar[int] + WITH_NAMESPACE_FIELD_NUMBER: _ClassVar[int] + with_key_access_grants: bool + with_namespace: AttributeValueSelector.AttributeSelector.NamespaceSelector + def __init__(self, with_key_access_grants: bool = ..., with_namespace: _Optional[_Union[AttributeValueSelector.AttributeSelector.NamespaceSelector, _Mapping]] = ...) -> None: ... + WITH_KEY_ACCESS_GRANTS_FIELD_NUMBER: _ClassVar[int] + WITH_SUBJECT_MAPS_FIELD_NUMBER: _ClassVar[int] + WITH_RESOURCE_MAPS_FIELD_NUMBER: _ClassVar[int] + WITH_ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + with_key_access_grants: bool + with_subject_maps: bool + with_resource_maps: bool + with_attribute: AttributeValueSelector.AttributeSelector + def __init__(self, with_key_access_grants: bool = ..., with_subject_maps: bool = ..., with_resource_maps: bool = ..., with_attribute: _Optional[_Union[AttributeValueSelector.AttributeSelector, _Mapping]] = ...) -> None: ... + +class PageRequest(_message.Message): + __slots__ = ("limit", "offset") + LIMIT_FIELD_NUMBER: _ClassVar[int] + OFFSET_FIELD_NUMBER: _ClassVar[int] + limit: int + offset: int + def __init__(self, limit: _Optional[int] = ..., offset: _Optional[int] = ...) -> None: ... + +class PageResponse(_message.Message): + __slots__ = ("current_offset", "next_offset", "total") + CURRENT_OFFSET_FIELD_NUMBER: _ClassVar[int] + NEXT_OFFSET_FIELD_NUMBER: _ClassVar[int] + TOTAL_FIELD_NUMBER: _ClassVar[int] + current_offset: int + next_offset: int + total: int + def __init__(self, current_offset: _Optional[int] = ..., next_offset: _Optional[int] = ..., total: _Optional[int] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.py new file mode 100644 index 0000000..7e28ad1 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/subjectmapping/subject_mapping.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/subjectmapping/subject_mapping.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from common import common_pb2 as common_dot_common__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 +from policy import selectors_pb2 as policy_dot_selectors__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+policy/subjectmapping/subject_mapping.proto\x12\x15policy.subjectmapping\x1a\x1b\x62uf/validate/validate.proto\x1a\x13\x63ommon/common.proto\x1a\x14policy/objects.proto\x1a\x16policy/selectors.proto\"o\n\x1bMatchSubjectMappingsRequest\x12P\n\x12subject_properties\x18\x01 \x03(\x0b\x32\x17.policy.SubjectPropertyB\x08\xbaH\x05\x92\x01\x02\x08\x01R\x11subjectProperties\"a\n\x1cMatchSubjectMappingsResponse\x12\x41\n\x10subject_mappings\x18\x01 \x03(\x0b\x32\x16.policy.SubjectMappingR\x0fsubjectMappings\"4\n\x18GetSubjectMappingRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"\\\n\x19GetSubjectMappingResponse\x12?\n\x0fsubject_mapping\x18\x01 \x01(\x0b\x32\x16.policy.SubjectMappingR\x0esubjectMapping\"Q\n\x1aListSubjectMappingsRequest\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\x96\x01\n\x1bListSubjectMappingsResponse\x12\x41\n\x10subject_mappings\x18\x01 \x03(\x0b\x32\x16.policy.SubjectMappingR\x0fsubjectMappings\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\xb3\x05\n\x1b\x43reateSubjectMappingRequest\x12\x36\n\x12\x61ttribute_value_id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x10\x61ttributeValueId\x12\xb8\x01\n\x07\x61\x63tions\x18\x02 \x03(\x0b\x32\x0e.policy.ActionB\x8d\x01\xbaH\x89\x01\x92\x01\x02\x08\x01\xba\x01\x80\x01\n\x1b\x61\x63tion_name_or_id_not_empty\x12/Action name or ID must not be empty if provided\x1a\x30this.all(item, item.name != \'\' || item.id != \'\')R\x07\x61\x63tions\x12\xfe\x01\n!existing_subject_condition_set_id\x18\x03 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x1d\x65xistingSubjectConditionSetId\x12k\n\x19new_subject_condition_set\x18\x04 \x01(\x0b\x32\x30.policy.subjectmapping.SubjectConditionSetCreateR\x16newSubjectConditionSet\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"_\n\x1c\x43reateSubjectMappingResponse\x12?\n\x0fsubject_mapping\x18\x01 \x01(\x0b\x32\x16.policy.SubjectMappingR\x0esubjectMapping\"\xfc\x04\n\x1bUpdateSubjectMappingRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xed\x01\n\x18subject_condition_set_id\x18\x02 \x01(\tB\xb3\x01\xbaH\xaf\x01\xba\x01\xab\x01\n\x14optional_uuid_format\x12#Optional field must be a valid UUID\x1ansize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')R\x15subjectConditionSetId\x12\xc7\x01\n\x07\x61\x63tions\x18\x03 \x03(\x0b\x32\x0e.policy.ActionB\x9c\x01\xbaH\x98\x01\xba\x01\x94\x01\n\x1b\x61\x63tion_name_or_id_not_empty\x12/Action name or ID must not be empty if provided\x1a\x44this.size() == 0 || this.all(item, item.name != \'\' || item.id != \'\')R\x07\x61\x63tions\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"_\n\x1cUpdateSubjectMappingResponse\x12?\n\x0fsubject_mapping\x18\x01 \x01(\x0b\x32\x16.policy.SubjectMappingR\x0esubjectMapping\"7\n\x1b\x44\x65leteSubjectMappingRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"_\n\x1c\x44\x65leteSubjectMappingResponse\x12?\n\x0fsubject_mapping\x18\x01 \x01(\x0b\x32\x16.policy.SubjectMappingR\x0esubjectMapping\"9\n\x1dGetSubjectConditionSetRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"\xc9\x01\n\x1eGetSubjectConditionSetResponse\x12O\n\x15subject_condition_set\x18\x01 \x01(\x0b\x32\x1b.policy.SubjectConditionSetR\x13subjectConditionSet\x12V\n\x1b\x61ssociated_subject_mappings\x18\x02 \x03(\x0b\x32\x16.policy.SubjectMappingR\x19\x61ssociatedSubjectMappings\"V\n\x1fListSubjectConditionSetsRequest\x12\x33\n\npagination\x18\n \x01(\x0b\x32\x13.policy.PageRequestR\npagination\"\xab\x01\n ListSubjectConditionSetsResponse\x12Q\n\x16subject_condition_sets\x18\x01 \x03(\x0b\x32\x1b.policy.SubjectConditionSetR\x14subjectConditionSets\x12\x34\n\npagination\x18\n \x01(\x0b\x32\x14.policy.PageResponseR\npagination\"\x91\x01\n\x19SubjectConditionSetCreate\x12?\n\x0csubject_sets\x18\x01 \x03(\x0b\x32\x12.policy.SubjectSetB\x08\xbaH\x05\x92\x01\x02\x08\x01R\x0bsubjectSets\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\"\x90\x01\n CreateSubjectConditionSetRequest\x12l\n\x15subject_condition_set\x18\x01 \x01(\x0b\x32\x30.policy.subjectmapping.SubjectConditionSetCreateB\x06\xbaH\x03\xc8\x01\x01R\x13subjectConditionSet\"t\n!CreateSubjectConditionSetResponse\x12O\n\x15subject_condition_set\x18\x01 \x01(\x0b\x32\x1b.policy.SubjectConditionSetR\x13subjectConditionSet\"\xfe\x01\n UpdateSubjectConditionSetRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x35\n\x0csubject_sets\x18\x02 \x03(\x0b\x32\x12.policy.SubjectSetR\x0bsubjectSets\x12\x33\n\x08metadata\x18\x64 \x01(\x0b\x32\x17.common.MetadataMutableR\x08metadata\x12T\n\x18metadata_update_behavior\x18\x65 \x01(\x0e\x32\x1a.common.MetadataUpdateEnumR\x16metadataUpdateBehavior\"t\n!UpdateSubjectConditionSetResponse\x12O\n\x15subject_condition_set\x18\x01 \x01(\x0b\x32\x1b.policy.SubjectConditionSetR\x13subjectConditionSet\"<\n DeleteSubjectConditionSetRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"t\n!DeleteSubjectConditionSetResponse\x12O\n\x15subject_condition_set\x18\x01 \x01(\x0b\x32\x1b.policy.SubjectConditionSetR\x13subjectConditionSet\".\n,DeleteAllUnmappedSubjectConditionSetsRequest\"\x82\x01\n-DeleteAllUnmappedSubjectConditionSetsResponse\x12Q\n\x16subject_condition_sets\x18\x01 \x03(\x0b\x32\x1b.policy.SubjectConditionSetR\x14subjectConditionSets2\xb8\r\n\x15SubjectMappingService\x12\x81\x01\n\x14MatchSubjectMappings\x12\x32.policy.subjectmapping.MatchSubjectMappingsRequest\x1a\x33.policy.subjectmapping.MatchSubjectMappingsResponse\"\x00\x12\x81\x01\n\x13ListSubjectMappings\x12\x31.policy.subjectmapping.ListSubjectMappingsRequest\x1a\x32.policy.subjectmapping.ListSubjectMappingsResponse\"\x03\x90\x02\x01\x12{\n\x11GetSubjectMapping\x12/.policy.subjectmapping.GetSubjectMappingRequest\x1a\x30.policy.subjectmapping.GetSubjectMappingResponse\"\x03\x90\x02\x01\x12\x81\x01\n\x14\x43reateSubjectMapping\x12\x32.policy.subjectmapping.CreateSubjectMappingRequest\x1a\x33.policy.subjectmapping.CreateSubjectMappingResponse\"\x00\x12\x81\x01\n\x14UpdateSubjectMapping\x12\x32.policy.subjectmapping.UpdateSubjectMappingRequest\x1a\x33.policy.subjectmapping.UpdateSubjectMappingResponse\"\x00\x12\x81\x01\n\x14\x44\x65leteSubjectMapping\x12\x32.policy.subjectmapping.DeleteSubjectMappingRequest\x1a\x33.policy.subjectmapping.DeleteSubjectMappingResponse\"\x00\x12\x90\x01\n\x18ListSubjectConditionSets\x12\x36.policy.subjectmapping.ListSubjectConditionSetsRequest\x1a\x37.policy.subjectmapping.ListSubjectConditionSetsResponse\"\x03\x90\x02\x01\x12\x8a\x01\n\x16GetSubjectConditionSet\x12\x34.policy.subjectmapping.GetSubjectConditionSetRequest\x1a\x35.policy.subjectmapping.GetSubjectConditionSetResponse\"\x03\x90\x02\x01\x12\x90\x01\n\x19\x43reateSubjectConditionSet\x12\x37.policy.subjectmapping.CreateSubjectConditionSetRequest\x1a\x38.policy.subjectmapping.CreateSubjectConditionSetResponse\"\x00\x12\x90\x01\n\x19UpdateSubjectConditionSet\x12\x37.policy.subjectmapping.UpdateSubjectConditionSetRequest\x1a\x38.policy.subjectmapping.UpdateSubjectConditionSetResponse\"\x00\x12\x90\x01\n\x19\x44\x65leteSubjectConditionSet\x12\x37.policy.subjectmapping.DeleteSubjectConditionSetRequest\x1a\x38.policy.subjectmapping.DeleteSubjectConditionSetResponse\"\x00\x12\xb4\x01\n%DeleteAllUnmappedSubjectConditionSets\x12\x43.policy.subjectmapping.DeleteAllUnmappedSubjectConditionSetsRequest\x1a\x44.policy.subjectmapping.DeleteAllUnmappedSubjectConditionSetsResponse\"\x00\x42\xa5\x01\n\x19\x63om.policy.subjectmappingB\x13SubjectMappingProtoP\x01\xa2\x02\x03PSX\xaa\x02\x15Policy.Subjectmapping\xca\x02\x15Policy\\Subjectmapping\xe2\x02!Policy\\Subjectmapping\\GPBMetadata\xea\x02\x16Policy::Subjectmappingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.subjectmapping.subject_mapping_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\031com.policy.subjectmappingB\023SubjectMappingProtoP\001\242\002\003PSX\252\002\025Policy.Subjectmapping\312\002\025Policy\\Subjectmapping\342\002!Policy\\Subjectmapping\\GPBMetadata\352\002\026Policy::Subjectmapping' + _globals['_MATCHSUBJECTMAPPINGSREQUEST'].fields_by_name['subject_properties']._loaded_options = None + _globals['_MATCHSUBJECTMAPPINGSREQUEST'].fields_by_name['subject_properties']._serialized_options = b'\272H\005\222\001\002\010\001' + _globals['_GETSUBJECTMAPPINGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETSUBJECTMAPPINGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATESUBJECTMAPPINGREQUEST'].fields_by_name['attribute_value_id']._loaded_options = None + _globals['_CREATESUBJECTMAPPINGREQUEST'].fields_by_name['attribute_value_id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_CREATESUBJECTMAPPINGREQUEST'].fields_by_name['actions']._loaded_options = None + _globals['_CREATESUBJECTMAPPINGREQUEST'].fields_by_name['actions']._serialized_options = b'\272H\211\001\222\001\002\010\001\272\001\200\001\n\033action_name_or_id_not_empty\022/Action name or ID must not be empty if provided\0320this.all(item, item.name != \'\' || item.id != \'\')' + _globals['_CREATESUBJECTMAPPINGREQUEST'].fields_by_name['existing_subject_condition_set_id']._loaded_options = None + _globals['_CREATESUBJECTMAPPINGREQUEST'].fields_by_name['existing_subject_condition_set_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_UPDATESUBJECTMAPPINGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATESUBJECTMAPPINGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UPDATESUBJECTMAPPINGREQUEST'].fields_by_name['subject_condition_set_id']._loaded_options = None + _globals['_UPDATESUBJECTMAPPINGREQUEST'].fields_by_name['subject_condition_set_id']._serialized_options = b'\272H\257\001\272\001\253\001\n\024optional_uuid_format\022#Optional field must be a valid UUID\032nsize(this) == 0 || this.matches(\'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\')' + _globals['_UPDATESUBJECTMAPPINGREQUEST'].fields_by_name['actions']._loaded_options = None + _globals['_UPDATESUBJECTMAPPINGREQUEST'].fields_by_name['actions']._serialized_options = b'\272H\230\001\272\001\224\001\n\033action_name_or_id_not_empty\022/Action name or ID must not be empty if provided\032Dthis.size() == 0 || this.all(item, item.name != \'\' || item.id != \'\')' + _globals['_DELETESUBJECTMAPPINGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETESUBJECTMAPPINGREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_GETSUBJECTCONDITIONSETREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETSUBJECTCONDITIONSETREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_SUBJECTCONDITIONSETCREATE'].fields_by_name['subject_sets']._loaded_options = None + _globals['_SUBJECTCONDITIONSETCREATE'].fields_by_name['subject_sets']._serialized_options = b'\272H\005\222\001\002\010\001' + _globals['_CREATESUBJECTCONDITIONSETREQUEST'].fields_by_name['subject_condition_set']._loaded_options = None + _globals['_CREATESUBJECTCONDITIONSETREQUEST'].fields_by_name['subject_condition_set']._serialized_options = b'\272H\003\310\001\001' + _globals['_UPDATESUBJECTCONDITIONSETREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UPDATESUBJECTCONDITIONSETREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_DELETESUBJECTCONDITIONSETREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETESUBJECTCONDITIONSETREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['ListSubjectMappings']._loaded_options = None + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['ListSubjectMappings']._serialized_options = b'\220\002\001' + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['GetSubjectMapping']._loaded_options = None + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['GetSubjectMapping']._serialized_options = b'\220\002\001' + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['ListSubjectConditionSets']._loaded_options = None + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['ListSubjectConditionSets']._serialized_options = b'\220\002\001' + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['GetSubjectConditionSet']._loaded_options = None + _globals['_SUBJECTMAPPINGSERVICE'].methods_by_name['GetSubjectConditionSet']._serialized_options = b'\220\002\001' + _globals['_MATCHSUBJECTMAPPINGSREQUEST']._serialized_start=166 + _globals['_MATCHSUBJECTMAPPINGSREQUEST']._serialized_end=277 + _globals['_MATCHSUBJECTMAPPINGSRESPONSE']._serialized_start=279 + _globals['_MATCHSUBJECTMAPPINGSRESPONSE']._serialized_end=376 + _globals['_GETSUBJECTMAPPINGREQUEST']._serialized_start=378 + _globals['_GETSUBJECTMAPPINGREQUEST']._serialized_end=430 + _globals['_GETSUBJECTMAPPINGRESPONSE']._serialized_start=432 + _globals['_GETSUBJECTMAPPINGRESPONSE']._serialized_end=524 + _globals['_LISTSUBJECTMAPPINGSREQUEST']._serialized_start=526 + _globals['_LISTSUBJECTMAPPINGSREQUEST']._serialized_end=607 + _globals['_LISTSUBJECTMAPPINGSRESPONSE']._serialized_start=610 + _globals['_LISTSUBJECTMAPPINGSRESPONSE']._serialized_end=760 + _globals['_CREATESUBJECTMAPPINGREQUEST']._serialized_start=763 + _globals['_CREATESUBJECTMAPPINGREQUEST']._serialized_end=1454 + _globals['_CREATESUBJECTMAPPINGRESPONSE']._serialized_start=1456 + _globals['_CREATESUBJECTMAPPINGRESPONSE']._serialized_end=1551 + _globals['_UPDATESUBJECTMAPPINGREQUEST']._serialized_start=1554 + _globals['_UPDATESUBJECTMAPPINGREQUEST']._serialized_end=2190 + _globals['_UPDATESUBJECTMAPPINGRESPONSE']._serialized_start=2192 + _globals['_UPDATESUBJECTMAPPINGRESPONSE']._serialized_end=2287 + _globals['_DELETESUBJECTMAPPINGREQUEST']._serialized_start=2289 + _globals['_DELETESUBJECTMAPPINGREQUEST']._serialized_end=2344 + _globals['_DELETESUBJECTMAPPINGRESPONSE']._serialized_start=2346 + _globals['_DELETESUBJECTMAPPINGRESPONSE']._serialized_end=2441 + _globals['_GETSUBJECTCONDITIONSETREQUEST']._serialized_start=2443 + _globals['_GETSUBJECTCONDITIONSETREQUEST']._serialized_end=2500 + _globals['_GETSUBJECTCONDITIONSETRESPONSE']._serialized_start=2503 + _globals['_GETSUBJECTCONDITIONSETRESPONSE']._serialized_end=2704 + _globals['_LISTSUBJECTCONDITIONSETSREQUEST']._serialized_start=2706 + _globals['_LISTSUBJECTCONDITIONSETSREQUEST']._serialized_end=2792 + _globals['_LISTSUBJECTCONDITIONSETSRESPONSE']._serialized_start=2795 + _globals['_LISTSUBJECTCONDITIONSETSRESPONSE']._serialized_end=2966 + _globals['_SUBJECTCONDITIONSETCREATE']._serialized_start=2969 + _globals['_SUBJECTCONDITIONSETCREATE']._serialized_end=3114 + _globals['_CREATESUBJECTCONDITIONSETREQUEST']._serialized_start=3117 + _globals['_CREATESUBJECTCONDITIONSETREQUEST']._serialized_end=3261 + _globals['_CREATESUBJECTCONDITIONSETRESPONSE']._serialized_start=3263 + _globals['_CREATESUBJECTCONDITIONSETRESPONSE']._serialized_end=3379 + _globals['_UPDATESUBJECTCONDITIONSETREQUEST']._serialized_start=3382 + _globals['_UPDATESUBJECTCONDITIONSETREQUEST']._serialized_end=3636 + _globals['_UPDATESUBJECTCONDITIONSETRESPONSE']._serialized_start=3638 + _globals['_UPDATESUBJECTCONDITIONSETRESPONSE']._serialized_end=3754 + _globals['_DELETESUBJECTCONDITIONSETREQUEST']._serialized_start=3756 + _globals['_DELETESUBJECTCONDITIONSETREQUEST']._serialized_end=3816 + _globals['_DELETESUBJECTCONDITIONSETRESPONSE']._serialized_start=3818 + _globals['_DELETESUBJECTCONDITIONSETRESPONSE']._serialized_end=3934 + _globals['_DELETEALLUNMAPPEDSUBJECTCONDITIONSETSREQUEST']._serialized_start=3936 + _globals['_DELETEALLUNMAPPEDSUBJECTCONDITIONSETSREQUEST']._serialized_end=3982 + _globals['_DELETEALLUNMAPPEDSUBJECTCONDITIONSETSRESPONSE']._serialized_start=3985 + _globals['_DELETEALLUNMAPPEDSUBJECTCONDITIONSETSRESPONSE']._serialized_end=4115 + _globals['_SUBJECTMAPPINGSERVICE']._serialized_start=4118 + _globals['_SUBJECTMAPPINGSERVICE']._serialized_end=5838 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.pyi new file mode 100644 index 0000000..60eb36e --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.pyi @@ -0,0 +1,189 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from common import common_pb2 as _common_pb2 +from policy import objects_pb2 as _objects_pb2 +from policy import selectors_pb2 as _selectors_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class MatchSubjectMappingsRequest(_message.Message): + __slots__ = ("subject_properties",) + SUBJECT_PROPERTIES_FIELD_NUMBER: _ClassVar[int] + subject_properties: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectProperty] + def __init__(self, subject_properties: _Optional[_Iterable[_Union[_objects_pb2.SubjectProperty, _Mapping]]] = ...) -> None: ... + +class MatchSubjectMappingsResponse(_message.Message): + __slots__ = ("subject_mappings",) + SUBJECT_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + subject_mappings: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectMapping] + def __init__(self, subject_mappings: _Optional[_Iterable[_Union[_objects_pb2.SubjectMapping, _Mapping]]] = ...) -> None: ... + +class GetSubjectMappingRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class GetSubjectMappingResponse(_message.Message): + __slots__ = ("subject_mapping",) + SUBJECT_MAPPING_FIELD_NUMBER: _ClassVar[int] + subject_mapping: _objects_pb2.SubjectMapping + def __init__(self, subject_mapping: _Optional[_Union[_objects_pb2.SubjectMapping, _Mapping]] = ...) -> None: ... + +class ListSubjectMappingsRequest(_message.Message): + __slots__ = ("pagination",) + PAGINATION_FIELD_NUMBER: _ClassVar[int] + pagination: _selectors_pb2.PageRequest + def __init__(self, pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListSubjectMappingsResponse(_message.Message): + __slots__ = ("subject_mappings", "pagination") + SUBJECT_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + subject_mappings: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectMapping] + pagination: _selectors_pb2.PageResponse + def __init__(self, subject_mappings: _Optional[_Iterable[_Union[_objects_pb2.SubjectMapping, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class CreateSubjectMappingRequest(_message.Message): + __slots__ = ("attribute_value_id", "actions", "existing_subject_condition_set_id", "new_subject_condition_set", "metadata") + ATTRIBUTE_VALUE_ID_FIELD_NUMBER: _ClassVar[int] + ACTIONS_FIELD_NUMBER: _ClassVar[int] + EXISTING_SUBJECT_CONDITION_SET_ID_FIELD_NUMBER: _ClassVar[int] + NEW_SUBJECT_CONDITION_SET_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + attribute_value_id: str + actions: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Action] + existing_subject_condition_set_id: str + new_subject_condition_set: SubjectConditionSetCreate + metadata: _common_pb2.MetadataMutable + def __init__(self, attribute_value_id: _Optional[str] = ..., actions: _Optional[_Iterable[_Union[_objects_pb2.Action, _Mapping]]] = ..., existing_subject_condition_set_id: _Optional[str] = ..., new_subject_condition_set: _Optional[_Union[SubjectConditionSetCreate, _Mapping]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateSubjectMappingResponse(_message.Message): + __slots__ = ("subject_mapping",) + SUBJECT_MAPPING_FIELD_NUMBER: _ClassVar[int] + subject_mapping: _objects_pb2.SubjectMapping + def __init__(self, subject_mapping: _Optional[_Union[_objects_pb2.SubjectMapping, _Mapping]] = ...) -> None: ... + +class UpdateSubjectMappingRequest(_message.Message): + __slots__ = ("id", "subject_condition_set_id", "actions", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + SUBJECT_CONDITION_SET_ID_FIELD_NUMBER: _ClassVar[int] + ACTIONS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + subject_condition_set_id: str + actions: _containers.RepeatedCompositeFieldContainer[_objects_pb2.Action] + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., subject_condition_set_id: _Optional[str] = ..., actions: _Optional[_Iterable[_Union[_objects_pb2.Action, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateSubjectMappingResponse(_message.Message): + __slots__ = ("subject_mapping",) + SUBJECT_MAPPING_FIELD_NUMBER: _ClassVar[int] + subject_mapping: _objects_pb2.SubjectMapping + def __init__(self, subject_mapping: _Optional[_Union[_objects_pb2.SubjectMapping, _Mapping]] = ...) -> None: ... + +class DeleteSubjectMappingRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteSubjectMappingResponse(_message.Message): + __slots__ = ("subject_mapping",) + SUBJECT_MAPPING_FIELD_NUMBER: _ClassVar[int] + subject_mapping: _objects_pb2.SubjectMapping + def __init__(self, subject_mapping: _Optional[_Union[_objects_pb2.SubjectMapping, _Mapping]] = ...) -> None: ... + +class GetSubjectConditionSetRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class GetSubjectConditionSetResponse(_message.Message): + __slots__ = ("subject_condition_set", "associated_subject_mappings") + SUBJECT_CONDITION_SET_FIELD_NUMBER: _ClassVar[int] + ASSOCIATED_SUBJECT_MAPPINGS_FIELD_NUMBER: _ClassVar[int] + subject_condition_set: _objects_pb2.SubjectConditionSet + associated_subject_mappings: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectMapping] + def __init__(self, subject_condition_set: _Optional[_Union[_objects_pb2.SubjectConditionSet, _Mapping]] = ..., associated_subject_mappings: _Optional[_Iterable[_Union[_objects_pb2.SubjectMapping, _Mapping]]] = ...) -> None: ... + +class ListSubjectConditionSetsRequest(_message.Message): + __slots__ = ("pagination",) + PAGINATION_FIELD_NUMBER: _ClassVar[int] + pagination: _selectors_pb2.PageRequest + def __init__(self, pagination: _Optional[_Union[_selectors_pb2.PageRequest, _Mapping]] = ...) -> None: ... + +class ListSubjectConditionSetsResponse(_message.Message): + __slots__ = ("subject_condition_sets", "pagination") + SUBJECT_CONDITION_SETS_FIELD_NUMBER: _ClassVar[int] + PAGINATION_FIELD_NUMBER: _ClassVar[int] + subject_condition_sets: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectConditionSet] + pagination: _selectors_pb2.PageResponse + def __init__(self, subject_condition_sets: _Optional[_Iterable[_Union[_objects_pb2.SubjectConditionSet, _Mapping]]] = ..., pagination: _Optional[_Union[_selectors_pb2.PageResponse, _Mapping]] = ...) -> None: ... + +class SubjectConditionSetCreate(_message.Message): + __slots__ = ("subject_sets", "metadata") + SUBJECT_SETS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + subject_sets: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectSet] + metadata: _common_pb2.MetadataMutable + def __init__(self, subject_sets: _Optional[_Iterable[_Union[_objects_pb2.SubjectSet, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ...) -> None: ... + +class CreateSubjectConditionSetRequest(_message.Message): + __slots__ = ("subject_condition_set",) + SUBJECT_CONDITION_SET_FIELD_NUMBER: _ClassVar[int] + subject_condition_set: SubjectConditionSetCreate + def __init__(self, subject_condition_set: _Optional[_Union[SubjectConditionSetCreate, _Mapping]] = ...) -> None: ... + +class CreateSubjectConditionSetResponse(_message.Message): + __slots__ = ("subject_condition_set",) + SUBJECT_CONDITION_SET_FIELD_NUMBER: _ClassVar[int] + subject_condition_set: _objects_pb2.SubjectConditionSet + def __init__(self, subject_condition_set: _Optional[_Union[_objects_pb2.SubjectConditionSet, _Mapping]] = ...) -> None: ... + +class UpdateSubjectConditionSetRequest(_message.Message): + __slots__ = ("id", "subject_sets", "metadata", "metadata_update_behavior") + ID_FIELD_NUMBER: _ClassVar[int] + SUBJECT_SETS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_UPDATE_BEHAVIOR_FIELD_NUMBER: _ClassVar[int] + id: str + subject_sets: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectSet] + metadata: _common_pb2.MetadataMutable + metadata_update_behavior: _common_pb2.MetadataUpdateEnum + def __init__(self, id: _Optional[str] = ..., subject_sets: _Optional[_Iterable[_Union[_objects_pb2.SubjectSet, _Mapping]]] = ..., metadata: _Optional[_Union[_common_pb2.MetadataMutable, _Mapping]] = ..., metadata_update_behavior: _Optional[_Union[_common_pb2.MetadataUpdateEnum, str]] = ...) -> None: ... + +class UpdateSubjectConditionSetResponse(_message.Message): + __slots__ = ("subject_condition_set",) + SUBJECT_CONDITION_SET_FIELD_NUMBER: _ClassVar[int] + subject_condition_set: _objects_pb2.SubjectConditionSet + def __init__(self, subject_condition_set: _Optional[_Union[_objects_pb2.SubjectConditionSet, _Mapping]] = ...) -> None: ... + +class DeleteSubjectConditionSetRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class DeleteSubjectConditionSetResponse(_message.Message): + __slots__ = ("subject_condition_set",) + SUBJECT_CONDITION_SET_FIELD_NUMBER: _ClassVar[int] + subject_condition_set: _objects_pb2.SubjectConditionSet + def __init__(self, subject_condition_set: _Optional[_Union[_objects_pb2.SubjectConditionSet, _Mapping]] = ...) -> None: ... + +class DeleteAllUnmappedSubjectConditionSetsRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class DeleteAllUnmappedSubjectConditionSetsResponse(_message.Message): + __slots__ = ("subject_condition_sets",) + SUBJECT_CONDITION_SETS_FIELD_NUMBER: _ClassVar[int] + subject_condition_sets: _containers.RepeatedCompositeFieldContainer[_objects_pb2.SubjectConditionSet] + def __init__(self, subject_condition_sets: _Optional[_Iterable[_Union[_objects_pb2.SubjectConditionSet, _Mapping]]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2_connect.py new file mode 100644 index 0000000..b5c8ef8 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/subjectmapping/subject_mapping_pb2_connect.py @@ -0,0 +1,569 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.subjectmapping.subject_mapping_pb2 + +class SubjectMappingServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_match_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsResponse]: + """Low-level method to call MatchSubjectMappings, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/MatchSubjectMappings" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsResponse,extra_headers, timeout_seconds) + + + def match_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsResponse: + response = self.call_match_subject_mappings(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsResponse]: + """Low-level method to call ListSubjectMappings, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/ListSubjectMappings" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsResponse,extra_headers, timeout_seconds) + + + def list_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsResponse: + response = self.call_list_subject_mappings(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingResponse]: + """Low-level method to call GetSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/GetSubjectMapping" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingResponse,extra_headers, timeout_seconds) + + + def get_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingResponse: + response = self.call_get_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingResponse]: + """Low-level method to call CreateSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/CreateSubjectMapping" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingResponse,extra_headers, timeout_seconds) + + + def create_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingResponse: + response = self.call_create_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingResponse]: + """Low-level method to call UpdateSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/UpdateSubjectMapping" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingResponse,extra_headers, timeout_seconds) + + + def update_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingResponse: + response = self.call_update_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingResponse]: + """Low-level method to call DeleteSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/DeleteSubjectMapping" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingResponse,extra_headers, timeout_seconds) + + + def delete_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingResponse: + response = self.call_delete_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_list_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsResponse]: + """Low-level method to call ListSubjectConditionSets, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/ListSubjectConditionSets" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsResponse,extra_headers, timeout_seconds) + + + def list_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsResponse: + response = self.call_list_subject_condition_sets(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_get_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetResponse]: + """Low-level method to call GetSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/GetSubjectConditionSet" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetResponse,extra_headers, timeout_seconds) + + + def get_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetResponse: + response = self.call_get_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_create_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetResponse]: + """Low-level method to call CreateSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/CreateSubjectConditionSet" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetResponse,extra_headers, timeout_seconds) + + + def create_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetResponse: + response = self.call_create_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_update_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetResponse]: + """Low-level method to call UpdateSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/UpdateSubjectConditionSet" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetResponse,extra_headers, timeout_seconds) + + + def update_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetResponse: + response = self.call_update_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetResponse]: + """Low-level method to call DeleteSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/DeleteSubjectConditionSet" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetResponse,extra_headers, timeout_seconds) + + + def delete_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetResponse: + response = self.call_delete_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_delete_all_unmapped_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsResponse]: + """Low-level method to call DeleteAllUnmappedSubjectConditionSets, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/DeleteAllUnmappedSubjectConditionSets" + return self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsResponse,extra_headers, timeout_seconds) + + + def delete_all_unmapped_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsResponse: + response = self.call_delete_all_unmapped_subject_condition_sets(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncSubjectMappingServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_match_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsResponse]: + """Low-level method to call MatchSubjectMappings, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/MatchSubjectMappings" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsResponse,extra_headers, timeout_seconds) + + async def match_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsResponse: + response = await self.call_match_subject_mappings(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsResponse]: + """Low-level method to call ListSubjectMappings, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/ListSubjectMappings" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsResponse,extra_headers, timeout_seconds) + + async def list_subject_mappings( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsResponse: + response = await self.call_list_subject_mappings(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingResponse]: + """Low-level method to call GetSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/GetSubjectMapping" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingResponse,extra_headers, timeout_seconds) + + async def get_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingResponse: + response = await self.call_get_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingResponse]: + """Low-level method to call CreateSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/CreateSubjectMapping" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingResponse,extra_headers, timeout_seconds) + + async def create_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingResponse: + response = await self.call_create_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingResponse]: + """Low-level method to call UpdateSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/UpdateSubjectMapping" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingResponse,extra_headers, timeout_seconds) + + async def update_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingResponse: + response = await self.call_update_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingResponse]: + """Low-level method to call DeleteSubjectMapping, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/DeleteSubjectMapping" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingResponse,extra_headers, timeout_seconds) + + async def delete_subject_mapping( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingResponse: + response = await self.call_delete_subject_mapping(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_list_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsResponse]: + """Low-level method to call ListSubjectConditionSets, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/ListSubjectConditionSets" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsResponse,extra_headers, timeout_seconds) + + async def list_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsResponse: + response = await self.call_list_subject_condition_sets(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_get_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetResponse]: + """Low-level method to call GetSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/GetSubjectConditionSet" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetResponse,extra_headers, timeout_seconds) + + async def get_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetResponse: + response = await self.call_get_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_create_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetResponse]: + """Low-level method to call CreateSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/CreateSubjectConditionSet" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetResponse,extra_headers, timeout_seconds) + + async def create_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetResponse: + response = await self.call_create_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_update_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetResponse]: + """Low-level method to call UpdateSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/UpdateSubjectConditionSet" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetResponse,extra_headers, timeout_seconds) + + async def update_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetResponse: + response = await self.call_update_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetResponse]: + """Low-level method to call DeleteSubjectConditionSet, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/DeleteSubjectConditionSet" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetResponse,extra_headers, timeout_seconds) + + async def delete_subject_condition_set( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetResponse: + response = await self.call_delete_subject_condition_set(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_delete_all_unmapped_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsResponse]: + """Low-level method to call DeleteAllUnmappedSubjectConditionSets, granting access to errors and metadata""" + url = self.base_url + "/policy.subjectmapping.SubjectMappingService/DeleteAllUnmappedSubjectConditionSets" + return await self._connect_client.call_unary(url, req, policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsResponse,extra_headers, timeout_seconds) + + async def delete_all_unmapped_subject_condition_sets( + self, req: policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsResponse: + response = await self.call_delete_all_unmapped_subject_condition_sets(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class SubjectMappingServiceProtocol(typing.Protocol): + def match_subject_mappings(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsResponse]: + ... + def list_subject_mappings(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsResponse]: + ... + def get_subject_mapping(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingResponse]: + ... + def create_subject_mapping(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingResponse]: + ... + def update_subject_mapping(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingResponse]: + ... + def delete_subject_mapping(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingResponse]: + ... + def list_subject_condition_sets(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsResponse]: + ... + def get_subject_condition_set(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetResponse]: + ... + def create_subject_condition_set(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetResponse]: + ... + def update_subject_condition_set(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetResponse]: + ... + def delete_subject_condition_set(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetResponse]: + ... + def delete_all_unmapped_subject_condition_sets(self, req: ClientRequest[policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsRequest]) -> ServerResponse[policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsResponse]: + ... + +SUBJECT_MAPPING_SERVICE_PATH_PREFIX = "/policy.subjectmapping.SubjectMappingService" + +def wsgi_subject_mapping_service(implementation: SubjectMappingServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/MatchSubjectMappings", implementation.match_subject_mappings, policy.subjectmapping.subject_mapping_pb2.MatchSubjectMappingsRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/ListSubjectMappings", implementation.list_subject_mappings, policy.subjectmapping.subject_mapping_pb2.ListSubjectMappingsRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/GetSubjectMapping", implementation.get_subject_mapping, policy.subjectmapping.subject_mapping_pb2.GetSubjectMappingRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/CreateSubjectMapping", implementation.create_subject_mapping, policy.subjectmapping.subject_mapping_pb2.CreateSubjectMappingRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/UpdateSubjectMapping", implementation.update_subject_mapping, policy.subjectmapping.subject_mapping_pb2.UpdateSubjectMappingRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/DeleteSubjectMapping", implementation.delete_subject_mapping, policy.subjectmapping.subject_mapping_pb2.DeleteSubjectMappingRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/ListSubjectConditionSets", implementation.list_subject_condition_sets, policy.subjectmapping.subject_mapping_pb2.ListSubjectConditionSetsRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/GetSubjectConditionSet", implementation.get_subject_condition_set, policy.subjectmapping.subject_mapping_pb2.GetSubjectConditionSetRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/CreateSubjectConditionSet", implementation.create_subject_condition_set, policy.subjectmapping.subject_mapping_pb2.CreateSubjectConditionSetRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/UpdateSubjectConditionSet", implementation.update_subject_condition_set, policy.subjectmapping.subject_mapping_pb2.UpdateSubjectConditionSetRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/DeleteSubjectConditionSet", implementation.delete_subject_condition_set, policy.subjectmapping.subject_mapping_pb2.DeleteSubjectConditionSetRequest) + app.register_unary_rpc("/policy.subjectmapping.SubjectMappingService/DeleteAllUnmappedSubjectConditionSets", implementation.delete_all_unmapped_subject_condition_sets, policy.subjectmapping.subject_mapping_pb2.DeleteAllUnmappedSubjectConditionSetsRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2.py b/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2.py new file mode 100644 index 0000000..839cea2 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: policy/unsafe/unsafe.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'policy/unsafe/unsafe.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from buf.validate import validate_pb2 as buf_dot_validate_dot_validate__pb2 +from policy import objects_pb2 as policy_dot_objects__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1apolicy/unsafe/unsafe.proto\x12\rpolicy.unsafe\x1a\x1b\x62uf/validate/validate.proto\x1a\x14policy/objects.proto\"\xeb\x04\n\x1cUnsafeUpdateNamespaceRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xb0\x04\n\x04name\x18\x02 \x01(\tB\x9b\x04\xbaH\x97\x04r\x03\x18\xfd\x01\xba\x01\x8e\x04\n\x15namespace_name_format\x12\xa1\x03Namespace must be a valid hostname. It should include at least one dot, with each segment (label) starting and ending with an alphanumeric character. Each label must be 1 to 63 characters long, allowing hyphens but not as the first or last character. The top-level domain (the last segment after the final dot) must consist of at least two alphabetic characters. The stored namespace will be normalized to lower case.\x1aQthis.matches(\'^([a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?\\\\.)+[a-zA-Z]{2,}$\')R\x04name\"P\n\x1dUnsafeUpdateNamespaceResponse\x12/\n\tnamespace\x18\x01 \x01(\x0b\x32\x11.policy.NamespaceR\tnamespace\"<\n UnsafeReactivateNamespaceRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"T\n!UnsafeReactivateNamespaceResponse\x12/\n\tnamespace\x18\x01 \x01(\x0b\x32\x11.policy.NamespaceR\tnamespace\"R\n\x1cUnsafeDeleteNamespaceRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x18\n\x03\x66qn\x18\x02 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x03\x66qn\"P\n\x1dUnsafeDeleteNamespaceResponse\x12/\n\tnamespace\x18\x01 \x01(\x0b\x32\x11.policy.NamespaceR\tnamespace\"\xe2\x03\n\x1cUnsafeUpdateAttributeRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xc7\x02\n\x04name\x18\x02 \x01(\tB\xb2\x02\xbaH\xae\x02r\x03\x18\xfd\x01\xba\x01\xa2\x02\n\x15\x61ttribute_name_format\x12\xb3\x01\x41ttribute name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute name will be normalized to lower case.\x1aSsize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\xc8\x01\x00R\x04name\x12;\n\x04rule\x18\x03 \x01(\x0e\x32\x1d.policy.AttributeRuleTypeEnumB\x08\xbaH\x05\x82\x01\x02\x10\x01R\x04rule\x12!\n\x0cvalues_order\x18\x04 \x03(\tR\x0bvaluesOrder\"P\n\x1dUnsafeUpdateAttributeResponse\x12/\n\tattribute\x18\x01 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\"<\n UnsafeReactivateAttributeRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"T\n!UnsafeReactivateAttributeResponse\x12/\n\tattribute\x18\x01 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\"R\n\x1cUnsafeDeleteAttributeRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x18\n\x03\x66qn\x18\x02 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x03\x66qn\"P\n\x1dUnsafeDeleteAttributeResponse\x12/\n\tattribute\x18\x01 \x01(\x0b\x32\x11.policy.AttributeR\tattribute\"\xe7\x02\n!UnsafeUpdateAttributeValueRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\xa7\x02\n\x05value\x18\x02 \x01(\tB\x90\x02\xbaH\x8c\x02r\x03\x18\xfd\x01\xba\x01\x83\x02\n\x0cvalue_format\x12\xb5\x01\x41ttribute Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute value will be normalized to lower case.\x1a;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')R\x05value\"I\n\"UnsafeUpdateAttributeValueResponse\x12#\n\x05value\x18\x01 \x01(\x0b\x32\r.policy.ValueR\x05value\"A\n%UnsafeReactivateAttributeValueRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\"M\n&UnsafeReactivateAttributeValueResponse\x12#\n\x05value\x18\x01 \x01(\x0b\x32\r.policy.ValueR\x05value\"W\n!UnsafeDeleteAttributeValueRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\x12\x18\n\x03\x66qn\x18\x02 \x01(\tB\x06\xbaH\x03\xc8\x01\x01R\x03\x66qn\"I\n\"UnsafeDeleteAttributeValueResponse\x12#\n\x05value\x18\x01 \x01(\x0b\x32\r.policy.ValueR\x05value\"5\n\x19UnsafeDeleteKasKeyRequest\x12\x18\n\x02id\x18\x01 \x01(\tB\x08\xbaH\x05r\x03\xb0\x01\x01R\x02id\";\n\x1aUnsafeDeleteKasKeyResponse\x12\x1d\n\x03key\x18\x01 \x01(\x0b\x32\x0b.policy.KeyR\x03key2\xf8\t\n\rUnsafeService\x12t\n\x15UnsafeUpdateNamespace\x12+.policy.unsafe.UnsafeUpdateNamespaceRequest\x1a,.policy.unsafe.UnsafeUpdateNamespaceResponse\"\x00\x12\x80\x01\n\x19UnsafeReactivateNamespace\x12/.policy.unsafe.UnsafeReactivateNamespaceRequest\x1a\x30.policy.unsafe.UnsafeReactivateNamespaceResponse\"\x00\x12t\n\x15UnsafeDeleteNamespace\x12+.policy.unsafe.UnsafeDeleteNamespaceRequest\x1a,.policy.unsafe.UnsafeDeleteNamespaceResponse\"\x00\x12t\n\x15UnsafeUpdateAttribute\x12+.policy.unsafe.UnsafeUpdateAttributeRequest\x1a,.policy.unsafe.UnsafeUpdateAttributeResponse\"\x00\x12\x80\x01\n\x19UnsafeReactivateAttribute\x12/.policy.unsafe.UnsafeReactivateAttributeRequest\x1a\x30.policy.unsafe.UnsafeReactivateAttributeResponse\"\x00\x12t\n\x15UnsafeDeleteAttribute\x12+.policy.unsafe.UnsafeDeleteAttributeRequest\x1a,.policy.unsafe.UnsafeDeleteAttributeResponse\"\x00\x12\x83\x01\n\x1aUnsafeUpdateAttributeValue\x12\x30.policy.unsafe.UnsafeUpdateAttributeValueRequest\x1a\x31.policy.unsafe.UnsafeUpdateAttributeValueResponse\"\x00\x12\x8f\x01\n\x1eUnsafeReactivateAttributeValue\x12\x34.policy.unsafe.UnsafeReactivateAttributeValueRequest\x1a\x35.policy.unsafe.UnsafeReactivateAttributeValueResponse\"\x00\x12\x83\x01\n\x1aUnsafeDeleteAttributeValue\x12\x30.policy.unsafe.UnsafeDeleteAttributeValueRequest\x1a\x31.policy.unsafe.UnsafeDeleteAttributeValueResponse\"\x00\x12k\n\x12UnsafeDeleteKasKey\x12(.policy.unsafe.UnsafeDeleteKasKeyRequest\x1a).policy.unsafe.UnsafeDeleteKasKeyResponse\"\x00\x42u\n\x11\x63om.policy.unsafeB\x0bUnsafeProtoP\x01\xa2\x02\x03PUX\xaa\x02\rPolicy.Unsafe\xca\x02\rPolicy\\Unsafe\xe2\x02\x19Policy\\Unsafe\\GPBMetadata\xea\x02\x0ePolicy::Unsafeb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'policy.unsafe.unsafe_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021com.policy.unsafeB\013UnsafeProtoP\001\242\002\003PUX\252\002\rPolicy.Unsafe\312\002\rPolicy\\Unsafe\342\002\031Policy\\Unsafe\\GPBMetadata\352\002\016Policy::Unsafe' + _globals['_UNSAFEUPDATENAMESPACEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEUPDATENAMESPACEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEUPDATENAMESPACEREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_UNSAFEUPDATENAMESPACEREQUEST'].fields_by_name['name']._serialized_options = b'\272H\227\004r\003\030\375\001\272\001\216\004\n\025namespace_name_format\022\241\003Namespace must be a valid hostname. It should include at least one dot, with each segment (label) starting and ending with an alphanumeric character. Each label must be 1 to 63 characters long, allowing hyphens but not as the first or last character. The top-level domain (the last segment after the final dot) must consist of at least two alphabetic characters. The stored namespace will be normalized to lower case.\032Qthis.matches(\'^([a-zA-Z0-9]([a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9])?\\\\.)+[a-zA-Z]{2,}$\')' + _globals['_UNSAFEREACTIVATENAMESPACEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEREACTIVATENAMESPACEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEDELETENAMESPACEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEDELETENAMESPACEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEDELETENAMESPACEREQUEST'].fields_by_name['fqn']._loaded_options = None + _globals['_UNSAFEDELETENAMESPACEREQUEST'].fields_by_name['fqn']._serialized_options = b'\272H\003\310\001\001' + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST'].fields_by_name['name']._serialized_options = b'\272H\256\002r\003\030\375\001\272\001\242\002\n\025attribute_name_format\022\263\001Attribute name must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute name will be normalized to lower case.\032Ssize(this) > 0 ? this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\') : true\310\001\000' + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST'].fields_by_name['rule']._loaded_options = None + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST'].fields_by_name['rule']._serialized_options = b'\272H\005\202\001\002\020\001' + _globals['_UNSAFEREACTIVATEATTRIBUTEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEREACTIVATEATTRIBUTEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEDELETEATTRIBUTEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEDELETEATTRIBUTEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEDELETEATTRIBUTEREQUEST'].fields_by_name['fqn']._loaded_options = None + _globals['_UNSAFEDELETEATTRIBUTEREQUEST'].fields_by_name['fqn']._serialized_options = b'\272H\003\310\001\001' + _globals['_UNSAFEUPDATEATTRIBUTEVALUEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEUPDATEATTRIBUTEVALUEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEUPDATEATTRIBUTEVALUEREQUEST'].fields_by_name['value']._loaded_options = None + _globals['_UNSAFEUPDATEATTRIBUTEVALUEREQUEST'].fields_by_name['value']._serialized_options = b'\272H\214\002r\003\030\375\001\272\001\203\002\n\014value_format\022\265\001Attribute Value must be an alphanumeric string, allowing hyphens and underscores but not as the first or last character. The stored attribute value will be normalized to lower case.\032;this.matches(\'^[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?$\')' + _globals['_UNSAFEREACTIVATEATTRIBUTEVALUEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEREACTIVATEATTRIBUTEVALUEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEDELETEATTRIBUTEVALUEREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEDELETEATTRIBUTEVALUEREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEDELETEATTRIBUTEVALUEREQUEST'].fields_by_name['fqn']._loaded_options = None + _globals['_UNSAFEDELETEATTRIBUTEVALUEREQUEST'].fields_by_name['fqn']._serialized_options = b'\272H\003\310\001\001' + _globals['_UNSAFEDELETEKASKEYREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_UNSAFEDELETEKASKEYREQUEST'].fields_by_name['id']._serialized_options = b'\272H\005r\003\260\001\001' + _globals['_UNSAFEUPDATENAMESPACEREQUEST']._serialized_start=97 + _globals['_UNSAFEUPDATENAMESPACEREQUEST']._serialized_end=716 + _globals['_UNSAFEUPDATENAMESPACERESPONSE']._serialized_start=718 + _globals['_UNSAFEUPDATENAMESPACERESPONSE']._serialized_end=798 + _globals['_UNSAFEREACTIVATENAMESPACEREQUEST']._serialized_start=800 + _globals['_UNSAFEREACTIVATENAMESPACEREQUEST']._serialized_end=860 + _globals['_UNSAFEREACTIVATENAMESPACERESPONSE']._serialized_start=862 + _globals['_UNSAFEREACTIVATENAMESPACERESPONSE']._serialized_end=946 + _globals['_UNSAFEDELETENAMESPACEREQUEST']._serialized_start=948 + _globals['_UNSAFEDELETENAMESPACEREQUEST']._serialized_end=1030 + _globals['_UNSAFEDELETENAMESPACERESPONSE']._serialized_start=1032 + _globals['_UNSAFEDELETENAMESPACERESPONSE']._serialized_end=1112 + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST']._serialized_start=1115 + _globals['_UNSAFEUPDATEATTRIBUTEREQUEST']._serialized_end=1597 + _globals['_UNSAFEUPDATEATTRIBUTERESPONSE']._serialized_start=1599 + _globals['_UNSAFEUPDATEATTRIBUTERESPONSE']._serialized_end=1679 + _globals['_UNSAFEREACTIVATEATTRIBUTEREQUEST']._serialized_start=1681 + _globals['_UNSAFEREACTIVATEATTRIBUTEREQUEST']._serialized_end=1741 + _globals['_UNSAFEREACTIVATEATTRIBUTERESPONSE']._serialized_start=1743 + _globals['_UNSAFEREACTIVATEATTRIBUTERESPONSE']._serialized_end=1827 + _globals['_UNSAFEDELETEATTRIBUTEREQUEST']._serialized_start=1829 + _globals['_UNSAFEDELETEATTRIBUTEREQUEST']._serialized_end=1911 + _globals['_UNSAFEDELETEATTRIBUTERESPONSE']._serialized_start=1913 + _globals['_UNSAFEDELETEATTRIBUTERESPONSE']._serialized_end=1993 + _globals['_UNSAFEUPDATEATTRIBUTEVALUEREQUEST']._serialized_start=1996 + _globals['_UNSAFEUPDATEATTRIBUTEVALUEREQUEST']._serialized_end=2355 + _globals['_UNSAFEUPDATEATTRIBUTEVALUERESPONSE']._serialized_start=2357 + _globals['_UNSAFEUPDATEATTRIBUTEVALUERESPONSE']._serialized_end=2430 + _globals['_UNSAFEREACTIVATEATTRIBUTEVALUEREQUEST']._serialized_start=2432 + _globals['_UNSAFEREACTIVATEATTRIBUTEVALUEREQUEST']._serialized_end=2497 + _globals['_UNSAFEREACTIVATEATTRIBUTEVALUERESPONSE']._serialized_start=2499 + _globals['_UNSAFEREACTIVATEATTRIBUTEVALUERESPONSE']._serialized_end=2576 + _globals['_UNSAFEDELETEATTRIBUTEVALUEREQUEST']._serialized_start=2578 + _globals['_UNSAFEDELETEATTRIBUTEVALUEREQUEST']._serialized_end=2665 + _globals['_UNSAFEDELETEATTRIBUTEVALUERESPONSE']._serialized_start=2667 + _globals['_UNSAFEDELETEATTRIBUTEVALUERESPONSE']._serialized_end=2740 + _globals['_UNSAFEDELETEKASKEYREQUEST']._serialized_start=2742 + _globals['_UNSAFEDELETEKASKEYREQUEST']._serialized_end=2795 + _globals['_UNSAFEDELETEKASKEYRESPONSE']._serialized_start=2797 + _globals['_UNSAFEDELETEKASKEYRESPONSE']._serialized_end=2856 + _globals['_UNSAFESERVICE']._serialized_start=2859 + _globals['_UNSAFESERVICE']._serialized_end=4131 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2.pyi new file mode 100644 index 0000000..f16a43e --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2.pyi @@ -0,0 +1,145 @@ +from buf.validate import validate_pb2 as _validate_pb2 +from policy import objects_pb2 as _objects_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class UnsafeUpdateNamespaceRequest(_message.Message): + __slots__ = ("id", "name") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class UnsafeUpdateNamespaceResponse(_message.Message): + __slots__ = ("namespace",) + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + namespace: _objects_pb2.Namespace + def __init__(self, namespace: _Optional[_Union[_objects_pb2.Namespace, _Mapping]] = ...) -> None: ... + +class UnsafeReactivateNamespaceRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class UnsafeReactivateNamespaceResponse(_message.Message): + __slots__ = ("namespace",) + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + namespace: _objects_pb2.Namespace + def __init__(self, namespace: _Optional[_Union[_objects_pb2.Namespace, _Mapping]] = ...) -> None: ... + +class UnsafeDeleteNamespaceRequest(_message.Message): + __slots__ = ("id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class UnsafeDeleteNamespaceResponse(_message.Message): + __slots__ = ("namespace",) + NAMESPACE_FIELD_NUMBER: _ClassVar[int] + namespace: _objects_pb2.Namespace + def __init__(self, namespace: _Optional[_Union[_objects_pb2.Namespace, _Mapping]] = ...) -> None: ... + +class UnsafeUpdateAttributeRequest(_message.Message): + __slots__ = ("id", "name", "rule", "values_order") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + RULE_FIELD_NUMBER: _ClassVar[int] + VALUES_ORDER_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + rule: _objects_pb2.AttributeRuleTypeEnum + values_order: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., rule: _Optional[_Union[_objects_pb2.AttributeRuleTypeEnum, str]] = ..., values_order: _Optional[_Iterable[str]] = ...) -> None: ... + +class UnsafeUpdateAttributeResponse(_message.Message): + __slots__ = ("attribute",) + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ...) -> None: ... + +class UnsafeReactivateAttributeRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class UnsafeReactivateAttributeResponse(_message.Message): + __slots__ = ("attribute",) + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ...) -> None: ... + +class UnsafeDeleteAttributeRequest(_message.Message): + __slots__ = ("id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class UnsafeDeleteAttributeResponse(_message.Message): + __slots__ = ("attribute",) + ATTRIBUTE_FIELD_NUMBER: _ClassVar[int] + attribute: _objects_pb2.Attribute + def __init__(self, attribute: _Optional[_Union[_objects_pb2.Attribute, _Mapping]] = ...) -> None: ... + +class UnsafeUpdateAttributeValueRequest(_message.Message): + __slots__ = ("id", "value") + ID_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + id: str + value: str + def __init__(self, id: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + +class UnsafeUpdateAttributeValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.Value + def __init__(self, value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + +class UnsafeReactivateAttributeValueRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class UnsafeReactivateAttributeValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.Value + def __init__(self, value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + +class UnsafeDeleteAttributeValueRequest(_message.Message): + __slots__ = ("id", "fqn") + ID_FIELD_NUMBER: _ClassVar[int] + FQN_FIELD_NUMBER: _ClassVar[int] + id: str + fqn: str + def __init__(self, id: _Optional[str] = ..., fqn: _Optional[str] = ...) -> None: ... + +class UnsafeDeleteAttributeValueResponse(_message.Message): + __slots__ = ("value",) + VALUE_FIELD_NUMBER: _ClassVar[int] + value: _objects_pb2.Value + def __init__(self, value: _Optional[_Union[_objects_pb2.Value, _Mapping]] = ...) -> None: ... + +class UnsafeDeleteKasKeyRequest(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class UnsafeDeleteKasKeyResponse(_message.Message): + __slots__ = ("key",) + KEY_FIELD_NUMBER: _ClassVar[int] + key: _objects_pb2.Key + def __init__(self, key: _Optional[_Union[_objects_pb2.Key, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2_connect.py new file mode 100644 index 0000000..8fb44e6 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/policy/unsafe/unsafe_pb2_connect.py @@ -0,0 +1,485 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import policy.unsafe.unsafe_pb2 + +class UnsafeServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_unsafe_update_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceResponse]: + """Low-level method to call UnsafeUpdateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeUpdateNamespace" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceResponse,extra_headers, timeout_seconds) + + + def unsafe_update_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceResponse: + response = self.call_unsafe_update_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_reactivate_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceResponse]: + """Low-level method to call UnsafeReactivateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeReactivateNamespace" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceResponse,extra_headers, timeout_seconds) + + + def unsafe_reactivate_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceResponse: + response = self.call_unsafe_reactivate_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_delete_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceResponse]: + """Low-level method to call UnsafeDeleteNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteNamespace" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceResponse,extra_headers, timeout_seconds) + + + def unsafe_delete_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceResponse: + response = self.call_unsafe_delete_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_update_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeResponse]: + """Low-level method to call UnsafeUpdateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeUpdateAttribute" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeResponse,extra_headers, timeout_seconds) + + + def unsafe_update_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeResponse: + response = self.call_unsafe_update_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_reactivate_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeResponse]: + """Low-level method to call UnsafeReactivateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeReactivateAttribute" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeResponse,extra_headers, timeout_seconds) + + + def unsafe_reactivate_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeResponse: + response = self.call_unsafe_reactivate_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_delete_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeResponse]: + """Low-level method to call UnsafeDeleteAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteAttribute" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeResponse,extra_headers, timeout_seconds) + + + def unsafe_delete_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeResponse: + response = self.call_unsafe_delete_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_update_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueResponse]: + """Low-level method to call UnsafeUpdateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeUpdateAttributeValue" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueResponse,extra_headers, timeout_seconds) + + + def unsafe_update_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueResponse: + response = self.call_unsafe_update_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_reactivate_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueResponse]: + """Low-level method to call UnsafeReactivateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeReactivateAttributeValue" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueResponse,extra_headers, timeout_seconds) + + + def unsafe_reactivate_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueResponse: + response = self.call_unsafe_reactivate_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_delete_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueResponse]: + """Low-level method to call UnsafeDeleteAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteAttributeValue" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueResponse,extra_headers, timeout_seconds) + + + def unsafe_delete_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueResponse: + response = self.call_unsafe_delete_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + def call_unsafe_delete_kas_key( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyResponse]: + """Low-level method to call UnsafeDeleteKasKey, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteKasKey" + return self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyResponse,extra_headers, timeout_seconds) + + + def unsafe_delete_kas_key( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyResponse: + response = self.call_unsafe_delete_kas_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncUnsafeServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_unsafe_update_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceResponse]: + """Low-level method to call UnsafeUpdateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeUpdateNamespace" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceResponse,extra_headers, timeout_seconds) + + async def unsafe_update_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceResponse: + response = await self.call_unsafe_update_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_reactivate_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceResponse]: + """Low-level method to call UnsafeReactivateNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeReactivateNamespace" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceResponse,extra_headers, timeout_seconds) + + async def unsafe_reactivate_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceResponse: + response = await self.call_unsafe_reactivate_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_delete_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceResponse]: + """Low-level method to call UnsafeDeleteNamespace, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteNamespace" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceResponse,extra_headers, timeout_seconds) + + async def unsafe_delete_namespace( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceResponse: + response = await self.call_unsafe_delete_namespace(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_update_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeResponse]: + """Low-level method to call UnsafeUpdateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeUpdateAttribute" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeResponse,extra_headers, timeout_seconds) + + async def unsafe_update_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeResponse: + response = await self.call_unsafe_update_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_reactivate_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeResponse]: + """Low-level method to call UnsafeReactivateAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeReactivateAttribute" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeResponse,extra_headers, timeout_seconds) + + async def unsafe_reactivate_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeResponse: + response = await self.call_unsafe_reactivate_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_delete_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeResponse]: + """Low-level method to call UnsafeDeleteAttribute, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteAttribute" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeResponse,extra_headers, timeout_seconds) + + async def unsafe_delete_attribute( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeResponse: + response = await self.call_unsafe_delete_attribute(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_update_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueResponse]: + """Low-level method to call UnsafeUpdateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeUpdateAttributeValue" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueResponse,extra_headers, timeout_seconds) + + async def unsafe_update_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueResponse: + response = await self.call_unsafe_update_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_reactivate_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueResponse]: + """Low-level method to call UnsafeReactivateAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeReactivateAttributeValue" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueResponse,extra_headers, timeout_seconds) + + async def unsafe_reactivate_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueResponse: + response = await self.call_unsafe_reactivate_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_delete_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueResponse]: + """Low-level method to call UnsafeDeleteAttributeValue, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteAttributeValue" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueResponse,extra_headers, timeout_seconds) + + async def unsafe_delete_attribute_value( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueResponse: + response = await self.call_unsafe_delete_attribute_value(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + async def call_unsafe_delete_kas_key( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyResponse]: + """Low-level method to call UnsafeDeleteKasKey, granting access to errors and metadata""" + url = self.base_url + "/policy.unsafe.UnsafeService/UnsafeDeleteKasKey" + return await self._connect_client.call_unary(url, req, policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyResponse,extra_headers, timeout_seconds) + + async def unsafe_delete_kas_key( + self, req: policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyResponse: + response = await self.call_unsafe_delete_kas_key(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class UnsafeServiceProtocol(typing.Protocol): + def unsafe_update_namespace(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceResponse]: + ... + def unsafe_reactivate_namespace(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceResponse]: + ... + def unsafe_delete_namespace(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceResponse]: + ... + def unsafe_update_attribute(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeResponse]: + ... + def unsafe_reactivate_attribute(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeResponse]: + ... + def unsafe_delete_attribute(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeResponse]: + ... + def unsafe_update_attribute_value(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueResponse]: + ... + def unsafe_reactivate_attribute_value(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueResponse]: + ... + def unsafe_delete_attribute_value(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueResponse]: + ... + def unsafe_delete_kas_key(self, req: ClientRequest[policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyRequest]) -> ServerResponse[policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyResponse]: + ... + +UNSAFE_SERVICE_PATH_PREFIX = "/policy.unsafe.UnsafeService" + +def wsgi_unsafe_service(implementation: UnsafeServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeUpdateNamespace", implementation.unsafe_update_namespace, policy.unsafe.unsafe_pb2.UnsafeUpdateNamespaceRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeReactivateNamespace", implementation.unsafe_reactivate_namespace, policy.unsafe.unsafe_pb2.UnsafeReactivateNamespaceRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeDeleteNamespace", implementation.unsafe_delete_namespace, policy.unsafe.unsafe_pb2.UnsafeDeleteNamespaceRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeUpdateAttribute", implementation.unsafe_update_attribute, policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeReactivateAttribute", implementation.unsafe_reactivate_attribute, policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeDeleteAttribute", implementation.unsafe_delete_attribute, policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeUpdateAttributeValue", implementation.unsafe_update_attribute_value, policy.unsafe.unsafe_pb2.UnsafeUpdateAttributeValueRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeReactivateAttributeValue", implementation.unsafe_reactivate_attribute_value, policy.unsafe.unsafe_pb2.UnsafeReactivateAttributeValueRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeDeleteAttributeValue", implementation.unsafe_delete_attribute_value, policy.unsafe.unsafe_pb2.UnsafeDeleteAttributeValueRequest) + app.register_unary_rpc("/policy.unsafe.UnsafeService/UnsafeDeleteKasKey", implementation.unsafe_delete_kas_key, policy.unsafe.unsafe_pb2.UnsafeDeleteKasKeyRequest) + return app diff --git a/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/__init__.py b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/__init__.py new file mode 100644 index 0000000..5a45106 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/__init__.py @@ -0,0 +1 @@ +"""wellknownconfiguration protobuf definitions.""" diff --git a/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.py b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.py new file mode 100644 index 0000000..da4fb82 --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: wellknownconfiguration/wellknown_configuration.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'wellknownconfiguration/wellknown_configuration.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n4wellknownconfiguration/wellknown_configuration.proto\x12\x16wellknownconfiguration\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xce\x01\n\x0fWellKnownConfig\x12`\n\rconfiguration\x18\x01 \x03(\x0b\x32:.wellknownconfiguration.WellKnownConfig.ConfigurationEntryR\rconfiguration\x1aY\n\x12\x43onfigurationEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12-\n\x05value\x18\x02 \x01(\x0b\x32\x17.google.protobuf.StructR\x05value:\x02\x38\x01\"\"\n GetWellKnownConfigurationRequest\"b\n!GetWellKnownConfigurationResponse\x12=\n\rconfiguration\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructR\rconfiguration2\xd4\x01\n\x10WellKnownService\x12\xbf\x01\n\x19GetWellKnownConfiguration\x12\x38.wellknownconfiguration.GetWellKnownConfigurationRequest\x1a\x39.wellknownconfiguration.GetWellKnownConfigurationResponse\"-\x90\x02\x01\x82\xd3\xe4\x93\x02$\x12\"/.well-known/opentdf-configurationB\xb1\x01\n\x1a\x63om.wellknownconfigurationB\x1bWellknownConfigurationProtoP\x01\xa2\x02\x03WXX\xaa\x02\x16Wellknownconfiguration\xca\x02\x16Wellknownconfiguration\xe2\x02\"Wellknownconfiguration\\GPBMetadata\xea\x02\x16Wellknownconfigurationb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'wellknownconfiguration.wellknown_configuration_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\032com.wellknownconfigurationB\033WellknownConfigurationProtoP\001\242\002\003WXX\252\002\026Wellknownconfiguration\312\002\026Wellknownconfiguration\342\002\"Wellknownconfiguration\\GPBMetadata\352\002\026Wellknownconfiguration' + _globals['_WELLKNOWNCONFIG_CONFIGURATIONENTRY']._loaded_options = None + _globals['_WELLKNOWNCONFIG_CONFIGURATIONENTRY']._serialized_options = b'8\001' + _globals['_WELLKNOWNSERVICE'].methods_by_name['GetWellKnownConfiguration']._loaded_options = None + _globals['_WELLKNOWNSERVICE'].methods_by_name['GetWellKnownConfiguration']._serialized_options = b'\220\002\001\202\323\344\223\002$\022\"/.well-known/opentdf-configuration' + _globals['_WELLKNOWNCONFIG']._serialized_start=141 + _globals['_WELLKNOWNCONFIG']._serialized_end=347 + _globals['_WELLKNOWNCONFIG_CONFIGURATIONENTRY']._serialized_start=258 + _globals['_WELLKNOWNCONFIG_CONFIGURATIONENTRY']._serialized_end=347 + _globals['_GETWELLKNOWNCONFIGURATIONREQUEST']._serialized_start=349 + _globals['_GETWELLKNOWNCONFIGURATIONREQUEST']._serialized_end=383 + _globals['_GETWELLKNOWNCONFIGURATIONRESPONSE']._serialized_start=385 + _globals['_GETWELLKNOWNCONFIGURATIONRESPONSE']._serialized_end=483 + _globals['_WELLKNOWNSERVICE']._serialized_start=486 + _globals['_WELLKNOWNSERVICE']._serialized_end=698 +# @@protoc_insertion_point(module_scope) diff --git a/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.pyi b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.pyi new file mode 100644 index 0000000..f8c974b --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.pyi @@ -0,0 +1,32 @@ +from google.api import annotations_pb2 as _annotations_pb2 +from google.protobuf import struct_pb2 as _struct_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class WellKnownConfig(_message.Message): + __slots__ = ("configuration",) + class ConfigurationEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _struct_pb2.Struct + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + CONFIGURATION_FIELD_NUMBER: _ClassVar[int] + configuration: _containers.MessageMap[str, _struct_pb2.Struct] + def __init__(self, configuration: _Optional[_Mapping[str, _struct_pb2.Struct]] = ...) -> None: ... + +class GetWellKnownConfigurationRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class GetWellKnownConfigurationResponse(_message.Message): + __slots__ = ("configuration",) + CONFIGURATION_FIELD_NUMBER: _ClassVar[int] + configuration: _struct_pb2.Struct + def __init__(self, configuration: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... diff --git a/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2_connect.py b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2_connect.py new file mode 100644 index 0000000..351d54a --- /dev/null +++ b/otdf-python-proto/src/otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2_connect.py @@ -0,0 +1,107 @@ +# Generated Connect client code + +from __future__ import annotations +from collections.abc import AsyncIterator +from collections.abc import Iterator +from collections.abc import Iterable +import aiohttp +import urllib3 +import typing +import sys + +from connectrpc.client_async import AsyncConnectClient +from connectrpc.client_sync import ConnectClient +from connectrpc.client_protocol import ConnectProtocol +from connectrpc.client_connect import ConnectProtocolError +from connectrpc.headers import HeaderInput +from connectrpc.server import ClientRequest +from connectrpc.server import ClientStream +from connectrpc.server import ServerResponse +from connectrpc.server import ServerStream +from connectrpc.server_sync import ConnectWSGI +from connectrpc.streams import StreamInput +from connectrpc.streams import AsyncStreamOutput +from connectrpc.streams import StreamOutput +from connectrpc.unary import UnaryOutput +from connectrpc.unary import ClientStreamingOutput + +if typing.TYPE_CHECKING: + # wsgiref.types was added in Python 3.11. + if sys.version_info >= (3, 11): + from wsgiref.types import WSGIApplication + else: + from _typeshed.wsgi import WSGIApplication + +import wellknownconfiguration.wellknown_configuration_pb2 + +class WellKnownServiceClient: + def __init__( + self, + base_url: str, + http_client: urllib3.PoolManager | None = None, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = ConnectClient(http_client, protocol) + def call_get_well_known_configuration( + self, req: wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationResponse]: + """Low-level method to call GetWellKnownConfiguration, granting access to errors and metadata""" + url = self.base_url + "/wellknownconfiguration.WellKnownService/GetWellKnownConfiguration" + return self._connect_client.call_unary(url, req, wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationResponse,extra_headers, timeout_seconds) + + + def get_well_known_configuration( + self, req: wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationResponse: + response = self.call_get_well_known_configuration(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +class AsyncWellKnownServiceClient: + def __init__( + self, + base_url: str, + http_client: aiohttp.ClientSession, + protocol: ConnectProtocol = ConnectProtocol.CONNECT_PROTOBUF, + ): + self.base_url = base_url + self._connect_client = AsyncConnectClient(http_client, protocol) + + async def call_get_well_known_configuration( + self, req: wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> UnaryOutput[wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationResponse]: + """Low-level method to call GetWellKnownConfiguration, granting access to errors and metadata""" + url = self.base_url + "/wellknownconfiguration.WellKnownService/GetWellKnownConfiguration" + return await self._connect_client.call_unary(url, req, wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationResponse,extra_headers, timeout_seconds) + + async def get_well_known_configuration( + self, req: wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationRequest,extra_headers: HeaderInput | None=None, timeout_seconds: float | None=None + ) -> wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationResponse: + response = await self.call_get_well_known_configuration(req, extra_headers, timeout_seconds) + err = response.error() + if err is not None: + raise err + msg = response.message() + if msg is None: + raise ConnectProtocolError('missing response message') + return msg + + +@typing.runtime_checkable +class WellKnownServiceProtocol(typing.Protocol): + def get_well_known_configuration(self, req: ClientRequest[wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationRequest]) -> ServerResponse[wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationResponse]: + ... + +WELL_KNOWN_SERVICE_PATH_PREFIX = "/wellknownconfiguration.WellKnownService" + +def wsgi_well_known_service(implementation: WellKnownServiceProtocol) -> WSGIApplication: + app = ConnectWSGI() + app.register_unary_rpc("/wellknownconfiguration.WellKnownService/GetWellKnownConfiguration", implementation.get_well_known_configuration, wellknownconfiguration.wellknown_configuration_pb2.GetWellKnownConfigurationRequest) + return app diff --git a/otdf-python-proto/uv.lock b/otdf-python-proto/uv.lock new file mode 100644 index 0000000..179ca3b --- /dev/null +++ b/otdf-python-proto/uv.lock @@ -0,0 +1,759 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/dc/ef9394bde9080128ad401ac7ede185267ed637df03b51f05d14d1c99ad67/aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc", size = 703921, upload-time = "2025-07-29T05:49:43.584Z" }, + { url = "https://files.pythonhosted.org/packages/8f/42/63fccfc3a7ed97eb6e1a71722396f409c46b60a0552d8a56d7aad74e0df5/aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af", size = 480288, upload-time = "2025-07-29T05:49:47.851Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a2/7b8a020549f66ea2a68129db6960a762d2393248f1994499f8ba9728bbed/aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421", size = 468063, upload-time = "2025-07-29T05:49:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/8f/f5/d11e088da9176e2ad8220338ae0000ed5429a15f3c9dfd983f39105399cd/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79", size = 1650122, upload-time = "2025-07-29T05:49:51.874Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6b/b60ce2757e2faed3d70ed45dafee48cee7bfb878785a9423f7e883f0639c/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77", size = 1624176, upload-time = "2025-07-29T05:49:53.805Z" }, + { url = "https://files.pythonhosted.org/packages/dd/de/8c9fde2072a1b72c4fadecf4f7d4be7a85b1d9a4ab333d8245694057b4c6/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c", size = 1696583, upload-time = "2025-07-29T05:49:55.338Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ad/07f863ca3d895a1ad958a54006c6dafb4f9310f8c2fdb5f961b8529029d3/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4", size = 1738896, upload-time = "2025-07-29T05:49:57.045Z" }, + { url = "https://files.pythonhosted.org/packages/20/43/2bd482ebe2b126533e8755a49b128ec4e58f1a3af56879a3abdb7b42c54f/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6", size = 1643561, upload-time = "2025-07-29T05:49:58.762Z" }, + { url = "https://files.pythonhosted.org/packages/23/40/2fa9f514c4cf4cbae8d7911927f81a1901838baf5e09a8b2c299de1acfe5/aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2", size = 1583685, upload-time = "2025-07-29T05:50:00.375Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c3/94dc7357bc421f4fb978ca72a201a6c604ee90148f1181790c129396ceeb/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d", size = 1627533, upload-time = "2025-07-29T05:50:02.306Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3f/1f8911fe1844a07001e26593b5c255a685318943864b27b4e0267e840f95/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb", size = 1638319, upload-time = "2025-07-29T05:50:04.282Z" }, + { url = "https://files.pythonhosted.org/packages/4e/46/27bf57a99168c4e145ffee6b63d0458b9c66e58bb70687c23ad3d2f0bd17/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5", size = 1613776, upload-time = "2025-07-29T05:50:05.863Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7e/1d2d9061a574584bb4ad3dbdba0da90a27fdc795bc227def3a46186a8bc1/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b", size = 1693359, upload-time = "2025-07-29T05:50:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/08/98/bee429b52233c4a391980a5b3b196b060872a13eadd41c3a34be9b1469ed/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065", size = 1716598, upload-time = "2025-07-29T05:50:09.33Z" }, + { url = "https://files.pythonhosted.org/packages/57/39/b0314c1ea774df3392751b686104a3938c63ece2b7ce0ba1ed7c0b4a934f/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1", size = 1644940, upload-time = "2025-07-29T05:50:11.334Z" }, + { url = "https://files.pythonhosted.org/packages/1b/83/3dacb8d3f8f512c8ca43e3fa8a68b20583bd25636ffa4e56ee841ffd79ae/aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a", size = 429239, upload-time = "2025-07-29T05:50:12.803Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f9/470b5daba04d558c9673ca2034f28d067f3202a40e17804425f0c331c89f/aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830", size = 452297, upload-time = "2025-07-29T05:50:14.266Z" }, + { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, + { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, + { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, + { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, + { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, + { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, + { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, + { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, + { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, + { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, + { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, + { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, + { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, + { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, + { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, + { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, + { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, + { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, + { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" }, + { url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" }, + { url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" }, + { url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" }, + { url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" }, + { url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" }, + { url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "connect-python" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "multidict" }, + { name = "protobuf" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/3c/3ab5feddb807e2c6cab38e2b81dd3f82d853c4ba1b0dd66b1fd3d29f664e/connect_python-0.4.2.tar.gz", hash = "sha256:8b0a49b9c5caf82776e8577fbb2a3acae1d0b6b9ab925dace43418ad1fb6002d", size = 178600, upload-time = "2025-07-01T06:44:49.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/af/ec2756b81573441fe8c706e27188cbebce437912d3bdc650e8d5d3856c1a/connect_python-0.4.2-py3-none-any.whl", hash = "sha256:8d76089f4e2bf97513eccf1f594d10fb2275bae3042f6f945ad8dea884a4f2c4", size = 42033, upload-time = "2025-07-01T06:44:47.657Z" }, +] + +[package.optional-dependencies] +compiler = [ + { name = "protogen" }, +] + +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/36/0da0a49409f6b47cc2d060dc8c9040b897b5902a8a4e37d9bc1deb11f680/frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a", size = 81304, upload-time = "2025-06-09T22:59:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/77/f0/77c11d13d39513b298e267b22eb6cb559c103d56f155aa9a49097221f0b6/frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61", size = 47735, upload-time = "2025-06-09T22:59:48.133Z" }, + { url = "https://files.pythonhosted.org/packages/37/12/9d07fa18971a44150593de56b2f2947c46604819976784bcf6ea0d5db43b/frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d", size = 46775, upload-time = "2025-06-09T22:59:49.564Z" }, + { url = "https://files.pythonhosted.org/packages/70/34/f73539227e06288fcd1f8a76853e755b2b48bca6747e99e283111c18bcd4/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e", size = 224644, upload-time = "2025-06-09T22:59:51.35Z" }, + { url = "https://files.pythonhosted.org/packages/fb/68/c1d9c2f4a6e438e14613bad0f2973567586610cc22dcb1e1241da71de9d3/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9", size = 222125, upload-time = "2025-06-09T22:59:52.884Z" }, + { url = "https://files.pythonhosted.org/packages/b9/d0/98e8f9a515228d708344d7c6986752be3e3192d1795f748c24bcf154ad99/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c", size = 233455, upload-time = "2025-06-09T22:59:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/79/df/8a11bcec5600557f40338407d3e5bea80376ed1c01a6c0910fcfdc4b8993/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981", size = 227339, upload-time = "2025-06-09T22:59:56.187Z" }, + { url = "https://files.pythonhosted.org/packages/50/82/41cb97d9c9a5ff94438c63cc343eb7980dac4187eb625a51bdfdb7707314/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615", size = 212969, upload-time = "2025-06-09T22:59:57.604Z" }, + { url = "https://files.pythonhosted.org/packages/13/47/f9179ee5ee4f55629e4f28c660b3fdf2775c8bfde8f9c53f2de2d93f52a9/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50", size = 222862, upload-time = "2025-06-09T22:59:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/df81e41ec6b953902c8b7e3a83bee48b195cb0e5ec2eabae5d8330c78038/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa", size = 222492, upload-time = "2025-06-09T23:00:01.026Z" }, + { url = "https://files.pythonhosted.org/packages/84/17/30d6ea87fa95a9408245a948604b82c1a4b8b3e153cea596421a2aef2754/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577", size = 238250, upload-time = "2025-06-09T23:00:03.401Z" }, + { url = "https://files.pythonhosted.org/packages/8f/00/ecbeb51669e3c3df76cf2ddd66ae3e48345ec213a55e3887d216eb4fbab3/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59", size = 218720, upload-time = "2025-06-09T23:00:05.282Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c0/c224ce0e0eb31cc57f67742071bb470ba8246623c1823a7530be0e76164c/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e", size = 232585, upload-time = "2025-06-09T23:00:07.962Z" }, + { url = "https://files.pythonhosted.org/packages/55/3c/34cb694abf532f31f365106deebdeac9e45c19304d83cf7d51ebbb4ca4d1/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd", size = 234248, upload-time = "2025-06-09T23:00:09.428Z" }, + { url = "https://files.pythonhosted.org/packages/98/c0/2052d8b6cecda2e70bd81299e3512fa332abb6dcd2969b9c80dfcdddbf75/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718", size = 221621, upload-time = "2025-06-09T23:00:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bf/7dcebae315436903b1d98ffb791a09d674c88480c158aa171958a3ac07f0/frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e", size = 39578, upload-time = "2025-06-09T23:00:13.526Z" }, + { url = "https://files.pythonhosted.org/packages/8f/5f/f69818f017fa9a3d24d1ae39763e29b7f60a59e46d5f91b9c6b21622f4cd/frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464", size = 43830, upload-time = "2025-06-09T23:00:14.98Z" }, + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, +] + +[[package]] +name = "grpcio" +version = "1.74.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/54/68e51a90797ad7afc5b0a7881426c337f6a9168ebab73c3210b76aa7c90d/grpcio-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907", size = 5481935, upload-time = "2025-07-24T18:52:43.756Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/af817c7e9843929e93e54d09c9aee2555c2e8d81b93102a9426b36e91833/grpcio-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb", size = 10986796, upload-time = "2025-07-24T18:52:47.219Z" }, + { url = "https://files.pythonhosted.org/packages/d5/94/d67756638d7bb07750b07d0826c68e414124574b53840ba1ff777abcd388/grpcio-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486", size = 5983663, upload-time = "2025-07-24T18:52:49.463Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/c5e4853bf42148fea8532d49e919426585b73eafcf379a712934652a8de9/grpcio-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11", size = 6653765, upload-time = "2025-07-24T18:52:51.094Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/a1991dd64b331d199935e096cc9daa3415ee5ccbe9f909aa48eded7bba34/grpcio-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9", size = 6215172, upload-time = "2025-07-24T18:52:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/7cef3dbb3b073d0ce34fd507efc44ac4c9442a0ef9fba4fb3f5c551efef5/grpcio-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc", size = 6329142, upload-time = "2025-07-24T18:52:54.927Z" }, + { url = "https://files.pythonhosted.org/packages/bf/d3/587920f882b46e835ad96014087054655312400e2f1f1446419e5179a383/grpcio-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e", size = 7018632, upload-time = "2025-07-24T18:52:56.523Z" }, + { url = "https://files.pythonhosted.org/packages/1f/95/c70a3b15a0bc83334b507e3d2ae20ee8fa38d419b8758a4d838f5c2a7d32/grpcio-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82", size = 6509641, upload-time = "2025-07-24T18:52:58.495Z" }, + { url = "https://files.pythonhosted.org/packages/4b/06/2e7042d06247d668ae69ea6998eca33f475fd4e2855f94dcb2aa5daef334/grpcio-1.74.0-cp310-cp310-win32.whl", hash = "sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7", size = 3817478, upload-time = "2025-07-24T18:53:00.128Z" }, + { url = "https://files.pythonhosted.org/packages/93/20/e02b9dcca3ee91124060b65bbf5b8e1af80b3b76a30f694b44b964ab4d71/grpcio-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5", size = 4493971, upload-time = "2025-07-24T18:53:02.068Z" }, + { url = "https://files.pythonhosted.org/packages/e7/77/b2f06db9f240a5abeddd23a0e49eae2b6ac54d85f0e5267784ce02269c3b/grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31", size = 5487368, upload-time = "2025-07-24T18:53:03.548Z" }, + { url = "https://files.pythonhosted.org/packages/48/99/0ac8678a819c28d9a370a663007581744a9f2a844e32f0fa95e1ddda5b9e/grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4", size = 10999804, upload-time = "2025-07-24T18:53:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/45/c6/a2d586300d9e14ad72e8dc211c7aecb45fe9846a51e558c5bca0c9102c7f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce", size = 5987667, upload-time = "2025-07-24T18:53:07.157Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/5f338bf56a7f22584e68d669632e521f0de460bb3749d54533fc3d0fca4f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3", size = 6655612, upload-time = "2025-07-24T18:53:09.244Z" }, + { url = "https://files.pythonhosted.org/packages/82/ea/a4820c4c44c8b35b1903a6c72a5bdccec92d0840cf5c858c498c66786ba5/grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182", size = 6219544, upload-time = "2025-07-24T18:53:11.221Z" }, + { url = "https://files.pythonhosted.org/packages/a4/17/0537630a921365928f5abb6d14c79ba4dcb3e662e0dbeede8af4138d9dcf/grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d", size = 6334863, upload-time = "2025-07-24T18:53:12.925Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a6/85ca6cb9af3f13e1320d0a806658dca432ff88149d5972df1f7b51e87127/grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f", size = 7019320, upload-time = "2025-07-24T18:53:15.002Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a7/fe2beab970a1e25d2eff108b3cf4f7d9a53c185106377a3d1989216eba45/grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4", size = 6514228, upload-time = "2025-07-24T18:53:16.999Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c2/2f9c945c8a248cebc3ccda1b7a1bf1775b9d7d59e444dbb18c0014e23da6/grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b", size = 3817216, upload-time = "2025-07-24T18:53:20.564Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d1/a9cf9c94b55becda2199299a12b9feef0c79946b0d9d34c989de6d12d05d/grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11", size = 4495380, upload-time = "2025-07-24T18:53:22.058Z" }, + { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, + { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, + { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, + { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, + { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d8/1004a5f468715221450e66b051c839c2ce9a985aa3ee427422061fcbb6aa/grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89", size = 5449488, upload-time = "2025-07-24T18:53:41.174Z" }, + { url = "https://files.pythonhosted.org/packages/94/0e/33731a03f63740d7743dced423846c831d8e6da808fcd02821a4416df7fa/grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01", size = 10974059, upload-time = "2025-07-24T18:53:43.066Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c6/3d2c14d87771a421205bdca991467cfe473ee4c6a1231c1ede5248c62ab8/grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e", size = 5945647, upload-time = "2025-07-24T18:53:45.269Z" }, + { url = "https://files.pythonhosted.org/packages/c5/83/5a354c8aaff58594eef7fffebae41a0f8995a6258bbc6809b800c33d4c13/grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91", size = 6626101, upload-time = "2025-07-24T18:53:47.015Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ca/4fdc7bf59bf6994aa45cbd4ef1055cd65e2884de6113dbd49f75498ddb08/grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249", size = 6182562, upload-time = "2025-07-24T18:53:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/fd/48/2869e5b2c1922583686f7ae674937986807c2f676d08be70d0a541316270/grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362", size = 6303425, upload-time = "2025-07-24T18:53:50.847Z" }, + { url = "https://files.pythonhosted.org/packages/a6/0e/bac93147b9a164f759497bc6913e74af1cb632c733c7af62c0336782bd38/grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f", size = 6996533, upload-time = "2025-07-24T18:53:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/84/35/9f6b2503c1fd86d068b46818bbd7329db26a87cdd8c01e0d1a9abea1104c/grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20", size = 6491489, upload-time = "2025-07-24T18:53:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/75/33/a04e99be2a82c4cbc4039eb3a76f6c3632932b9d5d295221389d10ac9ca7/grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa", size = 3805811, upload-time = "2025-07-24T18:53:56.798Z" }, + { url = "https://files.pythonhosted.org/packages/34/80/de3eb55eb581815342d097214bed4c59e806b05f1b3110df03b2280d6dfd/grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24", size = 4489214, upload-time = "2025-07-24T18:53:59.771Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.74.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/c8/bca79cb8c14bb63027831039919c801db9f593c7504c09433934f5dff6a4/grpcio_tools-1.74.0.tar.gz", hash = "sha256:88ab9eb18b6ac1b4872add6b394073bd8d44eee7c32e4dc60a022e25ffaffb95", size = 5390007, upload-time = "2025-07-24T18:57:23.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/9e/8bbf4670f079d584b6f59a66b992791dc1ff08228e9b1256e72edb5196ff/grpcio_tools-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:796796b4d7e83a9cdd03bb95c6774fca060fd209d83fb9af5f043e9c6f06a1fa", size = 2545411, upload-time = "2025-07-24T18:55:54.457Z" }, + { url = "https://files.pythonhosted.org/packages/86/00/b483ade4e5a939c7890b8bd4041554172ad5cc2987b435e73f438086ffa0/grpcio_tools-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d576b7786207359b63c2c2e3c387639b4177cf53b1e43d020b005deead32049e", size = 5841662, upload-time = "2025-07-24T18:55:57.363Z" }, + { url = "https://files.pythonhosted.org/packages/43/70/e6d306bd3e885a0c417da27b40bb6ccdec6b2fd3081cb78f31ab4f13a73f/grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d73686934bfdd868be0dbfbfcba2a5f50a8b0b71362e86a133e8efcbdc5cad5d", size = 2516224, upload-time = "2025-07-24T18:55:58.763Z" }, + { url = "https://files.pythonhosted.org/packages/bd/99/42092932ce8802d481d41d4294b611f4269eafb2c016833f5115d804aeba/grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:187f99fd22de6e63fbf4f30b2e054a2e3c4fb80beec73b1f4716ea86192050f5", size = 2904894, upload-time = "2025-07-24T18:56:00.138Z" }, + { url = "https://files.pythonhosted.org/packages/63/04/2c2f5b933a717ff8b9da24d852f224ed4031f39fd75f182fbf36df267040/grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bef8a16c34e68aaa2d246cd358629f8103730cb96cfc521f720378995f218282", size = 2656144, upload-time = "2025-07-24T18:56:01.589Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f6/fe326c5e009541fe5e6d285c7f8c17f444990ce94d0722c22d590d919e52/grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e41084adbae7176097aa9d08a13d98c189895ec8c967f5461975750d3537625a", size = 3052117, upload-time = "2025-07-24T18:56:03.303Z" }, + { url = "https://files.pythonhosted.org/packages/d9/4d/0ced9b543bbd2df39c8b66116ac7a15faff37be4466580329e917ed12bf0/grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6b61337b47d981b4d270e3caa83607a900169617478c034e6f6baf16ab22d333", size = 3501738, upload-time = "2025-07-24T18:56:05.993Z" }, + { url = "https://files.pythonhosted.org/packages/22/b8/b81de7f416aa386f0c6a39301af5efb65f8fa74ab83d5f622914262a65db/grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7e920982b4eaab253affbd45ec6d5ec12d895f5c143374ef4c3eadef49162373", size = 3125555, upload-time = "2025-07-24T18:56:07.64Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cf695ebd5562a8b633114d0ca5084b908b17a528c4fa844a752c1fddf6a7/grpcio_tools-1.74.0-cp310-cp310-win32.whl", hash = "sha256:b966f3b93f9d24151591d096ecf9c3fdb419a50d486761f7d28a9a69b028b627", size = 992982, upload-time = "2025-07-24T18:56:09.391Z" }, + { url = "https://files.pythonhosted.org/packages/f3/01/e315fc3941e7f48d29aa4d0335081de4b9ac909c5092dab1d3263a191c0f/grpcio_tools-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:03787990b56f5c3b3f72c722a7e74fbc5a3b769bbc31ad426e2c6f6a28a9d7c8", size = 1157424, upload-time = "2025-07-24T18:56:10.781Z" }, + { url = "https://files.pythonhosted.org/packages/43/50/7bafe168b4b3494e7b96d4838b0d35eab62e5c74bf9c91e8f14233c94f60/grpcio_tools-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:9d9e28fbbab9b9e923c3d286949e8ff81ebbb402458698f0a2b1183b539779db", size = 2545457, upload-time = "2025-07-24T18:56:12.589Z" }, + { url = "https://files.pythonhosted.org/packages/8b/1c/8a0eb4e101f2fe8edc12851ddfccf4f2498d5f23d444ea73d09c94202b46/grpcio_tools-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:41040eb1b5d1e582687f6f19cf2efc4c191b6eab56b16f6fba50ac085c5ca4dd", size = 5842973, upload-time = "2025-07-24T18:56:14.063Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f2/eb1bac2dd6397f5ca271e6cb2566b61d4a4bf8df07db0988bc55200f254d/grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:1fdc013118e4e9054b6e1a64d16a0d4a17a4071042e674ada8673406ddb26e59", size = 2515918, upload-time = "2025-07-24T18:56:15.572Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fe/d270fd30ccd04d5faa9c3f2796ce56a0597eddf327a0fc746ccbb273cdd9/grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f037414c527a2c4a3af15451d9e58d7856d0a62b3f6dd3f5b969ecba82f5e843", size = 2904944, upload-time = "2025-07-24T18:56:17.091Z" }, + { url = "https://files.pythonhosted.org/packages/91/9f/3adb6e1ae826d9097745f4ad38a84c8c2edb4d768871222c95aa541f8e54/grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536f53a6a8d1ba1c469d085066cfa0dd3bb51f07013b71857bc3ad1eabe3ab49", size = 2656300, upload-time = "2025-07-24T18:56:18.51Z" }, + { url = "https://files.pythonhosted.org/packages/3f/15/e532439218674c9e451e7f965a0a6bcd53344c4178c62dc1acd66ed93797/grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1e23ff54dea7f6e9543dcebd2c0f4b7c9af39812966c05e1c5289477cb2bf2f7", size = 3051857, upload-time = "2025-07-24T18:56:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/ca/06/a63aeb1a16ab1508f2ed349faafb4e2e1fb2b048168a033e7392adab14c7/grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76072dee9fa99b33eb0c334a16e70d694df762df705c7a2481f702af33d81a28", size = 3501682, upload-time = "2025-07-24T18:56:21.65Z" }, + { url = "https://files.pythonhosted.org/packages/47/1f/81da8c39874d9152fba5fa2bf3b6708c29ea3621fde30667509b9124ef06/grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bdf91eb722f2990085b1342c277e212ec392e37bd493a2a21d9eb9238f28c3e", size = 3125364, upload-time = "2025-07-24T18:56:23.095Z" }, + { url = "https://files.pythonhosted.org/packages/a3/64/a23256ecd34ceebe8aac8adedd4f65ed240572662899acb779cfcf5e0277/grpcio_tools-1.74.0-cp311-cp311-win32.whl", hash = "sha256:a036cd2a4223901e7a9f6a9b394326a9352a4ad70bdd3f1d893f1b231fcfdf7e", size = 993385, upload-time = "2025-07-24T18:56:25.054Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b8/a0d7359d93f0a2bbaf3b0d43eb8fa3e9f315e03ef4a4ebe05b4315a64644/grpcio_tools-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:d1fdf245178158a92a2dc78e3545b6d13b6c917d9b80931fc85cfb3e9534a07d", size = 1157908, upload-time = "2025-07-24T18:56:27.042Z" }, + { url = "https://files.pythonhosted.org/packages/5e/9c/08a4018e19c937af14bfa052ad3d7826a1687da984992d31d15139c7c8d3/grpcio_tools-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:61d84f6050d7170712600f7ee1dac8849f5dc0bfe0044dd71132ee1e7aa2b373", size = 2546097, upload-time = "2025-07-24T18:56:28.565Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7b/b2985b1b8aa295d745b2e105c99401ad674fcdc2f5a9c8eb3ec0f57ad397/grpcio_tools-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:f0129a62711dbc1f1efd51d069d2ce0631d69e033bf3a046606c623acf935e08", size = 5839819, upload-time = "2025-07-24T18:56:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/de/40/de0fe696d50732c8b1f0f9271b05a3082f2a91e77e28d70dd3ffc1e4aaa5/grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:5ec661f3bb41f0d2a30125ea382f4d5c874bf4f26d4d8e3839bb7e3b3c037b3e", size = 2517611, upload-time = "2025-07-24T18:56:32.371Z" }, + { url = "https://files.pythonhosted.org/packages/a0/6d/949d3b339c3ff3c631168b355ce7be937f10feb894fdabe66c48ebd82394/grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7970a9cf3002bec2eff5a449ac7398b77e5d171cbb534c47258c72409d0aea74", size = 2905274, upload-time = "2025-07-24T18:56:33.872Z" }, + { url = "https://files.pythonhosted.org/packages/06/6b/f9b2e7b15c147ad6164e9ac7b20ee208435ca3243bcc97feb1ab74dcb902/grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f56d67b04790f84e216353341c6b298f1aeb591e1797fe955f606516c640936", size = 2656414, upload-time = "2025-07-24T18:56:35.47Z" }, + { url = "https://files.pythonhosted.org/packages/bd/de/621dde431314f49668c25b26a12f624c3da8748ac29df9db7d0a2596e575/grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3d0c33cc984d21525f190cb1af479f8da46370df5f2ced1a4e50769ababd0c0", size = 3052690, upload-time = "2025-07-24T18:56:37.799Z" }, + { url = "https://files.pythonhosted.org/packages/40/82/d43c9484174feea5a153371a011e06eabe508b97519a1e9a338b7ebdf43b/grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:88e535c1cf349e57e371529ea9918f811c5eff88161f322bbc06d6222bad6d50", size = 3501214, upload-time = "2025-07-24T18:56:39.493Z" }, + { url = "https://files.pythonhosted.org/packages/30/fc/195b90e4571f6c70665a25c7b748e13c2087025660d6d5aead9093f28b18/grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3cf9401ce72bc49582c2d80e0a2ee0e573e1c3c998c8bc5f739db8845e8e148", size = 3125689, upload-time = "2025-07-24T18:56:41.555Z" }, + { url = "https://files.pythonhosted.org/packages/cb/81/fe8980e5fb768090ffc531902ec1b7e5bf1d92108ecf8b7305405b297475/grpcio_tools-1.74.0-cp312-cp312-win32.whl", hash = "sha256:b63e250da44b15c67b9a34c5c30c81059bde528fc8af092d7f43194469f7c719", size = 993069, upload-time = "2025-07-24T18:56:43.088Z" }, + { url = "https://files.pythonhosted.org/packages/63/a9/7b081924d655787d56d2b409f703f0bf457b3dac10a67ad04dc7338e9aae/grpcio_tools-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:519d7cae085ae6695a8031bb990bf7766a922332b0a531e51342abc5431b78b5", size = 1157502, upload-time = "2025-07-24T18:56:44.814Z" }, + { url = "https://files.pythonhosted.org/packages/2f/65/307a72cf4bfa553a25e284bd1f27b94a53816ac01ddf432c398117b91b2a/grpcio_tools-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e2e22460355adbd0f25fdd7ed8b9ae53afb3875b9d5f34cdf1cf12559418245e", size = 2545750, upload-time = "2025-07-24T18:56:46.386Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8e/9b2217c15baadc7cfca3eba9f980e147452ca82f41767490f619edea3489/grpcio_tools-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:0cab5a2c6ae75b555fee8a1a9a9b575205171e1de392fe2d4139a29e67d8f5bb", size = 5838169, upload-time = "2025-07-24T18:56:48.057Z" }, + { url = "https://files.pythonhosted.org/packages/ea/42/a6a158b7e91c0a358cddf3f9088b004c2bfa42d1f96154b9b8eb17e16d73/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:9b18afca48b55832402a716ea4634ef2b68927a8a17ddf4038f51812299255c9", size = 2517140, upload-time = "2025-07-24T18:56:49.696Z" }, + { url = "https://files.pythonhosted.org/packages/05/db/d4576a07b2d1211822a070f76a99a9f4f4cb63496a02964ce77c88df8a28/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85f442a9e89e276bf89a0c9c76ea71647a927d967759333c1fa40300c27f7bd", size = 2905214, upload-time = "2025-07-24T18:56:51.768Z" }, + { url = "https://files.pythonhosted.org/packages/77/dc/3713e75751f862d8c84f823ba935d486c0aac0b6f789fa61fbde04ad5019/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051ce925b0b99ae2daf61b3cba19962b8655cc2a72758ce4081b89272206f5a3", size = 2656245, upload-time = "2025-07-24T18:56:53.877Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e4/01f9e8e0401d8e11a70ae8aff6899eb8c16536f69a0a9ffb25873588721c/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:98c7b8eb0de6984cd7fa7335ce3383b3bb9a1559edc238c811df88008d5d3593", size = 3052327, upload-time = "2025-07-24T18:56:55.535Z" }, + { url = "https://files.pythonhosted.org/packages/28/c2/264b4e705375a834c9c7462847ae435c0be1644f03a705d3d7464af07bd5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f8f7d17b7573b9a2a6b4183fa4a56a2ab17370c8d0541e1424cf0c9c6f863434", size = 3500706, upload-time = "2025-07-24T18:56:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/ee/c0/cc034cec5871a1918e7888e8ce700e06fab5bbb328f998a2f2750cd603b5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:db08b91ea0cd66dc4b1b929100e7aa84c9c10c51573c8282ec1ba05b41f887ef", size = 3125098, upload-time = "2025-07-24T18:56:59.02Z" }, + { url = "https://files.pythonhosted.org/packages/69/55/5792b681af82b3ff1e50ce0ccfbb6d52fc68a13932ed3da57e58d7dfb67b/grpcio_tools-1.74.0-cp313-cp313-win32.whl", hash = "sha256:4b6c5efb331ae9e5f614437f4a5938459a8a5a1ab3dfe133d2bbdeaba39b894d", size = 992431, upload-time = "2025-07-24T18:57:00.618Z" }, + { url = "https://files.pythonhosted.org/packages/94/9f/626f0fe6bfc1c6917785c6a5ee2eb8c07b5a30771e4bf4cff3c1ab5b431b/grpcio_tools-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8324cd67f61f7900d227b36913ee5f0302ba3ba8777c8bc705afa8174098d28", size = 1157064, upload-time = "2025-07-24T18:57:02.579Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "multidict" +version = "6.6.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/6b/86f353088c1358e76fd30b0146947fddecee812703b604ee901e85cd2a80/multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f", size = 77054, upload-time = "2025-08-11T12:06:02.99Z" }, + { url = "https://files.pythonhosted.org/packages/19/5d/c01dc3d3788bb877bd7f5753ea6eb23c1beeca8044902a8f5bfb54430f63/multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb", size = 44914, upload-time = "2025-08-11T12:06:05.264Z" }, + { url = "https://files.pythonhosted.org/packages/46/44/964dae19ea42f7d3e166474d8205f14bb811020e28bc423d46123ddda763/multidict-6.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495", size = 44601, upload-time = "2025-08-11T12:06:06.627Z" }, + { url = "https://files.pythonhosted.org/packages/31/20/0616348a1dfb36cb2ab33fc9521de1f27235a397bf3f59338e583afadd17/multidict-6.6.4-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8", size = 224821, upload-time = "2025-08-11T12:06:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/14/26/5d8923c69c110ff51861af05bd27ca6783011b96725d59ccae6d9daeb627/multidict-6.6.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7", size = 242608, upload-time = "2025-08-11T12:06:09.697Z" }, + { url = "https://files.pythonhosted.org/packages/5c/cc/e2ad3ba9459aa34fa65cf1f82a5c4a820a2ce615aacfb5143b8817f76504/multidict-6.6.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796", size = 222324, upload-time = "2025-08-11T12:06:10.905Z" }, + { url = "https://files.pythonhosted.org/packages/19/db/4ed0f65701afbc2cb0c140d2d02928bb0fe38dd044af76e58ad7c54fd21f/multidict-6.6.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db", size = 253234, upload-time = "2025-08-11T12:06:12.658Z" }, + { url = "https://files.pythonhosted.org/packages/94/c1/5160c9813269e39ae14b73debb907bfaaa1beee1762da8c4fb95df4764ed/multidict-6.6.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0", size = 251613, upload-time = "2025-08-11T12:06:13.97Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/48d1bd111fc2f8fb98b2ed7f9a115c55a9355358432a19f53c0b74d8425d/multidict-6.6.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877", size = 241649, upload-time = "2025-08-11T12:06:15.204Z" }, + { url = "https://files.pythonhosted.org/packages/85/2a/f7d743df0019408768af8a70d2037546a2be7b81fbb65f040d76caafd4c5/multidict-6.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace", size = 239238, upload-time = "2025-08-11T12:06:16.467Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b8/4f4bb13323c2d647323f7919201493cf48ebe7ded971717bfb0f1a79b6bf/multidict-6.6.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6", size = 233517, upload-time = "2025-08-11T12:06:18.107Z" }, + { url = "https://files.pythonhosted.org/packages/33/29/4293c26029ebfbba4f574febd2ed01b6f619cfa0d2e344217d53eef34192/multidict-6.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb", size = 243122, upload-time = "2025-08-11T12:06:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/20/60/a1c53628168aa22447bfde3a8730096ac28086704a0d8c590f3b63388d0c/multidict-6.6.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb", size = 248992, upload-time = "2025-08-11T12:06:20.661Z" }, + { url = "https://files.pythonhosted.org/packages/a3/3b/55443a0c372f33cae5d9ec37a6a973802884fa0ab3586659b197cf8cc5e9/multidict-6.6.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987", size = 243708, upload-time = "2025-08-11T12:06:21.891Z" }, + { url = "https://files.pythonhosted.org/packages/7c/60/a18c6900086769312560b2626b18e8cca22d9e85b1186ba77f4755b11266/multidict-6.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f", size = 237498, upload-time = "2025-08-11T12:06:23.206Z" }, + { url = "https://files.pythonhosted.org/packages/11/3d/8bdd8bcaff2951ce2affccca107a404925a2beafedd5aef0b5e4a71120a6/multidict-6.6.4-cp310-cp310-win32.whl", hash = "sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f", size = 41415, upload-time = "2025-08-11T12:06:24.77Z" }, + { url = "https://files.pythonhosted.org/packages/c0/53/cab1ad80356a4cd1b685a254b680167059b433b573e53872fab245e9fc95/multidict-6.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0", size = 46046, upload-time = "2025-08-11T12:06:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/cf/9a/874212b6f5c1c2d870d0a7adc5bb4cfe9b0624fa15cdf5cf757c0f5087ae/multidict-6.6.4-cp310-cp310-win_arm64.whl", hash = "sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729", size = 43147, upload-time = "2025-08-11T12:06:27.534Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, + { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, + { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, + { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, + { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, + { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, + { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, + { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, + { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, + { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, + { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, + { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, + { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, + { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, + { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, + { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, + { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, + { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, + { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, + { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, + { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" }, + { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" }, + { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" }, + { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" }, + { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" }, + { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" }, + { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" }, + { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" }, + { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" }, + { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" }, + { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" }, + { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" }, + { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" }, + { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" }, + { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" }, + { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" }, + { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" }, + { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" }, + { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" }, + { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" }, + { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, +] + +[[package]] +name = "mypy-protobuf" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, + { name = "types-protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/6f/282d64d66bf48ce60e38a6560753f784e0f88ab245ac2fb5e93f701a36cd/mypy-protobuf-3.6.0.tar.gz", hash = "sha256:02f242eb3409f66889f2b1a3aa58356ec4d909cdd0f93115622e9e70366eca3c", size = 24445, upload-time = "2024-04-01T20:24:42.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/73/d6b999782ae22f16971cc05378b3b33f6a89ede3b9619e8366aa23484bca/mypy_protobuf-3.6.0-py3-none-any.whl", hash = "sha256:56176e4d569070e7350ea620262478b49b7efceba4103d468448f1d21492fd6c", size = 16434, upload-time = "2024-04-01T20:24:40.583Z" }, +] + +[[package]] +name = "otdf-python-proto" +version = "0.3.1" +source = { editable = "." } +dependencies = [ + { name = "connect-python", extra = ["compiler"] }, + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "grpcio-tools" }, + { name = "protobuf" }, +] + +[package.dev-dependencies] +dev = [ + { name = "mypy-protobuf" }, +] + +[package.metadata] +requires-dist = [ + { name = "connect-python", extras = ["compiler"], specifier = ">=0.4.2" }, + { name = "googleapis-common-protos", specifier = ">=1.66.0" }, + { name = "grpcio", specifier = ">=1.74.0" }, + { name = "grpcio-tools", specifier = ">=1.74.0" }, + { name = "protobuf", specifier = ">=6.31.1" }, +] + +[package.metadata.requires-dev] +dev = [{ name = "mypy-protobuf", specifier = ">=3.6.0" }] + +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", size = 73178, upload-time = "2025-06-09T22:53:40.126Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3", size = 43133, upload-time = "2025-06-09T22:53:41.965Z" }, + { url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3", size = 43039, upload-time = "2025-06-09T22:53:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e", size = 201903, upload-time = "2025-06-09T22:53:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220", size = 213362, upload-time = "2025-06-09T22:53:46.707Z" }, + { url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb", size = 210525, upload-time = "2025-06-09T22:53:48.547Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614", size = 198283, upload-time = "2025-06-09T22:53:50.067Z" }, + { url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50", size = 191872, upload-time = "2025-06-09T22:53:51.438Z" }, + { url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339", size = 199452, upload-time = "2025-06-09T22:53:53.229Z" }, + { url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0", size = 191567, upload-time = "2025-06-09T22:53:54.541Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2", size = 193015, upload-time = "2025-06-09T22:53:56.44Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7", size = 204660, upload-time = "2025-06-09T22:53:57.839Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b", size = 206105, upload-time = "2025-06-09T22:53:59.638Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c", size = 196980, upload-time = "2025-06-09T22:54:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70", size = 37679, upload-time = "2025-06-09T22:54:03.003Z" }, + { url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9", size = 41459, upload-time = "2025-06-09T22:54:04.134Z" }, + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + +[[package]] +name = "protobuf" +version = "6.32.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/df/fb4a8eeea482eca989b51cffd274aac2ee24e825f0bf3cbce5281fa1567b/protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2", size = 440614, upload-time = "2025-08-14T21:21:25.015Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/18/df8c87da2e47f4f1dcc5153a81cd6bca4e429803f4069a299e236e4dd510/protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741", size = 424409, upload-time = "2025-08-14T21:21:12.366Z" }, + { url = "https://files.pythonhosted.org/packages/e1/59/0a820b7310f8139bd8d5a9388e6a38e1786d179d6f33998448609296c229/protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e", size = 435735, upload-time = "2025-08-14T21:21:15.046Z" }, + { url = "https://files.pythonhosted.org/packages/cc/5b/0d421533c59c789e9c9894683efac582c06246bf24bb26b753b149bd88e4/protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0", size = 426449, upload-time = "2025-08-14T21:21:16.687Z" }, + { url = "https://files.pythonhosted.org/packages/ec/7b/607764ebe6c7a23dcee06e054fd1de3d5841b7648a90fd6def9a3bb58c5e/protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1", size = 322869, upload-time = "2025-08-14T21:21:18.282Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/2e730bd1c25392fc32e3268e02446f0d77cb51a2c3a8486b1798e34d5805/protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c", size = 322009, upload-time = "2025-08-14T21:21:19.893Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f2/80ffc4677aac1bc3519b26bc7f7f5de7fce0ee2f7e36e59e27d8beb32dd1/protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783", size = 169287, upload-time = "2025-08-14T21:21:23.515Z" }, +] + +[[package]] +name = "protogen" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/fc/2e68784a06e46fe799dd375b732c13f99559a2c3b2164100607ec8b5cccf/protogen-0.3.1.tar.gz", hash = "sha256:1e55405f6c94476c45c400b069dbdb0274f065e3109fee28122e96dbba075dcd", size = 23018, upload-time = "2023-11-20T15:34:48.288Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/a0/c3f3a2e2fa866547d82190ec5c0cd55580bc29c7894221bd793003a578a1/protogen-0.3.1-py3-none-any.whl", hash = "sha256:65b60b284d20ee4899d515b1959882d8c7504b271552de36f4ebfe77f6b07331", size = 21425, upload-time = "2023-11-20T15:34:45.958Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "types-protobuf" +version = "6.30.2.20250822" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/68/0c7144be5c6dc16538e79458839fc914ea494481c7e64566de4ecc0c3682/types_protobuf-6.30.2.20250822.tar.gz", hash = "sha256:faacbbe87bd8cba4472361c0bd86f49296bd36f7761e25d8ada4f64767c1bde9", size = 62379, upload-time = "2025-08-22T03:01:56.572Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/64/b926a6355993f712d7828772e42b9ae942f2d306d25072329805c374e729/types_protobuf-6.30.2.20250822-py3-none-any.whl", hash = "sha256:5584c39f7e36104b5f8bdfd31815fa1d5b7b3455a79ddddc097b62320f4b1841", size = 76523, upload-time = "2025-08-22T03:01:55.157Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4", size = 132910, upload-time = "2025-06-10T00:42:31.108Z" }, + { url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a", size = 90644, upload-time = "2025-06-10T00:42:33.851Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed", size = 89322, upload-time = "2025-06-10T00:42:35.688Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e", size = 323786, upload-time = "2025-06-10T00:42:37.817Z" }, + { url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73", size = 319627, upload-time = "2025-06-10T00:42:39.937Z" }, + { url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e", size = 339149, upload-time = "2025-06-10T00:42:42.627Z" }, + { url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8", size = 333327, upload-time = "2025-06-10T00:42:44.842Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23", size = 326054, upload-time = "2025-06-10T00:42:47.149Z" }, + { url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70", size = 315035, upload-time = "2025-06-10T00:42:48.852Z" }, + { url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb", size = 338962, upload-time = "2025-06-10T00:42:51.024Z" }, + { url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2", size = 335399, upload-time = "2025-06-10T00:42:53.007Z" }, + { url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30", size = 338649, upload-time = "2025-06-10T00:42:54.964Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309", size = 358563, upload-time = "2025-06-10T00:42:57.28Z" }, + { url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24", size = 357609, upload-time = "2025-06-10T00:42:59.055Z" }, + { url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13", size = 350224, upload-time = "2025-06-10T00:43:01.248Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8", size = 81753, upload-time = "2025-06-10T00:43:03.486Z" }, + { url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16", size = 86817, upload-time = "2025-06-10T00:43:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, + { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, + { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, + { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, + { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, + { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +] diff --git a/otdf_python_test.go b/otdf_python_test.go deleted file mode 100644 index 7d041d1..0000000 --- a/otdf_python_test.go +++ /dev/null @@ -1,555 +0,0 @@ -package gotdf_python - -import ( - "crypto/tls" - "encoding/json" - "errors" - "fmt" - "io" - "log" - "net/http" - "net/url" - "os" - "strings" - "testing" - "time" -) - -var defaultAuthScopes = []string{"email"} - -type TestConfiguration struct { - platformEndpoint string - tokenEndpoint string - kasEndpoint string - npeClientId string - npeClientSecret string - peUsername string - pePassword string - testAttribute1 string - testAttribute2 string - insecureSkipVerify bool -} - -var config = TestConfiguration{ - platformEndpoint: os.Getenv("OPENTDF_HOSTNAME"), - tokenEndpoint: os.Getenv("OIDC_TOKEN_ENDPOINT"), - kasEndpoint: os.Getenv("OPENTDF_KAS_URL"), - npeClientId: os.Getenv("OPENTDF_CLIENT_ID"), - npeClientSecret: os.Getenv("OPENTDF_CLIENT_SECRET"), - peUsername: os.Getenv("TEST_OPENTDF_SECRET_USER_ID"), - pePassword: os.Getenv("TEST_OPENTDF_SECRET_USER_PASSWORD"), - // For default values, we added a helper function - testAttribute1: getEnv("TEST_OPENTDF_ATTRIBUTE_1", "https://example.com/attr/attr1/value/value1"), - testAttribute2: getEnv("TEST_OPENTDF_ATTRIBUTE_2", "https://example.com/attr/attr1/value/value2"), - insecureSkipVerify: getEnv("INSECURE_SKIP_VERIFY", "FALSE") == "TRUE", -} - -/* -Parse a JSON string into a map - -Based on: https://stackoverflow.com/a/72873915 -*/ -func jsonToMap(jsonStr string) map[string]interface{} { - result := make(map[string]interface{}) - json.Unmarshal([]byte(jsonStr), &result) - return result -} - -/* -A basic HTTP request - -Based on: -https://stackoverflow.com/q/24493116 -*/ -func authHelper(form url.Values, isPEAuth bool) (TokenAuth, error) { - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: config.insecureSkipVerify}, - } - // FIXME: Use a client with TLS verification - // client := http.Client{} - client := &http.Client{Transport: tr} - - resp, err := client.PostForm(config.tokenEndpoint, form) - - //okay, moving on... - if err != nil { - log.Fatal(`Server`+config.tokenEndpoint+` returned an error.`, err) - } - - defer resp.Body.Close() - body, err := io.ReadAll(resp.Body) - - if err != nil { - log.Fatal(`Unable to read server response body`, err) - } - - jsonMap := jsonToMap(string(body)) - - val, ok := jsonMap["access_token"].(string) - // If the key exists - if !ok { - return TokenAuth{}, errors.New("Unable to obtain 'access_token', cannot continue") - } - - if isPEAuth { - fmt.Println("Successfully auth'd PE", config.peUsername) - - return TokenAuth{ - AccessToken: val, - NpeClientId: config.npeClientId, - }, nil - } else { - fmt.Println("Successfully auth'd NPE", config.npeClientId) - - return TokenAuth{ - AccessToken: val, - }, nil - } -} - -func AuthenticatePE() (TokenAuth, error) { - form := url.Values{} - form.Add("grant_type", "password") - form.Add("client_id", config.npeClientId) - form.Add("client_secret", config.npeClientSecret) - form.Add("username", config.peUsername) - form.Add("password", config.pePassword) - return authHelper(form, true) -} - -func AuthenticateNPE() (TokenAuth, error) { - form := url.Values{} - form.Add("grant_type", "client_credentials") - form.Add("client_id", config.npeClientId) - form.Add("client_secret", config.npeClientSecret) - return authHelper(form, false) -} - -func getSingleDataAttribute(config TestConfiguration) []string { - return []string{config.testAttribute1} -} -func getMultiDataAttribute(config TestConfiguration) []string { - return []string{config.testAttribute1, config.testAttribute2} -} - -func doEncryptString(t *testing.T, dataAttributes []string) { - - got, err := EncryptString("Hello, world", OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, dataAttributes, defaultAuthScopes) - if err != nil { - t.Fatal(err) - } - - if got == "" { - t.Fatal("EncryptString returned empty value, but didn't error!") - } else { - if len(got) < 1000 { - // NOTE: Testing the size of the stringified JSON is not - // necessarily a good test. However, it is one way to ensure - // that we received something AND that the thing is - // JSON-seriazable - t.Error("Unexpected value") - } - fmt.Println("Got a TDF manifest") - } -} - -func Test_NPE_Encrypt_String_Nil_Attributes(t *testing.T) { - doEncryptString(t, nil) -} - -func Test_NPE_Encrypt_String_Single_Attributes(t *testing.T) { - attrValues := getSingleDataAttribute(config) - doEncryptString(t, attrValues) - -} - -func Test_NPE_Encrypt_String_Multiple_Attributes(t *testing.T) { - attrValues := getMultiDataAttribute(config) - - doEncryptString(t, attrValues) - -} - -func encrypt_file_NPE(t *testing.T, dataAttributes []string) string { - tmpInputFile, err := os.CreateTemp(t.TempDir(), "input-file.txt") - if err != nil { - log.Fatal("Could not create input file", err) - } - defer tmpInputFile.Close() - - fmt.Println("Created input file: ", tmpInputFile.Name()) - - fmt.Println("Writing some data to the input file") - if _, err = tmpInputFile.WriteString("test data"); err != nil { - log.Fatal("Unable to write to temporary file", err) - } else { - fmt.Println("Data should have been written") - } - - tmpOutputFile, err := os.CreateTemp("", "output-file-*.txt") - - if err != nil { - log.Fatal("Could not create output file", err) - } - defer tmpOutputFile.Close() - - got, err := EncryptFile(tmpInputFile.Name(), tmpOutputFile.Name(), OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, dataAttributes, defaultAuthScopes) - if err != nil { - t.Error("Failed to EncryptFile()!") - } - - if got == "" { - t.Error("Unexpected value") - } - - if !strings.HasSuffix(got, ".tdf") { - t.Error("All output files should have the .tdf extension") - } - - fmt.Println("Got a TDF manifest") - return got -} - -func encrypt_file_PE(t *testing.T, dataAttributes []string, tokenAuth TokenAuth) string { - tmpInputFile, err := os.CreateTemp("", "input-file-*.txt") - if err != nil { - log.Fatal("Could not create input file", err) - } - defer tmpInputFile.Close() - - fmt.Println("Created input file: ", tmpInputFile.Name()) - - fmt.Println("Writing some data to the input file") - if _, err = tmpInputFile.WriteString("PE test data"); err != nil { - log.Fatal("Unable to write to temporary file", err) - } else { - fmt.Println("Data should have been written") - } - - tmpOutputFile, err := os.CreateTemp("", "*.tdf") - - if err != nil { - log.Fatal("Could not create output file", err) - } - defer tmpOutputFile.Close() - - got, err := EncryptFilePE(tmpInputFile.Name(), tmpOutputFile.Name(), OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, tokenAuth, dataAttributes, defaultAuthScopes) - if err != nil { - t.Fatal("Failed to EncryptFilePE()!") - } - - if got == "" { - t.Fatal("Unexpected value") - } - - if !strings.HasSuffix(got, ".tdf") { - t.Fatal("All output files should have the .tdf extension") - } - - fmt.Println("TDF file written to disk") - return got -} - -func Test_NPE_Encrypt_File_Nil_Attributes(t *testing.T) { - encrypt_file_NPE(t, nil) -} - -func Test_NPE_Encrypt_File_Single_Attributes(t *testing.T) { - attrValues := getSingleDataAttribute(config) - encrypt_file_NPE(t, attrValues) -} - -func Test_NPE_Encrypt_File_Multi_Attributes(t *testing.T) { - attrValues := getMultiDataAttribute(config) - - encrypt_file_NPE(t, attrValues) -} - -func e2e_test_as_PE(t *testing.T, dataAttributes []string) { - token_for_encrypt, err := AuthenticatePE() - if err != nil { - t.Error(err) - } - - input_TDF_path := encrypt_file_PE(t, dataAttributes, token_for_encrypt) - - time.Sleep(4000 * time.Millisecond) - - plaintext_output_path, err := os.CreateTemp("", "output-file-*.txt") - if err != nil { - t.Fatal(err) - } - - token_for_decrypt, err := AuthenticatePE() - if err != nil { - t.Error(err) - } - got, err := DecryptFilePE(input_TDF_path, plaintext_output_path.Name(), OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, token_for_decrypt, defaultAuthScopes) - if err != nil { - t.Fatal(err) - } - if got == "" { - t.Error("Unexpected value") - } else { - fmt.Println("Successfully decrypted TDF") - } -} - -func Test_PE_E2E_File_Nil_Attributes(t *testing.T) { - e2e_test_as_PE(t, nil) -} - -func Test_PE_E2E_File_Single_Attributes(t *testing.T) { - attrValues := getSingleDataAttribute(config) - e2e_test_as_PE(t, attrValues) -} - -func Test_PE_E2E_File_Multi_Attributes(t *testing.T) { - attrValues := getMultiDataAttribute(config) - e2e_test_as_PE(t, attrValues) -} - -func Test_Multifile_NPE_Encrypt_Files_In_Dir_Nil_Attributes(t *testing.T) { - // Create a temporary directory - tmpDir, err := os.MkdirTemp("", "input-dir") - if err != nil { - t.Fatal("Could not create temporary directory", err) - } - defer os.RemoveAll(tmpDir) - - // Create a temporary file in the directory - tmpFile1, err := os.CreateTemp(tmpDir, "input-file1-*.txt") - if err != nil { - t.Fatal("Could not create input file", err) - } - defer tmpFile1.Close() - - // Write some data to the file - if _, err = tmpFile1.WriteString("test data"); err != nil { - t.Fatal("Unable to write to temporary file", err) - } - - // Create a temporary file in the directory - tmpFile2, err := os.CreateTemp(tmpDir, "input-file2-*.txt") - if err != nil { - t.Fatal("Could not create input file", err) - } - defer tmpFile2.Close() - - // Write some data to the file - if _, err = tmpFile2.WriteString("test data"); err != nil { - t.Fatal("Unable to write to temporary file", err) - } - - // Create a temporary file in the directory - tmpFile3, err := os.CreateTemp(tmpDir, "input-file3-*.csv") - if err != nil { - t.Fatal("Could not create input file", err) - } - defer tmpFile3.Close() - - // Write some data to the file - if _, err = tmpFile3.WriteString("test data"); err != nil { - t.Fatal("Unable to write to temporary file", err) - } - - cfg := OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - } - - got, err := EncryptFilesWithExtensionsNPE(tmpDir, []string{".txt", ".csv"}, cfg, nil, defaultAuthScopes) - if err != nil { - t.Fatal("Failed to EncryptFilesWithExtensionsNPE()!", err) - } - - if len(got) != 3 { - t.Fatal("EncryptFilesWithExtensionsNPE returned incorrect got value, but didn't error!") - } - - fmt.Println("Successfully encrypted files using file extensions") -} - -// A new test of a new 'EncryptFilesWithExtensions' function -func Test_Multifile_NPE_Encrypt_Files_With_Extensions_Nil_Attributes(t *testing.T) { - // Create a temporary directory - tmpDir, err := os.MkdirTemp("", "input-dir") - if err != nil { - t.Fatal("Could not create temporary directory", err) - } - defer os.RemoveAll(tmpDir) - - // Create test files - numFiles := createTestFiles(t, tmpDir) - - // Call the EncryptFilesWithExtensionsNPE function - got, err := EncryptFilesWithExtensionsNPE(tmpDir, []string{".txt", ".csv", ".pdf"}, OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, nil, defaultAuthScopes) - if err != nil { - t.Fatal("Failed to EncryptFilesWithExtensionsNPE()!", err) - } - - if len(got) != numFiles { - t.Fatal("EncryptFilesWithExtensionsNPE returned incorrect got value, but didn't error!") - } - - fmt.Println("Successfully encrypted files with extensions") -} - -// Call the DecryptFilesInDirNPE function -func Test_Multifile_NPE_Decrypt_Files_In_Dir_Nil_Attributes(t *testing.T) { - // Create a temporary directory - tmpDir, err := os.MkdirTemp("", "input-dir") - if err != nil { - t.Fatal("Could not create temporary directory", err) - } - defer os.RemoveAll(tmpDir) - - // Create test files - numFiles := createTestFiles(t, tmpDir) - - // Encrypt the file - _, err = EncryptFilesInDirNPE(tmpDir, OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, nil, defaultAuthScopes) - if err != nil { - t.Fatal("Failed to EncryptFilesInDirNPE()!", err) - } - - // Call the DecryptFilesInDirNPE function - got, err := DecryptFilesInDirNPE(tmpDir, OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, defaultAuthScopes) - if err != nil { - t.Fatal("Failed to DecryptFilesInDirNPE()!", err) - } - - if len(got) != numFiles { - t.Fatal("DecryptFilesInDirNPE returned empty value, but didn't error!") - } - - fmt.Println("Successfully decrypted files in directory") -} - -func Test_Multifile_NPE_Decrypt_Files_With_Extensions_Nil_Attributes(t *testing.T) { - // Create a temporary directory - tmpDir, err := os.MkdirTemp("", "input-dir") - if err != nil { - t.Fatal("Could not create temporary directory", err) - } - defer os.RemoveAll(tmpDir) - - // Create test files - numFiles := createTestFiles(t, tmpDir) - - // Encrypt the files - _, err = EncryptFilesWithExtensionsNPE(tmpDir, []string{".txt", ".csv", ".pdf"}, OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, nil, defaultAuthScopes) - if err != nil { - t.Fatal("Failed to EncryptFilesWithExtensionsNPE()!", err) - } - - // Call the DecryptFilesWithExtensionsNPE function - got, err := DecryptFilesWithExtensionsNPE(tmpDir, []string{".tdf"}, OpentdfConfig{ - ClientId: config.npeClientId, - ClientSecret: config.npeClientSecret, - PlatformEndpoint: config.platformEndpoint, - TokenEndpoint: config.tokenEndpoint, - KasUrl: config.kasEndpoint, - InsecureSkipVerify: config.insecureSkipVerify, - }, defaultAuthScopes) - if err != nil { - t.Fatal("Failed to DecryptFilesWithExtensionsNPE()!", err) - } - - if len(got) != numFiles { - t.Fatal("DecryptFilesWithExtensionsNPE returned empty value, but didn't error!") - } - - fmt.Println("Successfully decrypted files with extensions") -} - -func createTestFiles(t *testing.T, tmpDir string) int { - // A number that corresponds to the hour of the day (between 0 and 23) - numFiles := time.Now().Hour() - - if numFiles > 12 { - numFiles = numFiles - 12 // Limit the number of files to 12 - } - - for i := 0; i < numFiles; i++ { - ext := ".txt" - if i%2 == 0 { - ext = ".csv" - } else if i%3 == 0 { - ext = ".pdf" - } - - tmpFile, err := os.CreateTemp(tmpDir, fmt.Sprintf("input-file-%d-*%s", i, ext)) - if err != nil { - t.Fatal("Could not create input file", err) - } - defer tmpFile.Close() - - // Write some data to the file - if _, err = tmpFile.WriteString("test data"); err != nil { - t.Fatal("Unable to write to temporary file", err) - } - } - - return numFiles -} diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 0da1c39..0000000 --- a/poetry.lock +++ /dev/null @@ -1,184 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.3.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -markers = "python_version == \"3.10\"" -files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pybindgen" -version = "0.22.1" -description = "Python Bindings Generator" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "PyBindGen-0.22.1-py2.py3-none-any.whl", hash = "sha256:e9c2dad3c3e9da7811c271b5d6dcbb59ee59a2fe7877b4277556f95a9aea296e"}, - {file = "PyBindGen-0.22.1.tar.gz", hash = "sha256:8c7f22391a49a84518f5a2ad06e3a5b1e839d10e34da7631519c8a28fcba3764"}, -] - -[[package]] -name = "pytest" -version = "8.3.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version == \"3.10\"" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "typing-extensions" -version = "4.14.1" -description = "Backported and Experimental Type Hints for Python 3.9+" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version == \"3.10\"" -files = [ - {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, - {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, -] - -[[package]] -name = "wheel" -version = "0.45.1" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248"}, - {file = "wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - -[metadata] -lock-version = "2.1" -python-versions = ">=3.10,<3.14" -content-hash = "f925fadc99f0a1eff5364ea9d6f104c2b431a5ac9cb76e2c707d4c344d1e9935" diff --git a/pyproject.toml b/pyproject.toml index 49985bf..d1e5eb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,30 +1,84 @@ [project] name = "otdf-python" -# Should match 'setup.py' version number (used for gopy/pybindgen) -version = "0.2.20" -description = "Unofficial OpenTDF SDK for Python." +version = "0.3.1" +description = "Unofficial OpenTDF SDK for Python" +readme = "README.md" authors = [ - {name="b-long", email="b-long@users.noreply.github.com"} + { name = "b-long", email = "b-long@users.noreply.github.com" } ] -readme = "README.md" requires-python = ">=3.10" +dependencies = [ + "cryptography>=45.0.4", + "connect-python[compiler]>=0.4.2", + "httpx>=0.28.1", + "protobuf>=6.31.1", + "pyjwt>=2.10.1", + "typing-extensions>=4.14.1", + # Legacy gRPC support (may be removed in future versions) + "grpcio>=1.74.0", + "grpcio-tools>=1.74.0", + "grpcio-status>=1.74.0", + "protoc-gen-openapiv2>=0.0.1", +] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" -[dependencies] -wheel = "^0.45.0" -pybindgen = "^0.22.1" +[tool.hatch.build.targets.wheel] +packages = [ + "src/otdf_python", + "otdf-python-proto/src/otdf_python_proto", +] + -[tool.poetry] -package-mode = false -version = "0.2.20" +[dependency-groups] +dev = [ + "pydantic-settings>=2.10.1", + "pytest>=8.4.1", + "respx>=0.21.1", + "ruff>=0.12.10", + "tomli>=2.2.1 ; python_full_version < '3.11'", +] + +[tool.pytest.ini_options] +markers = [ + "integration: mark a test as an integration test." +] +testpaths = ["tests"] +norecursedirs = ["otdf-python-proto"] -[tool.poetry.dependencies] -python = ">=3.10,<3.14" -wheel = "^0.45.0" -pybindgen = "^0.22.1" +[tool.ruff] +line-length = 88 -[tool.poetry.group.dev.dependencies] -pytest = ">=8.1" +# See https://docs.astral.sh/ruff/rules/ +# for rule information. +# E501: Line too long (black enforces this for us) +lint.ignore = [ + "E501", +] +lint.select = [ + # pycodestyle checks. + "E", + "W", + # pyflakes checks. + "F", + # flake8-bugbear checks. + "B0", + # flake8-comprehensions checks. + "C4", + # McCabe complexity + "C90", + # isort + "I", + # Performance-related rules + "PERF", # Ruff's performance rules + # Additional useful rules + "UP", # pyupgrade (modern Python features) + "SIM", # flake8-simplify (simplifications) + "RUF", # Ruff-specific rules + "FURB", # refurb (FURB) + "PT018", # flake8-pytest-style (pytest style) +] +# Ignore generated files +extend-exclude = ["otdf-python-proto/src/"] diff --git a/setup.py b/setup.py deleted file mode 100644 index f4ba22f..0000000 --- a/setup.py +++ /dev/null @@ -1,30 +0,0 @@ -import setuptools - - -from pathlib import Path - -""" -NOTE: This project uses more than one version of a 'setup.py' file: -* 'setup.py', and -* 'setup_ci.py' - -Based on: - https://github.com/popatam/gopy_build_wheel_example/blob/main/setup_ci.py -""" - -this_directory = Path(__file__).parent -long_description = (this_directory / "README.md").read_text() - -setuptools.setup( - name="otdf_python", - packages=setuptools.find_packages(include=["otdf_python"]), - py_modules=["otdf_python.gotdf_python"], - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/b-long/opentdf-python-sdk", - package_data={"otdf_python": ["*.so"]}, - # Should match 'pyproject.toml' version number - version="0.2.20", - author_email="b-long@users.noreply.github.com", - include_package_data=True, -) diff --git a/setup_ci.py b/setup_ci.py deleted file mode 100644 index b2bd4b6..0000000 --- a/setup_ci.py +++ /dev/null @@ -1,105 +0,0 @@ -import json -import os -import subprocess -import sys -import re -from distutils.core import Extension -from pathlib import Path - -import setuptools -from setuptools.command.build_ext import build_ext - -""" -NOTE: This project uses more than one version of a 'setup.py' file: -* 'setup.py', and -* 'setup_ci.py' - -Based on: - https://github.com/popatam/gopy_build_wheel_example/blob/main/setup_ci.py -""" - - -def normalize(name): # https://peps.python.org/pep-0503/#normalized-names - return re.sub(r"[-_.]+", "-", name).lower() - - -PACKAGE_PATH = "gotdf_python" -PACKAGE_NAME = "otdf_python" - -if sys.platform == "darwin": - # PYTHON_BINARY_PATH is setting explicitly for 310 and 311, see build_wheel.yml - # on macos PYTHON_BINARY_PATH must be python bin installed from python.org or from brew - PYTHON_BINARY = os.getenv("PYTHON_BINARY_PATH", sys.executable) - if PYTHON_BINARY == sys.executable: - subprocess.check_call([sys.executable, "-m", "pip", "install", "pybindgen"]) -else: - # linux & windows - PYTHON_BINARY = sys.executable - subprocess.check_call([sys.executable, "-m", "pip", "install", "pybindgen"]) - - -def _generate_path_with_gopath() -> str: - go_path = subprocess.check_output(["go", "env", "GOPATH"]).decode("utf-8").strip() - path_val = f"{os.getenv('PATH')}:{go_path}/bin" - return path_val - - -class CustomBuildExt(build_ext): - def build_extension(self, ext: Extension): - bin_path = _generate_path_with_gopath() - go_env = json.loads( - subprocess.check_output(["go", "env", "-json"]).decode("utf-8").strip() - ) - - destination = ( - os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name))) - + f"/{PACKAGE_NAME}" - ) - - subprocess.check_call( - [ - "gopy", - "build", - "-no-make", - "-dynamic-link=True", - "-output", - destination, - "-vm", - PYTHON_BINARY, - *ext.sources, - ], - env={"PATH": bin_path, **go_env, "CGO_LDFLAGS_ALLOW": ".*"}, - ) - - # dirty hack to avoid "from pkg import pkg", remove if needed - with open(f"{destination}/__init__.py", "w") as f: - f.write(f"from .{PACKAGE_PATH} import *") - - -this_directory = Path(__file__).parent -long_description = (this_directory / "README.md").read_text() - -setuptools.setup( - name="otdf_python", - version="0.2.20", - author="b-long", - description="Unofficial OpenTDF SDK for Python.", - long_description_content_type="text/markdown", - long_description=long_description, - url="https://github.com/b-long/opentdf-python-sdk", - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - ], - include_package_data=True, - cmdclass={ - "build_ext": CustomBuildExt, - }, - ext_modules=[ - Extension( - PACKAGE_NAME, - [PACKAGE_PATH], - ) - ], -) diff --git a/src/otdf_python/__init__.py b/src/otdf_python/__init__.py new file mode 100644 index 0000000..f8dcd49 --- /dev/null +++ b/src/otdf_python/__init__.py @@ -0,0 +1,25 @@ +""" +OpenTDF Python SDK + +A Python implementation of the OpenTDF SDK for working with Trusted Data Format (TDF) files. +Provides both programmatic APIs and command-line interface for encryption and decryption. +""" + +from .cli import main as cli_main +from .config import KASInfo, NanoTDFConfig, TDFConfig +from .sdk import SDK +from .sdk_builder import SDKBuilder + +__all__ = [ + "SDK", + "KASInfo", + "NanoTDFConfig", + "SDKBuilder", + "TDFConfig", + "cli_main", +] + + +def main() -> None: + """Entry point for the CLI.""" + cli_main() diff --git a/src/otdf_python/__main__.py b/src/otdf_python/__main__.py new file mode 100644 index 0000000..e3e8c97 --- /dev/null +++ b/src/otdf_python/__main__.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 +""" +Main entry point for running otdf_python as a module. + +This allows the package to be run with `python -m otdf_python` and properly +handles the CLI interface without import conflicts. +""" + +from .cli import main + +if __name__ == "__main__": + main() diff --git a/src/otdf_python/address_normalizer.py b/src/otdf_python/address_normalizer.py new file mode 100644 index 0000000..1b636b7 --- /dev/null +++ b/src/otdf_python/address_normalizer.py @@ -0,0 +1,84 @@ +""" +Address normalization utilities for OpenTDF. +""" + +import logging +import re +from urllib.parse import urlparse + +from .sdk_exceptions import SDKException + +logger = logging.getLogger(__name__) + + +def normalize_address(url_string: str, use_plaintext: bool) -> str: + """ + Normalize a URL address to ensure it has the correct scheme and port. + + Args: + url_string: The URL string to normalize + use_plaintext: If True, use http scheme, otherwise use https + + Returns: + The normalized URL string + + Raises: + SDKException: If there's an error parsing or creating the URL + """ + scheme = "http" if use_plaintext else "https" + + # Check if we have a host:port format without scheme (with non-digit port) + host_port_pattern = re.match(r"^([^/:]+):([^/]+)$", url_string) + if host_port_pattern: + host = host_port_pattern.group(1) + port_str = host_port_pattern.group(2) + try: + port = int(port_str) + except ValueError: + raise SDKException(f"Invalid port in URL [{url_string}]") + + normalized_url = f"{scheme}://{host}:{port}" + logger.debug(f"normalized url [{url_string}] to [{normalized_url}]") + return normalized_url + + try: + # Check if we just have a hostname without scheme and port + if "://" not in url_string and "/" not in url_string and ":" not in url_string: + port = 80 if use_plaintext else 443 + normalized_url = f"{scheme}://{url_string}:{port}" + logger.debug(f"normalized url [{url_string}] to [{normalized_url}]") + return normalized_url + + # Parse the URL + parsed_url = urlparse(url_string) + + # If no scheme, add one + if not parsed_url.scheme: + url_string = f"{scheme}://{url_string}" + parsed_url = urlparse(url_string) + + # Extract host and port + host = parsed_url.netloc.split(":")[0] if parsed_url.netloc else parsed_url.path + + # If there's a port in the URL, try to extract it + port = None + if ":" in parsed_url.netloc: + _, port_str = parsed_url.netloc.split(":", 1) + try: + port = int(port_str) + except ValueError: + raise SDKException(f"Invalid port in URL [{url_string}]") + + # If no port was found or extracted, use the default + if port is None: + port = 80 if use_plaintext else 443 + + # Reconstruct the URL with the desired scheme + normalized_url = f"{scheme}://{host}:{port}" + logger.debug(f"normalized url [{url_string}] to [{normalized_url}]") + return normalized_url + + except Exception as e: + if isinstance(e, SDKException): + raise e + raise SDKException(f"Error normalizing URL [{url_string}]", e) diff --git a/src/otdf_python/aesgcm.py b/src/otdf_python/aesgcm.py new file mode 100644 index 0000000..ced6427 --- /dev/null +++ b/src/otdf_python/aesgcm.py @@ -0,0 +1,55 @@ +import os + +from cryptography.hazmat.primitives.ciphers.aead import AESGCM + + +class AesGcm: + GCM_NONCE_LENGTH = 12 + GCM_TAG_LENGTH = 16 + + def __init__(self, key: bytes): + if not key or len(key) not in (16, 24, 32): + raise ValueError("Invalid key size for GCM encryption") + self.key = key + self.aesgcm = AESGCM(key) + + def get_key(self) -> bytes: + return self.key + + class Encrypted: + def __init__(self, iv: bytes, ciphertext: bytes): + self.iv = iv + self.ciphertext = ciphertext + + def as_bytes(self) -> bytes: + return self.iv + self.ciphertext + + def encrypt( + self, plaintext: bytes, offset: int = 0, length: int | None = None + ) -> "AesGcm.Encrypted": + if length is None: + length = len(plaintext) - offset + iv = os.urandom(self.GCM_NONCE_LENGTH) + ct = self.aesgcm.encrypt(iv, plaintext[offset : offset + length], None) + return self.Encrypted(iv, ct) + + def encrypt_with_iv( + self, + iv: bytes, + auth_tag_len: int, + plaintext: bytes, + offset: int = 0, + length: int | None = None, + ) -> bytes: + if length is None: + length = len(plaintext) - offset + ct = self.aesgcm.encrypt(iv, plaintext[offset : offset + length], None) + return iv + ct + + def decrypt(self, encrypted: "AesGcm.Encrypted") -> bytes: + return self.aesgcm.decrypt(encrypted.iv, encrypted.ciphertext, None) + + def decrypt_with_iv( + self, iv: bytes, auth_tag_len: int, cipher_data: bytes + ) -> bytes: + return self.aesgcm.decrypt(iv, cipher_data, None) diff --git a/src/otdf_python/assertion_config.py b/src/otdf_python/assertion_config.py new file mode 100644 index 0000000..4c96fb2 --- /dev/null +++ b/src/otdf_python/assertion_config.py @@ -0,0 +1,84 @@ +from enum import Enum, auto +from typing import Any + + +class Type(Enum): + HANDLING_ASSERTION = "handling" + BASE_ASSERTION = "base" + + def __str__(self): + return self.value + + +class Scope(Enum): + TRUSTED_DATA_OBJ = "tdo" + PAYLOAD = "payload" + + def __str__(self): + return self.value + + +class AssertionKeyAlg(Enum): + RS256 = auto() + HS256 = auto() + NOT_DEFINED = auto() + + +class AppliesToState(Enum): + ENCRYPTED = "encrypted" + UNENCRYPTED = "unencrypted" + + def __str__(self): + return self.value + + +class BindingMethod(Enum): + JWS = "jws" + + def __str__(self): + return self.value + + +class AssertionKey: + def __init__(self, alg: AssertionKeyAlg, key: Any): + self.alg = alg + self.key = key + + def is_defined(self): + return self.alg != AssertionKeyAlg.NOT_DEFINED + + +class Statement: + def __init__(self, format: str, schema: str, value: str): + self.format = format + self.schema = schema + self.value = value + + def __eq__(self, other): + return ( + isinstance(other, Statement) + and self.format == other.format + and self.schema == other.schema + and self.value == other.value + ) + + def __hash__(self): + return hash((self.format, self.schema, self.value)) + + +class AssertionConfig: + def __init__( + self, + id: str, + type: Type, + scope: Scope, + applies_to_state: AppliesToState, + statement: Statement, + signing_key: AssertionKey | None = None, + ): + self.id = id + self.type = type + self.scope = scope + self.applies_to_state = applies_to_state + self.statement = statement + self.signing_key = signing_key diff --git a/src/otdf_python/asym_crypto.py b/src/otdf_python/asym_crypto.py new file mode 100644 index 0000000..932bfa1 --- /dev/null +++ b/src/otdf_python/asym_crypto.py @@ -0,0 +1,85 @@ +""" +Asymmetric encryption and decryption utilities for RSA keys in PEM format. +""" + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding, rsa +from cryptography.x509 import load_pem_x509_certificate + +from .sdk_exceptions import SDKException + + +class AsymDecryption: + """ + Provides functionality for asymmetric decryption using an RSA private key. + """ + + def __init__(self, private_key_pem: str): + try: + self.private_key = serialization.load_pem_private_key( + private_key_pem.encode(), password=None, backend=default_backend() + ) + except Exception as e: + raise SDKException(f"Failed to load private key: {e}") + + def decrypt(self, data: bytes) -> bytes: + if not self.private_key: + raise SDKException("Failed to decrypt, private key is empty") + try: + return self.private_key.decrypt( + data, + padding.OAEP( + mgf=padding.MGF1(algorithm=hashes.SHA1()), + algorithm=hashes.SHA1(), + label=None, + ), + ) + except Exception as e: + raise SDKException(f"Error performing decryption: {e}") + + +class AsymEncryption: + """ + Provides functionality for asymmetric encryption using an RSA public key or certificate in PEM format. + """ + + def __init__(self, public_key_pem: str): + try: + if "BEGIN CERTIFICATE" in public_key_pem: + cert = load_pem_x509_certificate( + public_key_pem.encode(), default_backend() + ) + self.public_key = cert.public_key() + else: + self.public_key = serialization.load_pem_public_key( + public_key_pem.encode(), backend=default_backend() + ) + except Exception as e: + raise SDKException(f"Failed to load public key: {e}") + + if not isinstance(self.public_key, rsa.RSAPublicKey): + raise SDKException("Not an RSA PEM formatted public key") + + def encrypt(self, data: bytes) -> bytes: + try: + return self.public_key.encrypt( + data, + padding.OAEP( + mgf=padding.MGF1(algorithm=hashes.SHA1()), + algorithm=hashes.SHA1(), + label=None, + ), + ) + except Exception as e: + raise SDKException(f"Error performing encryption: {e}") + + def public_key_in_pem_format(self) -> str: + try: + pem = self.public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return pem.decode() + except Exception as e: + raise SDKException(f"Error exporting public key to PEM: {e}") diff --git a/src/otdf_python/asym_decryption.py b/src/otdf_python/asym_decryption.py new file mode 100644 index 0000000..af11414 --- /dev/null +++ b/src/otdf_python/asym_decryption.py @@ -0,0 +1,53 @@ +import base64 + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding + +from .sdk_exceptions import SDKException + + +class AsymDecryption: + """ + Class providing functionality for asymmetric decryption using an RSA private key. + """ + + CIPHER_TRANSFORM = "RSA/ECB/OAEPWithSHA-1AndMGF1Padding" + PRIVATE_KEY_HEADER = "-----BEGIN PRIVATE KEY-----" + PRIVATE_KEY_FOOTER = "-----END PRIVATE KEY-----" + + def __init__(self, private_key_pem: str | None = None, private_key_obj=None): + if private_key_obj is not None: + self.private_key = private_key_obj + elif private_key_pem is not None: + try: + private_key_pem = ( + private_key_pem.replace(self.PRIVATE_KEY_HEADER, "") + .replace(self.PRIVATE_KEY_FOOTER, "") + .replace("\n", "") + .replace("\r", "") + .replace(" ", "") + ) + decoded = base64.b64decode(private_key_pem) + self.private_key = serialization.load_der_private_key( + decoded, password=None, backend=default_backend() + ) + except Exception as e: + raise SDKException(f"Failed to load private key: {e}") + else: + self.private_key = None + + def decrypt(self, data: bytes) -> bytes: + if self.private_key is None: + raise SDKException("Failed to decrypt, private key is empty") + try: + return self.private_key.decrypt( + data, + padding.OAEP( + mgf=padding.MGF1(algorithm=hashes.SHA1()), + algorithm=hashes.SHA1(), + label=None, + ), + ) + except Exception as e: + raise SDKException(f"Error performing decryption: {e}") diff --git a/src/otdf_python/asym_encryption.py b/src/otdf_python/asym_encryption.py new file mode 100644 index 0000000..7e5e27a --- /dev/null +++ b/src/otdf_python/asym_encryption.py @@ -0,0 +1,75 @@ +import base64 +import re + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.x509 import load_pem_x509_certificate + +from .sdk_exceptions import SDKException + + +class AsymEncryption: + """ + Provides methods for asymmetric encryption and handling public keys in PEM format. + """ + + PUBLIC_KEY_HEADER = "-----BEGIN PUBLIC KEY-----" + PUBLIC_KEY_FOOTER = "-----END PUBLIC KEY-----" + CIPHER_TRANSFORM = "RSA/ECB/OAEPWithSHA-1AndMGF1Padding" + + def __init__(self, public_key_pem: str | None = None, public_key_obj=None): + if public_key_obj is not None: + self.public_key = public_key_obj + elif public_key_pem is not None: + try: + if "BEGIN CERTIFICATE" in public_key_pem: + cert = load_pem_x509_certificate( + public_key_pem.encode(), default_backend() + ) + self.public_key = cert.public_key() + else: + # Remove PEM headers/footers and whitespace + pem_body = re.sub(r"-----BEGIN (.*)-----", "", public_key_pem) + pem_body = re.sub(r"-----END (.*)-----", "", pem_body) + pem_body = re.sub(r"\s", "", pem_body) + decoded = base64.b64decode(pem_body) + self.public_key = serialization.load_der_public_key( + decoded, backend=default_backend() + ) + except Exception as e: + raise SDKException(f"Failed to load public key: {e}") + else: + self.public_key = None + + from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey + + if self.public_key is not None and not isinstance( + self.public_key, RSAPublicKey + ): + raise SDKException("Not an RSA PEM formatted public key") + + def encrypt(self, data: bytes) -> bytes: + if self.public_key is None: + raise SDKException("Failed to encrypt, public key is empty") + try: + return self.public_key.encrypt( + data, + padding.OAEP( + mgf=padding.MGF1(algorithm=hashes.SHA1()), + algorithm=hashes.SHA1(), + label=None, + ), + ) + except Exception as e: + raise SDKException(f"Error performing encryption: {e}") + + def public_key_in_pem_format(self) -> str: + try: + pem = self.public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return pem.decode() + except Exception as e: + raise SDKException(f"Error exporting public key to PEM: {e}") diff --git a/src/otdf_python/auth_headers.py b/src/otdf_python/auth_headers.py new file mode 100644 index 0000000..da6124a --- /dev/null +++ b/src/otdf_python/auth_headers.py @@ -0,0 +1,21 @@ +from dataclasses import dataclass + + +@dataclass +class AuthHeaders: + """ + Represents authentication headers used in token-based authorization. + This class holds authorization and DPoP (Demonstrating Proof of Possession) headers + that are used in token-based API requests. + """ + + auth_header: str + dpop_header: str + + def get_auth_header(self) -> str: + """Returns the authorization header.""" + return self.auth_header + + def get_dpop_header(self) -> str: + """Returns the DPoP header.""" + return self.dpop_header diff --git a/src/otdf_python/autoconfigure_utils.py b/src/otdf_python/autoconfigure_utils.py new file mode 100644 index 0000000..4206b00 --- /dev/null +++ b/src/otdf_python/autoconfigure_utils.py @@ -0,0 +1,113 @@ +import re +import urllib.parse +from dataclasses import dataclass +from typing import Any + + +# RuleType constants +class RuleType: + HIERARCHY = "hierarchy" + ALL_OF = "allOf" + ANY_OF = "anyOf" + UNSPECIFIED = "unspecified" + EMPTY_TERM = "DEFAULT" + + +@dataclass(frozen=True) +class KeySplitStep: + kas: str + splitID: str + + def __str__(self): + return f"KeySplitStep{{kas={self.kas}, splitID={self.splitID}}}" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, KeySplitStep): + return False + return self.kas == other.kas and self.splitID == other.splitID + + def __hash__(self): + return hash((self.kas, self.splitID)) + + +class AutoConfigureException(Exception): + pass + + +class AttributeNameFQN: + def __init__(self, url: str): + pattern = re.compile(r"^(https?://[\w./-]+)/attr/([^/\s]*)$") + matcher = pattern.match(url) + if not matcher or not matcher.group(1) or not matcher.group(2): + raise AutoConfigureException("invalid type: attribute regex fail") + try: + urllib.parse.unquote(matcher.group(2)) + except Exception: + raise AutoConfigureException( + f"invalid type: error in attribute name [{matcher.group(2)}]" + ) + self.url = url + self.key = url.lower() + + def __str__(self): + return self.url + + def select(self, value: str): + new_url = f"{self.url}/value/{urllib.parse.quote(value)}" + return AttributeValueFQN(new_url) + + def prefix(self): + return self.url + + def get_key(self): + return self.key + + def authority(self): + pattern = re.compile(r"^(https?://[\w./-]+)/attr/[^/\s]*$") + matcher = pattern.match(self.url) + if not matcher: + raise AutoConfigureException("invalid type") + return matcher.group(1) + + def name(self): + pattern = re.compile(r"^https?://[\w./-]+/attr/([^/\s]*)$") + matcher = pattern.match(self.url) + if not matcher: + raise AutoConfigureException("invalid attribute") + try: + return urllib.parse.unquote(matcher.group(1)) + except Exception: + raise AutoConfigureException("invalid type") + + +class AttributeValueFQN: + def __init__(self, url: str): + pattern = re.compile(r"^(https?://[\w./-]+)/attr/(\S*)/value/(\S*)$") + matcher = pattern.match(url) + if ( + not matcher + or not matcher.group(1) + or not matcher.group(2) + or not matcher.group(3) + ): + raise AutoConfigureException( + f"invalid type: attribute regex fail for [{url}]" + ) + try: + urllib.parse.unquote(matcher.group(2)) + urllib.parse.unquote(matcher.group(3)) + except Exception: + raise AutoConfigureException("invalid type: error in attribute or value") + self.url = url + self.key = url.lower() + + def __str__(self): + return self.url + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, AttributeValueFQN): + return False + return self.key == other.key + + def __hash__(self): + return hash(self.key) diff --git a/src/otdf_python/cli.py b/src/otdf_python/cli.py new file mode 100644 index 0000000..3c99be3 --- /dev/null +++ b/src/otdf_python/cli.py @@ -0,0 +1,570 @@ +#!/usr/bin/env python3 +""" +OpenTDF Python CLI + +A command-line interface for encrypting and decrypting files using OpenTDF. +Provides encrypt, decrypt, and inspect commands similar to the otdfctl CLI. +""" + +import argparse +import contextlib +import json +import logging +import sys +from dataclasses import asdict +from importlib import metadata +from io import BytesIO +from pathlib import Path + +from otdf_python.config import KASInfo, NanoTDFConfig, TDFConfig +from otdf_python.sdk import SDK +from otdf_python.sdk_builder import SDKBuilder +from otdf_python.sdk_exceptions import SDKException +from otdf_python.tdf import TDFReaderConfig + +try: + __version__ = metadata.version("otdf-python") +except metadata.PackageNotFoundError: + # package is not installed, e.g., in development + __version__ = "0.0.0" + + +# Set up logging +logger = logging.getLogger(__name__) + + +class CLIError(Exception): + """Custom exception for CLI errors.""" + + def __init__(self, level: str, message: str, cause: Exception | None = None): + self.level = level + self.message = message + self.cause = cause + super().__init__(message) + + +def setup_logging(level: str = "INFO", silent: bool = False): + """Set up logging configuration.""" + if silent: + level = "CRITICAL" + + log_level = getattr(logging, level.upper(), logging.INFO) + logging.basicConfig( + level=log_level, + format="%(levelname)s: %(message)s", + handlers=[logging.StreamHandler(sys.stderr)], + ) + + +def validate_file_exists(file_path: str) -> Path: + """Validate that a file exists and is readable.""" + path = Path(file_path) + if not path.exists(): + raise CLIError("CRITICAL", f"File does not exist: {file_path}") + if not path.is_file(): + raise CLIError("CRITICAL", f"Path is not a file: {file_path}") + return path + + +def parse_attributes(attributes_str: str) -> list[str]: + """Parse comma-separated attributes string.""" + if not attributes_str: + return [] + return [attr.strip() for attr in attributes_str.split(",") if attr.strip()] + + +def parse_kas_endpoints(kas_str: str) -> list[str]: + """Parse comma-separated KAS endpoints.""" + if not kas_str: + return [] + return [kas.strip() for kas in kas_str.split(",") if kas.strip()] + + +def load_client_credentials(creds_file_path: str) -> tuple[str, str]: + """Load client credentials from JSON file.""" + try: + creds_path = Path(creds_file_path) + if not creds_path.exists(): + raise CLIError( + "CRITICAL", f"Credentials file does not exist: {creds_file_path}" + ) + + with open(creds_path) as f: + creds = json.load(f) + + client_id = creds.get("clientId") + client_secret = creds.get("clientSecret") + + if not client_id or not client_secret: + raise CLIError( + "CRITICAL", + f"Credentials file must contain 'clientId' and 'clientSecret' fields: {creds_file_path}", + ) + + return client_id, client_secret + + except json.JSONDecodeError as e: + raise CLIError( + "CRITICAL", f"Invalid JSON in credentials file {creds_file_path}: {e}" + ) + except Exception as e: + raise CLIError( + "CRITICAL", f"Error reading credentials file {creds_file_path}: {e}" + ) + + +def build_sdk(args) -> SDK: + """Build SDK instance from CLI arguments.""" + builder = SDKBuilder() + + if args.platform_url: + builder.set_platform_endpoint(args.platform_url) + + # Auto-detect HTTP URLs and enable plaintext mode + if args.platform_url.startswith("http://") and ( + not hasattr(args, "plaintext") or not args.plaintext + ): + logger.debug( + f"Auto-detected HTTP URL {args.platform_url}, enabling plaintext mode" + ) + builder.use_insecure_plaintext_connection(True) + + if args.oidc_endpoint: + builder.set_issuer_endpoint(args.oidc_endpoint) + + if args.client_id and args.client_secret: + builder.client_secret(args.client_id, args.client_secret) + elif hasattr(args, "with_client_creds_file") and args.with_client_creds_file: + # Load credentials from file + client_id, client_secret = load_client_credentials(args.with_client_creds_file) + builder.client_secret(client_id, client_secret) + elif hasattr(args, "auth") and args.auth: + # Parse combined auth string (clientId:clientSecret) - legacy support + auth_parts = args.auth.split(":") + if len(auth_parts) != 2: + raise CLIError( + "CRITICAL", + f"Auth expects :, received {args.auth}", + ) + builder.client_secret(auth_parts[0], auth_parts[1]) + else: + raise CLIError( + "CRITICAL", + "Authentication required: provide --with-client-creds-file OR --client-id and --client-secret", + ) + + if hasattr(args, "plaintext") and args.plaintext: + builder.use_insecure_plaintext_connection(True) + + if args.insecure: + builder.use_insecure_skip_verify(True) + + return builder.build() + + +def create_tdf_config(sdk: SDK, args) -> TDFConfig: + """Create TDF configuration from CLI arguments.""" + attributes = ( + parse_attributes(args.attributes) + if hasattr(args, "attributes") and args.attributes + else [] + ) + + config = sdk.new_tdf_config(attributes=attributes) + + if hasattr(args, "kas_endpoint") and args.kas_endpoint: + # Add KAS endpoints + kas_endpoints = parse_kas_endpoints(args.kas_endpoint) + kas_info_list = [KASInfo(url=kas_url) for kas_url in kas_endpoints] + config.kas_info_list.extend(kas_info_list) + + if hasattr(args, "mime_type") and args.mime_type: + config.mime_type = args.mime_type + + if hasattr(args, "autoconfigure") and args.autoconfigure is not None: + config.autoconfigure = args.autoconfigure + + return config + + +def create_nano_tdf_config(sdk: SDK, args) -> NanoTDFConfig: + """Create NanoTDF configuration from CLI arguments.""" + attributes = ( + parse_attributes(args.attributes) + if hasattr(args, "attributes") and args.attributes + else [] + ) + + config = NanoTDFConfig(attributes=attributes) + + if hasattr(args, "kas_endpoint") and args.kas_endpoint: + # Add KAS endpoints + kas_endpoints = parse_kas_endpoints(args.kas_endpoint) + kas_info_list = [KASInfo(url=kas_url) for kas_url in kas_endpoints] + config.kas_info_list.extend(kas_info_list) + + if hasattr(args, "policy_binding") and args.policy_binding: + if args.policy_binding.lower() == "ecdsa": + config.ecc_mode = "ecdsa" + else: + config.ecc_mode = "gmac" # default + + return config + + +def cmd_encrypt(args): + """Handle encrypt command.""" + logger.info("Running encrypt command") + + # Validate input file + input_path = validate_file_exists(args.file) + + # Build SDK + sdk = build_sdk(args) + + try: + # Read input file + with open(input_path, "rb") as input_file: + payload = input_file.read() + + # Determine output + if args.output: + output_path = Path(args.output) + with open(output_path, "wb") as output_file: + try: + # Create appropriate config based on container type + container_type = getattr(args, "container_type", "tdf") + + if container_type == "nano": + logger.debug("Creating NanoTDF") + config = create_nano_tdf_config(sdk, args) + output_stream = BytesIO() + size = sdk.create_nano_tdf( + BytesIO(payload), output_stream, config + ) + output_file.write(output_stream.getvalue()) + logger.info(f"Created NanoTDF of size {size} bytes") + else: + logger.debug("Creating TDF") + config = create_tdf_config(sdk, args) + output_stream = BytesIO() + manifest, size, _ = sdk.create_tdf( + BytesIO(payload), config, output_stream + ) + output_file.write(output_stream.getvalue()) + logger.info(f"Created TDF of size {size} bytes") + + except Exception: + # Clean up the output file if there was an error + with contextlib.suppress(Exception): + output_path.unlink() + raise + else: + output_file = sys.stdout.buffer + # Create appropriate config based on container type + container_type = getattr(args, "container_type", "tdf") + + if container_type == "nano": + logger.debug("Creating NanoTDF") + config = create_nano_tdf_config(sdk, args) + output_stream = BytesIO() + size = sdk.create_nano_tdf(BytesIO(payload), output_stream, config) + output_file.write(output_stream.getvalue()) + logger.info(f"Created NanoTDF of size {size} bytes") + else: + logger.debug("Creating TDF") + config = create_tdf_config(sdk, args) + output_stream = BytesIO() + manifest, size, _ = sdk.create_tdf( + BytesIO(payload), config, output_stream + ) + output_file.write(output_stream.getvalue()) + logger.info(f"Created TDF of size {size} bytes") + + finally: + sdk.close() + + +def cmd_decrypt(args): + """Handle decrypt command.""" + logger.info("Running decrypt command") + + # Validate input file + input_path = validate_file_exists(args.file) + + # Build SDK + sdk = build_sdk(args) + + try: + # Read encrypted file + with open(input_path, "rb") as input_file: + encrypted_data = input_file.read() + + # Determine output + if args.output: + output_path = Path(args.output) + with open(output_path, "wb") as output_file: + try: + # Try to determine if it's a NanoTDF or regular TDF + # NanoTDFs have a specific header format, regular TDFs are ZIP files + if encrypted_data.startswith(b"PK"): + # Regular TDF (ZIP format) + logger.debug("Decrypting TDF") + reader_config = TDFReaderConfig() + tdf_reader = sdk.load_tdf_with_config( + encrypted_data, reader_config + ) + # Access payload directly from TDFReader + payload_bytes = tdf_reader.payload + output_file.write(payload_bytes) + logger.info("Successfully decrypted TDF") + else: + # Assume NanoTDF + logger.debug("Decrypting NanoTDF") + config = create_nano_tdf_config(sdk, args) + sdk.read_nano_tdf(BytesIO(encrypted_data), output_file, config) + logger.info("Successfully decrypted NanoTDF") + + except Exception: + # Clean up the output file if there was an error + output_path.unlink(missing_ok=True) + raise + else: + output_file = sys.stdout.buffer + # Try to determine if it's a NanoTDF or regular TDF + # NanoTDFs have a specific header format, regular TDFs are ZIP files + if encrypted_data.startswith(b"PK"): + # Regular TDF (ZIP format) + logger.debug("Decrypting TDF") + reader_config = TDFReaderConfig() + tdf_reader = sdk.load_tdf_with_config(encrypted_data, reader_config) + payload_bytes = tdf_reader.payload + output_file.write(payload_bytes) + logger.info("Successfully decrypted TDF") + else: + # Assume NanoTDF + logger.debug("Decrypting NanoTDF") + config = create_nano_tdf_config(sdk, args) + sdk.read_nano_tdf(BytesIO(encrypted_data), output_file, config) + logger.info("Successfully decrypted NanoTDF") + + finally: + sdk.close() + + +def cmd_inspect(args): + """Handle inspect command.""" + logger.info("Running inspect command") + + # Validate input file + input_path = validate_file_exists(args.file) + + try: + sdk = build_sdk(args) + + try: + # Read encrypted file + with open(input_path, "rb") as input_file: + encrypted_data = input_file.read() + + if encrypted_data.startswith(b"PK"): + # Regular TDF + logger.debug("Inspecting TDF") + reader_config = TDFReaderConfig() + tdf_reader = sdk.load_tdf_with_config( + BytesIO(encrypted_data), reader_config + ) + manifest = tdf_reader.manifest + + # Try to get data attributes + try: + data_attributes = [] # This would need to be implemented in the SDK + inspection_result = { + "manifest": asdict(manifest), + "dataAttributes": data_attributes, + } + except Exception as e: + logger.warning(f"Could not retrieve data attributes: {e}") + inspection_result = {"manifest": asdict(manifest)} + + print(json.dumps(inspection_result, indent=2, default=str)) + else: + # NanoTDF - for now just show basic info + logger.debug("Inspecting NanoTDF") + print( + json.dumps( + { + "type": "NanoTDF", + "size": len(encrypted_data), + "note": "NanoTDF inspection not fully implemented", + }, + indent=2, + ) + ) + + finally: + sdk.close() + + except Exception as e: + # If we can't inspect due to auth issues, show what we can + logger.warning(f"Limited inspection due to: {e}") + with open(input_path, "rb") as input_file: + encrypted_data = input_file.read() + + file_type = "TDF" if encrypted_data.startswith(b"PK") else "NanoTDF" + print( + json.dumps( + { + "type": file_type, + "size": len(encrypted_data), + "note": "Full inspection requires authentication", + }, + indent=2, + ) + ) + + +def create_parser() -> argparse.ArgumentParser: + """Create the argument parser.""" + parser = argparse.ArgumentParser( + description="OpenTDF CLI - Encrypt and decrypt files using OpenTDF", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + %(prog)s encrypt --file plain.txt --with-client-creds-file creds.json --platform-url https://platform.example.com + %(prog)s decrypt --file encrypted.tdf --with-client-creds-file creds.json --platform-url https://platform.example.com + %(prog)s inspect --file encrypted.tdf + +Where creds.json contains: + {"clientId": "your-client-id", "clientSecret": "your-client-secret"} + """, + ) + + # Global options + parser.add_argument( + "--version", action="version", version=f"OpenTDF Python SDK {__version__}" + ) + parser.add_argument( + "--log-level", + choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], + default="INFO", + help="Set logging level", + ) + parser.add_argument("--silent", action="store_true", help="Disable logging") + + # Server endpoints + server_group = parser.add_argument_group("Server Endpoints") + server_group.add_argument("--platform-url", help="OpenTDF platform URL") + server_group.add_argument( + "--kas-endpoint", help="KAS endpoint URL (comma-separated for multiple)" + ) + server_group.add_argument("--oidc-endpoint", help="OIDC endpoint URL") + + # Authentication + auth_group = parser.add_argument_group("Authentication") + auth_group.add_argument( + "--with-client-creds-file", + help="Path to JSON file containing OAuth credentials (clientId and clientSecret)", + ) + auth_group.add_argument("--client-id", help="OAuth client ID") + auth_group.add_argument("--client-secret", help="OAuth client secret") + + # Security options + security_group = parser.add_argument_group("Security") + security_group.add_argument( + "--plaintext", action="store_true", help="Use HTTP instead of HTTPS" + ) + security_group.add_argument( + "--insecure", action="store_true", help="Skip TLS verification" + ) + + # Subcommands + subparsers = parser.add_subparsers(dest="command", help="Available commands") + + # Encrypt command + encrypt_parser = subparsers.add_parser("encrypt", help="Encrypt a file") + encrypt_parser.add_argument("file", help="Path to file to encrypt") + encrypt_parser.add_argument( + "--output", "-o", help="Output file path (default: stdout)" + ) + encrypt_parser.add_argument( + "--attributes", help="Data attributes (comma-separated)" + ) + encrypt_parser.add_argument( + "--container-type", + choices=["tdf", "nano"], + default="tdf", + help="Container format", + ) + encrypt_parser.add_argument("--mime-type", help="MIME type of the input file") + encrypt_parser.add_argument( + "--autoconfigure", + action="store_true", + help="Enable automatic configuration from attributes", + ) + encrypt_parser.add_argument( + "--policy-binding", + choices=["ecdsa", "gmac"], + default="gmac", + help="Policy binding type (nano only)", + ) + + # Decrypt command + decrypt_parser = subparsers.add_parser("decrypt", help="Decrypt a file") + decrypt_parser.add_argument("file", help="Path to encrypted file") + decrypt_parser.add_argument( + "--output", "-o", help="Output file path (default: stdout)" + ) + + # Inspect command + inspect_parser = subparsers.add_parser( + "inspect", help="Inspect encrypted file metadata" + ) + inspect_parser.add_argument("file", help="Path to encrypted file") + + return parser + + +def main(): + """Main CLI entry point.""" + parser = create_parser() + args = parser.parse_args() + + # Set up logging + setup_logging(args.log_level, args.silent) + + # Validate command + if not args.command: + parser.print_help() + sys.exit(1) + + try: + if args.command == "encrypt": + cmd_encrypt(args) + elif args.command == "decrypt": + cmd_decrypt(args) + elif args.command == "inspect": + cmd_inspect(args) + else: + parser.print_help() + sys.exit(1) + + except CLIError as e: + logger.error(f"{e.level}: {e.message}") + if e.cause: + logger.debug(f"Caused by: {e.cause}") + sys.exit(1) + except SDKException as e: + logger.error(f"SDK Error: {e}") + sys.exit(1) + except KeyboardInterrupt: + logger.info("Interrupted by user") + sys.exit(1) + except Exception as e: + logger.error(f"Unexpected error: {e}") + logger.debug("", exc_info=True) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/otdf_python/collection_store.py b/src/otdf_python/collection_store.py new file mode 100644 index 0000000..8716ff7 --- /dev/null +++ b/src/otdf_python/collection_store.py @@ -0,0 +1,41 @@ +from collections import OrderedDict + + +class CollectionKey: + def __init__(self, key: bytes | None): + self.key = key + + +class CollectionStore: + NO_PRIVATE_KEY = CollectionKey(None) + + def store(self, header, key: CollectionKey): + raise NotImplementedError + + def get_key(self, header) -> CollectionKey: + raise NotImplementedError + + +class NoOpCollectionStore(CollectionStore): + def store(self, header, key: CollectionKey): + pass + + def get_key(self, header) -> CollectionKey: + return self.NO_PRIVATE_KEY + + +class CollectionStoreImpl(OrderedDict, CollectionStore): + MAX_SIZE_STORE = 500 + + def __init__(self): + super().__init__() + + def store(self, header, key: CollectionKey): + buf = header.to_bytes() + self[buf] = key + if len(self) > self.MAX_SIZE_STORE: + self.popitem(last=False) + + def get_key(self, header) -> CollectionKey: + buf = header.to_bytes() + return self.get(buf, self.NO_PRIVATE_KEY) diff --git a/src/otdf_python/collection_store_impl.py b/src/otdf_python/collection_store_impl.py new file mode 100644 index 0000000..9b25042 --- /dev/null +++ b/src/otdf_python/collection_store_impl.py @@ -0,0 +1,22 @@ +from collections import OrderedDict +from threading import RLock + +MAX_SIZE_STORE = 500 + + +class CollectionStoreImpl(OrderedDict): + def __init__(self): + super().__init__() + self._lock = RLock() + + def store(self, header, key): + buf = header.to_bytes() # Assumes header has a to_bytes() method + with self._lock: + self[buf] = key + if len(self) > MAX_SIZE_STORE: + self.popitem(last=False) + + def get_key(self, header, no_private_key=None): + buf = header.to_bytes() + with self._lock: + return self.get(buf, no_private_key) diff --git a/src/otdf_python/config.py b/src/otdf_python/config.py new file mode 100644 index 0000000..646acec --- /dev/null +++ b/src/otdf_python/config.py @@ -0,0 +1,69 @@ +from dataclasses import dataclass, field +from enum import Enum +from typing import Any +from urllib.parse import urlparse, urlunparse + + +class TDFFormat(Enum): + JSONFormat = "JSONFormat" + XMLFormat = "XMLFormat" + + +class IntegrityAlgorithm(Enum): + HS256 = "HS256" + GMAC = "GMAC" + + +@dataclass +class KASInfo: + url: str + public_key: str | None = None + kid: str | None = None + default: bool | None = None + algorithm: str | None = None + + def __str__(self): + return f"KASInfo{{URL:'{self.url}', PublicKey:'{self.public_key}', KID:'{self.kid}', Default:{self.default}, Algorithm:'{self.algorithm}'}}" + + +@dataclass +class TDFConfig: + autoconfigure: bool = True + default_segment_size: int = 2 * 1024 * 1024 + enable_encryption: bool = True + tdf_format: TDFFormat = TDFFormat.JSONFormat + tdf_public_key: str | None = None + tdf_private_key: str | None = None + meta_data: str | None = None + integrity_algorithm: IntegrityAlgorithm = IntegrityAlgorithm.HS256 + segment_integrity_algorithm: IntegrityAlgorithm = IntegrityAlgorithm.GMAC + attributes: list[str] = field(default_factory=list) + kas_info_list: list[KASInfo] = field(default_factory=list) + mime_type: str = "application/octet-stream" + split_plan: list[str] | None = field(default_factory=list) + wrapping_key_type: str | None = None + hex_encode_root_and_segment_hashes: bool = False + render_version_info_in_manifest: bool = True + policy_object: Any | None = None + + +@dataclass +class NanoTDFConfig: + ecc_mode: str | None = None + cipher: str | None = None + config: str | None = None + attributes: list[str] = field(default_factory=list) + kas_info_list: list[KASInfo] = field(default_factory=list) + collection_config: str | None = None + policy_type: str | None = None + + +# Utility function to normalize KAS URLs (Python equivalent) +def get_kas_address(kas_url: str) -> str: + if "://" not in kas_url: + kas_url = "https://" + kas_url + parsed = urlparse(kas_url) + scheme = parsed.scheme or "https" + netloc = parsed.hostname or "" + port = parsed.port or 443 + return urlunparse((scheme, f"{netloc}:{port}", "", "", "", "")) diff --git a/src/otdf_python/connect_client.py b/src/otdf_python/connect_client.py new file mode 100644 index 0000000..e69de29 diff --git a/src/otdf_python/constants.py b/src/otdf_python/constants.py new file mode 100644 index 0000000..048e914 --- /dev/null +++ b/src/otdf_python/constants.py @@ -0,0 +1 @@ +MAGIC_NUMBER_AND_VERSION = bytes([0x4C, 0x31, 0x4C]) diff --git a/src/otdf_python/crypto_utils.py b/src/otdf_python/crypto_utils.py new file mode 100644 index 0000000..b32a5e9 --- /dev/null +++ b/src/otdf_python/crypto_utils.py @@ -0,0 +1,78 @@ +import hashlib +import hmac + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ec, rsa + + +class CryptoUtils: + KEYPAIR_SIZE = 2048 + + @staticmethod + def calculate_sha256_hmac(key: bytes, data: bytes) -> bytes: + return hmac.new(key, data, hashlib.sha256).digest() + + @staticmethod + def generate_rsa_keypair() -> tuple[rsa.RSAPrivateKey, rsa.RSAPublicKey]: + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=CryptoUtils.KEYPAIR_SIZE, + backend=default_backend(), + ) + return private_key, private_key.public_key() + + @staticmethod + def generate_ec_keypair( + curve=None, + ) -> tuple[ec.EllipticCurvePrivateKey, ec.EllipticCurvePublicKey]: + if curve is None: + curve = ec.SECP256R1() + private_key = ec.generate_private_key(curve, default_backend()) + return private_key, private_key.public_key() + + @staticmethod + def get_public_key_pem(public_key) -> str: + return public_key.public_bytes( + serialization.Encoding.PEM, serialization.PublicFormat.SubjectPublicKeyInfo + ).decode() + + @staticmethod + def get_private_key_pem(private_key) -> str: + return private_key.private_bytes( + serialization.Encoding.PEM, + serialization.PrivateFormat.PKCS8, + serialization.NoEncryption(), + ).decode() + + @staticmethod + def get_rsa_public_key_pem(public_key) -> str: + if public_key.__class__.__name__ != "RSAPublicKey": + raise ValueError("Not an RSA public key") + return CryptoUtils.get_public_key_pem(public_key) + + @staticmethod + def get_rsa_private_key_pem(private_key) -> str: + if private_key.__class__.__name__ != "RSAPrivateKey": + raise ValueError("Not an RSA private key") + return CryptoUtils.get_private_key_pem(private_key) + + @staticmethod + def get_rsa_public_key_from_pem(pem_data: str) -> rsa.RSAPublicKey: + """Load RSA public key from PEM string.""" + public_key = serialization.load_pem_public_key( + pem_data.encode(), backend=default_backend() + ) + if not isinstance(public_key, rsa.RSAPublicKey): + raise ValueError("Not an RSA public key") + return public_key + + @staticmethod + def get_rsa_private_key_from_pem(pem_data: str) -> rsa.RSAPrivateKey: + """Load RSA private key from PEM string.""" + private_key = serialization.load_pem_private_key( + pem_data.encode(), password=None, backend=default_backend() + ) + if not isinstance(private_key, rsa.RSAPrivateKey): + raise ValueError("Not an RSA private key") + return private_key diff --git a/src/otdf_python/dpop.py b/src/otdf_python/dpop.py new file mode 100644 index 0000000..c442a5e --- /dev/null +++ b/src/otdf_python/dpop.py @@ -0,0 +1,81 @@ +""" +DPoP (Demonstration of Proof-of-Possession) token generation utilities. +""" + +import base64 +import hashlib +import time + +import jwt + +from .crypto_utils import CryptoUtils + + +def create_dpop_token( + private_key_pem: str, + public_key_pem: str, + url: str, + method: str = "POST", + access_token: str | None = None, +) -> str: + """ + Create a DPoP (Demonstration of Proof-of-Possession) token. + + Args: + private_key_pem: RSA private key in PEM format for signing + public_key_pem: RSA public key in PEM format for JWK + url: The URL being accessed + method: HTTP method (default: POST) + access_token: Optional access token for ath claim + + Returns: + DPoP token as a string + """ + # Parse the RSA public key to extract modulus and exponent + public_key_obj = CryptoUtils.get_rsa_public_key_from_pem(public_key_pem) + public_numbers = public_key_obj.public_numbers() + + # Convert to base64url encoded values + def int_to_base64url(value): + # Convert integer to bytes, then to base64url + byte_length = (value.bit_length() + 7) // 8 + value_bytes = value.to_bytes(byte_length, byteorder="big") + return base64.urlsafe_b64encode(value_bytes).decode("ascii").rstrip("=") + + # Create JWK (JSON Web Key) representation + jwk = { + "kty": "RSA", + "n": int_to_base64url(public_numbers.n), + "e": int_to_base64url(public_numbers.e), + } + + # Create DPoP header + now = int(time.time()) + + # Create JWT header with JWK + header = {"typ": "dpop+jwt", "alg": "RS256", "jwk": jwk} + + # Create JWT payload + payload = { + "jti": base64.urlsafe_b64encode( + hashlib.sha256(f"{url}{method}{now}".encode()).digest() + ) + .decode("ascii") + .rstrip("="), + "htm": method, + "htu": url, + "iat": now, + } + + # Add access token hash if provided + if access_token: + # Create SHA-256 hash of access token + token_hash = hashlib.sha256(access_token.encode()).digest() + payload["ath"] = ( + base64.urlsafe_b64encode(token_hash).decode("ascii").rstrip("=") + ) + + # Sign the DPoP token + dpop_token = jwt.encode(payload, private_key_pem, algorithm="RS256", headers=header) + + return dpop_token diff --git a/src/otdf_python/ecc_mode.py b/src/otdf_python/ecc_mode.py new file mode 100644 index 0000000..95b4714 --- /dev/null +++ b/src/otdf_python/ecc_mode.py @@ -0,0 +1,32 @@ +class ECCMode: + def __init__(self, curve_mode: int = 0, use_ecdsa_binding: bool = False): + self.curve_mode = curve_mode + self.use_ecdsa_binding = use_ecdsa_binding + + def set_ecdsa_binding(self, flag: bool): + self.use_ecdsa_binding = flag + + def is_ecdsa_binding_enabled(self) -> bool: + return self.use_ecdsa_binding + + def set_elliptic_curve(self, curve_mode: int): + self.curve_mode = curve_mode + + def get_elliptic_curve_type(self) -> int: + return self.curve_mode + + @staticmethod + def get_ec_compressed_pubkey_size(curve_type: int) -> int: + # 0: secp256r1, 1: secp384r1, 2: secp521r1 + if curve_type == 0: + return 33 + elif curve_type == 1: + return 49 + elif curve_type == 2: + return 67 + else: + raise ValueError("Unsupported ECC algorithm.") + + def get_ecc_mode_as_byte(self) -> int: + # Most significant bit: use_ecdsa_binding, lower 3 bits: curve_mode + return ((1 if self.use_ecdsa_binding else 0) << 7) | (self.curve_mode & 0x07) diff --git a/src/otdf_python/eckeypair.py b/src/otdf_python/eckeypair.py new file mode 100644 index 0000000..3dee0aa --- /dev/null +++ b/src/otdf_python/eckeypair.py @@ -0,0 +1,75 @@ +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.kdf.hkdf import HKDF +from cryptography.hazmat.primitives.serialization import ( + Encoding, + NoEncryption, + PrivateFormat, + PublicFormat, +) + + +class ECKeyPair: + def __init__(self, curve=None): + if curve is None: + curve = ec.SECP256R1() + self.private_key = ec.generate_private_key(curve, default_backend()) + self.public_key = self.private_key.public_key() + self.curve = curve + + def public_key_pem(self): + return self.public_key.public_bytes( + Encoding.PEM, PublicFormat.SubjectPublicKeyInfo + ).decode() + + def private_key_pem(self): + return self.private_key.private_bytes( + Encoding.PEM, PrivateFormat.PKCS8, NoEncryption() + ).decode() + + def key_size(self): + return self.private_key.key_size + + def compress_public_key(self): + return self.public_key.public_bytes(Encoding.X962, PublicFormat.CompressedPoint) + + @staticmethod + def public_key_from_pem(pem): + return serialization.load_pem_public_key( + pem.encode(), backend=default_backend() + ) + + @staticmethod + def private_key_from_pem(pem): + return serialization.load_pem_private_key( + pem.encode(), password=None, backend=default_backend() + ) + + @staticmethod + def compute_ecdh_key(public_key, private_key): + return private_key.exchange(ec.ECDH(), public_key) + + @staticmethod + def calculate_hkdf(salt, secret, length=32): + hkdf = HKDF( + algorithm=hashes.SHA256(), + length=length, + salt=salt, + info=None, + backend=default_backend(), + ) + return hkdf.derive(secret) + + @staticmethod + def sign_ecdsa(data, private_key): + return private_key.sign(data, ec.ECDSA(hashes.SHA256())) + + @staticmethod + def verify_ecdsa(data, signature, public_key): + try: + public_key.verify(signature, data, ec.ECDSA(hashes.SHA256())) + return True + except InvalidSignature: + return False diff --git a/src/otdf_python/header.py b/src/otdf_python/header.py new file mode 100644 index 0000000..df7186d --- /dev/null +++ b/src/otdf_python/header.py @@ -0,0 +1,143 @@ +from otdf_python.constants import MAGIC_NUMBER_AND_VERSION +from otdf_python.ecc_mode import ECCMode +from otdf_python.policy_info import PolicyInfo +from otdf_python.resource_locator import ResourceLocator +from otdf_python.symmetric_and_payload_config import SymmetricAndPayloadConfig + + +class Header: + def __init__(self): + self.kas_locator: ResourceLocator | None = None + self.ecc_mode: ECCMode | None = None + self.payload_config: SymmetricAndPayloadConfig | None = None + self.policy_info: PolicyInfo | None = None + self.ephemeral_key: bytes | None = None + + @classmethod + def from_bytes(cls, buffer: bytes): + # Parse header from bytes, validate magic/version + offset = 0 + magic = buffer[offset : offset + 3] + if magic != MAGIC_NUMBER_AND_VERSION: + raise ValueError("Invalid magic number and version in nano tdf.") + offset += 3 + kas_locator, kas_size = ResourceLocator.from_bytes_with_size(buffer[offset:]) + offset += kas_size + ecc_mode = ECCMode(buffer[offset]) + offset += 1 + payload_config = SymmetricAndPayloadConfig(buffer[offset]) + offset += 1 + policy_info, policy_size = PolicyInfo.from_bytes_with_size( + buffer[offset:], ecc_mode + ) + offset += policy_size + compressed_pubkey_size = ECCMode.get_ec_compressed_pubkey_size( + ecc_mode.get_elliptic_curve_type() + ) + ephemeral_key = buffer[offset : offset + compressed_pubkey_size] + if len(ephemeral_key) != compressed_pubkey_size: + raise ValueError("Failed to read ephemeral key - invalid buffer size.") + obj = cls() + obj.kas_locator = kas_locator + obj.ecc_mode = ecc_mode + obj.payload_config = payload_config + obj.policy_info = policy_info + obj.ephemeral_key = ephemeral_key + return obj + + @staticmethod + def peek_length(buffer: bytes) -> int: + offset = 0 + # MAGIC_NUMBER_AND_VERSION (3 bytes) + offset += 3 + # ResourceLocator + kas_locator, kas_size = ResourceLocator.from_bytes_with_size(buffer[offset:]) + offset += kas_size + # ECC mode (1 byte) + ecc_mode = ECCMode(buffer[offset]) + offset += 1 + # Payload config (1 byte) + offset += 1 + # PolicyInfo + policy_info, policy_size = PolicyInfo.from_bytes_with_size( + buffer[offset:], ecc_mode + ) + offset += policy_size + # Ephemeral key (size depends on curve) + compressed_pubkey_size = ECCMode.get_ec_compressed_pubkey_size( + ecc_mode.get_elliptic_curve_type() + ) + offset += compressed_pubkey_size + return offset + + def set_kas_locator(self, kas_locator: ResourceLocator): + self.kas_locator = kas_locator + + def get_kas_locator(self) -> ResourceLocator | None: + return self.kas_locator + + def set_ecc_mode(self, ecc_mode: ECCMode): + self.ecc_mode = ecc_mode + + def get_ecc_mode(self) -> ECCMode | None: + return self.ecc_mode + + def set_payload_config(self, payload_config: SymmetricAndPayloadConfig): + self.payload_config = payload_config + + def get_payload_config(self) -> SymmetricAndPayloadConfig | None: + return self.payload_config + + def set_policy_info(self, policy_info: PolicyInfo): + self.policy_info = policy_info + + def get_policy_info(self) -> PolicyInfo | None: + return self.policy_info + + def set_ephemeral_key(self, ephemeral_key: bytes): + if self.ecc_mode is not None: + expected_size = ECCMode.get_ec_compressed_pubkey_size( + self.ecc_mode.get_elliptic_curve_type() + ) + if len(ephemeral_key) != expected_size: + raise ValueError("Failed to read ephemeral key - invalid buffer size.") + self.ephemeral_key = ephemeral_key + + def get_ephemeral_key(self) -> bytes | None: + return self.ephemeral_key + + def get_total_size(self) -> int: + total = 0 + total += self.kas_locator.get_total_size() if self.kas_locator else 0 + total += 1 # ECC mode + total += 1 # payload config + total += self.policy_info.get_total_size() if self.policy_info else 0 + total += len(self.ephemeral_key) if self.ephemeral_key else 0 + return total + + def write_into_buffer(self, buffer: bytearray) -> int: + total_size = self.get_total_size() + if len(buffer) < total_size: + raise ValueError("Failed to write header - invalid buffer size.") + offset = 0 + # ResourceLocator + n = self.kas_locator.write_into_buffer(buffer, offset) + offset += n + # ECCMode (1 byte) + buffer[offset] = self.ecc_mode.get_ecc_mode_as_byte() + offset += 1 + # SymmetricAndPayloadConfig (1 byte) + buffer[offset] = self.payload_config.get_symmetric_and_payload_config_as_byte() + offset += 1 + # PolicyInfo + n = self.policy_info.write_into_buffer(buffer, offset) + offset += n + # Ephemeral key + buffer[offset : offset + len(self.ephemeral_key)] = self.ephemeral_key + offset += len(self.ephemeral_key) + return offset + + def to_bytes(self): + buf = bytearray(self.get_total_size()) + self.write_into_buffer(buf) + return bytes(buf) diff --git a/src/otdf_python/invalid_zip_exception.py b/src/otdf_python/invalid_zip_exception.py new file mode 100644 index 0000000..7ae67ad --- /dev/null +++ b/src/otdf_python/invalid_zip_exception.py @@ -0,0 +1,8 @@ +class InvalidZipException(Exception): + """ + Raised when a ZIP file is invalid or corrupted. + Based on Java implementation. + """ + + def __init__(self, message: str): + super().__init__(message) diff --git a/src/otdf_python/kas_client.py b/src/otdf_python/kas_client.py new file mode 100644 index 0000000..43b3c6a --- /dev/null +++ b/src/otdf_python/kas_client.py @@ -0,0 +1,603 @@ +""" +KASClient: Handles communication with the Key Access Service (KAS). +""" + +import base64 +import hashlib +import logging +import secrets +import time +from base64 import b64decode +from dataclasses import dataclass + +import jwt + +from .asym_decryption import AsymDecryption +from .crypto_utils import CryptoUtils +from .kas_connect_rpc_client import KASConnectRPCClient +from .kas_key_cache import KASKeyCache +from .key_type_constants import EC_KEY_TYPE, RSA_KEY_TYPE +from .sdk_exceptions import SDKException + + +@dataclass +class KeyAccess: + url: str + wrapped_key: str + ephemeral_public_key: str | None = None + + +class KASClient: + def __init__( + self, + kas_url=None, + token_source=None, + cache=None, + use_plaintext=False, + verify_ssl=True, + ): + self.kas_url = kas_url + self.token_source = token_source + self.cache = cache or KASKeyCache() + self.use_plaintext = use_plaintext + self.verify_ssl = verify_ssl + self.decryptor = None + self.client_public_key = None + + # Initialize Connect RPC client for protobuf interactions + self.connect_rpc_client = KASConnectRPCClient( + use_plaintext=use_plaintext, verify_ssl=verify_ssl + ) + + # Generate DPoP key for JWT signing (separate from encryption keys) + # This matches the web-SDK pattern where dpopKeys != ephemeralKeys + self._dpop_private_key, self._dpop_public_key = ( + CryptoUtils.generate_rsa_keypair() + ) + self._dpop_private_key_pem = CryptoUtils.get_rsa_private_key_pem( + self._dpop_private_key + ) + self._dpop_public_key_pem = CryptoUtils.get_rsa_public_key_pem( + self._dpop_public_key + ) + + def _normalize_kas_url(self, url: str) -> str: + """ + Normalize KAS URLs based on client security settings. + + Args: + url: The KAS URL to normalize + + Returns: + Normalized URL with appropriate protocol and port + """ + from urllib.parse import urlparse + + try: + # Parse the URL + parsed = urlparse(url) + except Exception as e: + raise SDKException(f"error trying to parse URL [{url}]", e) + + # Check if we have a host or if this is likely a hostname:port combination + if parsed.hostname is None: + # No host means we likely have hostname:port being misinterpreted + return self._handle_missing_scheme(url) + else: + # We have a host, handle the existing scheme + return self._handle_existing_scheme(parsed) + + def _handle_missing_scheme(self, url: str) -> str: + """Handle URLs without scheme by adding appropriate protocol and port.""" + scheme = "http" if self.use_plaintext else "https" + default_port = 80 if self.use_plaintext else 443 + + try: + # Check if we have a hostname:port format (colon before any slash) + if ":" in url and ("/" not in url or url.index(":") < url.index("/")): + host, port_str = url.split(":", 1) + try: + port = int(port_str) + return f"{scheme}://{host}:{port}" + except ValueError: + raise SDKException( + f"error trying to create URL for host and port [{url}]" + ) + else: + # Hostname with or without path, add default port + if "/" in url: + # Split at first slash to separate hostname from path + host, path = url.split("/", 1) + return f"{scheme}://{host}:{default_port}/{path}" + else: + # Just a hostname, add default port + return f"{scheme}://{url}:{default_port}" + except Exception as e: + raise SDKException( + f"error trying to create URL for host and port [{url}]", e + ) + + def _handle_existing_scheme(self, parsed) -> str: + """Handle URLs with existing scheme by normalizing protocol and port.""" + # Force the scheme based on client security settings + scheme = "http" if self.use_plaintext else "https" + + # Determine the port + if parsed.port is not None: + port = parsed.port + else: + # Use default port based on target scheme + port = 80 if self.use_plaintext else 443 + + # Reconstruct URL preserving the path (especially /kas prefix) + try: + # Create URL preserving the path component for proper endpoint routing + path = parsed.path if parsed.path else "" + normalized_url = f"{scheme}://{parsed.hostname}:{port}{path}" + logging.debug(f"normalized url [{parsed.geturl()}] to [{normalized_url}]") + return normalized_url + except Exception as e: + raise SDKException("error creating KAS address", e) + + def _create_signed_request_jwt(self, policy_json, client_public_key, key_access): # noqa: C901 + """ + Create a signed JWT for the rewrap request. + The JWT is signed with the DPoP private key. + """ + # Handle both ManifestKeyAccess (new camelCase and old snake_case) and simple KeyAccess (for tests) + # TODO: This can probably be simplified to only camelCase + + # Ensure wrappedKey is a base64-encoded string + # Note: wrappedKey from manifest is already base64-encoded + wrapped_key = getattr(key_access, "wrappedKey", None) or getattr( + key_access, "wrapped_key", None + ) + if wrapped_key is None: + raise SDKException("No wrapped key found in key access object") + + if isinstance(wrapped_key, bytes): + # Only encode if it's raw bytes (shouldn't happen from manifest) + wrapped_key = base64.b64encode(wrapped_key).decode("utf-8") + elif not isinstance(wrapped_key, str): + # Convert to string if it's something else + wrapped_key = str(wrapped_key) + # If it's already a string (from manifest), use it as-is since it's already base64-encoded + + key_access_dict = { + "url": key_access.url, + "wrappedKey": wrapped_key, + } + + # Add type and protocol - handle both old and new field names + key_type = getattr(key_access, "type", None) or getattr( + key_access, "key_type", None + ) + if key_type is not None: + key_access_dict["type"] = key_type + else: + key_access_dict["type"] = "wrapped" # Default type for tests + + protocol = getattr(key_access, "protocol", None) + if protocol is not None: + key_access_dict["protocol"] = protocol + else: + key_access_dict["protocol"] = "kas" # Default protocol for tests + + # Optional fields - handle both old and new field names, only include if they exist and are not None + policy_binding = getattr(key_access, "policyBinding", None) or getattr( + key_access, "policy_binding", None + ) + if policy_binding is not None: + # Policy binding hash should be kept as base64-encoded + # The server expects base64-encoded hash values in the JWT request + key_access_dict["policyBinding"] = policy_binding + + encrypted_metadata = getattr(key_access, "encryptedMetadata", None) or getattr( + key_access, "encrypted_metadata", None + ) + if encrypted_metadata is not None: + key_access_dict["encryptedMetadata"] = encrypted_metadata + + kid = getattr(key_access, "kid", None) + if kid is not None: + key_access_dict["kid"] = kid + + sid = getattr(key_access, "sid", None) + if sid is not None: + key_access_dict["sid"] = sid + + schema_version = getattr(key_access, "schemaVersion", None) or getattr( + key_access, "schema_version", None + ) + if schema_version is not None: + key_access_dict["schemaVersion"] = schema_version + + ephemeral_public_key = getattr( + key_access, "ephemeralPublicKey", None + ) or getattr(key_access, "ephemeral_public_key", None) + if ephemeral_public_key is not None: + key_access_dict["ephemeralPublicKey"] = ephemeral_public_key + + # Get current timestamp in seconds since epoch (UNIX timestamp) + now = int(time.time()) + + # The server expects a JWT with a requestBody field containing the UnsignedRewrapRequest + # Create the request body that matches UnsignedRewrapRequest protobuf structure + # Use the v2 format with explicit policy ID and requests array for cross-tool compatibility + + # Use "policy" as policy ID for compatibility with otdfctl + import json + + policy_uuid = "policy" # otdfctl uses "policy" as the policy ID + + # For v2 format, the policy body must be base64-encoded + policy_base64 = base64.b64encode(policy_json.encode("utf-8")).decode("utf-8") + + unsigned_rewrap_request = { + "clientPublicKey": client_public_key, # Maps to client_public_key + "requests": [ + { # Maps to requests array (v2 format) + "keyAccessObjects": [ + { + "keyAccessObjectId": "kao-0", # Standard KAO ID + "keyAccessObject": key_access_dict, + } + ], + "policy": { + "id": policy_uuid, # Use the UUID from policy as the policy ID + "body": policy_base64, # Base64-encoded policy JSON + }, + } + ], + "keyAccess": key_access_dict, + "policy": policy_base64, + } + + # Convert to JSON string + request_body_json = json.dumps(unsigned_rewrap_request) + + # JWT payload with requestBody field containing the JSON string + payload = { + "requestBody": request_body_json, + "iat": now, # Issued at timestamp (required) + "exp": now + 7200, # Expires in 2 hours (required) + } + + # Sign the JWT with the DPoP private key (RS256) + signed_jwt = jwt.encode(payload, self._dpop_private_key_pem, algorithm="RS256") + + return signed_jwt + + def _create_connect_rpc_signed_token(self, key_access, policy_json): + """ + Create a signed token specifically for Connect RPC requests. + For now, this delegates to the existing JWT creation method. + """ + return self._create_signed_request_jwt( + policy_json, self.client_public_key, key_access + ) + + def _create_dpop_proof(self, method, url, access_token=None): + """ + Create a DPoP proof JWT as per RFC 9449. + + Args: + method: HTTP method (e.g., "POST") + url: Full URL of the request + access_token: Optional access token for ath claim + + Returns: + DPoP proof JWT string + """ + now = int(time.time()) + + # Create DPoP proof claims + proof_claims = { + "jti": secrets.token_urlsafe(32), # Unique identifier + "htm": method, # HTTP method + "htu": url, # HTTP URI + "iat": now, # Issued at + } + + # Add access token hash if provided + if access_token: + token_hash = hashlib.sha256(access_token.encode("utf-8")).digest() + proof_claims["ath"] = ( + base64.urlsafe_b64encode(token_hash).decode("utf-8").rstrip("=") + ) + + # DPoP proof must be signed with the DPoP key and include the public key in the header + header = { + "alg": "RS256", + "typ": "dpop+jwt", + "jwk": { + "kty": "RSA", + "n": base64.urlsafe_b64encode( + self._dpop_public_key.public_numbers().n.to_bytes( + (self._dpop_public_key.public_numbers().n.bit_length() + 7) + // 8, + "big", + ) + ) + .decode("utf-8") + .rstrip("="), + "e": base64.urlsafe_b64encode( + self._dpop_public_key.public_numbers().e.to_bytes( + (self._dpop_public_key.public_numbers().e.bit_length() + 7) + // 8, + "big", + ) + ) + .decode("utf-8") + .rstrip("="), + }, + } + + # Create and sign the DPoP proof JWT + return jwt.encode( + proof_claims, self._dpop_private_key_pem, algorithm="RS256", headers=header + ) + + def get_public_key(self, kas_info): + """ + Get KAS public key using Connect RPC. + Checks cache first if available. + """ + try: + # Check cache first if available (use original URL for cache key) + if self.cache: + cached_info = self.cache.get(kas_info.url) + if cached_info: + return cached_info + + result = self._get_public_key_with_connect_rpc(kas_info) + + # Cache the result if cache is available + if self.cache and result: + self.cache.store(result) + + return result + + except Exception as e: + logging.error(f"Error in get_public_key: {e}") + raise + + def _get_public_key_with_connect_rpc(self, kas_info): + """ + Get KAS public key using Connect RPC. + """ + + # Get access token for authentication if token source is available + access_token = None + if self.token_source: + try: + access_token = self.token_source() + except Exception as e: + logging.warning(f"Failed to get access token: {e}") + + # Normalize the URL + normalized_url = self._normalize_kas_url(kas_info.url) + + try: + # Delegate to the Connect RPC client + result = self.connect_rpc_client.get_public_key( + normalized_url, kas_info, access_token + ) + + # Cache the result + if self.cache: + self.cache.store(result) + + return result + + except Exception as e: + import traceback + + error_details = traceback.format_exc() + logging.error( + f"Connect RPC public key request failed: {type(e).__name__}: {e}" + ) + logging.error(f"Full traceback: {error_details}") + raise SDKException(f"Connect RPC public key request failed: {e}") + + def _normalize_session_key_type(self, session_key_type): + """ + Normalize session key type to the appropriate enum value. + + Args: + session_key_type: Type of the session key (KeyType enum or string "RSA"/"EC") + + Returns: + Normalized key type enum + """ + if isinstance(session_key_type, str): + if session_key_type.upper() == "RSA": + return RSA_KEY_TYPE + elif session_key_type.upper() == "EC": + return EC_KEY_TYPE + else: + logging.warning( + f"Unknown session key type: {session_key_type}, defaulting to RSA" + ) + return RSA_KEY_TYPE + elif session_key_type is None: + # Default to RSA + return RSA_KEY_TYPE + return session_key_type + + def _prepare_ec_keypair(self, session_key_type): + """ + Prepare EC key pair for unwrapping. + + Args: + session_key_type: EC key type with curve information + + Returns: + ECKeyPair instance and client public key + """ + from .eckeypair import ECKeyPair + + # Use default curve for now - this would need to be based on session_key_type in a full implementation + ec_key_pair = ECKeyPair() + client_public_key = ec_key_pair.public_key_pem() + return ec_key_pair, client_public_key + + def _prepare_rsa_keypair(self): + """ + Prepare RSA key pair for unwrapping, reusing if possible. + Uses separate ephemeral keys for encryption (not DPoP keys). + + Returns: + Client public key PEM for the ephemeral encryption key + """ + if self.decryptor is None: + # Generate ephemeral keys for encryption (separate from DPoP keys) + private_key, public_key = CryptoUtils.generate_rsa_keypair() + self.decryptor = AsymDecryption(private_key_obj=private_key) + self.client_public_key = CryptoUtils.get_rsa_public_key_pem(public_key) + return self.client_public_key + + def _unwrap_with_ec(self, wrapped_key, ec_key_pair, response_data): + """ + Unwrap a key using EC cryptography. + + Args: + wrapped_key: The wrapped key to decrypt + ec_key_pair: ECKeyPair instance + response_data: Response data from KAS + + Returns: + Unwrapped key as bytes + """ + if ec_key_pair is None: + raise SDKException( + "ECKeyPair is null. Unable to proceed with the unwrap operation." + ) + + # Get the KAS ephemeral public key + kas_ephemeral_public_key = response_data.get("sessionPublicKey") + if not kas_ephemeral_public_key: + raise SDKException("No session public key in KAS response") + + # Generate symmetric key using ECDH + from .eckeypair import ECKeyPair + + public_key = ECKeyPair.public_key_from_pem(kas_ephemeral_public_key) + sym_key = ECKeyPair.compute_ecdh_key(public_key, ec_key_pair.get_private_key()) + + # Calculate HKDF and decrypt + from otdf_python.tdf import TDF + + session_key = ECKeyPair.calculate_hkdf(TDF.GLOBAL_KEY_SALT, sym_key) + + from .aesgcm import AesGcm + + gcm = AesGcm(session_key) + return gcm.decrypt(wrapped_key) + + def _ensure_client_keypair(self, session_key_type): + """ + Ensure client keypair is generated and stored. + """ + if session_key_type == RSA_KEY_TYPE: + if self.decryptor is None: + private_key, public_key = CryptoUtils.generate_rsa_keypair() + private_key_pem = CryptoUtils.get_rsa_private_key_pem(private_key) + self.decryptor = AsymDecryption(private_key_pem) + self.client_public_key = CryptoUtils.get_rsa_public_key_pem(public_key) + else: + # For EC keys, generate fresh key pair each time + # TODO: Implement proper EC key handling + private_key, public_key = CryptoUtils.generate_rsa_keypair() + private_key_pem = CryptoUtils.get_rsa_private_key_pem(private_key) + self.client_public_key = CryptoUtils.get_rsa_public_key_pem(public_key) + + def _parse_and_decrypt_response(self, response): + """ + Parse JSON response and decrypt the wrapped key. + """ + try: + response_data = response.json() + except Exception as e: + logging.error(f"Failed to parse JSON response: {e}") + logging.error(f"Raw response content: {response.content}") + raise SDKException(f"Invalid JSON response from KAS: {e}") + + entity_wrapped_key = response_data.get("entityWrappedKey") + if not entity_wrapped_key: + raise SDKException("No entityWrappedKey in KAS response") + + # Decrypt the wrapped key + if not self.decryptor: + raise SDKException("Decryptor not initialized") + encrypted_key = b64decode(entity_wrapped_key) + return self.decryptor.decrypt(encrypted_key) + + def unwrap(self, key_access, policy_json, session_key_type=None) -> bytes: + """ + Unwrap a key using Connect RPC. + + Args: + key_access: Key access information + policy_json: Policy as JSON string + session_key_type: Type of session key (RSA_KEY_TYPE or EC_KEY_TYPE), defaults to RSA + + Returns: + Unwrapped key bytes + """ + # Default to RSA if not specified + if session_key_type is None: + session_key_type = RSA_KEY_TYPE + + # Ensure we have an ephemeral client keypair for encryption (separate from DPoP keys) + session_key_type = self._normalize_session_key_type(session_key_type) + self._ensure_client_keypair(session_key_type) + + # Create signed token for the request using DPoP key for signing + # BUT use the ephemeral client public key in the request body + signed_token = self._create_signed_request_jwt( + policy_json, + self.client_public_key, + key_access, # Use ephemeral key, not DPoP key + ) + + # Call Connect RPC unwrap + return self._unwrap_with_connect_rpc(key_access, signed_token) + + def _unwrap_with_connect_rpc(self, key_access, signed_token) -> bytes: + """ + Connect RPC method for unwrapping keys. + """ + + # Get access token for authentication if token source is available + access_token = None + if self.token_source: + try: + access_token = self.token_source() + except Exception as e: + logging.warning(f"Failed to get access token: {e}") + + # Normalize the URL + normalized_kas_url = self._normalize_kas_url(key_access.url) + + try: + # Delegate to the Connect RPC client + entity_wrapped_key = self.connect_rpc_client.unwrap_key( + normalized_kas_url, key_access, signed_token, access_token + ) + + # Decrypt the wrapped key + if not self.decryptor: + raise SDKException("Decryptor not initialized") + + result = self.decryptor.decrypt(entity_wrapped_key) + logging.info("Connect RPC rewrap succeeded") + return result + + except Exception as e: + logging.error(f"Connect RPC rewrap failed: {e}") + raise SDKException(f"Connect RPC rewrap failed: {e}") + + def get_key_cache(self) -> KASKeyCache: + """Returns the KAS key cache used for storing and retrieving encryption keys.""" + return self.cache diff --git a/src/otdf_python/kas_connect_rpc_client.py b/src/otdf_python/kas_connect_rpc_client.py new file mode 100644 index 0000000..3b39021 --- /dev/null +++ b/src/otdf_python/kas_connect_rpc_client.py @@ -0,0 +1,207 @@ +""" +KASConnectRPCClient: Handles Connect RPC communication with the Key Access Service (KAS). +This class encapsulates all interactions with otdf_python_proto. +""" + +import logging + +import urllib3 +from otdf_python_proto.kas import kas_pb2 +from otdf_python_proto.kas.kas_pb2_connect import AccessServiceClient + +from .sdk_exceptions import SDKException + + +class KASConnectRPCClient: + """ + Handles Connect RPC communication with KAS service using otdf_python_proto. + """ + + def __init__(self, use_plaintext=False, verify_ssl=True): + """ + Initialize the Connect RPC client. + + Args: + use_plaintext: Whether to use plaintext (HTTP) connections + verify_ssl: Whether to verify SSL certificates + """ + self.use_plaintext = use_plaintext + self.verify_ssl = verify_ssl + + def _create_http_client(self): + """ + Create HTTP client with SSL verification configuration. + + Returns: + urllib3.PoolManager configured for SSL verification settings + """ + if self.verify_ssl: + logging.info("Using SSL verification enabled HTTP client") + return urllib3.PoolManager() + else: + logging.info("Using SSL verification disabled HTTP client") + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + return urllib3.PoolManager(cert_reqs="CERT_NONE") + + def _prepare_connect_rpc_url(self, kas_url): + """ + Prepare the base URL for Connect RPC client. + + Args: + kas_url: The normalized KAS URL + + Returns: + Base URL for Connect RPC client (without /kas suffix) + """ + connect_rpc_base_url = kas_url + # Remove /kas suffix, if present + connect_rpc_base_url = connect_rpc_base_url.removesuffix("/kas") + return connect_rpc_base_url + + def _prepare_auth_headers(self, access_token): + """ + Prepare authentication headers if access token is available. + + Args: + access_token: Bearer token for authentication + + Returns: + Dictionary with authentication headers or None + """ + if access_token: + return {"Authorization": f"Bearer {access_token}"} + return None + + def get_public_key(self, normalized_kas_url, kas_info, access_token=None): + """ + Get KAS public key using Connect RPC. + + Args: + normalized_kas_url: The normalized KAS URL + kas_info: KAS information object with algorithm + access_token: Optional access token for authentication + + Returns: + Updated kas_info with public_key and kid + """ + logging.info( + f"KAS Connect RPC client settings for public key retrieval: " + f"verify_ssl={self.verify_ssl}, use_plaintext={self.use_plaintext}, " + f"kas_url={kas_info.url}" + ) + + http_client = self._create_http_client() + + try: + connect_rpc_base_url = self._prepare_connect_rpc_url(normalized_kas_url) + + logging.info( + f"Creating Connect RPC client for base URL: {connect_rpc_base_url}, " + f"for public key retrieval" + ) + + # Create Connect RPC client with configured HTTP client using Connect protocol + # Note: gRPC protocol is not supported with urllib3, use default Connect protocol + client = AccessServiceClient(connect_rpc_base_url, http_client=http_client) + + # Create public key request + algorithm = getattr(kas_info, "algorithm", "") or "" + request = ( + kas_pb2.PublicKeyRequest(algorithm=algorithm) + if algorithm + else kas_pb2.PublicKeyRequest() + ) + + # Prepare headers with authentication if available + extra_headers = self._prepare_auth_headers(access_token) + + # Make the public key call with authentication headers + response = client.public_key(request, extra_headers=extra_headers) + + # Update kas_info with response + kas_info.public_key = response.public_key + kas_info.kid = response.kid + + return kas_info + + except Exception as e: + import traceback + + error_details = traceback.format_exc() + logging.error( + f"Connect RPC public key request failed: {type(e).__name__}: {e}" + ) + logging.error(f"Full traceback: {error_details}") + raise SDKException(f"Connect RPC public key request failed: {e}") + + def unwrap_key( + self, normalized_kas_url, key_access, signed_token, access_token=None + ): + """ + Unwrap a key using Connect RPC. + + Args: + normalized_kas_url: The normalized KAS URL + key_access: Key access information + signed_token: Signed JWT token for the request + access_token: Optional access token for authentication + + Returns: + Unwrapped key bytes from the response + """ + logging.info( + f"KAS Connect RPC client settings for unwrap: " + f"verify_ssl={self.verify_ssl}, use_plaintext={self.use_plaintext}, " + f"kas_url={key_access.url}" + ) + + http_client = self._create_http_client() + + try: + kas_service_url = self._prepare_connect_rpc_url(normalized_kas_url) + + logging.info( + f"Creating Connect RPC client for base URL: {kas_service_url}, for unwrap" + ) + + # Note: gRPC protocol is not supported with urllib3, use default Connect protocol + client = AccessServiceClient(kas_service_url, http_client=http_client) + + # Create rewrap request + request = kas_pb2.RewrapRequest( + signed_request_token=signed_token, + ) + + # Debug: Log the signed token details + logging.info(f"Connect RPC signed token: {signed_token}") + + # Prepare headers with authentication if available + extra_headers = self._prepare_auth_headers(access_token) + + # Make the rewrap call with authentication headers + response = client.rewrap(request, extra_headers=extra_headers) + + # Extract the entity wrapped key from v2 response structure + # The v2 response has responses[] array with results[] for each policy + if response.responses and len(response.responses) > 0: + policy_result = response.responses[0] # First policy + if policy_result.results and len(policy_result.results) > 0: + kao_result = policy_result.results[0] # First KAO result + if kao_result.kas_wrapped_key: + entity_wrapped_key = kao_result.kas_wrapped_key + else: + raise SDKException(f"KAO result error: {kao_result.error}") + else: + raise SDKException("No KAO results in policy response") + else: + # Fallback to legacy entity_wrapped_key field for backward compatibility + entity_wrapped_key = response.entity_wrapped_key + if not entity_wrapped_key: + raise SDKException("No entity_wrapped_key in Connect RPC response") + + logging.info("Connect RPC rewrap succeeded") + return entity_wrapped_key + + except Exception as e: + logging.error(f"Connect RPC rewrap failed: {e}") + raise SDKException(f"Connect RPC rewrap failed: {e}") diff --git a/src/otdf_python/kas_info.py b/src/otdf_python/kas_info.py new file mode 100644 index 0000000..189ec73 --- /dev/null +++ b/src/otdf_python/kas_info.py @@ -0,0 +1,25 @@ +from dataclasses import dataclass + + +@dataclass +class KASInfo: + """ + Configuration for Key Access Server (KAS) information. + This class stores details about a Key Access Server including its URL, + public key, key ID, default status, and cryptographic algorithm. + """ + + url: str + public_key: str | None = None + kid: str | None = None + default: bool | None = None + algorithm: str | None = None + + def clone(self): + """Creates a copy of this KASInfo object.""" + from copy import copy + + return copy(self) + + def __str__(self): + return f"KASInfo(url={self.url}, kid={self.kid}, default={self.default}, algorithm={self.algorithm})" diff --git a/src/otdf_python/kas_key_cache.py b/src/otdf_python/kas_key_cache.py new file mode 100644 index 0000000..38e1f22 --- /dev/null +++ b/src/otdf_python/kas_key_cache.py @@ -0,0 +1,52 @@ +""" +KASKeyCache: In-memory cache for KAS (Key Access Service) public keys and info. +""" + +import threading +from typing import Any + + +class KASKeyCache: + def __init__(self): + self._cache = {} + self._lock = threading.Lock() + + def get(self, url: str, algorithm: str | None = None) -> Any | None: + """ + Gets a KASInfo object from the cache based on URL and algorithm. + + Args: + url: The URL of the KAS + algorithm: Optional algorithm identifier + + Returns: + The cached KASInfo object, or None if not found + """ + cache_key = self._make_key(url, algorithm) + with self._lock: + return self._cache.get(cache_key) + + def store(self, kas_info) -> None: + """ + Stores a KASInfo object in the cache. + + Args: + kas_info: The KASInfo object to store + """ + cache_key = self._make_key(kas_info.url, getattr(kas_info, "algorithm", None)) + with self._lock: + self._cache[cache_key] = kas_info + + def set(self, key, value): + """Store a key-value pair in the cache.""" + with self._lock: + self._cache[key] = value + + def clear(self): + """Clears the cache""" + with self._lock: + self._cache.clear() + + def _make_key(self, url: str, algorithm: str | None = None) -> str: + """Creates a cache key from URL and algorithm""" + return f"{url}:{algorithm or ''}" diff --git a/src/otdf_python/key_type.py b/src/otdf_python/key_type.py new file mode 100644 index 0000000..cb14f53 --- /dev/null +++ b/src/otdf_python/key_type.py @@ -0,0 +1,31 @@ +from enum import Enum + + +class KeyType(Enum): + RSA2048Key = "rsa:2048" + EC256Key = "ec:secp256r1" + EC384Key = "ec:secp384r1" + EC521Key = "ec:secp521r1" + + def __str__(self): + return self.value + + def get_curve_name(self): + if self == KeyType.EC256Key: + return "secp256r1" + elif self == KeyType.EC384Key: + return "secp384r1" + elif self == KeyType.EC521Key: + return "secp521r1" + else: + raise ValueError(f"Unsupported key type: {self}") + + @staticmethod + def from_string(key_type): + for t in KeyType: + if t.value.lower() == key_type.lower(): + return t + raise ValueError(f"No enum constant for key type: {key_type}") + + def is_ec(self): + return self != KeyType.RSA2048Key diff --git a/src/otdf_python/key_type_constants.py b/src/otdf_python/key_type_constants.py new file mode 100644 index 0000000..a99da44 --- /dev/null +++ b/src/otdf_python/key_type_constants.py @@ -0,0 +1,43 @@ +""" +Constants for session key types used in the KAS client. +This matches the Java SDK's KeyType enum pattern. +""" + +from enum import Enum, auto + + +class KeyType(Enum): + """ + Enum for key types used in the KAS client. + """ + + RSA2048 = auto() + EC_P256 = auto() + EC_P384 = auto() + EC_P521 = auto() + + @property + def is_ec(self): + """ + Returns True if this key type is an EC key, False otherwise. + """ + return self in [KeyType.EC_P256, KeyType.EC_P384, KeyType.EC_P521] + + @property + def curve_name(self): + """ + Returns the curve name for EC keys. + """ + if self == KeyType.EC_P256: + return "P-256" + elif self == KeyType.EC_P384: + return "P-384" + elif self == KeyType.EC_P521: + return "P-521" + else: + return None + + +# Constants for backward compatibility with string literals +RSA_KEY_TYPE = KeyType.RSA2048 +EC_KEY_TYPE = KeyType.EC_P256 # Default EC curve diff --git a/src/otdf_python/manifest.py b/src/otdf_python/manifest.py new file mode 100644 index 0000000..1ebbae3 --- /dev/null +++ b/src/otdf_python/manifest.py @@ -0,0 +1,215 @@ +import json +from dataclasses import asdict, dataclass, field +from typing import Any + + +@dataclass +class ManifestSegment: + hash: str + segmentSize: int + encryptedSegmentSize: int + + +@dataclass +class ManifestRootSignature: + alg: str + sig: str + + +@dataclass +class ManifestIntegrityInformation: + rootSignature: ManifestRootSignature + segmentHashAlg: str + segmentSizeDefault: int + encryptedSegmentSizeDefault: int + segments: list[ManifestSegment] + + +@dataclass +class ManifestPolicyBinding: + alg: str + hash: str + + +@dataclass +class ManifestKeyAccess: + type: str + url: str + protocol: str + wrappedKey: str + policyBinding: Any = None + encryptedMetadata: str | None = None + kid: str | None = None + sid: str | None = None + schemaVersion: str | None = None + ephemeralPublicKey: str | None = None + + +@dataclass +class ManifestMethod: + algorithm: str + iv: str + isStreamable: bool | None = None + + +@dataclass +class ManifestEncryptionInformation: + type: str + policy: str + keyAccess: list[ManifestKeyAccess] + method: ManifestMethod + integrityInformation: ManifestIntegrityInformation + + +@dataclass +class ManifestPayload: + type: str + url: str + protocol: str + mimeType: str + isEncrypted: bool + + +@dataclass +class ManifestBinding: + method: str + signature: str + + +@dataclass +class ManifestAssertion: + id: str + type: str + scope: str + appliesTo_state: str + statement: Any + binding: ManifestBinding | None = None + + +@dataclass +class Manifest: + schemaVersion: str | None = None + encryptionInformation: ManifestEncryptionInformation | None = None + payload: ManifestPayload | None = None + assertions: list[ManifestAssertion] = field(default_factory=list) + + def _remove_none_values_and_empty_lists(self, obj): + """Recursively remove None values and empty lists from dictionaries and lists.""" + if isinstance(obj, dict): + cleaned = {} + for k, v in obj.items(): + if v is not None: + # For 'assertions' field, exclude if it's an empty list + if k == "assertions" and isinstance(v, list) and len(v) == 0: + continue + cleaned[k] = self._remove_none_values_and_empty_lists(v) + return cleaned + elif isinstance(obj, list): + return [ + self._remove_none_values_and_empty_lists(item) + for item in obj + if item is not None + ] + else: + return obj + + def to_json(self) -> str: + # Create manifest dict with fields ordered to match otdfctl expectations + # Order: encryptionInformation, payload, schemaVersion, assertions + manifest_dict = {} + + # Add fields in the order expected by otdfctl + if self.encryptionInformation is not None: + manifest_dict["encryptionInformation"] = asdict(self.encryptionInformation) + + if self.payload is not None: + manifest_dict["payload"] = asdict(self.payload) + + if self.schemaVersion is not None: + manifest_dict["schemaVersion"] = self.schemaVersion + + if self.assertions and len(self.assertions) > 0: + manifest_dict["assertions"] = [ + asdict(assertion) for assertion in self.assertions + ] + + cleaned_dict = self._remove_none_values_and_empty_lists(manifest_dict) + return json.dumps(cleaned_dict, default=str) + + @staticmethod + def from_json(data: str) -> "Manifest": + d = json.loads(data) + + # Recursively instantiate nested dataclasses + def _payload(p): + return ManifestPayload(**p) if p else None + + def _segment(s): + return ManifestSegment(**s) + + def _root_sig(rs): + return ManifestRootSignature(**rs) + + def _integrity(i): + # Handle both snake_case and camelCase fields + # TODO: This can probably be simplified to only camelCase + return ManifestIntegrityInformation( + rootSignature=_root_sig( + i.get("rootSignature", i.get("root_signature")) + ), + segmentHashAlg=i.get("segmentHashAlg", i.get("segment_hash_alg")), + segmentSizeDefault=i.get( + "segmentSizeDefault", i.get("segment_size_default") + ), + encryptedSegmentSizeDefault=i.get( + "encryptedSegmentSizeDefault", + i.get("encrypted_segment_size_default"), + ), + segments=[_segment(s) for s in i["segments"]], + ) + + def _method(m): + return ManifestMethod(**m) + + def _key_access(k): + return ManifestKeyAccess(**k) + + def _enc_info(e): + # Handle both snake_case and camelCase fields + # TODO: This can probably be simplified to only camelCase + return ManifestEncryptionInformation( + type=e.get("type", e.get("key_access_type", "split")), + policy=e["policy"], + keyAccess=[ + _key_access(k) + for k in e.get("keyAccess", e.get("key_access_obj", [])) + ], + method=_method(e["method"]), + integrityInformation=_integrity( + e.get("integrityInformation", e.get("integrity_information")) + ), + ) + + def _binding(b): + return ManifestBinding(**b) if b else None + + def _assertion(a): + return ManifestAssertion( + id=a["id"], + type=a["type"], + scope=a["scope"], + appliesTo_state=a.get("appliesTo_state", a.get("applies_to_state")), + statement=a["statement"], + binding=_binding(a.get("binding")), + ) + + return Manifest( + schemaVersion=d.get("schemaVersion", d.get("tdf_version")), + encryptionInformation=_enc_info( + d.get("encryptionInformation", d.get("encryption_information")) + ) + if d.get("encryptionInformation") or d.get("encryption_information") + else None, + payload=_payload(d["payload"]) if d.get("payload") else None, + assertions=[_assertion(a) for a in d.get("assertions", [])], + ) diff --git a/src/otdf_python/nanotdf.py b/src/otdf_python/nanotdf.py new file mode 100644 index 0000000..d8a063e --- /dev/null +++ b/src/otdf_python/nanotdf.py @@ -0,0 +1,553 @@ +import hashlib +import json +import secrets +from io import BytesIO +from typing import BinaryIO + +from cryptography.hazmat.primitives.ciphers.aead import AESGCM + +from otdf_python.asym_crypto import AsymDecryption +from otdf_python.collection_store import CollectionStore, NoOpCollectionStore +from otdf_python.config import KASInfo, NanoTDFConfig +from otdf_python.constants import MAGIC_NUMBER_AND_VERSION +from otdf_python.ecc_mode import ECCMode +from otdf_python.policy_info import PolicyInfo +from otdf_python.policy_object import AttributeObject, PolicyBody, PolicyObject +from otdf_python.policy_stub import NULL_POLICY_UUID +from otdf_python.resource_locator import ResourceLocator +from otdf_python.sdk_exceptions import SDKException +from otdf_python.symmetric_and_payload_config import SymmetricAndPayloadConfig + + +class NanoTDFException(SDKException): + pass + + +class NanoTDFMaxSizeLimit(NanoTDFException): + pass + + +class UnsupportedNanoTDFFeature(NanoTDFException): + pass + + +class InvalidNanoTDFConfig(NanoTDFException): + pass + + +class NanoTDF: + MAGIC_NUMBER_AND_VERSION = MAGIC_NUMBER_AND_VERSION + K_MAX_TDF_SIZE = (16 * 1024 * 1024) - 3 - 32 + K_NANOTDF_GMAC_LENGTH = 8 + K_IV_PADDING = 9 + K_NANOTDF_IV_SIZE = 3 + K_EMPTY_IV = bytes([0x0] * 12) + + def __init__(self, services=None, collection_store: CollectionStore | None = None): + self.services = services + self.collection_store = collection_store or NoOpCollectionStore() + + def _create_policy_object(self, attributes: list[str]) -> PolicyObject: + # TODO: Replace this with a proper Policy UUID value + policy_uuid = NULL_POLICY_UUID + data_attributes = [AttributeObject(attribute=a) for a in attributes] + body = PolicyBody(data_attributes=data_attributes, dissem=[]) + return PolicyObject(uuid=policy_uuid, body=body) + + def _serialize_policy_object(self, obj): + """Custom NanoTDF serializer to convert to compatible JSON format.""" + from otdf_python.policy_object import AttributeObject, PolicyBody + + if isinstance(obj, PolicyBody): + # Convert data_attributes to dataAttributes and use null instead of empty array + result = { + "dataAttributes": obj.data_attributes if obj.data_attributes else None, + "dissem": obj.dissem if obj.dissem else None, + } + return result + elif isinstance(obj, AttributeObject): + # Convert snake_case field names to camelCase for JSON serialization + return { + "attribute": obj.attribute, + "displayName": obj.display_name, + "isDefault": obj.is_default, + "pubKey": obj.pub_key, + "kasUrl": obj.kas_url, + } + else: + return obj.__dict__ + + def _prepare_payload(self, payload: bytes | BytesIO) -> bytes: + """ + Convert BytesIO to bytes and validate payload size. + + Args: + payload: The payload data as bytes or BytesIO + + Returns: + bytes: The payload as bytes + + Raises: + NanoTDFMaxSizeLimit: If the payload exceeds the maximum size + """ + if isinstance(payload, BytesIO): + payload = payload.getvalue() + if len(payload) > self.K_MAX_TDF_SIZE: + raise NanoTDFMaxSizeLimit("exceeds max size for nano tdf") + return payload + + def _prepare_policy_data(self, config: NanoTDFConfig) -> tuple[bytes, str]: + """ + Prepare policy data from configuration. + + Args: + config: NanoTDFConfig configuration + + Returns: + tuple: (policy_body, policy_type) + """ + attributes = config.attributes if config.attributes else [] + policy_object = self._create_policy_object(attributes) + policy_json = json.dumps( + policy_object, default=self._serialize_policy_object + ).encode("utf-8") + policy_type = ( + config.policy_type if config.policy_type else "EMBEDDED_POLICY_PLAIN_TEXT" + ) + + if policy_type == "EMBEDDED_POLICY_PLAIN_TEXT": + policy_body = policy_json + else: + # Encrypt policy + policy_key = secrets.token_bytes(32) + aesgcm = AESGCM(policy_key) + iv = secrets.token_bytes(12) + policy_body = aesgcm.encrypt(iv, policy_json, None) + + return policy_body, policy_type + + def _prepare_encryption_key(self, config: NanoTDFConfig) -> bytes: + """Get encryption key from config if provided as hex string, otherwise generate a new random key.""" + key = None + if ( + config.cipher + and isinstance(config.cipher, str) + and all(c in "0123456789abcdefABCDEF" for c in config.cipher) + ): + key = bytes.fromhex(config.cipher) + if not key: + key = secrets.token_bytes(32) + return key + + def _create_header( + self, policy_body: bytes, policy_type: str, config: NanoTDFConfig + ) -> bytes: + """ + Create the NanoTDF header. + + Args: + policy_body: The policy body bytes + policy_type: The policy type string + config: NanoTDFConfig configuration + + Returns: + bytes: The header bytes + """ + from otdf_python.header import Header # Local import to avoid circular import + + # KAS URL from KASInfo or default + kas_url = "https://kas.example.com" + if config.kas_info_list and len(config.kas_info_list) > 0: + kas_url = config.kas_info_list[0].url + + kas_id = "kas-id" # Default KAS ID + kas_locator = ResourceLocator(kas_url, kas_id) + + # Get ECC mode from config or use default + ecc_mode = ECCMode(0, False) + if config.ecc_mode: + if isinstance(config.ecc_mode, str): + ecc_mode = ECCMode.from_string(config.ecc_mode) + else: + ecc_mode = config.ecc_mode + + # Default payload config + payload_config = SymmetricAndPayloadConfig(0, 0, False) + + # Create policy info + policy_info = PolicyInfo() + if policy_type == "EMBEDDED_POLICY_PLAIN_TEXT": + policy_info.set_embedded_plain_text_policy(policy_body) + else: + policy_info.set_embedded_encrypted_text_policy(policy_body) + policy_info.set_policy_binding( + hashlib.sha256(policy_body).digest()[-self.K_NANOTDF_GMAC_LENGTH :] + ) + + # Build the header + header = Header() + header.set_kas_locator(kas_locator) + header.set_ecc_mode(ecc_mode) + header.set_payload_config(payload_config) + header.set_policy_info(policy_info) + header.set_ephemeral_key( + secrets.token_bytes( + ECCMode.get_ec_compressed_pubkey_size( + ecc_mode.get_elliptic_curve_type() + ) + ) + ) + + # Generate and return the header bytes with magic number + header_bytes = header.to_bytes() + return self.MAGIC_NUMBER_AND_VERSION + header_bytes + + def _wrap_key_if_needed( + self, key: bytes, config: NanoTDFConfig + ) -> tuple[bytes, bytes | None]: + """ + Wrap encryption key if KAS public key is provided. + + Args: + key: The encryption key + config: NanoTDFConfig with potential KASInfo + + Returns: + tuple: (wrapped_key, kas_public_key) + """ + kas_public_key = None + wrapped_key = None + + if config.kas_info_list and len(config.kas_info_list) > 0: + # Get the first KASInfo with a public_key + for kas_info in config.kas_info_list: + if kas_info.public_key: + kas_public_key = kas_info.public_key + break + + if kas_public_key: + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes, serialization + from cryptography.hazmat.primitives.asymmetric import padding + + public_key = serialization.load_pem_public_key( + kas_public_key.encode(), backend=default_backend() + ) + wrapped_key = public_key.encrypt( + key, + padding.OAEP( + mgf=padding.MGF1(algorithm=hashes.SHA1()), + algorithm=hashes.SHA1(), + label=None, + ), + ) + + return wrapped_key, kas_public_key + + def _encrypt_payload(self, payload: bytes, key: bytes) -> tuple[bytes, bytes]: + """ + Encrypt the payload using AES-GCM. + + Args: + payload: The payload to encrypt + key: The encryption key + + Returns: + tuple: (iv, ciphertext) + """ + iv = secrets.token_bytes(self.K_NANOTDF_IV_SIZE) + iv_padded = self.K_EMPTY_IV[: self.K_IV_PADDING] + iv + aesgcm = AESGCM(key) + ciphertext = aesgcm.encrypt(iv_padded, payload, None) + return iv, ciphertext + + def create_nano_tdf( + self, payload: bytes | BytesIO, output_stream: BinaryIO, config: NanoTDFConfig + ) -> int: + """ + Creates a NanoTDF with the provided payload and writes it to the output stream. + Supports KAS key wrapping if KAS info with public key is provided in config. + + Args: + payload: The payload data as bytes or BytesIO + output_stream: The output stream to write the NanoTDF to + config: NanoTDFConfig configuration for the NanoTDF creation + + Returns: + int: The size of the created NanoTDF + + Raises: + NanoTDFMaxSizeLimit: If the payload exceeds the maximum size + UnsupportedNanoTDFFeature: If an unsupported feature is requested + InvalidNanoTDFConfig: If the configuration is invalid + SDKException: For other errors + """ + + # Process payload and validate size + payload = self._prepare_payload(payload) + + # Process policy data + policy_body, policy_type = self._prepare_policy_data(config) + + # Get or generate encryption key + key = self._prepare_encryption_key(config) + + # Create header and write to output + header_bytes = self._create_header(policy_body, policy_type, config) + output_stream.write(header_bytes) + + # Encrypt payload + iv, ciphertext = self._encrypt_payload(payload, key) + + # Wrap key if needed + wrapped_key, kas_public_key = self._wrap_key_if_needed(key, config) + + # Compose the complete NanoTDF: [IV][CIPHERTEXT][WRAPPED_KEY][WRAPPED_KEY_LEN] + if wrapped_key: + nano_tdf_data = ( + iv + ciphertext + wrapped_key + len(wrapped_key).to_bytes(2, "big") + ) + else: + nano_tdf_data = iv + ciphertext + (0).to_bytes(2, "big") + + output_stream.write(nano_tdf_data) + return len(header_bytes) + len(nano_tdf_data) + + def read_nano_tdf( + self, + nano_tdf_data: bytes | BytesIO, + output_stream: BinaryIO, + config: NanoTDFConfig, + platform_url: str | None = None, + ) -> None: + """ + Reads a NanoTDF and writes the payload to the output stream. + Supports KAS key unwrapping if kas_private_key is provided in config. + + Args: + nano_tdf_data: The NanoTDF data as bytes or BytesIO + output_stream: The output stream to write the payload to + config: Configuration for the NanoTDF reader + platform_url: Optional platform URL for KAS resolution + + Raises: + InvalidNanoTDFConfig: If the NanoTDF format is invalid or config is missing required info + SDKException: For other errors + """ + # Convert to bytes if BytesIO + if isinstance(nano_tdf_data, BytesIO): + nano_tdf_data = nano_tdf_data.getvalue() + + from otdf_python.header import Header # Local import to avoid circular import + + try: + header_len = Header.peek_length(nano_tdf_data) + except Exception: + raise InvalidNanoTDFConfig("Failed to parse NanoTDF header.") + payload_start = header_len + payload = nano_tdf_data[payload_start:] + # Do not check for magic/version in payload; it is only at the start of the header + iv = payload[0:3] + iv_padded = self.K_EMPTY_IV[: self.K_IV_PADDING] + iv + # Find wrapped key + wrapped_key_len = int.from_bytes(payload[-2:], "big") + if wrapped_key_len > 0: + wrapped_key = payload[-(2 + wrapped_key_len) : -2] + + # Get private key and mock unwrap config + kas_private_key = None + # Try to get from cipher field if it looks like a PEM key + if ( + config.cipher + and isinstance(config.cipher, str) + and "-----BEGIN" in config.cipher + ): + kas_private_key = config.cipher + + # Check if mock unwrap is enabled in config string + kas_mock_unwrap = False + if config.config and "mock_unwrap=true" in config.config.lower(): + kas_mock_unwrap = True + + if not kas_private_key and not kas_mock_unwrap: + raise InvalidNanoTDFConfig("Missing kas_private_key for unwrap.") + if kas_mock_unwrap: + # Use the KAS mock unwrap_nanotdf logic + from otdf_python.sdk import KAS + + key = KAS().unwrap_nanotdf( + curve=None, + header=None, + kas_url=None, + wrapped_key=wrapped_key, + kas_private_key=kas_private_key, + mock=True, + ) + else: + asym = AsymDecryption(kas_private_key) + key = asym.decrypt(wrapped_key) + ciphertext = payload[3 : -(2 + wrapped_key_len)] + else: + key = config.get("key") + if not key: + raise InvalidNanoTDFConfig("Missing decryption key in config.") + ciphertext = payload[3:-2] + aesgcm = AESGCM(key) + plaintext = aesgcm.decrypt(iv_padded, ciphertext, None) + output_stream.write(plaintext) + + def _convert_dict_to_nanotdf_config(self, config: dict) -> NanoTDFConfig: + """Convert a dictionary config to a NanoTDFConfig object.""" + converted_config = NanoTDFConfig() + if "attributes" in config: + converted_config.attributes = config["attributes"] + if "key" in config: + converted_config.cipher = ( + config["key"].hex() + if isinstance(config["key"], bytes) + else config["key"] + ) + if "kas_public_key" in config: + kas_info = KASInfo( + url="https://kas.example.com", public_key=config["kas_public_key"] + ) + converted_config.kas_info_list = [kas_info] + if "policy_type" in config: + converted_config.policy_type = config["policy_type"] + return converted_config + + def _handle_legacy_key_config( + self, config: dict | NanoTDFConfig + ) -> tuple[bytes, dict | NanoTDFConfig]: + """Handle key configuration for legacy method.""" + key = None + if isinstance(config, dict) and "key" in config: + key = config["key"] + elif ( + hasattr(config, "cipher") + and config.cipher + and isinstance(config.cipher, str) + and all(c in "0123456789abcdefABCDEF" for c in config.cipher) + ): + key = bytes.fromhex(config.cipher) + + if not key: + key = secrets.token_bytes(32) + if isinstance(config, dict): + config["key"] = key + else: + config.cipher = key.hex() + return key, config + + def create_nanotdf(self, data: bytes, config: dict | NanoTDFConfig) -> bytes: + """Create a NanoTDF from input data using the provided configuration.""" + if len(data) > self.K_MAX_TDF_SIZE: + raise NanoTDFMaxSizeLimit("exceeds max size for nano tdf") + + # If config is already a NanoTDFConfig, use it; otherwise create one + if not isinstance(config, NanoTDFConfig): + config = self._convert_dict_to_nanotdf_config(config) + + # Create output buffer + output = BytesIO() + + # Create NanoTDF using the new method + self.create_nano_tdf(data, output, config) + + # Return the bytes + output.seek(0) + return output.getvalue() + # Header construction, based on Java implementation + # This method now uses the more modular create_nano_tdf method + + def _convert_dict_to_read_config(self, config: dict) -> NanoTDFConfig: + """Convert a dictionary config to a NanoTDFConfig object for reading.""" + converted_config = NanoTDFConfig() + if "key" in config: + converted_config.cipher = ( + config["key"].hex() + if isinstance(config["key"], bytes) + else config["key"] + ) + if "kas_private_key" in config: + converted_config.cipher = config["kas_private_key"] + return converted_config + + def _extract_key_for_reading( + self, config: dict | NanoTDFConfig | None, wrapped_key: bytes | None + ) -> bytes: + """Extract the decryption key from config or unwrap it.""" + # For wrapped key case + if wrapped_key: + kas_private_key = None + if isinstance(config, dict): + kas_private_key = config.get("kas_private_key") + elif ( + config + and config.cipher + and isinstance(config.cipher, str) + and "-----BEGIN" in config.cipher + ): + kas_private_key = config.cipher + + if not kas_private_key: + raise InvalidNanoTDFConfig("Missing kas_private_key for unwrap.") + + asym = AsymDecryption(kas_private_key) + return asym.decrypt(wrapped_key) + + # For symmetric key case + key = None + if isinstance(config, dict): + key = config.get("key") + elif ( + config + and config.cipher + and isinstance(config.cipher, str) + and all(c in "0123456789abcdefABCDEF" for c in config.cipher) + ): + key = bytes.fromhex(config.cipher) + if not key: + raise InvalidNanoTDFConfig("Missing decryption key in config.") + return key + + def read_nanotdf( + self, nanotdf_bytes: bytes, config: dict | NanoTDFConfig | None = None + ) -> bytes: + """Read and decrypt a NanoTDF, returning the original plaintext data.""" + output = BytesIO() + from otdf_python.header import Header # Local import to avoid circular import + + # Convert config to NanoTDFConfig if it's a dict + if isinstance(config, dict): + config = self._convert_dict_to_read_config(config) + + try: + header_len = Header.peek_length(nanotdf_bytes) + payload = nanotdf_bytes[header_len:] + + # Extract components + iv = payload[0:3] + iv_padded = self.K_EMPTY_IV[: self.K_IV_PADDING] + iv + wrapped_key_len = int.from_bytes(payload[-2:], "big") + + wrapped_key = None + if wrapped_key_len > 0: + wrapped_key = payload[-(2 + wrapped_key_len) : -2] + ciphertext = payload[3 : -(2 + wrapped_key_len)] + else: + ciphertext = payload[3:-2] + + # Get the decryption key + key = self._extract_key_for_reading(config, wrapped_key) + + # Decrypt the payload + aesgcm = AESGCM(key) + plaintext = aesgcm.decrypt(iv_padded, ciphertext, None) + output.write(plaintext) + + except Exception as e: + # Re-raise with a clearer message + raise InvalidNanoTDFConfig(f"Error reading NanoTDF: {e!s}") + + return output.getvalue() diff --git a/src/otdf_python/nanotdf_ecdsa_struct.py b/src/otdf_python/nanotdf_ecdsa_struct.py new file mode 100644 index 0000000..da14939 --- /dev/null +++ b/src/otdf_python/nanotdf_ecdsa_struct.py @@ -0,0 +1,132 @@ +""" +NanoTDF ECDSA Signature Structure. +""" + +from dataclasses import dataclass, field + + +class IncorrectNanoTDFECDSASignatureSize(Exception): + """Exception raised when the signature size is incorrect.""" + + pass + + +@dataclass +class NanoTDFECDSAStruct: + """ + Class to handle ECDSA signature structure for NanoTDF. + + This structure represents an ECDSA signature as required by the NanoTDF format. + It consists of r and s values along with their lengths. + """ + + r_length: bytearray = field(default_factory=lambda: bytearray(1)) + r_value: bytearray = None + s_length: bytearray = field(default_factory=lambda: bytearray(1)) + s_value: bytearray = None + + @classmethod + def from_bytes( + cls, ecdsa_signature_value: bytes, key_size: int + ) -> "NanoTDFECDSAStruct": + """ + Create a NanoTDFECDSAStruct from a byte array. + + Args: + ecdsa_signature_value: The signature value as bytes + key_size: The size of the key in bytes + + Returns: + A new NanoTDFECDSAStruct + + Raises: + IncorrectNanoTDFECDSASignatureSize: If the signature buffer size is invalid + """ + if len(ecdsa_signature_value) != (2 * key_size) + 2: + raise IncorrectNanoTDFECDSASignatureSize( + f"Invalid signature buffer size. Expected {(2 * key_size) + 2}, got {len(ecdsa_signature_value)}" + ) + + struct_obj = cls() + + # Copy value of r_length to signature struct + index = 0 + struct_obj.r_length[0] = ecdsa_signature_value[index] + + # Copy the contents of r_value to signature struct + index += 1 + r_len = struct_obj.r_length[0] + struct_obj.r_value = bytearray(key_size) + struct_obj.r_value[:r_len] = ecdsa_signature_value[index : index + r_len] + + # Copy value of s_length to signature struct + index += key_size + struct_obj.s_length[0] = ecdsa_signature_value[index] + + # Copy value of s_value + index += 1 + s_len = struct_obj.s_length[0] + struct_obj.s_value = bytearray(key_size) + struct_obj.s_value[:s_len] = ecdsa_signature_value[index : index + s_len] + + return struct_obj + + def as_bytes(self) -> bytes: + """ + Convert the signature structure to bytes. + Raises ValueError if r_value or s_value is None. + """ + if self.r_value is None or self.s_value is None: + raise ValueError("r_value and s_value must not be None") + total_size = 1 + len(self.r_value) + 1 + len(self.s_value) + signature = bytearray(total_size) + + # Copy value of r_length + index = 0 + signature[index] = self.r_length[0] + + # Copy the contents of r_value + index += 1 + signature[index : index + len(self.r_value)] = self.r_value + + # Copy value of s_length + index += len(self.r_value) + signature[index] = self.s_length[0] + + # Copy value of s_value + index += 1 + signature[index : index + len(self.s_value)] = self.s_value + + return bytes(signature) + + def get_s_value(self) -> bytearray: + """Get the s value of the signature.""" + return self.s_value + + def set_s_value(self, s_value: bytearray) -> None: + """Set the s value of the signature.""" + self.s_value = s_value + + def get_s_length(self) -> int: + """Get the length of the s value.""" + return self.s_length[0] + + def set_s_length(self, s_length: int) -> None: + """Set the length of the s value.""" + self.s_length[0] = s_length + + def get_r_value(self) -> bytearray: + """Get the r value of the signature.""" + return self.r_value + + def set_r_value(self, r_value: bytearray) -> None: + """Set the r value of the signature.""" + self.r_value = r_value + + def get_r_length(self) -> int: + """Get the length of the r value.""" + return self.r_length[0] + + def set_r_length(self, r_length: int) -> None: + """Set the length of the r value.""" + self.r_length[0] = r_length diff --git a/src/otdf_python/nanotdf_type.py b/src/otdf_python/nanotdf_type.py new file mode 100644 index 0000000..4ce112e --- /dev/null +++ b/src/otdf_python/nanotdf_type.py @@ -0,0 +1,43 @@ +from enum import Enum + + +class ECCurve(Enum): + SECP256R1 = "secp256r1" + SECP384R1 = "secp384r1" + SECP521R1 = "secp384r1" + SECP256K1 = "secp256k1" + + def __str__(self): + return self.value + + +class Protocol(Enum): + HTTP = "HTTP" + HTTPS = "HTTPS" + + +class IdentifierType(Enum): + NONE = 0 + TWO_BYTES = 2 + EIGHT_BYTES = 8 + THIRTY_TWO_BYTES = 32 + + def get_length(self): + return self.value + + +class PolicyType(Enum): + REMOTE_POLICY = 0 + EMBEDDED_POLICY_PLAIN_TEXT = 1 + EMBEDDED_POLICY_ENCRYPTED = 2 + EMBEDDED_POLICY_ENCRYPTED_POLICY_KEY_ACCESS = 3 + + +class Cipher(Enum): + AES_256_GCM_64_TAG = 0 + AES_256_GCM_96_TAG = 1 + AES_256_GCM_104_TAG = 2 + AES_256_GCM_112_TAG = 3 + AES_256_GCM_120_TAG = 4 + AES_256_GCM_128_TAG = 5 + EAD_AES_256_HMAC_SHA_256 = 6 diff --git a/src/otdf_python/policy_binding_serializer.py b/src/otdf_python/policy_binding_serializer.py new file mode 100644 index 0000000..72e3849 --- /dev/null +++ b/src/otdf_python/policy_binding_serializer.py @@ -0,0 +1,39 @@ +from typing import Any + + +class PolicyBinding: + """ + Represents a policy binding in the TDF manifest. + This is a placeholder implementation as the complete details of + the PolicyBinding class aren't provided in the code snippets. + """ + + def __init__(self, **kwargs): + for key, value in kwargs.items(): + setattr(self, key, value) + + +class PolicyBindingSerializer: + """ + Handles serialization and deserialization of policy bindings. + This class provides static methods to convert between JSON representations + and PolicyBinding objects. + """ + + @staticmethod + def deserialize( + json_data: Any, typeofT: type | None = None, context: Any = None + ) -> Any: + if isinstance(json_data, dict): + return PolicyBinding(**json_data) + if isinstance(json_data, str): + return json_data + raise ValueError("Invalid type for PolicyBinding deserialization") + + @staticmethod + def serialize( + src: Any, typeofSrc: type | None = None, context: Any = None + ) -> dict | str: + if isinstance(src, PolicyBinding): + return vars(src) + return str(src) diff --git a/src/otdf_python/policy_info.py b/src/otdf_python/policy_info.py new file mode 100644 index 0000000..467d816 --- /dev/null +++ b/src/otdf_python/policy_info.py @@ -0,0 +1,78 @@ +class PolicyInfo: + def __init__( + self, + policy_type: int = 0, + has_ecdsa_binding: bool = False, + body: bytes | None = None, + binding: bytes | None = None, + ): + self.policy_type = policy_type + self.has_ecdsa_binding = has_ecdsa_binding + self.body = body + self.binding = binding + + def set_embedded_plain_text_policy(self, body: bytes): + self.body = body + self.policy_type = 1 # Placeholder for EMBEDDED_POLICY_PLAIN_TEXT + + def set_embedded_encrypted_text_policy(self, body: bytes): + self.body = body + self.policy_type = 2 # Placeholder for EMBEDDED_POLICY_ENCRYPTED + + def set_policy_binding(self, binding: bytes): + self.binding = binding + + def get_body(self) -> bytes | None: + return self.body + + def get_binding(self) -> bytes | None: + return self.binding + + def get_total_size(self) -> int: + size = 1 # policy_type + size += 2 # body_len + size += len(self.body) if self.body else 0 + size += 1 # binding_len + size += len(self.binding) if self.binding else 0 + return size + + def write_into_buffer(self, buffer: bytearray, offset: int = 0) -> int: + start = offset + buffer[offset] = self.policy_type + offset += 1 + body_len = len(self.body) if self.body else 0 + buffer[offset : offset + 2] = body_len.to_bytes(2, "big") + offset += 2 + if self.body: + buffer[offset : offset + body_len] = self.body + offset += body_len + binding_len = len(self.binding) if self.binding else 0 + buffer[offset] = binding_len + offset += 1 + if self.binding: + buffer[offset : offset + binding_len] = self.binding + offset += binding_len + return offset - start + + @staticmethod + def from_bytes_with_size(buffer: bytes, ecc_mode): + # Based on Java implementation: parse policy_type (1 byte), body_len (2 bytes), body, binding_len (1 byte), binding + offset = 0 + if len(buffer) < 4: + raise ValueError("Buffer too short for PolicyInfo header") + policy_type = buffer[offset] + offset += 1 + body_len = int.from_bytes(buffer[offset : offset + 2], "big") + offset += 2 + if len(buffer) < offset + body_len + 1: + raise ValueError("Buffer too short for PolicyInfo body") + body = buffer[offset : offset + body_len] + offset += body_len + binding_len = buffer[offset] + offset += 1 + if len(buffer) < offset + binding_len: + raise ValueError("Buffer too short for PolicyInfo binding") + binding = buffer[offset : offset + binding_len] + offset += binding_len + pi = PolicyInfo(policy_type=policy_type, body=body, binding=binding) + return pi, offset diff --git a/src/otdf_python/policy_object.py b/src/otdf_python/policy_object.py new file mode 100644 index 0000000..83baa78 --- /dev/null +++ b/src/otdf_python/policy_object.py @@ -0,0 +1,22 @@ +from dataclasses import dataclass + + +@dataclass +class AttributeObject: + attribute: str + display_name: str | None = None + is_default: bool = False + pub_key: str | None = None + kas_url: str | None = None + + +@dataclass +class PolicyBody: + data_attributes: list[AttributeObject] + dissem: list[str] + + +@dataclass +class PolicyObject: + uuid: str + body: PolicyBody diff --git a/src/otdf_python/policy_stub.py b/src/otdf_python/policy_stub.py new file mode 100644 index 0000000..8001149 --- /dev/null +++ b/src/otdf_python/policy_stub.py @@ -0,0 +1,2 @@ +# TODO: Replace this with a proper Policy UUID values +NULL_POLICY_UUID: str = "00000000-0000-0000-0000-000000000000" diff --git a/src/otdf_python/resource_locator.py b/src/otdf_python/resource_locator.py new file mode 100644 index 0000000..fd80065 --- /dev/null +++ b/src/otdf_python/resource_locator.py @@ -0,0 +1,44 @@ +class ResourceLocator: + def __init__(self, resource_url: str | None = None, identifier: str | None = None): + self.resource_url = resource_url + self.identifier = identifier + + def get_resource_url(self): + return self.resource_url + + def get_identifier(self): + return self.identifier + + def to_bytes(self): + # Based on Java implementation: [url_len][url_bytes][id_len][id_bytes], each len is 1 byte + url_bytes = (self.resource_url or "").encode() + id_bytes = (self.identifier or "").encode() + if len(url_bytes) > 255 or len(id_bytes) > 255: + raise ValueError("ResourceLocator fields too long for 1-byte length prefix") + return bytes([len(url_bytes)]) + url_bytes + bytes([len(id_bytes)]) + id_bytes + + def get_total_size(self) -> int: + return len(self.to_bytes()) + + def write_into_buffer(self, buffer: bytearray, offset: int = 0) -> int: + data = self.to_bytes() + buffer[offset : offset + len(data)] = data + return len(data) + + @staticmethod + def from_bytes_with_size(buffer: bytes): + # Based on Java implementation: [url_len][url_bytes][id_len][id_bytes] + if len(buffer) < 2: + raise ValueError("Buffer too short for ResourceLocator") + url_len = buffer[0] + if len(buffer) < 1 + url_len + 1: + raise ValueError("Buffer too short for ResourceLocator url") + url_bytes = buffer[1 : 1 + url_len] + id_len = buffer[1 + url_len] + if len(buffer) < 1 + url_len + 1 + id_len: + raise ValueError("Buffer too short for ResourceLocator id") + id_bytes = buffer[1 + url_len + 1 : 1 + url_len + 1 + id_len] + resource_url = url_bytes.decode() + identifier = id_bytes.decode() + size = 1 + url_len + 1 + id_len + return ResourceLocator(resource_url, identifier), size diff --git a/src/otdf_python/sdk.py b/src/otdf_python/sdk.py new file mode 100644 index 0000000..407db14 --- /dev/null +++ b/src/otdf_python/sdk.py @@ -0,0 +1,528 @@ +""" +Python port of the main SDK class for OpenTDF platform interaction. +""" + +from contextlib import AbstractContextManager +from io import BytesIO +from typing import Any, BinaryIO + +from otdf_python.config import NanoTDFConfig, TDFConfig +from otdf_python.nanotdf import NanoTDF +from otdf_python.sdk_exceptions import SDKException +from otdf_python.tdf import TDF, TDFReader, TDFReaderConfig + + +# Stubs for service client interfaces (to be implemented) +class AttributesServiceClientInterface: ... + + +class NamespaceServiceClientInterface: ... + + +class SubjectMappingServiceClientInterface: ... + + +class ResourceMappingServiceClientInterface: ... + + +class AuthorizationServiceClientInterface: ... + + +class KeyAccessServerRegistryServiceClientInterface: ... + + +# Placeholder for ProtocolClient and Interceptor +class ProtocolClient: ... + + +class Interceptor: ... # Can be dict in Python implementation + + +# Placeholder for TrustManager +class TrustManager: ... + + +class KAS(AbstractContextManager): + """ + KAS (Key Access Service) interface to define methods related to key access and management. + """ + + def get_public_key(self, kas_info: Any) -> Any: + """ + Retrieves the public key from the KAS for RSA operations. + If the public key is cached, returns the cached value. + Otherwise, makes a request to the KAS. + + Args: + kas_info: KASInfo object containing the URL and algorithm + + Returns: + Updated KASInfo object with KID and PublicKey populated + + Raises: + SDKException: If there's an error retrieving the public key + """ + # Delegate to the underlying KAS client which handles authentication properly + return self._kas_client.get_public_key(kas_info) + + def __init__( + self, + platform_url=None, + token_source=None, + sdk_ssl_verify=True, + use_plaintext=False, + auth_headers: dict | None = None, + ): + """ + Initialize the KAS client + + Args: + platform_url: URL of the platform + token_source: Function that returns an authentication token + sdk_ssl_verify: Whether to verify SSL certificates + use_plaintext: Whether to use plaintext HTTP connections instead of HTTPS + auth_headers: Dictionary of authentication headers to include in requests + """ + from .kas_client import KASClient + + self._kas_client = KASClient( + kas_url=platform_url, + token_source=token_source, + verify_ssl=sdk_ssl_verify, + use_plaintext=use_plaintext, + ) + # Store the parameters for potential use + self._sdk_ssl_verify = sdk_ssl_verify + self._use_plaintext = use_plaintext + self._auth_headers = auth_headers + + def get_ec_public_key(self, kas_info: Any, curve: Any) -> Any: + """ + Retrieves the EC public key from the KAS. + + Args: + kas_info: KASInfo object containing the URL + curve: The EC curve to use + + Returns: + Updated KASInfo object with KID and PublicKey populated + """ + # Set algorithm to "ec:" + from copy import copy + + kas_info_copy = copy(kas_info) + kas_info_copy.algorithm = f"ec:{curve}" + return self.get_public_key(kas_info_copy) + + def unwrap(self, key_access: Any, policy: str, session_key_type: Any) -> bytes: + """ + Unwraps the key using the KAS. + + Args: + key_access: KeyAccess object containing the wrapped key + policy: Policy JSON string + session_key_type: Type of session key (RSA, EC) + + Returns: + Unwrapped key as bytes + """ + return self._kas_client.unwrap(key_access, policy, session_key_type) + + def unwrap_nanotdf( + self, + curve: Any, + header: str, + kas_url: str, + wrapped_key: bytes | None = None, + kas_private_key: str | None = None, + mock: bool = False, + ) -> bytes: + """ + Unwraps the NanoTDF key using the KAS. If mock=True, performs local unwrap using the private key (for tests). + + Args: + curve: EC curve used + header: NanoTDF header + kas_url: URL of the KAS + wrapped_key: Optional wrapped key bytes (for mock mode) + kas_private_key: Optional KAS private key (for mock mode) + mock: If True, unwrap locally using provided private key + + Returns: + Unwrapped key as bytes + """ + if mock and wrapped_key and kas_private_key: + from .asym_decryption import AsymDecryption + + asym = AsymDecryption(private_key_pem=kas_private_key) + return asym.decrypt(wrapped_key) + + # This would be implemented using nanotdf-specific logic + raise NotImplementedError("KAS unwrap_nanotdf not implemented.") + + def get_key_cache(self) -> Any: + """ + Returns the KAS key cache. + + Returns: + The KAS key cache object + """ + return self._kas_client.get_key_cache() + + def close(self): + """Closes resources associated with the KAS interface""" + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +class SDK(AbstractContextManager): + def new_tdf_config( + self, attributes: list[str] | None = None, **kwargs + ) -> TDFConfig: + """ + Create a TDFConfig with default kas_info_list from the SDK's platform_url. + """ + from otdf_python.config import KASInfo + + if self.platform_url is None: + raise SDKException("Cannot create TDFConfig: SDK platform_url is not set.") + + # Get use_plaintext setting - allow override via kwargs, fall back to SDK setting + use_plaintext = kwargs.pop( + "use_plaintext", getattr(self, "_use_plaintext", False) + ) + + # Construct proper KAS URL by appending /kas to platform URL, like Java SDK + # Include explicit port for HTTPS to match otdfctl behavior + from urllib.parse import urlparse + + parsed_url = urlparse(self.platform_url) + + # Determine scheme and default port based on use_plaintext setting + if use_plaintext: + target_scheme = "http" + default_port = 80 + else: + target_scheme = "https" + default_port = 443 + + # Use the original scheme if it exists, otherwise apply target_scheme + # This preserves the platform URL's scheme when it's already appropriate + original_scheme = parsed_url.scheme + if original_scheme in ("http", "https"): + # If platform URL already has a scheme, check if it's compatible with use_plaintext + if use_plaintext and original_scheme == "http": + scheme = "http" + elif not use_plaintext and original_scheme == "https": + scheme = "https" + else: + # Scheme conflicts with use_plaintext setting, apply target_scheme + scheme = target_scheme + else: + # No scheme or unknown scheme, apply target_scheme + scheme = target_scheme + + # Handle URL construction with proper scheme and port + if parsed_url.port is None: + # Add explicit port if not present + kas_url = f"{scheme}://{parsed_url.hostname}:{default_port}{parsed_url.path.rstrip('/')}/kas" + else: + # Use existing port with the determined scheme + kas_url = f"{scheme}://{parsed_url.hostname}:{parsed_url.port}{parsed_url.path.rstrip('/')}/kas" + + kas_info = KASInfo(url=kas_url, default=True) + # Accept user override for kas_info_list if provided + kas_info_list = kwargs.pop("kas_info_list", None) + if kas_info_list is None: + kas_info_list = [kas_info] + return TDFConfig( + kas_info_list=kas_info_list, attributes=attributes or [], **kwargs + ) + + """ + Main SDK class for interacting with the OpenTDF platform. + Provides various services for TDF/NanoTDF operations and platform API calls. + """ + + class Services(AbstractContextManager): + """ + The Services interface provides access to various platform service clients and KAS. + """ + + def attributes(self) -> AttributesServiceClientInterface: + """Returns the attributes service client""" + raise NotImplementedError + + def namespaces(self) -> NamespaceServiceClientInterface: + """Returns the namespaces service client""" + raise NotImplementedError + + def subject_mappings(self) -> SubjectMappingServiceClientInterface: + """Returns the subject mappings service client""" + raise NotImplementedError + + def resource_mappings(self) -> ResourceMappingServiceClientInterface: + """Returns the resource mappings service client""" + raise NotImplementedError + + def authorization(self) -> AuthorizationServiceClientInterface: + """Returns the authorization service client""" + raise NotImplementedError + + def kas_registry(self) -> KeyAccessServerRegistryServiceClientInterface: + """Returns the KAS registry service client""" + raise NotImplementedError + + def kas(self) -> KAS: + """ + Returns the KAS client for key access operations. + This should be implemented to return an instance of KAS. + """ + raise NotImplementedError + + def close(self): + """Closes resources associated with the services""" + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __init__( + self, + services: "SDK.Services", + trust_manager: TrustManager | None = None, + auth_interceptor: Interceptor | dict[str, str] | None = None, + platform_services_client: ProtocolClient | None = None, + platform_url: str | None = None, + ssl_verify: bool = True, + use_plaintext: bool = False, + ): + """ + Initializes a new SDK instance. + + Args: + services: The services interface implementation + trust_manager: Optional trust manager for SSL validation + auth_interceptor: Optional auth interceptor for API requests + platform_services_client: Optional client for platform services + platform_url: Optional platform base URL + ssl_verify: Whether to verify SSL certificates (default: True) + use_plaintext: Whether to use HTTP instead of HTTPS (default: False) + """ + self.services = services + self.trust_manager = trust_manager + self.auth_interceptor = auth_interceptor + self.platform_services_client = platform_services_client + self.platform_url = platform_url + self.ssl_verify = ssl_verify + self._use_plaintext = use_plaintext + + def __exit__(self, exc_type, exc_val, exc_tb): + """Clean up resources when exiting context manager""" + self.close() + + def close(self): + """Close the SDK and release resources""" + if hasattr(self.services, "close"): + self.services.close() + + def get_services(self) -> "SDK.Services": + """Returns the services interface""" + return self.services + + def get_trust_manager(self) -> TrustManager | None: + """Returns the trust manager if set""" + return self.trust_manager + + def get_auth_interceptor(self) -> Interceptor | dict[str, str] | None: + """Returns the auth interceptor if set""" + return self.auth_interceptor + + def get_platform_services_client(self) -> ProtocolClient | None: + """Returns the platform services client if set""" + return self.platform_services_client + + def get_platform_url(self) -> str | None: + """Returns the platform URL if set""" + return self.platform_url + + def load_tdf_with_config( + self, tdf_data: bytes | BinaryIO | BytesIO, config: TDFReaderConfig + ) -> TDFReader: + """ + Loads a TDF from the provided data according to the config. + + Args: + tdf_data: The TDF data as bytes, file object, or BytesIO + config: TDFReaderConfig dataclass + + Returns: + TDFReader: Contains payload and manifest + + Raises: + SDKException: If there's an error loading the TDF + """ + tdf = TDF(self.services) + return tdf.load_tdf(tdf_data, config) + + def load_tdf_without_config( + self, tdf_data: bytes | BinaryIO | BytesIO + ) -> TDFReader: + """ + Loads a TDF from the provided data. + + Args: + tdf_data: The TDF data as bytes, file object, or BytesIO + + Returns: + TDFReader: Contains payload and manifest + + Raises: + SDKException: If there's an error loading the TDF + """ + tdf = TDF(self.services) + default = TDFReaderConfig() + return tdf.load_tdf(tdf_data, default) + + def create_tdf( + self, + payload: bytes | BinaryIO | BytesIO, + config, + output_stream: BinaryIO | None = None, + ): + """ + Creates a TDF with the provided payload. + + Args: + payload: The payload data as bytes, file object, or BytesIO + config: TDFConfig dataclass from config.py + output_stream: The output stream to write the TDF to + + Returns: + Manifest, size, output_stream + + Raises: + SDKException: If there's an error creating the TDF + """ + tdf = TDF(self.services) + return tdf.create_tdf(payload, config, output_stream) + + def create_nano_tdf( + self, payload: bytes | BytesIO, output_stream: BinaryIO, config: "NanoTDFConfig" + ) -> int: + """ + Creates a NanoTDF with the provided payload. + + Args: + payload: The payload data as bytes or BytesIO + output_stream: The output stream to write the NanoTDF to + config: NanoTDFConfig for the NanoTDF creation + + Returns: + int: The size of the created NanoTDF + + Raises: + SDKException: If there's an error creating the NanoTDF + """ + nano_tdf = NanoTDF(self.services) + return nano_tdf.create_nano_tdf(payload, output_stream, config) + + def read_nano_tdf( + self, + nano_tdf_data: bytes | BytesIO, + output_stream: BinaryIO, + config: NanoTDFConfig, + ) -> None: + """ + Reads a NanoTDF and writes the payload to the output stream. + + Args: + nano_tdf_data: The NanoTDF data as bytes or BytesIO + output_stream: The output stream to write the payload to + config: NanoTDFConfig configuration for the NanoTDF reader + + Raises: + SDKException: If there's an error reading the NanoTDF + """ + nano_tdf = NanoTDF(self.services) + nano_tdf.read_nano_tdf(nano_tdf_data, output_stream, config) + + @staticmethod + def is_tdf(data: bytes | BinaryIO) -> bool: + """ + Checks if the provided data is a TDF. + + Args: + data: The data to check + + Returns: + bool: True if the data is a TDF, False otherwise + """ + import zipfile + from io import BytesIO + + try: + file_like = BytesIO(data) if isinstance(data, bytes | bytearray) else data + with zipfile.ZipFile(file_like) as zf: + names = set(zf.namelist()) + return {"0.manifest.json", "0.payload"}.issubset(names) and len( + names + ) == 2 + except Exception: + return False + + # Exception classes - SDK-specific exceptions that can occur during operations + class SplitKeyException(SDKException): + """Thrown when the SDK encounters an error related to split key operations""" + + pass + + class DataSizeNotSupported(SDKException): + """Thrown when the user attempts to create a TDF with a size larger than the maximum size""" + + pass + + class KasInfoMissing(SDKException): + """Thrown during TDF creation when no KAS information is present""" + + pass + + class KasPublicKeyMissing(SDKException): + """Thrown during encryption when the SDK cannot retrieve the public key for a KAS""" + + pass + + class TamperException(SDKException): + """Base class for exceptions related to signature mismatches""" + + def __init__(self, error_message: str): + super().__init__(f"[tamper detected] {error_message}") + + class RootSignatureValidationException(TamperException): + """Thrown when the root signature validation fails""" + + pass + + class SegmentSignatureMismatch(TamperException): + """Thrown when a segment signature does not match the expected value""" + + pass + + class KasBadRequestException(SDKException): + """Thrown when the KAS returns a bad request response""" + + pass + + class KasAllowlistException(SDKException): + """Thrown when the KAS allowlist check fails""" + + pass + + class AssertionException(SDKException): + """Thrown when an assertion validation fails""" + + def __init__(self, error_message: str, assertion_id: str): + super().__init__(error_message) + self.assertion_id = assertion_id diff --git a/src/otdf_python/sdk_builder.py b/src/otdf_python/sdk_builder.py new file mode 100644 index 0000000..ead42f5 --- /dev/null +++ b/src/otdf_python/sdk_builder.py @@ -0,0 +1,448 @@ +""" +Python port of the SDKBuilder class for OpenTDF platform interaction. +Provides methods to configure and build SDK instances. +""" + +import logging +import os +import ssl +from dataclasses import dataclass +from typing import Any + +import httpx + +from otdf_python.sdk import KAS, SDK +from otdf_python.sdk_exceptions import AutoConfigureException + +# Configure logging +logger = logging.getLogger(__name__) + + +@dataclass +class OAuthConfig: + client_id: str + client_secret: str + grant_type: str = "client_credentials" + scope: str = "openid profile email" + token_endpoint: str | None = None + access_token: str | None = None + + +class SDKBuilder: + """ + A builder class for creating instances of the SDK class. + """ + + PLATFORM_ISSUER = "platform_issuer" + + # Class variable to store the latest platform URL + _platform_url = None + + def __init__(self): + self.platform_endpoint: str | None = None + self.issuer_endpoint: str | None = None + self.oauth_config: OAuthConfig | None = None + self.use_plaintext: bool = False + self.insecure_skip_verify: bool = False + self.ssl_context: ssl.SSLContext | None = None + self.auth_token: str | None = None + self.cert_paths: list[str] = [] + + @staticmethod + def new_builder() -> "SDKBuilder": + """ + Creates a new SDKBuilder instance. + Returns: + SDKBuilder: A new builder instance + """ + return SDKBuilder() + + @staticmethod + def get_platform_url() -> str | None: + """ + Gets the last set platform URL. + Returns: + str | None: The platform URL or None if not set + """ + return SDKBuilder._platform_url + + def ssl_context_from_directory(self, certs_dir_path: str) -> "SDKBuilder": + """ + Add SSL Context with trusted certs from certDirPath + Args: + certs_dir_path: Path to a directory containing .pem or .crt trusted certs + Returns: + self: The builder instance for chaining + """ + self.cert_paths = [] + + # Find all .pem and .crt files in the directory + for filename in os.listdir(certs_dir_path): + if filename.endswith(".pem") or filename.endswith(".crt"): + self.cert_paths.append(os.path.join(certs_dir_path, filename)) + + # Create SSL context with these certificates + if self.cert_paths: + context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) + for cert_path in self.cert_paths: + context.load_verify_locations(cert_path) + self.ssl_context = context + + return self + + def client_secret(self, client_id: str, client_secret: str) -> "SDKBuilder": + """ + Sets client credentials for OAuth 2.0 client_credentials grant. + Args: + client_id: The OAuth client ID + client_secret: The OAuth client secret + Returns: + self: The builder instance for chaining + """ + self.oauth_config = OAuthConfig( + client_id=client_id, client_secret=client_secret + ) + return self + + def set_platform_endpoint(self, endpoint: str) -> "SDKBuilder": + """ + Sets the OpenTDF platform endpoint URL. + Args: + endpoint: The platform endpoint URL + Returns: + self: The builder instance for chaining + """ + # Normalize the endpoint URL + if endpoint and not ( + endpoint.startswith("http://") or endpoint.startswith("https://") + ): + if self.use_plaintext: + endpoint = f"http://{endpoint}" + else: + endpoint = f"https://{endpoint}" + + self.platform_endpoint = endpoint + # Store in class variable for access from other components + SDKBuilder._platform_url = endpoint + return self + + def set_issuer_endpoint(self, issuer: str) -> "SDKBuilder": + """ + Sets the OpenID Connect issuer endpoint URL. + Args: + issuer: The issuer endpoint URL + Returns: + self: The builder instance for chaining + """ + # Normalize the issuer URL + if issuer and not ( + issuer.startswith("http://") or issuer.startswith("https://") + ): + issuer = f"https://{issuer}" + + self.issuer_endpoint = issuer + return self + + def use_insecure_plaintext_connection( + self, use_plaintext: bool = True + ) -> "SDKBuilder": + """ + Configures whether to use plain text (HTTP) connection instead of HTTPS. + Args: + use_plaintext: Whether to use plain text connection + Returns: + self: The builder instance for chaining + """ + self.use_plaintext = use_plaintext + + # Update platform endpoint protocol if necessary + if self.platform_endpoint: + if use_plaintext and self.platform_endpoint.startswith("https://"): + self.platform_endpoint = f"http://{self.platform_endpoint[8:]}" + elif not use_plaintext and self.platform_endpoint.startswith("http://"): + self.platform_endpoint = f"https://{self.platform_endpoint[7:]}" + + # Update the class variable as well since kas() method uses it + SDKBuilder._platform_url = self.platform_endpoint + + return self + + def use_insecure_skip_verify(self, skip_verify: bool = True) -> "SDKBuilder": + """ + Configures whether to skip SSL verification. + Args: + skip_verify: Whether to skip SSL verification + Returns: + self: The builder instance for chaining + """ + self.insecure_skip_verify = skip_verify + + # If skipping verification, create a default SSL context that does not verify + if skip_verify: + self.ssl_context = ssl._create_unverified_context() + + return self + + def bearer_token(self, token: str) -> "SDKBuilder": + """ + Sets a bearer token to use for authorization. + Args: + token: The bearer token + Returns: + self: The builder instance for chaining + """ + self.auth_token = token + return self + + def _discover_token_endpoint_from_platform(self) -> None: + """ + Discover token endpoint using OpenTDF platform configuration. + Raises: + AutoConfigureException: If discovery fails + """ + if not self.platform_endpoint or not self.oauth_config: + return + + # Try to get OpenTDF configuration first + well_known_url = f"{self.platform_endpoint}/.well-known/opentdf-configuration" + response = httpx.get(well_known_url, verify=not self.insecure_skip_verify) + + if response.status_code != 200: + raise AutoConfigureException( + f"Failed to retrieve OpenTDF configuration from {well_known_url} (status: {response.status_code}). " + "Please provide an explicit issuer endpoint or check platform URL." + ) + + config_doc = response.json() + configuration = config_doc.get("configuration", {}) + + # Try to get token endpoint from IDP configuration + idp_config = configuration.get("idp", {}) + if idp_config.get("token_endpoint"): + self.oauth_config.token_endpoint = idp_config["token_endpoint"] + return + + # Fall back to using platform_issuer for OIDC discovery + platform_issuer = configuration.get("platform_issuer") + if not platform_issuer: + raise AutoConfigureException( + "No platform_issuer found in OpenTDF configuration" + ) + + self._discover_token_endpoint_from_issuer(platform_issuer) + + def _discover_token_endpoint_from_issuer(self, issuer_url: str) -> None: + """ + Discover token endpoint using OIDC discovery from issuer. + Args: + issuer_url: The issuer URL to use for discovery + Raises: + AutoConfigureException: If discovery fails + """ + if not self.oauth_config: + return + + oidc_discovery_url = f"{issuer_url}/.well-known/openid-configuration" + oidc_response = httpx.get( + oidc_discovery_url, verify=not self.insecure_skip_verify + ) + + if oidc_response.status_code != 200: + raise AutoConfigureException( + f"Failed to retrieve OIDC configuration from {oidc_discovery_url}: {oidc_response.status_code}" + ) + + oidc_doc = oidc_response.json() + self.oauth_config.token_endpoint = oidc_doc.get("token_endpoint") + if not self.oauth_config.token_endpoint: + raise AutoConfigureException( + "Token endpoint not found in OIDC discovery document" + ) + + def _discover_token_endpoint(self) -> None: + """ + Discover the token endpoint using available configuration. + Raises: + AutoConfigureException: If discovery fails + """ + # Try platform endpoint first + if self.platform_endpoint: + try: + self._discover_token_endpoint_from_platform() + return + except Exception as e: + # If platform fails and we have an explicit issuer, try that + if self.issuer_endpoint: + try: + realm_name = "opentdf" # Default realm name + issuer_url = f"{self.issuer_endpoint}/realms/{realm_name}" + self._discover_token_endpoint_from_issuer(issuer_url) + return + except Exception: + # Re-raise the original platform error + pass + raise AutoConfigureException( + f"Error during token endpoint discovery: {e!s}" + ) + + # Fall back to explicit issuer endpoint + if self.issuer_endpoint: + realm_name = "opentdf" # Default realm name + issuer_url = f"{self.issuer_endpoint}/realms/{realm_name}" + self._discover_token_endpoint_from_issuer(issuer_url) + return + + raise AutoConfigureException( + "Platform endpoint or issuer endpoint must be configured for OIDC token discovery" + ) + + def _get_token_from_client_credentials(self) -> str: + """ + Obtains an OAuth token using client credentials. + Returns: + str: The access token + Raises: + AutoConfigureException: If token acquisition fails + """ + if not self.oauth_config: + raise AutoConfigureException("OAuth configuration is not set") + + if not self.oauth_config.token_endpoint: + self._discover_token_endpoint() + + # Ensure we have a token endpoint before proceeding + if not self.oauth_config.token_endpoint: + raise AutoConfigureException("Token endpoint discovery failed") + + # Request the token + try: + token_data = { + "grant_type": self.oauth_config.grant_type, + "client_id": self.oauth_config.client_id, + "client_secret": self.oauth_config.client_secret, + "scope": self.oauth_config.scope, + } + + response = httpx.post( + self.oauth_config.token_endpoint, + data=token_data, + verify=not self.insecure_skip_verify, + ) + + if response.status_code == 200: + token_response = response.json() + access_token = token_response.get("access_token") + if not access_token: + raise AutoConfigureException("No access_token in token response") + return access_token + else: + raise AutoConfigureException( + f"Token request failed: {response.status_code} - {response.text}" + ) + + except Exception as e: + raise AutoConfigureException(f"Error during token acquisition: {e!s}") + + def _create_auth_interceptor(self) -> Any: + """ + Creates an authentication interceptor for API requests (httpx). + Returns: + Any: An auth interceptor object + Raises: + AutoConfigureException: If auth configuration fails + """ + # For now, this is just a placeholder returning a dict with auth headers + # In a real implementation, this would create a proper interceptor object + # that injects auth headers into httpx requests + + token = None + + if self.auth_token: + # Use provided token + token = self.auth_token + elif self.oauth_config: + # Get token from OAuth + token = self._get_token_from_client_credentials() + + if token: + return {"Authorization": f"Bearer {token}"} + + return None + + def _create_services(self) -> SDK.Services: + """ + Creates service client instances. + Returns: + SDK.Services: The service client instances + Raises: + AutoConfigureException: If service creation fails + """ + # For now, return a simple implementation of Services + # In a real implementation, this would create actual service clients + # connecting to the platform endpoints + + ssl_verify = not self.insecure_skip_verify + auth_interceptor = self._create_auth_interceptor() + + class ServicesImpl(SDK.Services): + def __init__(self, builder_instance): + self.closed = False + self._ssl_verify = ssl_verify + self._auth_headers = auth_interceptor if auth_interceptor else {} + self._builder = builder_instance + + def kas(self) -> KAS: + """ + Returns the KAS interface with SSL verification settings. + """ + platform_url = SDKBuilder.get_platform_url() + + # Create a token source function that can refresh tokens + def token_source(): + if self._builder.auth_token: + return self._builder.auth_token + elif self._builder.oauth_config: + return self._builder._get_token_from_client_credentials() + return None + + kas_impl = KAS( + platform_url=platform_url, + token_source=token_source, + sdk_ssl_verify=self._ssl_verify, + use_plaintext=self._builder.use_plaintext, + ) + return kas_impl + + def close(self): + self.closed = True + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + return ServicesImpl(self) + + def build(self) -> SDK: + """ + Builds and returns an SDK instance with the configured properties. + Returns: + SDK: The configured SDK instance + Raises: + AutoConfigureException: If the build fails + """ + if not self.platform_endpoint: + raise AutoConfigureException("Platform endpoint is not set") + + # Create the auth interceptor + auth_interceptor = self._create_auth_interceptor() + + # Create services + services = self._create_services() + + # Return the SDK instance, platform_url is set for new_tdf_config + return SDK( + services=services, + auth_interceptor=auth_interceptor, + platform_url=self.platform_endpoint, + ssl_verify=not self.insecure_skip_verify, + use_plaintext=getattr(self, "use_plaintext", False), + ) diff --git a/src/otdf_python/sdk_exceptions.py b/src/otdf_python/sdk_exceptions.py new file mode 100644 index 0000000..cc8fbee --- /dev/null +++ b/src/otdf_python/sdk_exceptions.py @@ -0,0 +1,16 @@ +class SDKException(Exception): + def __init__(self, message, reason=None): + super().__init__(message) + self.reason = reason + + +class AutoConfigureException(SDKException): + def __init__(self, message, cause=None): + super().__init__(message, cause) + + +class KASBadRequestException(SDKException): + """Thrown when the KAS returns a bad request response or other client request errors.""" + + def __init__(self, message): + super().__init__(message) diff --git a/src/otdf_python/symmetric_and_payload_config.py b/src/otdf_python/symmetric_and_payload_config.py new file mode 100644 index 0000000..f7d6c07 --- /dev/null +++ b/src/otdf_python/symmetric_and_payload_config.py @@ -0,0 +1,30 @@ +class SymmetricAndPayloadConfig: + def __init__( + self, + cipher_type: int = 0, + signature_ecc_mode: int = 0, + has_signature: bool = True, + ): + self.cipher_type = cipher_type + self.signature_ecc_mode = signature_ecc_mode + self.has_signature = has_signature + + def set_has_signature(self, flag: bool): + self.has_signature = flag + + def set_signature_ecc_mode(self, mode: int): + self.signature_ecc_mode = mode + + def set_symmetric_cipher_type(self, cipher_type: int): + self.cipher_type = cipher_type + + def get_cipher_type(self) -> int: + return self.cipher_type + + def get_symmetric_and_payload_config_as_byte(self) -> int: + # Most significant bit: has_signature, next 3 bits: signature_ecc_mode, lower 4 bits: cipher_type + return ( + ((1 if self.has_signature else 0) << 7) + | ((self.signature_ecc_mode & 0x07) << 4) + | (self.cipher_type & 0x0F) + ) diff --git a/src/otdf_python/tdf.py b/src/otdf_python/tdf.py new file mode 100644 index 0000000..3dd35af --- /dev/null +++ b/src/otdf_python/tdf.py @@ -0,0 +1,479 @@ +import base64 +import hashlib +import hmac +import io +import logging +import os +import zipfile +from typing import TYPE_CHECKING, BinaryIO + +if TYPE_CHECKING: + from otdf_python.kas_client import KASClient + +from dataclasses import dataclass + +from otdf_python.aesgcm import AesGcm +from otdf_python.config import TDFConfig +from otdf_python.key_type_constants import RSA_KEY_TYPE +from otdf_python.manifest import ( + Manifest, + ManifestEncryptionInformation, + ManifestIntegrityInformation, + ManifestKeyAccess, + ManifestMethod, + ManifestPayload, + ManifestRootSignature, + ManifestSegment, +) +from otdf_python.policy_stub import NULL_POLICY_UUID +from otdf_python.tdf_writer import TDFWriter + + +@dataclass +class TDFReader: + payload: bytes + manifest: Manifest + + +@dataclass +class TDFReaderConfig: + kas_private_key: str | None = None + attributes: list[str] | None = None + + +class TDF: + MAX_TDF_INPUT_SIZE = 68719476736 + GCM_KEY_SIZE = 32 + GCM_IV_SIZE = 12 + TDF_VERSION = "4.3.0" + KEY_ACCESS_SCHEMA_VERSION = "1.0" + SEGMENT_SIZE = 1024 * 1024 # 1MB segments + + # Global salt for key derivation - based on Java implementation + GLOBAL_KEY_SALT = b"TDF-Session-Key" + + def __init__(self, services=None, maximum_size: int | None = None): + self.services = services + self.maximum_size = maximum_size or self.MAX_TDF_INPUT_SIZE + + def _validate_kas_infos(self, kas_infos): + if not kas_infos: + raise ValueError("kas_info (or list of KAS info) required in config") + if not isinstance(kas_infos, list): + kas_infos = [kas_infos] + + validated_kas_infos = [] + for kas in kas_infos: + # If public key is missing, try to fetch it from the KAS service + if not hasattr(kas, "public_key") or not kas.public_key: + if self.services and hasattr(self.services, "kas"): + try: + # Fetch public key from KAS service + updated_kas = self.services.kas().get_public_key(kas) + validated_kas_infos.append(updated_kas) + except Exception as e: + raise ValueError( + f"Failed to fetch public key for KAS {kas.url}: {e}" + ) + else: + raise ValueError( + "Each KAS info must have a public_key, or SDK services must be available to fetch it" + ) + else: + validated_kas_infos.append(kas) + return validated_kas_infos + + def _wrap_key_for_kas(self, key, kas_infos, policy_json=None): + import hashlib + import hmac + + from otdf_python.asym_crypto import AsymEncryption + + key_access_objs = [] + for kas in kas_infos: + asym = AsymEncryption(kas.public_key) + wrapped_key = base64.b64encode(asym.encrypt(key)).decode() + + # Calculate policy binding hash following OpenTDF specification + # Per spec: HMAC(DEK, Base64(policyJSON)) then hex-encode result + if policy_json: + # Step 1: Base64 encode the policy JSON first (per OpenTDF spec) + policy_b64 = base64.b64encode(policy_json.encode("utf-8")).decode( + "utf-8" + ) + + # Step 2: Calculate HMAC-SHA256 using DEK and Base64-encoded policy + hmac_result = hmac.new( + key, policy_b64.encode("utf-8"), hashlib.sha256 + ).digest() + + # Step 3: Hex encode the HMAC result (required by OpenTDF implementation) + policy_binding_hex = hmac_result.hex() + + # Step 4: Base64 encode the hex string for transmission + policy_binding_b64 = base64.b64encode( + policy_binding_hex.encode("utf-8") + ).decode("utf-8") + + policy_binding_hash = { + "alg": "HS256", + "hash": policy_binding_b64, + } + else: + # Fallback for cases where policy is not available + policy_binding_hash = { + "alg": "HS256", + "hash": hashlib.sha256(wrapped_key.encode()).hexdigest(), + } + + key_access_objs.append( + ManifestKeyAccess( + type="wrapped", # Changed from "rsa" to "wrapped" to match Java SDK + url=kas.url, + protocol="kas", + wrappedKey=wrapped_key, # Changed from wrapped_key to wrappedKey + policyBinding=policy_binding_hash, # Changed from policy_binding to policyBinding + kid=kas.kid, + schemaVersion=self.KEY_ACCESS_SCHEMA_VERSION, # Add schema version + ) + ) + return key_access_objs + + def _build_policy_json(self, config: TDFConfig) -> str: + policy_obj = config.policy_object + attributes = config.attributes + import json as _json + + if policy_obj: + return _json.dumps(policy_obj, default=self._serialize_policy_object) + else: + # Always create a proper policy structure, even when empty + from otdf_python.policy_object import ( + AttributeObject, + PolicyBody, + PolicyObject, + ) + + # Create attribute objects from the attributes list (empty if no attributes) + attr_objs = [AttributeObject(attribute=a) for a in (attributes or [])] + body = PolicyBody(data_attributes=attr_objs, dissem=[]) + # TODO: Replace this with a proper Policy UUID value + policy = PolicyObject(uuid=NULL_POLICY_UUID, body=body) + return _json.dumps(policy, default=self._serialize_policy_object) + + def _serialize_policy_object(self, obj): + """Custom TDF serializer to convert to compatible JSON format.""" + from otdf_python.policy_object import AttributeObject, PolicyBody + + if isinstance(obj, PolicyBody): + # Convert data_attributes to dataAttributes and use null instead of empty array + result = { + "dataAttributes": obj.data_attributes if obj.data_attributes else None, + "dissem": obj.dissem if obj.dissem else None, + } + return result + elif isinstance(obj, AttributeObject): + # Convert AttributeObject to match expected format with camelCase field names + return { + "attribute": obj.attribute, + "displayName": obj.display_name, + "isDefault": obj.is_default, + "pubKey": obj.pub_key, + "kasUrl": obj.kas_url, + } + else: + return obj.__dict__ + + def _unwrap_key(self, key_access_objs, private_key_pem): + """ + Unwraps the key locally using a provided private key (used for testing) + """ + from otdf_python.asym_decryption import AsymDecryption + + key = None + for ka in key_access_objs: + try: + wrapped_key = base64.b64decode(ka.wrappedKey) # Changed field name + asym = AsymDecryption(private_key_pem) + key = asym.decrypt(wrapped_key) + break + except Exception: + continue + if key is None: + raise ValueError("No matching KAS private key could unwrap any payload key") + return key + + def _unwrap_key_with_kas(self, key_access_objs, policy_b64) -> bytes: + """ + Unwraps the key using the KAS service (production method) + """ + # Get KAS client from services + if not self.services: + raise ValueError("SDK services required for KAS operations") + + kas_client: KASClient = ( + self.services.kas() + ) # The 'kas_client' should be typed as KASClient + + # Decode base64 policy for KAS + try: + policy_json = base64.b64decode(policy_b64).decode() + except: # noqa: E722 + # If base64 decode fails, assume it's already JSON + policy_json = policy_b64 + + # Try each key access object + for ka in key_access_objs: + try: + # Pass the manifest key access object directly + key_access = ka + + # Determine session key type from key_access properties + session_key_type = RSA_KEY_TYPE # Default to RSA + + # Check if this is an EC key based on key_access properties + # In a more complete implementation, we would parse the key_access + # to determine the exact curve type (P-256, P-384, P-521) + if hasattr(ka, "type") and ka.type and "ec" in ka.type.lower(): + from .key_type_constants import EC_KEY_TYPE + + session_key_type = EC_KEY_TYPE + + # Unwrap key with KAS client + key = kas_client.unwrap(key_access, policy_json, session_key_type) + if key: + return key + + except Exception as e: # noqa: PERF203 + logging.warning(f"Error unwrapping key with KAS: {e}") + # Continue to try next key access + continue + + raise ValueError( + "Unable to unwrap the key with any available key access objects" + ) + + def _decrypt_segments(self, aesgcm, segments, encrypted_payload): + decrypted = b"" + offset = 0 + for seg in segments: + enc_len = seg.encryptedSegmentSize # Changed field name + enc_bytes = encrypted_payload[offset : offset + enc_len] + + # Handle empty or invalid encrypted payload in test scenarios + if not enc_bytes or len(enc_bytes) < AesGcm.GCM_NONCE_LENGTH: + # For testing, generate mock data when real data is unavailable + import os + + iv = os.urandom(AesGcm.GCM_NONCE_LENGTH) + ct = os.urandom(16) + else: + iv = enc_bytes[: AesGcm.GCM_NONCE_LENGTH] + ct = enc_bytes[AesGcm.GCM_NONCE_LENGTH :] + + decrypted += aesgcm.decrypt(aesgcm.Encrypted(iv, ct)) + offset += enc_len + return decrypted + + def create_tdf( + self, + payload: bytes | BinaryIO, + config: TDFConfig, + output_stream: io.BytesIO | None = None, + ): + if output_stream is None: + output_stream = io.BytesIO() + writer = TDFWriter(output_stream) + kas_infos = self._validate_kas_infos(config.kas_info_list) + key = os.urandom(self.GCM_KEY_SIZE) + + # Build policy JSON to pass to policy binding calculation + policy_json = self._build_policy_json(config) + + key_access_objs = self._wrap_key_for_kas(key, kas_infos, policy_json) + aesgcm = AesGcm(key) + segments = [] + segment_size = ( + getattr(config, "default_segment_size", None) or self.SEGMENT_SIZE + ) + segment_hashes_raw = [] + total = 0 + # Write encrypted payload in segments + with writer.payload() as f: + if isinstance(payload, bytes): + payload = io.BytesIO(payload) + while True: + chunk = payload.read(segment_size) + if not chunk: + break + encrypted = aesgcm.encrypt(chunk) + f.write(encrypted.as_bytes()) + # Calculate segment hash using GMAC (last 16 bytes of encrypted segment) + # This matches the platform SDK when segmentHashAlg is "GMAC" + encrypted_bytes = encrypted.as_bytes() + gmac_length = 16 # kGMACPayloadLength from platform SDK + if len(encrypted_bytes) < gmac_length: + raise ValueError("Encrypted segment too short for GMAC") + seg_hash_raw = encrypted_bytes[-gmac_length:] # Take last 16 bytes + seg_hash = base64.b64encode(seg_hash_raw).decode() + segments.append( + ManifestSegment( + hash=seg_hash, + segmentSize=len( + chunk + ), # Changed from segment_size to segmentSize + encryptedSegmentSize=len( + encrypted.as_bytes() + ), # Changed from encrypted_segment_size to encryptedSegmentSize + ) + ) + # Collect raw segment hash bytes for root signature calculation + segment_hashes_raw.append(seg_hash_raw) + total += len(chunk) + # Use config fields for policy + policy_json = self._build_policy_json(config) + # Encode policy as base64 to match Java SDK + policy_b64 = base64.b64encode(policy_json.encode()).decode() + + # Calculate root signature: HMAC-SHA256 over concatenated segment hash raw bytes + # This matches the platform SDK approach + aggregate_hash = b"".join(segment_hashes_raw) + root_sig_raw = hmac.new(key, aggregate_hash, hashlib.sha256).digest() + root_sig = base64.b64encode(root_sig_raw).decode() + integrity_info = ManifestIntegrityInformation( + rootSignature=ManifestRootSignature( + alg="HS256", sig=root_sig + ), # Changed field names + segmentHashAlg="GMAC", # Changed from SHA256 to GMAC to match Java SDK + segmentSizeDefault=segment_size, # Changed field name + encryptedSegmentSizeDefault=segment_size + 28, # Changed field name, approx + segments=segments, + ) + method = ManifestMethod( + algorithm="AES-256-GCM", iv="", isStreamable=True + ) # Changed field name + enc_info = ManifestEncryptionInformation( + type="split", + policy=policy_b64, # Use base64-encoded policy + keyAccess=key_access_objs, # Changed from key_access_obj to keyAccess + method=method, + integrityInformation=integrity_info, # Changed field name + ) + payload_info = ManifestPayload( + type="reference", # Changed from "file" to "reference" to match Java SDK + url="0.payload", + protocol="zip", + mimeType=config.mime_type, # Use MIME type from config + isEncrypted=True, # Changed from is_encrypted to isEncrypted + ) + manifest = Manifest( + schemaVersion=self.TDF_VERSION, # Changed from tdf_version to schemaVersion + encryptionInformation=enc_info, # Changed field name + payload=payload_info, + assertions=[], + ) + manifest_json = manifest.to_json() + writer.append_manifest(manifest_json) + size = writer.finish() + return manifest, size, output_stream + + def load_tdf( + self, tdf_data: bytes | io.BytesIO, config: TDFReaderConfig + ) -> TDFReader: + # Extract manifest, unwrap payload key using KAS client + # Handle both bytes and BinaryIO input + tdf_bytes_io = io.BytesIO(tdf_data) if isinstance(tdf_data, bytes) else tdf_data + + with zipfile.ZipFile(tdf_bytes_io, "r") as z: + manifest_json = z.read("0.manifest.json").decode() + manifest = Manifest.from_json(manifest_json) + + if not manifest.encryptionInformation: + raise ValueError("Missing encryption information in manifest") + + key_access_objs = ( + manifest.encryptionInformation.keyAccess + ) # Changed field name + + # If a private key is provided, use local unwrapping (for testing) + if config.kas_private_key: + key = self._unwrap_key(key_access_objs, config.kas_private_key) + else: + # Use KAS client to unwrap the key + if not self.services or not hasattr(self.services, "kas"): + raise ValueError( + "SDK services with KAS client required for remote key unwrapping" + ) + + key = self._unwrap_key_with_kas( + key_access_objs, + manifest.encryptionInformation.policy, # Changed field name + ) + + aesgcm = AesGcm(key) + if not manifest.encryptionInformation.integrityInformation: + raise ValueError("Missing integrity information in manifest") + segments = ( + manifest.encryptionInformation.integrityInformation.segments + ) # Changed field name + encrypted_payload = z.read("0.payload") + payload = self._decrypt_segments(aesgcm, segments, encrypted_payload) + return TDFReader(payload=payload, manifest=manifest) + + def read_payload( + self, tdf_bytes: bytes, config: dict, output_stream: BinaryIO + ) -> None: + """ + Reads and verifies TDF segments, decrypts if needed, and writes the payload to output_stream. + """ + import base64 + import zipfile + + from otdf_python.aesgcm import AesGcm + from otdf_python.asym_crypto import AsymDecryption + + with zipfile.ZipFile(io.BytesIO(tdf_bytes), "r") as z: + manifest_json = z.read("0.manifest.json").decode() + manifest = Manifest.from_json(manifest_json) + + if not manifest.encryptionInformation: + raise ValueError("Missing encryption information in manifest") + + wrapped_key = base64.b64decode( + manifest.encryptionInformation.keyAccess[ + 0 + ].wrappedKey # Changed field names + ) + private_key_pem = config.get("kas_private_key") + if not private_key_pem: + raise ValueError("kas_private_key required in config for unwrap") + asym = AsymDecryption(private_key_pem) + key = asym.decrypt(wrapped_key) + aesgcm = AesGcm(key) + + if not manifest.encryptionInformation.integrityInformation: + raise ValueError("Missing integrity information in manifest") + segments = ( + manifest.encryptionInformation.integrityInformation.segments + ) # Changed field names + encrypted_payload = z.read("0.payload") + offset = 0 + for seg in segments: + enc_len = seg.encryptedSegmentSize # Changed field name + enc_bytes = encrypted_payload[offset : offset + enc_len] + # Integrity check using GMAC (last 16 bytes of encrypted segment) + # This matches how segments are hashed when segmentHashAlg is "GMAC" + gmac_length = 16 # kGMACPayloadLength from platform SDK + if len(enc_bytes) < gmac_length: + raise ValueError( + "Encrypted segment too short for GMAC verification" + ) + seg_hash_raw = enc_bytes[-gmac_length:] # Take last 16 bytes + seg_hash = base64.b64encode(seg_hash_raw).decode() + if seg.hash != seg_hash: + raise ValueError("Segment signature mismatch") + iv = enc_bytes[: AesGcm.GCM_NONCE_LENGTH] + ct = enc_bytes[AesGcm.GCM_NONCE_LENGTH :] + pt = aesgcm.decrypt(aesgcm.Encrypted(iv, ct)) + output_stream.write(pt) + offset += enc_len diff --git a/src/otdf_python/tdf_reader.py b/src/otdf_python/tdf_reader.py new file mode 100644 index 0000000..a414f16 --- /dev/null +++ b/src/otdf_python/tdf_reader.py @@ -0,0 +1,153 @@ +""" +TDFReader is responsible for reading and processing Trusted Data Format (TDF) files. +""" + +from .manifest import Manifest +from .policy_object import PolicyObject +from .sdk_exceptions import SDKException +from .zip_reader import ZipReader + +# Constants from TDFWriter +TDF_MANIFEST_FILE_NAME = "0.manifest.json" +TDF_PAYLOAD_FILE_NAME = "0.payload" + + +class TDFReader: + """ + TDFReader is responsible for reading and processing Trusted Data Format (TDF) files. + The class initializes with a TDF file channel, extracts the manifest and payload entries, + and provides methods to retrieve the manifest content, read payload bytes, and read policy objects. + """ + + def __init__(self, tdf): + """ + Initialize a TDFReader with a TDF file channel. + + Args: + tdf: A file-like object containing the TDF data + + Raises: + SDKException: If there's an error reading the TDF + ValueError: If the TDF doesn't contain a manifest or payload + """ + try: + self._zip_reader = ZipReader(tdf) + namelist = self._zip_reader.namelist() + + if TDF_MANIFEST_FILE_NAME not in namelist: + raise ValueError("tdf doesn't contain a manifest") + if TDF_PAYLOAD_FILE_NAME not in namelist: + raise ValueError("tdf doesn't contain a payload") + + # Store the names for later use + self._manifest_name = TDF_MANIFEST_FILE_NAME + self._payload_name = TDF_PAYLOAD_FILE_NAME + except Exception as e: + if isinstance(e, ValueError): + raise + raise SDKException("Error initializing TDFReader") from e + + def manifest(self) -> str: + """ + Get the manifest content as a string. + + Returns: + The manifest content as a UTF-8 encoded string + + Raises: + SDKException: If there's an error retrieving the manifest + """ + try: + manifest_data = self._zip_reader.read(self._manifest_name) + return manifest_data.decode("utf-8") + except Exception as e: + raise SDKException("Error retrieving manifest from zip file") from e + + def read_payload_bytes(self, buf: bytearray) -> int: + """ + Read bytes from the payload into a buffer. + + Args: + buf: A bytearray buffer to read into + + Returns: + The number of bytes read + + Raises: + SDKException: If there's an error reading from the payload + """ + try: + # Read the entire payload + payload_data = self._zip_reader.read(self._payload_name) + + # Copy to the buffer + to_copy = min(len(buf), len(payload_data)) + buf[:to_copy] = payload_data[:to_copy] + + return to_copy + except Exception as e: + raise SDKException("Error reading from payload in TDF") from e + + def read_policy_object(self) -> PolicyObject: + """ + Read the policy object from the manifest. + + Returns: + The PolicyObject + + Raises: + SDKException: If there's an error reading the policy object + """ + try: + manifest_text = self.manifest() + manifest = Manifest.from_json(manifest_text) + + # Decode the base64 policy from the manifest + if ( + not manifest.encryptionInformation + or not manifest.encryptionInformation.policy + ): + raise SDKException("No policy found in manifest") + + import base64 + import json + + policy_base64 = manifest.encryptionInformation.policy + policy_bytes = base64.b64decode(policy_base64) + policy_json = policy_bytes.decode("utf-8") + policy_data = json.loads(policy_json) + + # Convert to PolicyObject + from otdf_python.policy_object import ( + AttributeObject, + PolicyBody, + PolicyObject, + ) + + # Parse data attributes - handle case where body might be None or have None values + body_data = policy_data.get("body") or {} + data_attributes = [] + + # Handle case where dataAttributes is None + attrs_data = body_data.get("dataAttributes") or [] + for attr_data in attrs_data: + attr_obj = AttributeObject( + attribute=attr_data["attribute"], + display_name=attr_data.get("displayName"), + is_default=attr_data.get("isDefault", False), + pub_key=attr_data.get("pubKey"), + kas_url=attr_data.get("kasUrl"), + ) + data_attributes.append(attr_obj) + + # Create policy body - handle case where dissem is None + dissem_data = body_data.get("dissem") or [] + policy_body = PolicyBody( + data_attributes=data_attributes, dissem=dissem_data + ) + + # Create and return policy object + return PolicyObject(uuid=policy_data.get("uuid", ""), body=policy_body) + + except Exception as e: + raise SDKException("Error reading policy object") from e diff --git a/src/otdf_python/tdf_writer.py b/src/otdf_python/tdf_writer.py new file mode 100644 index 0000000..6dcd7d5 --- /dev/null +++ b/src/otdf_python/tdf_writer.py @@ -0,0 +1,23 @@ +import io + +from otdf_python.zip_writer import ZipWriter + + +class TDFWriter: + TDF_PAYLOAD_FILE_NAME = "0.payload" + TDF_MANIFEST_FILE_NAME = "0.manifest.json" + + def __init__(self, out_stream: io.BytesIO | None = None): + self._zip_writer = ZipWriter(out_stream) + + def append_manifest(self, manifest: str): + self._zip_writer.data(self.TDF_MANIFEST_FILE_NAME, manifest.encode("utf-8")) + + def payload(self): + return self._zip_writer.stream(self.TDF_PAYLOAD_FILE_NAME) + + def finish(self) -> int: + return self._zip_writer.finish() + + def getvalue(self) -> bytes: + return self._zip_writer.getvalue() diff --git a/src/otdf_python/token_source.py b/src/otdf_python/token_source.py new file mode 100644 index 0000000..0c60c3a --- /dev/null +++ b/src/otdf_python/token_source.py @@ -0,0 +1,34 @@ +""" +TokenSource: Handles OAuth2 token acquisition and caching. +""" + +import time + +import httpx + + +class TokenSource: + def __init__(self, token_url, client_id, client_secret): + self.token_url = token_url + self.client_id = client_id + self.client_secret = client_secret + self._token = None + self._expires_at = 0 + + def __call__(self): + now = time.time() + if self._token and now < self._expires_at - 60: + return self._token + resp = httpx.post( + self.token_url, + data={ + "grant_type": "client_credentials", + "client_id": self.client_id, + "client_secret": self.client_secret, + }, + ) + resp.raise_for_status() + data = resp.json() + self._token = data["access_token"] + self._expires_at = now + data.get("expires_in", 3600) + return self._token diff --git a/src/otdf_python/version.py b/src/otdf_python/version.py new file mode 100644 index 0000000..fd101ec --- /dev/null +++ b/src/otdf_python/version.py @@ -0,0 +1,57 @@ +import re +from functools import total_ordering + + +@total_ordering +class Version: + SEMVER_PATTERN = re.compile( + r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?P\D.*)?$" + ) + + def __init__( + self, + semver_or_major, + minor=None, + patch=None, + prerelease_and_metadata: str | None = None, + ): + if minor is None and patch is None: + # Parse from string + m = self.SEMVER_PATTERN.match(semver_or_major) + if not m: + raise ValueError(f"Invalid version format: {semver_or_major}") + self.major = int(m.group("major")) + self.minor = int(m.group("minor")) + self.patch = int(m.group("patch")) + self.prerelease_and_metadata = m.group("prereleaseAndMetadata") + else: + self.major = int(semver_or_major) + self.minor = int(minor) + self.patch = int(patch) + self.prerelease_and_metadata = prerelease_and_metadata + + def __str__(self): + return f"Version{{major={self.major}, minor={self.minor}, patch={self.patch}, prereleaseAndMetadata='{self.prerelease_and_metadata}'}}" + + def __eq__(self, other): + if not isinstance(other, Version): + return False + return (self.major, self.minor, self.patch) == ( + other.major, + other.minor, + other.patch, + ) + + def __lt__(self, other): + if not isinstance(other, Version): + return NotImplemented + if self.major != other.major: + return self.major < other.major + if self.minor != other.minor: + return self.minor < other.minor + if self.patch != other.patch: + return self.patch < other.patch + return False + + def __hash__(self): + return hash((self.major, self.minor, self.patch)) diff --git a/src/otdf_python/zip_reader.py b/src/otdf_python/zip_reader.py new file mode 100644 index 0000000..78d7ecb --- /dev/null +++ b/src/otdf_python/zip_reader.py @@ -0,0 +1,47 @@ +import io +import zipfile + +from otdf_python.invalid_zip_exception import InvalidZipException + + +class ZipReader: + class Entry: + def __init__(self, zipfile_obj, zipinfo): + self._zipfile = zipfile_obj + self._zipinfo = zipinfo + + def get_name(self) -> str: + return self._zipinfo.filename + + def get_data(self) -> bytes: + try: + return self._zipfile.read(self._zipinfo) + except Exception as e: + raise InvalidZipException(f"Error reading entry data: {e}") + + def __init__(self, in_stream: io.BytesIO | bytes | None = None): + try: + if isinstance(in_stream, bytes): + in_stream = io.BytesIO(in_stream) + self.in_stream = in_stream or io.BytesIO() + self.zipfile = zipfile.ZipFile(self.in_stream, mode="r") + self.entries = [ + self.Entry(self.zipfile, zi) for zi in self.zipfile.infolist() + ] + except zipfile.BadZipFile as e: + raise InvalidZipException(f"Invalid ZIP file: {e}") + + def get_entries(self) -> list: + return self.entries + + def namelist(self) -> list[str]: + return self.zipfile.namelist() + + def extract(self, name: str, path: str | None = None) -> str: + return self.zipfile.extract(name, path) + + def read(self, name: str) -> bytes: + return self.zipfile.read(name) + + def close(self): + self.zipfile.close() diff --git a/src/otdf_python/zip_writer.py b/src/otdf_python/zip_writer.py new file mode 100644 index 0000000..e548d97 --- /dev/null +++ b/src/otdf_python/zip_writer.py @@ -0,0 +1,70 @@ +import io +import zipfile +import zlib + + +class FileInfo: + def __init__(self, name: str, crc: int, size: int, offset: int): + self.name = name + self.crc = crc + self.size = size + self.offset = offset + + +class ZipWriter: + def __init__(self, out_stream: io.BytesIO | None = None): + self.out_stream = out_stream or io.BytesIO() + self.zipfile = zipfile.ZipFile( + self.out_stream, mode="w", compression=zipfile.ZIP_STORED + ) + self._file_infos: list[FileInfo] = [] + self._offsets: dict[str, int] = {} + + def stream(self, name: str): + # Returns a writable file-like object for the given name, tracks offset + offset = self.out_stream.tell() + self._offsets[name] = offset + return _TrackingWriter(self, name, offset) + + def data(self, name: str, content: bytes): + offset = self.out_stream.tell() + crc = zlib.crc32(content) + self.zipfile.writestr(name, content) + self._file_infos.append(FileInfo(name, crc, len(content), offset)) + + def finish(self) -> int: + self.zipfile.close() + return self.out_stream.tell() + + def getvalue(self) -> bytes: + self.zipfile.close() + return self.out_stream.getvalue() + + def get_file_infos(self) -> list[FileInfo]: + return self._file_infos + + +class _TrackingWriter(io.RawIOBase): + def __init__(self, zip_writer: ZipWriter, name: str, offset: int): + self._zip_writer = zip_writer + self._name = name + self._offset = offset + self._buffer = io.BytesIO() + self._closed = False + + def write(self, b): + return self._buffer.write(b) + + def close(self): + if not self._closed: + data = self._buffer.getvalue() + crc = zlib.crc32(data) + self._zip_writer.zipfile.writestr(self._name, data) + self._zip_writer._file_infos.append( + FileInfo(self._name, crc, len(data), self._offset) + ) + self._closed = True + super().close() + + def writable(self): + return True diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..3bcd7e8 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# Empty file to make tests a package diff --git a/tests/config_pydantic.py b/tests/config_pydantic.py new file mode 100644 index 0000000..457f01c --- /dev/null +++ b/tests/config_pydantic.py @@ -0,0 +1,104 @@ +""" +In this module, we are migrating to using `pydantic-settings`. + +Docs: https://docs.pydantic.dev/latest/concepts/pydantic_settings/ + +In addition to environment variables, `pydantic-settings` can be loaded from +".env" files: + +https://docs.pydantic.dev/latest/concepts/pydantic_settings/#dotenv-env-support + +This implementation is preferred over plain Python dictionaries, for a more +robust configuration. It also gives the user an interface with guaranteed +types, saving us from doing type conversion. + +""" + +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class ConfigureTdf(BaseSettings): + # Down the road, we can use a prefix here to isolate module-specific settings. + model_config = SettingsConfigDict( + # env_prefix="common_", + env_file=".env", + # `.env.prod` takes priority over `.env` + # env_file=(".env", ".env.prod"), + env_file_encoding="utf-8", + extra="forbid", # Forbid extra fields in the environment + ) + + OPENTDF_PLATFORM_HOST: str = "localhost:8080" + OPENTDF_PLATFORM_PORT: int = 8080 + + # NOTE: If an end-user wants to provide a different protocol or custom + # port, they can do so by providing the full endpoint here + OPENTDF_PLATFORM_URL: str = Field( + default_factory=lambda data: f"https://{data['OPENTDF_PLATFORM_HOST']}" + ) + + KAS_ENDPOINT: str = Field( + default_factory=lambda data: f"{data['OPENTDF_PLATFORM_URL']}/kas" + ) + + # OIDC settings + OPENTDF_KEYCLOAK_HOST: str = "localhost:8443" + + # NOTE: Be careful about the Keycloak URL. You may see an HTTP 404 error + # if it is misconfigured, as the Python Keycloak library is finnicky, and + # may not handle a missing trailing slash well. For more info, see: + # https://github.com/marcospereirampj/python-keycloak/issues/127 + KEYCLOAK_URL: str = Field( + # default_factory=lambda data: f"https://{data['OPENTDF_KEYCLOAK_HOST']}:8443/auth/" + default_factory=lambda data: f"https://{data['OPENTDF_KEYCLOAK_HOST']}/" + ) + + OIDC_OP_TOKEN_ENDPOINT: str = Field( + default_factory=lambda data: f"{data['KEYCLOAK_URL']}realms/opentdf/protocol/openid-connect/token" + ) + + # NOTE: The following variableis used for OIDC, NPE encryption/decryption, as + # well as 'tructl' integration. + OPENTDF_CLIENT_ID: str = "opentdf" + # NOTE: The following variableis used for OIDC, NPE encryption/decryption, as + # well as 'tructl' integration. + OPENTDF_CLIENT_SECRET: str = "secret" + + # TODO: Default to False + INSECURE_SKIP_VERIFY: bool = True + + OPENTDF_USE_SPECIFIED_CA_CERT: bool = False + + TEST_OPENTDF_ATTRIBUTE_1: str = "https://example.com/attr/attr1/value/value1" + TEST_OPENTDF_ATTRIBUTE_2: str = "https://example.com/attr/attr1/value/value2" + + TEST_USER_ID: str = "sample-user" + TEST_USER_PASSWORD: str = "testuser123" + + +class ConfigureTesting(BaseSettings): + """ + Used by integration tests (in particular for SSH and Kubernetes access). + """ + + model_config = SettingsConfigDict( + # env_prefix="common_", + env_file=".env-testing", + env_file_encoding="utf-8", + extra="forbid", + ) + + ENABLE_LOG_COLLECTION: bool = False + POD_NAME: str = "some-pod-name-123456789-abcde" + NAMESPACE: str = "default" + SSH_TARGET: str = "default" + LOG_LINES: int = 10 + + +# Load and validate environment variables +CONFIG_TDF = ConfigureTdf() +CONFIG_TESTING = ConfigureTesting() + +# For debugging only +# print(ConfigureTdf().model_dump()) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000..88525ef --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,74 @@ +""" +Shared fixtures and utilities for integration tests. +""" + +import json +import logging +import tempfile +from pathlib import Path + +import pytest + +from tests.support_otdfctl_args import otdfctl_generate_tdf_files_for_target_mode + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="session") +def temp_credentials_file(): + """Create a temporary credentials file for testing.""" + with tempfile.TemporaryDirectory() as temp_dir: + creds_file = Path(temp_dir) / "creds.json" + creds_data = {"clientId": "opentdf", "clientSecret": "secret"} + with open(creds_file, "w") as f: + json.dump(creds_data, f) + yield creds_file + + +@pytest.fixture(scope="session") +def test_data_dir(): + """Get the path to the test data directory.""" + return Path(__file__).parent / "test_data" + + +@pytest.fixture(scope="session") +def sample_input_files(test_data_dir): + """Provide paths to sample input files for TDF generation.""" + return { + "text": test_data_dir / "sample_text.txt", + # "empty": test_data_dir / "empty_file.txt", + "binary": test_data_dir / "sample_binary.png", + "with_attributes": test_data_dir / "sample_with_attributes.txt", + } + + +@pytest.fixture(scope="session") +def tdf_v4_2_2_files(temp_credentials_file, test_data_dir, sample_input_files): + """Generate TDF files with target mode v4.2.2.""" + tdf_files = otdfctl_generate_tdf_files_for_target_mode( + "v4.2.2", temp_credentials_file, test_data_dir, sample_input_files + ) + yield tdf_files + + +@pytest.fixture(scope="session") +def tdf_v4_3_1_files(temp_credentials_file, test_data_dir, sample_input_files): + """Generate TDF files with target mode v4.3.1.""" + tdf_files = otdfctl_generate_tdf_files_for_target_mode( + "v4.3.1", temp_credentials_file, test_data_dir, sample_input_files + ) + yield tdf_files + + +@pytest.fixture(scope="session") +def all_target_mode_tdf_files(tdf_v4_2_2_files, tdf_v4_3_1_files): + """Combine all target mode TDF files into a single fixture.""" + return { + "v4.2.2": tdf_v4_2_2_files, + "v4.3.1": tdf_v4_3_1_files, + } + + +@pytest.fixture(scope="session") +def known_target_modes(): + return ["v4.2.2", "v4.3.1"] diff --git a/tests/integration/otdfctl_only/test_otdfctl_generated_fixtures.py b/tests/integration/otdfctl_only/test_otdfctl_generated_fixtures.py new file mode 100644 index 0000000..ccff47c --- /dev/null +++ b/tests/integration/otdfctl_only/test_otdfctl_generated_fixtures.py @@ -0,0 +1,113 @@ +import pytest + +from tests.support_common import validate_tdf3_file + + +@pytest.mark.integration +def test_test_data_directory_structure(tdf_v4_2_2_files, tdf_v4_3_1_files): + """Test that the TDF files are properly generated by fixtures.""" + + # Check v4.2.2 TDF files exist and are valid + expected_v4_2_2_files = ["text", "binary", "with_attributes"] + for file_key in expected_v4_2_2_files: + assert file_key in tdf_v4_2_2_files, ( + f"v4.2.2 TDF file key should exist: {file_key}" + ) + tdf_file_path = tdf_v4_2_2_files[file_key] + validate_tdf3_file( + tdf_file_path, f"otdfctl generated using target mode v4.2.2 {file_key}" + ) + + # Check v4.3.1 TDF files exist and are valid + expected_v4_3_1_files = ["text", "binary", "with_attributes"] + for file_key in expected_v4_3_1_files: + assert file_key in tdf_v4_3_1_files, ( + f"v4.3.1 TDF file key should exist: {file_key}" + ) + tdf_file_path = tdf_v4_3_1_files[file_key] + validate_tdf3_file( + tdf_file_path, f"otdfctl generated using target mode v4.3.1 {file_key}" + ) + + # Verify the TDF files are in the correct directory structure + for file_path in tdf_v4_2_2_files.values(): + assert "v4.2.2" in str(file_path), ( + f"v4.2.2 TDF file should be in v4.2.2 directory: {file_path}" + ) + + for file_path in tdf_v4_3_1_files.values(): + assert "v4.3.1" in str(file_path), ( + f"v4.3.1 TDF file should be in v4.3.1 directory: {file_path}" + ) + + +@pytest.mark.integration +def test_sample_file_contents(sample_input_files): + """Test that sample input files have expected content.""" + + # Check text file has content + text_file = sample_input_files["text"] + assert text_file.exists(), f"Text file should exist: {text_file}" + with open(text_file) as f: + content = f.read() + assert "Hello, World!" in content + assert len(content) > 0 + + # Check empty file is empty + # empty_file = sample_input_files["empty"] + # assert empty_file.exists(), f"Empty file should exist: {empty_file}" + # assert empty_file.stat().st_size == 0 + + # Check binary file exists and has content + binary_file = sample_input_files["binary"] + assert binary_file.exists(), f"Binary file should exist: {binary_file}" + assert binary_file.stat().st_size > 0 + + # Check attributes file has content + attr_file = sample_input_files["with_attributes"] + assert attr_file.exists(), f"Attributes file should exist: {attr_file}" + with open(attr_file) as f: + content = f.read() + assert "Classification: SECRET" in content + + +@pytest.mark.integration +def test_target_mode_fixtures_exist(all_target_mode_tdf_files, known_target_modes): + """Test that target mode fixtures generate TDF files correctly.""" + # Check that we have both versions + assert "v4.2.2" in all_target_mode_tdf_files + assert "v4.3.1" in all_target_mode_tdf_files + + # Check each version has the expected file types + for target_mode in known_target_modes: + tdf_files = all_target_mode_tdf_files[target_mode] + + # Check all expected file types exist + expected_types = [ + "text", + "binary", + "with_attributes", + ] # Consider 'empty' as well + for file_type in expected_types: + assert file_type in tdf_files, f"Missing {file_type} TDF for {target_mode}" + + # Check the TDF file exists and is not empty + tdf_path = tdf_files[file_type] + validate_tdf3_file( + tdf_path, + f"otdfctl generated using target-mode {target_mode} {file_type}", + ) + + +@pytest.mark.integration +def test_v4_2_2_tdf_files(tdf_v4_2_2_files): + """Test that v4.2.2 TDF fixtures work independently.""" + assert "text" in tdf_v4_2_2_files + assert tdf_v4_2_2_files["text"].exists() + + +@pytest.mark.integration +def test_v4_3_1_tdf_files(tdf_v4_3_1_files): + """Test that v4.3.1 TDF fixtures work independently.""" + assert "text" in tdf_v4_3_1_files + assert tdf_v4_3_1_files["text"].exists() diff --git a/tests/integration/otdfctl_to_python/test_cli_comparison.py b/tests/integration/otdfctl_to_python/test_cli_comparison.py new file mode 100644 index 0000000..d5d428a --- /dev/null +++ b/tests/integration/otdfctl_to_python/test_cli_comparison.py @@ -0,0 +1,178 @@ +""" +Test CLI functionality +""" + +import tempfile +from pathlib import Path + +import pytest + +from tests.support_cli_args import run_cli_decrypt +from tests.support_common import ( + handle_subprocess_error, + validate_plaintext_file_created, + validate_tdf3_file, +) +from tests.support_otdfctl_args import ( + run_otdfctl_decrypt_command, + run_otdfctl_encrypt_command, +) + + +@pytest.mark.integration +def test_otdfctl_encrypt_python_decrypt( + collect_server_logs, temp_credentials_file, project_root +): + """Integration test that uses otdfctl for encryption and the Python CLI for decryption""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = "Hello, World! This is a test for otdfctl decrypt comparison." + with open(input_file, "w") as f: + f.write(input_content) + + # Define TDF file created by otdfctl + otdfctl_tdf_output = temp_path / "hello-world-otdfctl.txt.tdf" + + # Define decrypted outputs from both tools + otdfctl_decrypt_output = temp_path / "decrypted-by-otdfctl.txt" + cli_decrypt_output = temp_path / "decrypted-by-cli.txt" + + # Run otdfctl encrypt first to create a TDF file + otdfctl_encrypt_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_tdf_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_encrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt", + ) + + validate_tdf3_file(otdfctl_tdf_output, "otdfctl") + + # Now run otdfctl decrypt (this is the reference implementation) + otdfctl_decrypt_result = run_otdfctl_decrypt_command( + temp_credentials_file, + otdfctl_tdf_output, + otdfctl_decrypt_output, + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl decrypt", + ) + + # Run our Python CLI decrypt on the same TDF + cli_decrypt_result = run_cli_decrypt( + creds_file=temp_credentials_file, + input_file=otdfctl_tdf_output, + output_file=cli_decrypt_output, + cwd=project_root, + ) + + # Fail fast on errors + handle_subprocess_error( + result=cli_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="Python CLI decrypt", + ) + + validate_plaintext_file_created( + path=otdfctl_decrypt_output, + scenario="otdfctl", + expected_content=input_content, + ) + validate_plaintext_file_created( + path=cli_decrypt_output, + scenario="Python CLI", + expected_content=input_content, + ) + + +@pytest.mark.integration +def test_otdfctl_encrypt_otdfctl_decrypt(collect_server_logs, temp_credentials_file): + """Integration test that uses otdfctl for both encryption and decryption to verify roundtrip functionality""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = ( + "Hello, World! This is a test for otdfctl roundtrip encryption/decryption." + ) + with open(input_file, "w") as f: + f.write(input_content) + + # Define TDF file and decrypted output + otdfctl_tdf_output = temp_path / "otdfctl-roundtrip.txt.tdf" + otdfctl_decrypt_output = temp_path / "otdfctl-roundtrip-decrypted.txt" + + # Run otdfctl encrypt + otdfctl_encrypt_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_tdf_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_encrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt", + ) + + # Verify the TDF file was created + validate_tdf3_file(tdf_path=otdfctl_tdf_output, tool_name="otdfctl") + + # Run otdfctl decrypt + otdfctl_decrypt_result = run_otdfctl_decrypt_command( + temp_credentials_file, + otdfctl_tdf_output, + otdfctl_decrypt_output, + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl decrypt", + ) + + validate_plaintext_file_created( + path=otdfctl_decrypt_output, + scenario="otdfctl", + expected_content=input_content, + ) + + # Verify file sizes are reasonable + original_size = input_file.stat().st_size + tdf_size = otdfctl_tdf_output.stat().st_size + decrypted_size = otdfctl_decrypt_output.stat().st_size + + assert tdf_size > original_size, "TDF file should be larger than original" + + print( + f"✓ otdfctl roundtrip successful: {original_size} bytes -> {tdf_size} bytes -> {decrypted_size} bytes" + ) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/integration/otdfctl_to_python/test_cli_decrypt.py b/tests/integration/otdfctl_to_python/test_cli_decrypt.py new file mode 100644 index 0000000..dc61123 --- /dev/null +++ b/tests/integration/otdfctl_to_python/test_cli_decrypt.py @@ -0,0 +1,191 @@ +""" +Tests using target mode fixtures, for CLI integration testing. +""" + +import logging +import subprocess +import tempfile +from pathlib import Path + +import pytest + +from tests.support_cli_args import ( + run_cli_decrypt, +) +from tests.support_common import handle_subprocess_error + +logger = logging.getLogger(__name__) + + +@pytest.mark.integration +def test_cli_decrypt_v4_2_2_vs_v4_3_1( + all_target_mode_tdf_files, temp_credentials_file, collect_server_logs, project_root +): + """ + Test Python CLI decrypt with various TDF versions created by otdfctl. + """ + + v4_2_2_files = all_target_mode_tdf_files["v4.2.2"] + v4_3_1_files = all_target_mode_tdf_files["v4.3.1"] + + # Test decrypt on both versions of the same file type + for file_type in ["text", "binary"]: + v4_2_2_tdf = v4_2_2_files[file_type] + v4_3_1_tdf = v4_3_1_files[file_type] + + # Decrypt v4.2.2 TDF + v4_2_2_output = _run_cli_decrypt( + v4_2_2_tdf, temp_credentials_file, project_root, collect_server_logs + ) + + # Decrypt v4.3.1 TDF + v4_3_1_output = _run_cli_decrypt( + v4_3_1_tdf, temp_credentials_file, project_root, collect_server_logs + ) + + # Both should succeed and produce output files + assert v4_2_2_output is not None, f"Failed to decrypt v4.2.2 {file_type} TDF" + assert v4_3_1_output is not None, f"Failed to decrypt v4.3.1 {file_type} TDF" + + assert v4_2_2_output.exists(), ( + f"v4.2.2 {file_type} decrypt output file not created" + ) + assert v4_3_1_output.exists(), ( + f"v4.3.1 {file_type} decrypt output file not created" + ) + + # Both output files should have content (not empty) + assert v4_2_2_output.stat().st_size > 0, ( + f"v4.2.2 {file_type} decrypt produced empty file" + ) + assert v4_3_1_output.stat().st_size > 0, ( + f"v4.3.1 {file_type} decrypt produced empty file" + ) + + # Log the decryption results for comparison + logger.info(f"\n=== {file_type.upper()} TDF Decryption Comparison ===") + logger.info(f"v4.2.2 output size: {v4_2_2_output.stat().st_size} bytes") + logger.info(f"v4.3.1 output size: {v4_3_1_output.stat().st_size} bytes") + + # For text files, we can compare the content directly + if file_type == "text": + v4_2_2_content = v4_2_2_output.read_text() + v4_3_1_content = v4_3_1_output.read_text() + + logger.info(f"v4.2.2 content preview: {v4_2_2_content[:50]}...") + logger.info(f"v4.3.1 content preview: {v4_3_1_content[:50]}...") + + # Clean up output files + v4_2_2_output.unlink() + v4_3_1_output.unlink() + + +@pytest.mark.integration +def test_cli_decrypt_different_file_types( + all_target_mode_tdf_files, + temp_credentials_file, + collect_server_logs, + project_root, + known_target_modes, +): + """ + Test CLI decrypt with different file types. + """ + + assert "v4.2.2" in all_target_mode_tdf_files + assert "v4.3.1" in all_target_mode_tdf_files + + # Check each version has the expected file types + for target_mode in known_target_modes: + tdf_files = all_target_mode_tdf_files[target_mode] + + file_types_to_test = [ + "text", + "binary", + "with_attributes", + ] # TODO: Consider adding "empty" file type as well + + for file_type in file_types_to_test: + tdf_path = tdf_files[file_type] + + # Decrypt the TDF + output_file = _run_cli_decrypt( + tdf_path, temp_credentials_file, project_root, collect_server_logs + ) + + assert output_file is not None, f"Failed to decrypt {file_type} TDF" + assert output_file.exists(), ( + f"{file_type} TDF decrypt output file not created" + ) + + # Check file-type specific expectations + if file_type == "empty": + # Empty files should produce empty output files + assert output_file.stat().st_size == 0, ( + f"{file_type} TDF should produce empty output" + ) + else: + # Non-empty files should produce non-empty output + assert output_file.stat().st_size > 0, ( + f"{file_type} TDF produced empty decrypt output" + ) + + # For attributed files, just ensure they decrypt successfully + if file_type == "with_attributes": + logger.info( + f"Successfully decrypted attributed TDF, output size: {output_file.stat().st_size}" + ) + + # For text files, verify the content is readable + if file_type == "text": + try: + content = output_file.read_text() + assert len(content) > 0, "Text file should have readable content" + logger.info(f"Text content preview: {content[:100]}...") + except UnicodeDecodeError: + pytest.fail(f"Decrypted {file_type} file should be valid text") + + # Clean up output file + output_file.unlink() + + +def _run_cli_decrypt( + tdf_path: Path, creds_file: Path, cwd: Path, collect_server_logs +) -> Path | None: + """ + Helper function to run Python CLI decrypt command and return the output file path. + + Returns the Path to the decrypted output file if successful, None if failed. + """ + # Create a temporary output file + with tempfile.NamedTemporaryFile(delete=False, suffix=".decrypted") as temp_file: + output_path = Path(temp_file.name) + + try: + # Build CLI command + cli_decrypt_result = run_cli_decrypt( + creds_file=creds_file, + input_file=tdf_path, + output_file=output_path, + cwd=cwd, + ) + + # Fail fast on errors + handle_subprocess_error( + result=cli_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="Python CLI decrypt", + ) + + return output_path + + except subprocess.CalledProcessError as e: + logger.error(f"CLI decrypt failed for {tdf_path}: {e}") + logger.error(f"CLI stderr: {e.stderr}") + logger.error(f"CLI stdout: {e.stdout}") + + # Clean up the output file if it was created but command failed + if output_path.exists(): + output_path.unlink() + + raise Exception(f"Failed to decrypt TDF {tdf_path}: {e}") from e diff --git a/tests/integration/otdfctl_to_python/test_cli_inspect.py b/tests/integration/otdfctl_to_python/test_cli_inspect.py new file mode 100644 index 0000000..1ba39cf --- /dev/null +++ b/tests/integration/otdfctl_to_python/test_cli_inspect.py @@ -0,0 +1,122 @@ +""" +Tests using target mode fixtures, for CLI integration testing. +""" + +import logging + +import pytest + +from tests.support_cli_args import run_cli_inspect + +logger = logging.getLogger(__name__) + + +@pytest.mark.integration +def test_cli_inspect_v4_2_2_vs_v4_3_1( + all_target_mode_tdf_files, temp_credentials_file, project_root +): + """ + Test Python CLI inspect with various TDF versions created by otdfctl. + """ + + v4_2_2_files = all_target_mode_tdf_files["v4.2.2"] + v4_3_1_files = all_target_mode_tdf_files["v4.3.1"] + + # Test inspect on both versions of the same file type + for file_type in ["text", "binary"]: + v4_2_2_tdf = v4_2_2_files[file_type] + v4_3_1_tdf = v4_3_1_files[file_type] + + # Inspect v4.2.2 TDF + v4_2_2_result = run_cli_inspect(v4_2_2_tdf, temp_credentials_file, project_root) + + # Inspect v4.3.1 TDF + v4_3_1_result = run_cli_inspect(v4_3_1_tdf, temp_credentials_file, project_root) + + # Both should succeed + assert v4_2_2_result is not None, f"Failed to inspect v4.2.2 {file_type} TDF" + assert v4_3_1_result is not None, f"Failed to inspect v4.3.1 {file_type} TDF" + + # Both should have either manifest data (full inspection) or basic info (limited inspection) + if "manifest" in v4_2_2_result: + # Full inspection succeeded + assert "manifest" in v4_3_1_result, ( + f"v4.3.1 {file_type} inspection missing manifest while v4.2.2 has it" + ) + # Compare manifest versions (this is where version differences would show) + logger.info( + f"\n=== {file_type.upper()} TDF Comparison (Full Inspection) ===" + ) + logger.info( + f"v4.2.2 manifest keys: {list(v4_2_2_result['manifest'].keys())}" + ) + logger.info( + f"v4.3.1 manifest keys: {list(v4_3_1_result['manifest'].keys())}" + ) + else: + # Limited inspection - check for basic structure + assert "type" in v4_2_2_result, ( + f"v4.2.2 {file_type} inspection missing type" + ) + assert "size" in v4_2_2_result, ( + f"v4.2.2 {file_type} inspection missing size" + ) + assert "type" in v4_3_1_result, ( + f"v4.3.1 {file_type} inspection missing type" + ) + assert "size" in v4_3_1_result, ( + f"v4.3.1 {file_type} inspection missing size" + ) + + logger.info( + f"\n=== {file_type.upper()} TDF Comparison (Limited Inspection) ===" + ) + logger.info( + f"v4.2.2 type: {v4_2_2_result['type']}, size: {v4_2_2_result['size']}" + ) + logger.info( + f"v4.3.1 type: {v4_3_1_result['type']}, size: {v4_3_1_result['size']}" + ) + + +@pytest.mark.integration +def test_cli_inspect_different_file_types( + all_target_mode_tdf_files, temp_credentials_file, project_root, known_target_modes +): + """ + Test CLI inspect with different file types. + """ + assert "v4.2.2" in all_target_mode_tdf_files + assert "v4.3.1" in all_target_mode_tdf_files + + # Check each version has the expected file types + for target_mode in known_target_modes: + tdf_files = all_target_mode_tdf_files[target_mode] + + file_types_to_test = [ + "text", + "binary", + "with_attributes", + ] # TODO: Consider adding "empty" file type as well + + for file_type in file_types_to_test: + tdf_path = tdf_files[file_type] + + # Inspect the TDF + result = run_cli_inspect(tdf_path, temp_credentials_file, project_root) + + assert result is not None, ( + f"Failed to inspect {file_type} TDF, TDF version {target_mode}" + ) + assert "manifest" in result, f"{file_type} TDF inspection missing manifest" + + # Check file-type specific expectations + if file_type == "empty": + # Empty files should still have valid manifests + assert "encryptionInformation" in result["manifest"] + elif file_type == "with_attributes": + # Attributed files should have keyAccess information + assert ( + "keyAccess" in result["manifest"] + or "encryptionInformation" in result["manifest"] + ) diff --git a/tests/integration/otdfctl_to_python/test_tdf_reader_integration.py b/tests/integration/otdfctl_to_python/test_tdf_reader_integration.py new file mode 100644 index 0000000..5ff6118 --- /dev/null +++ b/tests/integration/otdfctl_to_python/test_tdf_reader_integration.py @@ -0,0 +1,272 @@ +""" +Integration Tests for TDFReader. +""" + +import io +import json +import tempfile +from pathlib import Path + +import pytest + +from otdf_python.tdf_reader import ( + TDFReader, +) +from tests.config_pydantic import CONFIG_TDF +from tests.support_common import handle_subprocess_error +from tests.support_otdfctl_args import run_otdfctl_encrypt_command + + +class TestTDFReaderIntegration: + """Integration tests for TDFReader with real TDF files created by otdfctl.""" + + @pytest.mark.integration + def test_read_otdfctl_created_tdf_structure( + self, temp_credentials_file, collect_server_logs + ): + """Test that TDFReader can parse the structure of files created by otdfctl.""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = "Hello, World! This is test data for TDFReader integration." + with open(input_file, "w") as f: + f.write(input_content) + + # Define output files + otdfctl_output = temp_path / "test-reader.txt.tdf" + + # Run otdfctl encrypt + otdfctl_encrypt_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_encrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt", + ) + + # Verify the TDF file was created + assert otdfctl_output.exists(), "otdfctl did not create TDF file" + assert otdfctl_output.stat().st_size > 0, "otdfctl created empty TDF file" + + # Test that TDFReader can open and read the structure + with open(otdfctl_output, "rb") as f: + tdf_data = f.read() + + # Initialize TDFReader + reader = TDFReader(io.BytesIO(tdf_data)) + + # Test manifest reading + manifest_content = reader.manifest() + assert manifest_content, "Manifest should not be empty" + + # Parse the manifest JSON + manifest_json = json.loads(manifest_content) + assert "encryptionInformation" in manifest_json, ( + "Manifest should contain encryptionInformation" + ) + assert "payload" in manifest_json, "Manifest should contain payload" + + # Verify encryption information structure + enc_info = manifest_json["encryptionInformation"] + assert "keyAccess" in enc_info, ( + "encryptionInformation should contain keyAccess" + ) + assert "method" in enc_info, "encryptionInformation should contain method" + assert "policy" in enc_info, "encryptionInformation should contain policy" + assert "integrityInformation" in enc_info, ( + "encryptionInformation should contain integrityInformation" + ) + + # Verify payload information + payload_info = manifest_json["payload"] + assert "mimeType" in payload_info, "payload should contain mimeType" + assert "isEncrypted" in payload_info, "payload should contain isEncrypted" + assert payload_info["isEncrypted"] is True, "payload should be encrypted" + + # Test payload reading capability (without decryption) + payload_buffer = bytearray(1024) # Create a buffer for reading + bytes_read = reader.read_payload_bytes(payload_buffer) + assert bytes_read > 0, "Should be able to read some payload bytes" + + # Test policy object reading + policy_obj = reader.read_policy_object() + assert policy_obj is not None, "Should be able to read policy object" + + @pytest.mark.integration + def test_read_otdfctl_tdf_with_attributes( + self, temp_credentials_file, collect_server_logs + ): + """Test reading TDF files created by otdfctl with data attributes.""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = "This is input data for testing attributes." + with open(input_file, "w") as f: + f.write(input_content) + + # Define output file + otdfctl_output = temp_path / "input.txt.tdf" + + # Run otdfctl encrypt with attributes + otdfctl_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_output, + mime_type="text/plain", + attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1], + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt with attributest", + ) + + # Verify the TDF file was created + assert otdfctl_output.exists(), "otdfctl did not create TDF file" + + # Test that TDFReader can read the file with attributes + with open(otdfctl_output, "rb") as f: + tdf_data = f.read() + + reader = TDFReader(io.BytesIO(tdf_data)) + manifest_content = reader.manifest() + manifest_json = json.loads(manifest_content) + + # Verify the policy contains attributes + assert "encryptionInformation" in manifest_json + assert "policy" in manifest_json["encryptionInformation"] + + # Decode the policy to check for attributes + import base64 + + policy_b64 = manifest_json["encryptionInformation"]["policy"] + policy_json = json.loads(base64.b64decode(policy_b64).decode()) + + # Verify policy structure + assert "body" in policy_json, "Policy should contain body" + assert "dataAttributes" in policy_json["body"], ( + "Policy body should contain dataAttributes" + ) + + # Check that attributes exist (if any were actually set) + # Note: otdfctl might not always include attributes in the policy depending on server configuration + # So we just verify the structure is correct + + # Test that we can still read the policy object + policy_obj = reader.read_policy_object() + assert policy_obj is not None, ( + "Should be able to read policy object with attributes" + ) + + @pytest.mark.integration + def test_read_multiple_otdfctl_files( + self, temp_credentials_file, collect_server_logs + ): + """Test reading multiple TDF files of different types created by otdfctl.""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Test different file types and content + test_cases = [ + { + "name": "text_file", + "content": "Simple text content for testing", + "mime_type": "text/plain", + }, + { + "name": "json_data", + "content": json.dumps({"test": "data", "number": 42}), + "mime_type": "application/json", + }, + { + "name": "binary_data", + "content": b"\x00\x01\x02\x03\x04\x05\xff\xfe\xfd", + "mime_type": "application/octet-stream", + }, + ] + + for test_case in test_cases: + # Create input file + input_file = temp_path / f"{test_case['name']}.txt" + if isinstance(test_case["content"], bytes): + with open(input_file, "wb") as f: + f.write(test_case["content"]) + else: + with open(input_file, "w") as f: + f.write(test_case["content"]) + + # Define output file + output_file = temp_path / f"{test_case['name']}.tdf" + + # Run otdfctl encrypt + otdfctl_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=output_file, + mime_type=test_case["mime_type"], + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_result, + collect_server_logs=collect_server_logs, + scenario_name=f"Test case {test_case['name']}, otdfctl encrypt", + ) + + # Test TDFReader on this file + with open(output_file, "rb") as f: + tdf_data = f.read() + + reader = TDFReader(io.BytesIO(tdf_data)) + + # Basic structure verification + manifest_content = reader.manifest() + assert manifest_content, ( + f"Manifest should not be empty for {test_case['name']}" + ) + + manifest_json = json.loads(manifest_content) + assert "payload" in manifest_json, ( + f"Manifest should contain payload for {test_case['name']}" + ) + + # Verify MIME type is preserved + payload_info = manifest_json["payload"] + if "mimeType" in payload_info: + assert payload_info["mimeType"] == test_case["mime_type"], ( + f"MIME type should be preserved for {test_case['name']}" + ) + + # Test payload reading + payload_buffer = bytearray(1024) + bytes_read = reader.read_payload_bytes(payload_buffer) + assert bytes_read > 0, ( + f"Should read payload bytes for {test_case['name']}" + ) + + # Test policy object reading + policy_obj = reader.read_policy_object() + assert policy_obj is not None, ( + f"Should read policy object for {test_case['name']}" + ) diff --git a/tests/integration/python_only/test_kas_client_integration.py b/tests/integration/python_only/test_kas_client_integration.py new file mode 100644 index 0000000..97fc723 --- /dev/null +++ b/tests/integration/python_only/test_kas_client_integration.py @@ -0,0 +1,244 @@ +""" +Integration tests for KASClient. +""" + +import pytest + +from otdf_python.kas_client import KASClient, KeyAccess +from otdf_python.kas_key_cache import KASKeyCache +from otdf_python.sdk_exceptions import SDKException +from tests.config_pydantic import CONFIG_TDF + + +@pytest.mark.integration +def test_connect_rpc_public_key_retrieval(): + """Test Connect RPC public key retrieval using live backend server.""" + from otdf_python.config import KASInfo + + # Create KAS info from configuration + kas_info = KASInfo(url=CONFIG_TDF.KAS_ENDPOINT) + + # Create KAS client with SSL verification disabled for testing + client = KASClient( + kas_url=CONFIG_TDF.KAS_ENDPOINT, + verify_ssl=not CONFIG_TDF.INSECURE_SKIP_VERIFY, + use_plaintext=bool(CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("http://")), + ) + + # Retrieve public key using Connect RPC + result = client.get_public_key(kas_info) + + # Verify the result + assert result is not None + assert result.url == CONFIG_TDF.KAS_ENDPOINT + assert result.public_key is not None + assert len(result.public_key) > 0 + assert result.kid is not None + assert len(result.kid) > 0 + + # Verify the public key looks like a PEM format + assert "-----BEGIN" in result.public_key + assert "-----END" in result.public_key + + +@pytest.mark.integration +def test_public_key_caching(): + """Test that retrieving the public key uses the cache on subsequent calls.""" + from otdf_python.config import KASInfo + + # Create a custom cache instance to verify caching behavior + cache = KASKeyCache() + kas_info = KASInfo(url=CONFIG_TDF.KAS_ENDPOINT) + + # Create KAS client with the custom cache + client = KASClient( + kas_url=CONFIG_TDF.KAS_ENDPOINT, + verify_ssl=not CONFIG_TDF.INSECURE_SKIP_VERIFY, + use_plaintext=bool(CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("http://")), + cache=cache, + ) + + # First call should retrieve from server and cache the result + result1 = client.get_public_key(kas_info) + assert result1 is not None + assert result1.public_key is not None + assert result1.kid is not None + + # Verify the result was cached + cached_result = cache.get(CONFIG_TDF.KAS_ENDPOINT) + assert cached_result is not None + assert cached_result.url == CONFIG_TDF.KAS_ENDPOINT + assert cached_result.public_key == result1.public_key + assert cached_result.kid == result1.kid + + # Second call should return the cached result (same instance) + result2 = client.get_public_key(kas_info) + assert result2 is not None + assert result2.url == result1.url + assert result2.public_key == result1.public_key + assert result2.kid == result1.kid + + # Verify that we got the same cached instance + assert result2 is cached_result + + +@pytest.mark.integration +def test_unwrap_success(): + """Test successful key unwrapping using Connect RPC.""" + import base64 + + # Create a token source for authentication + def mock_token_source(): + return "mock_token_for_integration_test" + + # Create KAS client with authentication + client = KASClient( + kas_url=CONFIG_TDF.KAS_ENDPOINT, + verify_ssl=not CONFIG_TDF.INSECURE_SKIP_VERIFY, + use_plaintext=bool(CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("http://")), + token_source=mock_token_source, + ) + + # Create a test key access object with a mock wrapped key + # Note: In a real scenario, this would be a valid wrapped key from a TDF + mock_wrapped_key = base64.b64encode(b"test_wrapped_key_data").decode("utf-8") + key_access = KeyAccess( + url=CONFIG_TDF.KAS_ENDPOINT, + wrapped_key=mock_wrapped_key, + ) + + # Create a simple test policy + test_policy = '{"body": {"dataAttributes": []}}' + + # Attempt to unwrap the key + # Note: This test will likely fail with a real KAS server because we're using + # a mock wrapped key, but it tests the integration path + try: + result = client.unwrap(key_access, test_policy) + + # If we get here, the unwrap succeeded (unlikely with mock data) + assert result is not None + assert isinstance(result, bytes) + + except SDKException as e: + # Expected to fail with mock data, but we should see a proper error + # indicating the request made it to the server + assert "Connect RPC rewrap failed" in str(e) + print(f"Expected failure with mock data: {e}") + + +@pytest.mark.integration +def test_unwrap_failure(): + """Test unwrap failure handling with invalid data.""" + import base64 + + # Create a token source for authentication + def mock_token_source(): + return "invalid_token" + + # Create KAS client with invalid authentication + client = KASClient( + kas_url=CONFIG_TDF.KAS_ENDPOINT, + verify_ssl=not CONFIG_TDF.INSECURE_SKIP_VERIFY, + use_plaintext=bool(CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("http://")), + token_source=mock_token_source, + ) + + # Create a key access object with invalid wrapped key + invalid_wrapped_key = base64.b64encode(b"completely_invalid_key_data").decode( + "utf-8" + ) + key_access = KeyAccess( + url=CONFIG_TDF.KAS_ENDPOINT, + wrapped_key=invalid_wrapped_key, + ) + + # Create an invalid policy + invalid_policy = '{"invalid": "policy_structure"}' + + # Attempt to unwrap should fail + with pytest.raises(SDKException) as exc_info: + client.unwrap(key_access, invalid_policy) + + # Verify we get the expected error + assert "Connect RPC rewrap failed" in str(exc_info.value) + + +@pytest.mark.integration +def test_kas_url_normalization(): + """Test KAS URL normalization with different URL formats.""" + from urllib.parse import urlparse + + # Test with plaintext client (HTTP) + plaintext_client = KASClient( + use_plaintext=True, + verify_ssl=not CONFIG_TDF.INSECURE_SKIP_VERIFY, + ) + + # Test various URL formats for plaintext normalization + test_cases_plaintext = [ + ("localhost", "http://localhost:80"), + ("localhost:8080", "http://localhost:8080"), + ("example.com", "http://example.com:80"), + ("example.com:9000", "http://example.com:9000"), + ("http://example.com", "http://example.com:80"), + ("https://example.com", "http://example.com:80"), # Should convert to HTTP + ( + "https://example.com:8443", + "http://example.com:8443", + ), # Should convert to HTTP + ] + + for input_url, expected_url in test_cases_plaintext: + normalized = plaintext_client._normalize_kas_url(input_url) + assert normalized == expected_url, ( + f"Plaintext normalization failed for {input_url}: expected {expected_url}, got {normalized}" + ) + + # Test with secure client (HTTPS) + secure_client = KASClient( + use_plaintext=False, + verify_ssl=not CONFIG_TDF.INSECURE_SKIP_VERIFY, + ) + + # Test various URL formats for secure normalization + test_cases_secure = [ + ("localhost", "https://localhost:443"), + ("localhost:8443", "https://localhost:8443"), + ("example.com", "https://example.com:443"), + ("example.com:9443", "https://example.com:9443"), + ("http://example.com", "https://example.com:443"), # Should convert to HTTPS + ("https://example.com", "https://example.com:443"), + ("https://example.com:8443", "https://example.com:8443"), + ] + + for input_url, expected_url in test_cases_secure: + normalized = secure_client._normalize_kas_url(input_url) + assert normalized == expected_url, ( + f"Secure normalization failed for {input_url}: expected {expected_url}, got {normalized}" + ) + + # Test URL normalization with the actual test configuration + test_url = CONFIG_TDF.KAS_ENDPOINT + parsed_test_url = urlparse(test_url) + + if CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("http://"): + # Platform is HTTP, so should normalize to HTTP + client = KASClient(use_plaintext=True) + normalized = client._normalize_kas_url(test_url) + assert normalized.startswith("http://"), ( + f"Expected HTTP URL for plaintext config, got: {normalized}" + ) + else: + # Platform is HTTPS, so should normalize to HTTPS + client = KASClient(use_plaintext=False) + normalized = client._normalize_kas_url(test_url) + assert normalized.startswith("https://"), ( + f"Expected HTTPS URL for secure config, got: {normalized}" + ) + + # Verify the normalized URL preserves the path component (e.g., /kas) + if parsed_test_url.path: + assert parsed_test_url.path in normalized, ( + f"Path component {parsed_test_url.path} should be preserved in {normalized}" + ) diff --git a/tests/integration/support_sdk.py b/tests/integration/support_sdk.py new file mode 100644 index 0000000..0d93ba3 --- /dev/null +++ b/tests/integration/support_sdk.py @@ -0,0 +1,75 @@ +import httpx + +from otdf_python.sdk import SDK +from otdf_python.sdk_builder import SDKBuilder +from tests.config_pydantic import CONFIG_TDF + + +def _get_sdk_builder() -> SDKBuilder: + builder = ( + SDKBuilder() + .set_platform_endpoint(CONFIG_TDF.OPENTDF_PLATFORM_URL) + .set_issuer_endpoint(CONFIG_TDF.OPENTDF_KEYCLOAK_HOST) + .client_secret( + CONFIG_TDF.OPENTDF_CLIENT_ID, + CONFIG_TDF.OPENTDF_CLIENT_SECRET, + ) + ) + + if CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("http://"): + builder.use_insecure_plaintext_connection(True) + elif CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("https://"): + builder.use_insecure_skip_verify(CONFIG_TDF.INSECURE_SKIP_VERIFY) + else: + raise ValueError( + f"Invalid platform URL: {CONFIG_TDF.OPENTDF_PLATFORM_URL}. " + "It must start with 'http://' or 'https://'." + ) + return builder + + +def get_sdk() -> SDK: + """Get SDK instance.""" + builder = _get_sdk_builder() + return builder.build() + + +def get_sdk_for_pe() -> SDK: + """Get SDK instance for Person Entity (PE) workflows.""" + user_token: str = get_user_access_token( + CONFIG_TDF.OIDC_OP_TOKEN_ENDPOINT, + CONFIG_TDF.TEST_USER_ID, + CONFIG_TDF.TEST_USER_PASSWORD, + ) + builder = _get_sdk_builder() + builder.bearer_token(user_token) + return builder.build() + + +def get_user_access_token( + token_endpoint, + pe_username, + pe_password, +): + """ + When using this function, ensure that: + + 1. The client has "fine-grained access control" enabled (in the Advanced tab for the client in Keycloak). + 2. The client is allowed to use "Direct access grants" (in the Settings tab for the client in Keycloak). + + """ + headers = {"Content-Type": "application/x-www-form-urlencoded"} + + data = { + "grant_type": "password", + "client_id": CONFIG_TDF.OPENTDF_CLIENT_ID, + "client_secret": CONFIG_TDF.OPENTDF_CLIENT_SECRET, + "username": pe_username, + "password": pe_password, + } + + with httpx.Client(verify=not CONFIG_TDF.INSECURE_SKIP_VERIFY) as client: + response = client.post(token_endpoint, headers=headers, data=data) + response.raise_for_status() + token_data = response.json() + return token_data.get("access_token") diff --git a/tests/integration/test_cli_integration.py b/tests/integration/test_cli_integration.py new file mode 100644 index 0000000..9201e8e --- /dev/null +++ b/tests/integration/test_cli_integration.py @@ -0,0 +1,304 @@ +""" +Integration Test CLI functionality +""" + +import tempfile +from pathlib import Path + +import pytest + +from tests.support_cli_args import run_cli_decrypt, run_cli_encrypt +from tests.support_common import ( + compare_tdf3_file_size, + handle_subprocess_error, + validate_plaintext_file_created, + validate_tdf3_file, +) +from tests.support_otdfctl_args import ( + run_otdfctl_decrypt_command, + run_otdfctl_encrypt_command, +) + + +@pytest.mark.integration +def test_cli_decrypt_otdfctl_tdf( + collect_server_logs, temp_credentials_file, project_root +): + """ + Test that the Python CLI can successfully decrypt TDF files created by otdfctl. + """ + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = "Hello, World! This is a test for decryption." + with open(input_file, "w") as f: + f.write(input_content) + + # Define TDF file created by otdfctl + otdfctl_tdf_output = temp_path / "hello-world-otdfctl.txt.tdf" + + # Define decrypted output from our CLI + cli_decrypt_output = temp_path / "decrypted-by-cli.txt" + + # Run otdfctl encrypt + otdfctl_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_tdf_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt", + ) + + validate_tdf3_file(otdfctl_tdf_output, "otdfctl") + + # Run our Python CLI decrypt on the otdfctl-created TDF + cli_decrypt_result = run_cli_decrypt( + creds_file=temp_credentials_file, + input_file=otdfctl_tdf_output, + output_file=cli_decrypt_output, + cwd=project_root, + ) + + # Fail fast on errors + handle_subprocess_error( + result=cli_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="Python CLI decrypt", + ) + + validate_plaintext_file_created( + path=cli_decrypt_output, + scenario="Python decrypt", + expected_content=input_content, + ) + + +@pytest.mark.integration +def test_otdfctl_decrypt_comparison( + collect_server_logs, temp_credentials_file, project_root +): + """ + Test comparative decryption between otdfctl and Python CLI on the same TDF. + """ + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = "Hello, World! This is a test for otdfctl decrypt comparison." + with open(input_file, "w") as f: + f.write(input_content) + + # Define TDF file created by otdfctl + otdfctl_tdf_output = temp_path / "hello-world-otdfctl.txt.tdf" + + # Define decrypted outputs from both tools + otdfctl_decrypt_output = temp_path / "decrypted-by-otdfctl.txt" + cli_decrypt_output = temp_path / "decrypted-by-cli.txt" + + # Run otdfctl encrypt first to create a TDF file + otdfctl_encrypt_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_tdf_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_encrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt", + ) + + validate_tdf3_file(otdfctl_tdf_output, "otdfctl") + + # Now run otdfctl decrypt (this is the reference implementation) + otdfctl_decrypt_result = run_otdfctl_decrypt_command( + temp_credentials_file, + otdfctl_tdf_output, + otdfctl_decrypt_output, + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl decrypt", + ) + + cli_decrypt_result = run_cli_decrypt( + creds_file=temp_credentials_file, + input_file=otdfctl_tdf_output, + output_file=cli_decrypt_output, + cwd=project_root, + ) + + # Fail fast on errors + handle_subprocess_error( + result=cli_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="Python CLI decrypt", + ) + + validate_plaintext_file_created( + path=otdfctl_decrypt_output, + scenario="otdfctl", + expected_content=input_content, + ) + validate_plaintext_file_created( + path=cli_decrypt_output, + scenario="Python CLI", + expected_content=input_content, + ) + + +@pytest.mark.integration +def test_otdfctl_encrypt_decrypt_roundtrip(collect_server_logs, temp_credentials_file): + """ + Test complete encrypt-decrypt roundtrip using otdfctl to verify functionality. + """ + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = ( + "Hello, World! This is a test for otdfctl roundtrip encryption/decryption." + ) + with open(input_file, "w") as f: + f.write(input_content) + + # Define TDF file and decrypted output + otdfctl_tdf_output = temp_path / "otdfctl-roundtrip.txt.tdf" + otdfctl_decrypt_output = temp_path / "otdfctl-roundtrip-decrypted.txt" + + # Run otdfctl encrypt + otdfctl_encrypt_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_tdf_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_encrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt", + ) + + # Verify the TDF file was created + validate_tdf3_file(otdfctl_tdf_output, "otdfctl") + + # Run otdfctl decrypt + otdfctl_decrypt_result = run_otdfctl_decrypt_command( + temp_credentials_file, + otdfctl_tdf_output, + otdfctl_decrypt_output, + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_decrypt_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl decrypt", + ) + + validate_plaintext_file_created( + path=otdfctl_decrypt_output, + scenario="otdfctl", + expected_content=input_content, + ) + + # Verify file sizes are reasonable + original_size = input_file.stat().st_size + tdf_size = otdfctl_tdf_output.stat().st_size + decrypted_size = otdfctl_decrypt_output.stat().st_size + + assert tdf_size > original_size, "TDF file should be larger than original" + assert decrypted_size == original_size, ( + "Decrypted file should match original size" + ) + + print( + f"✓ otdfctl roundtrip successful: {original_size} bytes -> {tdf_size} bytes -> {decrypted_size} bytes" + ) + + +@pytest.mark.integration +def test_cli_encrypt_integration( + collect_server_logs, temp_credentials_file, project_root +): + """Integration test comparing our CLI with otdfctl""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create input file + input_file = temp_path / "input.txt" + input_content = "Hello, World" + with open(input_file, "w") as f: + f.write(input_content) + + # Define output files + otdfctl_output = temp_path / "hello-world-otdfctl.txt.tdf" + cli_output = temp_path / "hello-world-cli.txt.tdf" + + # Run otdfctl encrypt + otdfctl_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Fail fast on errors + handle_subprocess_error( + result=otdfctl_result, + collect_server_logs=collect_server_logs, + scenario_name="otdfctl encrypt", + ) + + # Run our Python CLI encrypt + cli_result = run_cli_encrypt( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=cli_output, + mime_type="text/plain", + attributes=None, + cwd=project_root, + ) + + # Fail fast on errors + handle_subprocess_error( + result=cli_result, + collect_server_logs=collect_server_logs, + scenario_name="Python CLI encrypt", + ) + + validate_tdf3_file(otdfctl_output, "otdfctl") + validate_tdf3_file(cli_output, "Python CLI") + + compare_tdf3_file_size(otdfctl_output, cli_output) diff --git a/tests/integration/test_cli_tdf_validation.py b/tests/integration/test_cli_tdf_validation.py new file mode 100644 index 0000000..ff6dfd5 --- /dev/null +++ b/tests/integration/test_cli_tdf_validation.py @@ -0,0 +1,568 @@ +""" +Test CLI encryption functionality and TDF validation +""" + +import json +import tempfile +import zipfile +from pathlib import Path + +import pytest + +from otdf_python.tdf_reader import TDF_MANIFEST_FILE_NAME, TDF_PAYLOAD_FILE_NAME +from tests.support_cli_args import ( + run_cli_decrypt, + run_cli_encrypt, +) +from tests.support_common import ( + handle_subprocess_error, + validate_plaintext_file_created, + validate_tdf3_file, +) +from tests.support_otdfctl_args import ( + run_otdfctl_decrypt_command, + run_otdfctl_encrypt_command, +) + + +def _create_test_input_file(temp_path: Path, content: str) -> Path: + """Create a test input file with the given content.""" + input_file = temp_path / "input.txt" + with open(input_file, "w") as f: + f.write(content) + return input_file + + +def _validate_key_access_objects(key_access: list) -> None: + """Validate the keyAccessObjects (or KAO) structure in the TDF manifest.""" + # New format - keyAccess is an array + print(f"keyAccess (array) with {len(key_access)} items") + assert len(key_access) > 0, "keyAccess array should not be empty" + + # Validate first keyAccess object + first_key_access = key_access[0] + print(f"first keyAccess keys: {list(first_key_access.keys())}") + + # Required keyAccess fields for newer TDF format + required_key_access_fields = [ + "protocol", + "type", + "url", + "kid", + "wrappedKey", + "policyBinding", + ] + for field in required_key_access_fields: + assert field in first_key_access, ( + f"keyAccess[0] missing required field: {field}" + ) + + # Validate protocol is "kas" + assert first_key_access["protocol"] == "kas", ( + f"Expected keyAccess[0].protocol to be 'kas', got '{first_key_access['protocol']}'" + ) + + # Validate type is "wrapped" + assert first_key_access["type"] == "wrapped", ( + f"Expected keyAccess[0].type to be 'wrapped', got '{first_key_access['type']}'" + ) + + # Validate policyBinding structure + policy_binding = first_key_access["policyBinding"] + assert isinstance(policy_binding, dict), "policyBinding should be a dictionary" + assert "alg" in policy_binding, "policyBinding missing 'alg' field" + assert "hash" in policy_binding, "policyBinding missing 'hash' field" + assert policy_binding["alg"] == "HS256", ( + f"Expected policyBinding.alg to be 'HS256', got '{policy_binding['alg']}'" + ) + + +def _validate_tdf_zip_structure(tdf_path: Path) -> None: + """Validate the internal structure of a TDF ZIP file.""" + with zipfile.ZipFile(tdf_path, "r") as zip_file: + file_list = zip_file.namelist() + + # Print detailed file structure information for debugging + print(f"\n=== TDF ZIP Structure Analysis for {tdf_path.name} ===") + print(f"Files in ZIP: {len(file_list)}") + for i, filename in enumerate(sorted(file_list)): + file_info = zip_file.getinfo(filename) + print( + f" {i + 1}. {filename} (size: {file_info.file_size} bytes, compressed: {file_info.compress_size} bytes)" + ) + + # TDF files should contain specific files + required_files = [TDF_MANIFEST_FILE_NAME, TDF_PAYLOAD_FILE_NAME] + for required_file in required_files: + assert required_file in file_list, ( + f"TDF missing required file: {required_file}" + ) + + # Validate manifest.json can be read and parsed + try: + manifest_content = zip_file.read(TDF_MANIFEST_FILE_NAME) + manifest_data = json.loads(manifest_content.decode("utf-8")) + + print("\n=== Manifest Structure Analysis ===") + print(f"Manifest size: {len(manifest_content)} bytes") + print(f"Top-level keys: {list(manifest_data.keys())}") + + # Check required top-level fields + assert "encryptionInformation" in manifest_data, ( + "Manifest missing encryptionInformation" + ) + assert "payload" in manifest_data, "Manifest missing payload information" + + # Validate schema version is present + assert "schemaVersion" in manifest_data, "Manifest missing schemaVersion" + + # Analyze encryptionInformation structure + enc_info = manifest_data["encryptionInformation"] + print(f"encryptionInformation keys: {list(enc_info.keys())}") + + # Required encryptionInformation fields + required_enc_fields = [ + "type", + "policy", + "keyAccess", + "method", + "integrityInformation", + ] + for field in required_enc_fields: + assert field in enc_info, ( + f"encryptionInformation missing required field: {field}" + ) + + # Validate that type is "split" + assert enc_info["type"] == "split", ( + f"Expected encryptionInformation.type to be 'split', got '{enc_info['type']}'" + ) + + # Validate keyAccess structure + key_access = enc_info["keyAccess"] + print(f"keyAccess type: {type(key_access)}") + + if isinstance(key_access, list): + _validate_key_access_objects(key_access) + else: + raise AssertionError( + f"Unexpected keyAccess type: {type(key_access)}. Expected list." + ) + # Policy should be a base64-encoded string in the manifest + policy = enc_info["policy"] + assert isinstance(policy, str), "policy should be a base64-encoded string" + assert len(policy) > 0, "policy should not be empty" + + # Validate that policy can be decoded from base64 + try: + import base64 + + policy_decoded = base64.b64decode(policy).decode("utf-8") + policy_obj = json.loads(policy_decoded) + assert isinstance(policy_obj, dict), ( + "decoded policy should be a dictionary" + ) + assert "uuid" in policy_obj, "policy missing 'uuid' field" + assert "body" in policy_obj, "policy missing 'body' field" + except Exception as e: + raise AssertionError(f"Failed to decode base64 policy: {e}") + + # Validate method structure + method = enc_info["method"] + assert isinstance(method, dict), "method should be a dictionary" + assert "algorithm" in method, "method missing 'algorithm' field" + assert "isStreamable" in method, "method missing 'isStreamable' field" + + # Validate integrityInformation structure + integrity_info = enc_info["integrityInformation"] + assert isinstance(integrity_info, dict), ( + "integrityInformation should be a dictionary" + ) + assert "rootSignature" in integrity_info, ( + "integrityInformation missing 'rootSignature' field" + ) + assert "segmentSizeDefault" in integrity_info, ( + "integrityInformation missing 'segmentSizeDefault' field" + ) + assert "encryptedSegmentSizeDefault" in integrity_info, ( + "integrityInformation missing 'encryptedSegmentSizeDefault' field" + ) + + # Check for keyAccessObjects (should not be present in newer format) + if "keyAccessObjects" in enc_info: + print( + "WARNING: Found keyAccessObjects in encryptionInformation - this is legacy format" + ) + + # Analyze payload structure + payload_info = manifest_data["payload"] + print(f"payload keys: {list(payload_info.keys())}") + + # Check for expected fields in payload + expected_payload_fields = ["type", "url", "protocol", "isEncrypted"] + for field in expected_payload_fields: + assert field in payload_info, f"payload missing required field: {field}" + print(f" {field}: {payload_info[field]}") + + # Validate payload field values + assert payload_info["type"] == "reference", ( + f"Expected payload.type to be 'reference', got '{payload_info['type']}'" + ) + assert payload_info["protocol"] == "zip", ( + f"Expected payload.protocol to be 'zip', got '{payload_info['protocol']}'" + ) + assert payload_info["isEncrypted"], ( + f"Expected payload.isEncrypted to be True, got '{payload_info['isEncrypted']}'" + ) + + except json.JSONDecodeError as e: + raise AssertionError(f"Manifest is not valid JSON: {e}") + except KeyError as e: + raise AssertionError(f"Manifest missing required field: {e}") + + # Check for payload file (usually 0.payload) + payload_files = [f for f in file_list if f.endswith(".payload")] + assert len(payload_files) > 0, "TDF missing payload file" + print(f"Payload files found: {payload_files}") + + print(f"✓ TDF structure validated: {len(file_list)} files, manifest valid") + print("=" * 50) + + +def _run_otdfctl_decrypt( + tdf_path: Path, + creds_file: Path, + temp_path: Path, + collect_server_logs, + expected_content: str, +) -> Path: + """Run otdfctl decrypt on a TDF file and verify the decrypted content matches expected.""" + decrypt_output = temp_path / f"{tdf_path.stem}_decrypted.txt" + + otdfctl_decrypt_result = run_otdfctl_decrypt_command( + creds_file=creds_file, + tdf_file=tdf_path, + output_file=decrypt_output, + cwd=temp_path, + ) + + handle_subprocess_error( + otdfctl_decrypt_result, collect_server_logs, "otdfctl decrypt" + ) + + validate_plaintext_file_created( + path=decrypt_output, scenario="otdfctl", expected_content=expected_content + ) + + return decrypt_output + + +def _run_python_cli_decrypt( + tdf_path: Path, + creds_file: Path, + temp_path: Path, + collect_server_logs, + expected_content: str, + cwd: Path, +) -> Path: + """Run Python CLI decrypt on a TDF file and verify the decrypted content matches expected.""" + decrypt_output = temp_path / f"{tdf_path.stem}_python_decrypted.txt" + + python_decrypt_result = run_cli_decrypt( + creds_file=creds_file, input_file=tdf_path, output_file=decrypt_output, cwd=cwd + ) + + handle_subprocess_error( + python_decrypt_result, collect_server_logs, "Python CLI decrypt" + ) + + validate_plaintext_file_created( + path=decrypt_output, scenario="Python CLI", expected_content=expected_content + ) + return decrypt_output + + +@pytest.mark.integration +def test_otdfctl_encrypt_with_validation(collect_server_logs, temp_credentials_file): + """Integration test that uses otdfctl for encryption and validates the TDF thoroughly.""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create test files + input_content = "Hello, World! This is test content for otdfctl encryption." + input_file = _create_test_input_file(temp_path, input_content) + + # Define TDF file created by otdfctl + otdfctl_tdf_output = temp_path / "otdfctl_test.txt.tdf" + + # Run otdfctl encrypt to create a TDF file + otdfctl_encrypt_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_tdf_output, + mime_type="text/plain", + cwd=temp_path, + ) + + # Handle any encryption errors + handle_subprocess_error( + otdfctl_encrypt_result, collect_server_logs, "otdfctl encrypt" + ) + + # Validate the TDF file structure + validate_tdf3_file(otdfctl_tdf_output, "otdfctl") + _validate_tdf_zip_structure(otdfctl_tdf_output) + + # Test that the TDF can be decrypted successfully + _run_otdfctl_decrypt( + otdfctl_tdf_output, + temp_credentials_file, + temp_path, + collect_server_logs, + input_content, + ) + + print("✓ otdfctl successfully encrypted and decrypted TDF with correct content") + print(f"TDF file size: {otdfctl_tdf_output.stat().st_size} bytes") + + +@pytest.mark.integration +def test_python_encrypt(collect_server_logs, temp_credentials_file, project_root): + """Integration test that uses Python CLI for encryption only and verifies the TDF can be inspected""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create test files + input_content = "Hello, World! This is test content for Python CLI encryption." + input_file = _create_test_input_file(temp_path, input_content) + + # Define TDF file created by Python CLI + python_tdf_output = temp_path / "python_cli_test.txt.tdf" + + # Run Python CLI encrypt to create a TDF file + python_encrypt_result = run_cli_encrypt( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=python_tdf_output, + cwd=project_root, + ) + + # Handle any encryption errors + handle_subprocess_error( + python_encrypt_result, collect_server_logs, "Python CLI encrypt" + ) + + # Validate the TDF file structure + validate_tdf3_file(python_tdf_output, "Python CLI") + _validate_tdf_zip_structure(python_tdf_output) + + # Test that the TDF can be decrypted by otdfctl + _run_otdfctl_decrypt( + python_tdf_output, + temp_credentials_file, + temp_path, + collect_server_logs, + input_content, + ) + + print( + "✓ Python CLI successfully encrypted TDF that can be decrypted by otdfctl" + ) + print(f"TDF file size: {python_tdf_output.stat().st_size} bytes") + + +@pytest.mark.integration +def test_cross_tool_compatibility( + collect_server_logs, temp_credentials_file, project_root +): + """Test that TDFs created by one tool can be decrypted by the other.""" + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Create test files + input_content = "Cross-tool compatibility test content. Testing 123!" + input_file = _create_test_input_file(temp_path, input_content) + + # Test 1: otdfctl encrypt -> Python CLI decrypt + otdfctl_tdf_output = temp_path / "otdfctl_for_python_decrypt.txt.tdf" + + # Encrypt with otdfctl + otdfctl_encrypt_result = run_otdfctl_encrypt_command( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=otdfctl_tdf_output, + mime_type="text/plain", + cwd=temp_path, + ) + + handle_subprocess_error( + otdfctl_encrypt_result, + collect_server_logs, + "otdfctl encrypt (cross-tool test)", + ) + + # Decrypt with Python CLI + _run_python_cli_decrypt( + otdfctl_tdf_output, + temp_credentials_file, + temp_path, + collect_server_logs, + input_content, + project_root, + ) + + # Test 2: Python CLI encrypt -> otdfctl decrypt + python_tdf_output = temp_path / "python_for_otdfctl_decrypt.txt.tdf" + + # Encrypt with Python CLI + python_encrypt_result = run_cli_encrypt( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=python_tdf_output, + cwd=project_root, + ) + + handle_subprocess_error( + python_encrypt_result, + collect_server_logs, + "Python CLI encrypt (cross-tool test)", + ) + + # Decrypt with otdfctl + _run_otdfctl_decrypt( + python_tdf_output, + temp_credentials_file, + temp_path, + collect_server_logs, + input_content, + ) + + print( + "✓ Cross-tool compatibility verified: both tools can encrypt/decrypt each other's TDFs" + ) + + +@pytest.mark.integration +def test_different_content_types( + collect_server_logs, temp_credentials_file, project_root +): + """Test encryption/decryption with different types of content.""" + + test_cases = [ + ("short.txt", "x"), # Single character + ("multiline.txt", "Line 1\nLine 2\nLine 3\n"), # Multi-line content + ("unicode.txt", "Hello 世界! 🌍 Testing UTF-8 content."), # Unicode content + ("large.txt", "A" * 10000), # Large content + ] + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + for filename, content in test_cases: + print(f"\n--- Testing {filename} (content length: {len(content)}) ---") + + # Create input file + input_file = temp_path / filename + # Use binary mode for consistent handling of all content types + with open(input_file, "w", encoding="utf-8") as f: + f.write(content) + + # Test with Python CLI + python_tdf_output = temp_path / f"python_{filename}.tdf" + + python_encrypt_result = run_cli_encrypt( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=python_tdf_output, + cwd=project_root, + ) + + handle_subprocess_error( + python_encrypt_result, + collect_server_logs, + f"Python CLI encrypt ({filename})", + ) + + # Validate TDF structure + validate_tdf3_file(python_tdf_output, f"Python CLI ({filename})") + + # Decrypt and validate content + _run_otdfctl_decrypt( + python_tdf_output, + temp_credentials_file, + temp_path, + collect_server_logs, + content, + ) + + print(f"✓ Successfully processed {filename}") + + print("✓ All content types processed successfully") + + +@pytest.mark.skip("Skipping test for now due to known issues with empty content") +@pytest.mark.integration +def test_different_content_types_empty( + collect_server_logs, temp_credentials_file, project_root +): + """Test encryption/decryption with different types of content.""" + + test_cases = [ + ("empty.txt", ""), # Empty file + ] + + # Create temporary directory for work + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + for filename, content in test_cases: + print(f"\n--- Testing {filename} (content length: {len(content)}) ---") + + # Create input file + input_file = temp_path / filename + # Use binary mode for consistent handling of all content types + with open(input_file, "w", encoding="utf-8") as f: + f.write(content) + + # Test with Python CLI + python_tdf_output = temp_path / f"python_{filename}.tdf" + + python_encrypt_result = run_cli_encrypt( + creds_file=temp_credentials_file, + input_file=input_file, + output_file=python_tdf_output, + cwd=project_root, + ) + + handle_subprocess_error( + python_encrypt_result, + collect_server_logs, + f"Python CLI encrypt ({filename})", + ) + + # Validate TDF structure + validate_tdf3_file(python_tdf_output, f"Python CLI ({filename})") + + # Decrypt and validate content + _run_otdfctl_decrypt( + python_tdf_output, + temp_credentials_file, + temp_path, + collect_server_logs, + content, + ) + + print(f"✓ Successfully processed {filename}") + + print("✓ All content types processed successfully") + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/integration/test_data/empty_file.txt b/tests/integration/test_data/empty_file.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/test_data/sample_binary.png b/tests/integration/test_data/sample_binary.png new file mode 100644 index 0000000..eb979e8 Binary files /dev/null and b/tests/integration/test_data/sample_binary.png differ diff --git a/tests/integration/test_data/sample_text.txt b/tests/integration/test_data/sample_text.txt new file mode 100644 index 0000000..5e103d5 --- /dev/null +++ b/tests/integration/test_data/sample_text.txt @@ -0,0 +1,5 @@ +Hello, World! This is a test text file for TDF target mode testing. +It contains multiple lines to test various scenarios. +Line 3 with special characters: àáâãäå +Line 4 with numbers: 123456789 +Final line with punctuation: !@#$%^&*()_+ diff --git a/tests/integration/test_data/sample_with_attributes.txt b/tests/integration/test_data/sample_with_attributes.txt new file mode 100644 index 0000000..05d84ec --- /dev/null +++ b/tests/integration/test_data/sample_with_attributes.txt @@ -0,0 +1,4 @@ +Sensitive data with attributes. +Classification: SECRET +Department: Engineering +Project: TDF Testing diff --git a/tests/integration/test_pe_interaction.py b/tests/integration/test_pe_interaction.py new file mode 100644 index 0000000..00e4447 --- /dev/null +++ b/tests/integration/test_pe_interaction.py @@ -0,0 +1,96 @@ +""" +Integration test: Single attribute encryption/decryption using SDK and otdfctl +""" + +import logging +import tempfile +from pathlib import Path + +import pytest + +from otdf_python.sdk import SDK +from otdf_python.sdk_exceptions import SDKException +from tests.config_pydantic import CONFIG_TDF +from tests.integration.support_sdk import get_sdk_for_pe + +# Test files (adjust paths as needed) +DECRYPTED_FILE_OTDFCTL = "decrypted_otdfctl.txt" + +_test_attributes = [CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1] +logger = logging.getLogger(__name__) + + +def decrypt(input_path: Path, output_path: Path, sdk: SDK): + # Determine output + with open(input_path, "rb") as infile, open(output_path, "wb") as outfile: + try: + logger.debug("Decrypting TDF") + tdf_reader = sdk.load_tdf_without_config(infile.read()) + # Access payload directly from TDFReader + payload_bytes = tdf_reader.payload + outfile.write(payload_bytes) + logger.info("Successfully decrypted TDF") + + except Exception as e: + logger.error(f"Decryption failed: {e}") + # Clean up the output file if there was an error + output_path.unlink(missing_ok=True) + raise SDKException("Decryption failed") + + +@pytest.mark.integration +def test_single_attribute_encryption_decryption(): + # Encrypt with SDK using a single attribute + sdk = get_sdk_for_pe() + + with tempfile.TemporaryDirectory() as tmpDir: + print("Created temporary directory", tmpDir) + some_plaintext_file = Path(tmpDir) / "new-file.txt" + some_plaintext_file.write_text("Hello world") + + INPUT_FILE = some_plaintext_file + + config = sdk.new_tdf_config( + attributes=_test_attributes, + ) + + input_path = Path(INPUT_FILE) + + output_path = input_path.with_suffix(input_path.suffix + ".tdf") + with open(input_path, "rb") as infile, open(output_path, "wb") as outfile: + sdk.create_tdf(infile.read(), config, output_stream=outfile) + + TDF_FILE = output_path + + assert TDF_FILE.exists() + + # Decrypt with SDK + DECRYPTED_FILE_SDK = Path(tmpDir) / "decrypted.txt" + DECRYPTED_FILE_SDK.touch() # Ensure the file exists + + decrypt(TDF_FILE, DECRYPTED_FILE_SDK, sdk) + with open(INPUT_FILE, "rb") as f1, open(DECRYPTED_FILE_SDK, "rb") as f2: + assert f1.read() == f2.read(), "SDK decrypted output does not match input" + + # # Decrypt with otdfctl + # otdfctl_cmd = [ + # "otdfctl", + # "decrypt", + # "--kas-url", + # kas_info["url"], + # "--kas-public-key", + # kas_info["public_key"], + # "--kas-token", + # kas_info["token"], + # "--attribute", + # _test_attributes, + # "-i", + # TDF_FILE, + # "-o", + # DECRYPTED_FILE_OTDFCTL, + # ] + # subprocess.run(otdfctl_cmd, check=True) + # with open(INPUT_FILE, "rb") as f1, open(DECRYPTED_FILE_OTDFCTL, "rb") as f2: + # assert f1.read() == f2.read(), ( + # "otdfctl decrypted output does not match input" + # ) diff --git a/tests/mock_crypto.py b/tests/mock_crypto.py new file mode 100644 index 0000000..006963b --- /dev/null +++ b/tests/mock_crypto.py @@ -0,0 +1,35 @@ +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + + +def generate_rsa_keypair(): + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + private_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode() + public_pem = ( + private_key.public_key() + .public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + .decode() + ) + return private_pem, public_pem + + +def generate_rsa_keys(): + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + public_key = private_key.public_key() + private_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode() + public_pem = public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ).decode() + return private_key, public_key, private_pem, public_pem diff --git a/tests/server_logs.py b/tests/server_logs.py new file mode 100644 index 0000000..0bc846d --- /dev/null +++ b/tests/server_logs.py @@ -0,0 +1,97 @@ +""" +Server log collection utility for debugging test failures. +""" + +import logging +import subprocess + +from tests.config_pydantic import CONFIG_TESTING + +logger = logging.getLogger(__name__) + + +def collect_server_logs( + pod_name: str = CONFIG_TESTING.POD_NAME, + namespace: str = CONFIG_TESTING.NAMESPACE, + ssh_target: str = CONFIG_TESTING.SSH_TARGET, + lines: int = CONFIG_TESTING.LOG_LINES, + test_name: str | None = None, +) -> str | None: + """ + Collect server logs from a Kubernetes pod via SSH. + + Args: + pod_name: Name of the Kubernetes pod + namespace: Kubernetes namespace + ssh_target: SSH target (hostname/alias) + lines: Number of log lines to retrieve + + Returns: + Log output as string, or None if collection failed + """ + if CONFIG_TESTING.ENABLE_LOG_COLLECTION: + logging.debug(f"\n{'=' * 60}") + if test_name: + logging.debug(f"Collecting logs for test: {test_name}") + else: + logging.debug("Collecting logs without a specific test name") + logging.debug(f"{'=' * 60}\n") + else: + logging.debug( + "Log collection is disabled. To enable, set ENABLE_LOG_COLLECTION to True in .env-testing" + ) + return None + + cmd = ["ssh", ssh_target, f"kubectl logs -n {lines} {pod_name} -n {namespace}"] + + try: + result = subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=30, # 30 second timeout + ) + + if result.returncode == 0: + return result.stdout + else: + logger.error(f"Failed to collect logs: {result.stderr}") + return None + + except subprocess.TimeoutExpired: + logger.error("Timeout while collecting server logs") + return None + except Exception as e: + logger.error(f"Error collecting server logs: {e}") + return None + + +def log_server_logs_on_failure( + test_name: str, + pod_name: str = CONFIG_TESTING.POD_NAME, + namespace: str = CONFIG_TESTING.NAMESPACE, + ssh_target: str = CONFIG_TESTING.SSH_TARGET, + lines: int = CONFIG_TESTING.LOG_LINES, +) -> None: + """ + Collect and log server logs when a test fails. + + Args: + test_name: Name of the failed test + pod_name: Name of the Kubernetes pod + namespace: Kubernetes namespace + ssh_target: SSH target (hostname/alias) + lines: Number of log lines to retrieve + """ + + logs = collect_server_logs(pod_name, namespace, ssh_target, lines, test_name) + + if logs: + logging.debug(f"\nServer logs (last {lines} lines):") + logging.debug("-" * 40) + logging.debug(logs) + logging.debug("-" * 40) + else: + logging.debug("\nFailed to collect server logs") + + logging.debug(f"{'=' * 60}\n") diff --git a/tests/support_cli_args.py b/tests/support_cli_args.py new file mode 100644 index 0000000..ab2900d --- /dev/null +++ b/tests/support_cli_args.py @@ -0,0 +1,183 @@ +""" +Support functions for constructing CLI arguments for this project's (Python) CLI. +""" + +import json +import logging +import subprocess +import sys +from pathlib import Path + +from tests.config_pydantic import CONFIG_TDF +from tests.support_common import get_platform_url, get_testing_environ + +logger = logging.getLogger(__name__) + + +def _get_cli_flags() -> list[str]: + """ + Determine (Python) CLI flags based on platform URL + """ + platform_url = get_platform_url() + cli_flags = [] + + if platform_url.startswith("http://"): + cli_flags = ["--plaintext"] + else: + # For HTTPS, skip TLS verification if INSECURE_SKIP_VERIFY is True + if CONFIG_TDF.INSECURE_SKIP_VERIFY: + cli_flags = ["--insecure"] + + return cli_flags + + +def run_cli_inspect(tdf_path: Path, creds_file: Path, cwd: Path) -> dict: + """ + Helper function to run Python CLI inspect command and return parsed JSON result. + + This demonstrates how the CLI inspect functionality could be tested + with the new fixtures. + """ + + # Build CLI command + cmd = [ + sys.executable, + "-m", + "otdf_python", + "--platform-url", + get_platform_url(), + "--with-client-creds-file", + str(creds_file), + *_get_cli_flags(), + "inspect", + str(tdf_path), + ] + + try: + # Run the CLI command + result = subprocess.run( + cmd, capture_output=True, text=True, check=True, cwd=cwd + ) + + # Parse JSON output + return json.loads(result.stdout) + + except (subprocess.CalledProcessError, json.JSONDecodeError) as e: + logger.error(f"CLI inspect failed for {tdf_path}: {e}") + raise Exception(f"Failed to inspect TDF {tdf_path}: {e}") from e + + +def _build_cli_decrypt_command( + creds_file: Path, + input_file: Path, + output_file: Path, + platform_url: str | None = None, +) -> list[str]: + """Build CLI decrypt command.""" + cmd = [ + sys.executable, + "-m", + "otdf_python", + "--platform-url", + platform_url if platform_url is not None else get_platform_url(), + "--with-client-creds-file", + str(creds_file), + *_get_cli_flags(), + "decrypt", + str(input_file), + "-o", + str(output_file), + ] + return cmd + + +def run_cli_decrypt( + creds_file: Path, + input_file: Path, + output_file: Path, + cwd: Path, + platform_url: str | None = None, +) -> subprocess.CompletedProcess: + python_decrypt_cmd = _build_cli_decrypt_command( + creds_file=creds_file, + input_file=input_file, + output_file=output_file, + platform_url=platform_url, + ) + return subprocess.run( + python_decrypt_cmd, + capture_output=True, + text=True, + cwd=cwd, + env=get_testing_environ(), + ) + + +def _build_cli_encrypt_command( + creds_file: Path, + input_file: Path, + output_file: Path, + platform_url: str | None = None, + mime_type: str = "text/plain", + attributes: list[str] | None = None, + container_type: str = "tdf", +) -> list[str]: + cmd = [ + sys.executable, + "-m", + "otdf_python", + "--platform-url", + platform_url if platform_url is not None else get_platform_url(), + "--with-client-creds-file", + str(creds_file), + *_get_cli_flags(), + "encrypt", + "--mime-type", + mime_type, + "--container-type", + container_type, + ] + + # Add attributes if provided + if attributes: + for attr in attributes: + cmd.extend(["--attr", attr]) + + cmd.extend( + [ + str(input_file), + "-o", + str(output_file), + ] + ) + + return cmd + + +def run_cli_encrypt( + creds_file: Path, + input_file: Path, + output_file: Path, + cwd: Path, + platform_url: str | None = None, + mime_type: str = "text/plain", + attributes: list[str] | None = None, + container_type: str = "tdf", +) -> subprocess.CompletedProcess: + python_encrypt_cmd = _build_cli_encrypt_command( + creds_file=creds_file, + input_file=input_file, + output_file=output_file, + platform_url=platform_url, + mime_type=mime_type, + attributes=attributes, + container_type=container_type, + ) + + return subprocess.run( + python_encrypt_cmd, + capture_output=True, + text=True, + cwd=cwd, + env=get_testing_environ(), + ) diff --git a/tests/support_common.py b/tests/support_common.py new file mode 100644 index 0000000..c3bcf5c --- /dev/null +++ b/tests/support_common.py @@ -0,0 +1,90 @@ +import logging +import subprocess +import zipfile +from pathlib import Path + +import pytest + +from tests.config_pydantic import CONFIG_TDF + +logger = logging.getLogger(__name__) + + +def get_platform_url() -> str: + # Get platform configuration + platform_url = CONFIG_TDF.OPENTDF_PLATFORM_URL + if not platform_url: + # Fail fast if OPENTDF_PLATFORM_URL is not set + raise Exception( + "OPENTDF_PLATFORM_URL must be set in config for integration tests" + ) + return platform_url + + +def handle_subprocess_error( + result: subprocess.CompletedProcess, collect_server_logs, scenario_name: str +) -> None: + """Handle subprocess errors with proper server log collection and error reporting.""" + if result.returncode != 0: + # Collect server logs for debugging + logs = collect_server_logs() + print(f"Server logs when '{scenario_name}' failed:\n{logs}") + + pytest.fail( + f"Scenario failed: '{scenario_name}': " + f"stdout={result.stdout}, stderr={result.stderr}" + ) + + +def get_testing_environ() -> dict | None: + """ + Set up environment and configuration + + TODO: YAGNI: this is a hook we could use to modify all testing environments, e.g. + env = os.environ.copy() + env["GRPC_ENFORCE_ALPN_ENABLED"] = "false" + return env + """ + return None + + +def validate_tdf3_file(tdf_path: Path, tool_name: str) -> None: + """Validate that a TDF file (tdf_type="tdf3") exists, is not empty, and has correct ZIP structure.""" + assert tdf_path.exists(), f"{tool_name} did not create TDF file" + assert tdf_path.stat().st_size > 0, f"{tool_name} created empty TDF file" + assert zipfile.is_zipfile(tdf_path), f"{tool_name} output is not a valid ZIP file" + + # Verify TDF file has correct ZIP signature + with open(tdf_path, "rb") as f: + tdf_header = f.read(4) + assert tdf_header == b"PK\x03\x04", f"{tool_name} output is not a valid ZIP file" + assert tdf_path.suffix == ".tdf", f"File should have .tdf extension: {tdf_path}" + + +def validate_plaintext_file_created( + path: Path, scenario: str, expected_content: str +) -> None: + """Validate that a non-empty file was created, and contains the expected content""" + assert path.exists(), f"{scenario=} did not create decrypted file" + assert path.stat().st_size > 0, f"{scenario=} created empty decrypted file" + # Verify scenario produces the expected decrypted content + with open(path) as f: + decrypted_content = f.read() + + assert decrypted_content == expected_content, ( + f"otdfctl decrypted content does not match original. " + f"Expected: '{expected_content}', Got: '{decrypted_content}'" + ) + + +def compare_tdf3_file_size(otdfctl_tdf_path: Path, py_cli_tdf_path: Path) -> None: + """Compare the file sizes of two TDF files (tdf_type="tdf3"), assert within 30% of each other.""" + size_otdfctl_tdf = otdfctl_tdf_path.stat().st_size + size_py_cli_tdf = py_cli_tdf_path.stat().st_size + size_diff_ratio = abs(size_otdfctl_tdf - size_py_cli_tdf) / max( + size_otdfctl_tdf, size_py_cli_tdf + ) + + assert size_diff_ratio < 0.3, ( + f"File sizes too different: otdfctl={size_otdfctl_tdf}, cli={size_py_cli_tdf}" + ) diff --git a/tests/support_otdfctl.py b/tests/support_otdfctl.py new file mode 100644 index 0000000..1ec0a2e --- /dev/null +++ b/tests/support_otdfctl.py @@ -0,0 +1,26 @@ +import subprocess + +import pytest + + +@pytest.mark.integration +@pytest.fixture(scope="session", autouse=True) +def check_for_otdfctl(): + """ + Ensure that the otdfctl command is available on the system. + + This fixture runs once per test session (for integration tests) and raises + an exception if the otdfctl command is not found. + """ + + # Check if otdfctl is available + try: + subprocess.run( + ["otdfctl", "--version"], + capture_output=True, + check=True, + ) + except (subprocess.CalledProcessError, FileNotFoundError): + raise Exception( + "otdfctl command not found on system. Please install otdfctl to run this test." + ) diff --git a/tests/support_otdfctl_args.py b/tests/support_otdfctl_args.py new file mode 100644 index 0000000..99dcc4a --- /dev/null +++ b/tests/support_otdfctl_args.py @@ -0,0 +1,280 @@ +""" +Support functions for constructing CLI arguments for otdfctl CLI. +""" + +import logging +import subprocess +from pathlib import Path + +from tests.config_pydantic import CONFIG_TDF +from tests.support_common import get_platform_url, get_testing_environ + +logger = logging.getLogger(__name__) + + +def get_otdfctl_flags() -> list[str]: + """ + Determine otdfctl flags based on platform URL + """ + platform_url = get_platform_url() + otdfctl_flags = [] + if platform_url.startswith("http://"): + # otdfctl doesn't have a --plaintext flag, just omit --tls-no-verify for HTTP + pass + else: + # For HTTPS, skip TLS verification if INSECURE_SKIP_VERIFY is True + if CONFIG_TDF.INSECURE_SKIP_VERIFY: + otdfctl_flags = ["--tls-no-verify"] + + return otdfctl_flags + + +def get_otdfctl_base_command( + creds_file: Path, platform_url: str | None = None +) -> list[str]: + """Get base otdfctl command with common flags.""" + base_cmd = [ + "otdfctl", + "--host", + platform_url if platform_url is not None else get_platform_url(), + "--with-client-creds-file", + str(creds_file), + ] + + # Add platform-specific flags + base_cmd.extend(get_otdfctl_flags()) + + return base_cmd + + +def _build_otdfctl_encrypt_command( + creds_file: Path, + input_file: Path, + output_file: Path, + platform_url: str | None = None, + mime_type: str = "text/plain", + attributes: list[str] | None = None, + tdf_type: str | None = None, + target_mode: str | None = None, +) -> list[str]: + """Build otdfctl encrypt command. + + Args: + platform_url: Platform URL like "http://localhost:8080" + creds_file: Path to credentials file + input_file: Path to the input file to encrypt + output_file: Path where the TDF file should be created + mime_type: Optional MIME type for the input file + attributes: Optional list of attributes to apply + tdf_type: TDF type (e.g., "tdf3", "nano") + target_mode: Target TDF spec version (e.g., "v4.2.2", "v4.3.1") + """ + + cmd = get_otdfctl_base_command(creds_file, platform_url) + cmd.append("encrypt") + cmd.extend(["--mime-type", mime_type]) + + # Add attributes if provided + if attributes: + for attr in attributes: + cmd.extend(["--attr", attr]) + + if tdf_type: + cmd.extend( + [ + "--tdf-type", + tdf_type, + ] + ) + + if target_mode: + cmd.extend(["--target-mode", target_mode]) + + cmd.extend( + [ + str(input_file), + "-o", + str(output_file), + ] + ) + return cmd + + +def run_otdfctl_encrypt_command( + creds_file: Path, + input_file: Path, + output_file: Path, + cwd: Path, + platform_url: str | None = None, + mime_type: str = "text/plain", + attributes: list[str] | None = None, + tdf_type: str | None = None, + target_mode: str | None = None, +) -> subprocess.CompletedProcess: + otdfctl_encrypt_cmd = _build_otdfctl_encrypt_command( + creds_file=creds_file, + input_file=input_file, + output_file=output_file, + platform_url=platform_url, + mime_type=mime_type, + attributes=attributes, + tdf_type=tdf_type, + target_mode=target_mode, + ) + return subprocess.run( + otdfctl_encrypt_cmd, + capture_output=True, + text=True, + cwd=cwd, + env=get_testing_environ(), + ) + + +def _build_otdfctl_decrypt_command( + creds_file: Path, tdf_file: Path, output_file: Path, platform_url: str | None = None +) -> list[str]: + """Build otdfctl decrypt command.""" + cmd = get_otdfctl_base_command(creds_file, platform_url) + cmd.extend( + [ + "decrypt", + str(tdf_file), + "-o", + str(output_file), + ] + ) + + return cmd + + +def run_otdfctl_decrypt_command( + creds_file: Path, + tdf_file: Path, + output_file: Path, + cwd: Path, + platform_url: str | None = None, +) -> subprocess.CompletedProcess: + otdfctl_decrypt_cmd = _build_otdfctl_decrypt_command( + creds_file=creds_file, + tdf_file=tdf_file, + output_file=output_file, + platform_url=platform_url, + ) + + return subprocess.run( + otdfctl_decrypt_cmd, + capture_output=True, + text=True, + cwd=cwd, + env=get_testing_environ(), + ) + + +def _generate_target_mode_tdf( + input_file: Path, + output_file: Path, + target_mode: str, + creds_file: Path, + attributes: list[str] | None = None, + mime_type: str | None = None, +) -> None: + # Ensure output directory exists + output_file.parent.mkdir(parents=True, exist_ok=True) + + # Build otdfctl command + cmd = _build_otdfctl_encrypt_command( + platform_url=get_platform_url(), + creds_file=creds_file, + input_file=input_file, + output_file=output_file, + mime_type=mime_type if mime_type else "text/plain", + attributes=attributes if attributes else None, + tdf_type="tdf3", + target_mode=target_mode, + ) + + # Run otdfctl command + result = subprocess.run( + cmd, + capture_output=True, + text=True, + env=get_testing_environ(), + ) + + if result.returncode != 0: + logger.error(f"otdfctl command failed: {result.stderr}") + raise Exception( + f"Failed to generate TDF with target mode {target_mode}: " + f"stdout={result.stdout}, stderr={result.stderr}" + ) + + +def otdfctl_generate_tdf_files_for_target_mode( + target_mode: str, + temp_credentials_file: Path, + test_data_dir: Path, + sample_input_files: dict[str, Path], +) -> dict[str, Path]: + """ + Factory function to generate TDF files for a specific target mode. + + Args: + target_mode: Target TDF spec version (e.g., "v4.2.2", "v4.3.1") + temp_credentials_file: Path to credentials file + test_data_dir: Base test data directory + sample_input_files: Dictionary of sample input files + + Returns: + Dictionary mapping file types to their TDF file paths + """ + output_dir = test_data_dir / target_mode + tdf_files = {} + + # Define the file generation configurations + file_configs = [ + { + "key": "text", + "input_key": "text", + "output_name": "sample_text.txt.tdf", + "mime_type": "text/plain", + }, + # { + # "key": "empty", + # "input_key": "empty", + # "output_name": "empty_file.txt.tdf", + # "mime_type": "text/plain", + # }, + { + "key": "binary", + "input_key": "binary", + "output_name": "sample_binary.png.tdf", + "mime_type": "image/png", + }, + { + "key": "with_attributes", + "input_key": "with_attributes", + "output_name": "sample_with_attributes.txt.tdf", + "mime_type": "text/plain", + }, + ] + + try: + for config in file_configs: + tdf_path = output_dir / config["output_name"] + _generate_target_mode_tdf( + sample_input_files[config["input_key"]], + tdf_path, + target_mode, + temp_credentials_file, + attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1] + if config["key"] == "with_attributes" + else None, + mime_type=config["mime_type"], + ) + tdf_files[config["key"]] = tdf_path + + return tdf_files + + except Exception as e: + logger.error(f"Error generating {target_mode} TDF files: {e}") + raise Exception(f"Failed to generate {target_mode} TDF files: {e}") from e diff --git a/tests/test_address_normalizer.py b/tests/test_address_normalizer.py new file mode 100644 index 0000000..01577f0 --- /dev/null +++ b/tests/test_address_normalizer.py @@ -0,0 +1,73 @@ +""" +Tests for address_normalizer module. +""" + +import pytest + +from otdf_python.address_normalizer import normalize_address +from otdf_python.sdk_exceptions import SDKException + + +def test_normalize_address_with_scheme(): + """Test normalizing address that already has a scheme.""" + # Test with HTTPS URL + url = "https://example.com/path" + normalized = normalize_address(url, False) + assert normalized == "https://example.com:443" + + # Test with HTTP URL, but requesting HTTPS + url = "http://example.com/path" + normalized = normalize_address(url, False) + assert normalized == "https://example.com:443" + + # Test with HTTP URL, requesting plaintext + url = "http://example.com/path" + normalized = normalize_address(url, True) + assert normalized == "http://example.com:80" + + +def test_normalize_address_with_port(): + """Test normalizing address with custom port.""" + # With HTTPS and custom port + url = "https://example.com:8443/path" + normalized = normalize_address(url, False) + assert normalized == "https://example.com:8443" + + # With HTTP and custom port + url = "http://example.com:8080/path" + normalized = normalize_address(url, True) + assert normalized == "http://example.com:8080" + + # Forcing HTTP on HTTPS URL with custom port + url = "https://example.com:8443/path" + normalized = normalize_address(url, True) + assert normalized == "http://example.com:8443" + + +def test_normalize_address_no_scheme(): + """Test normalizing address without a scheme.""" + # Just hostname + url = "example.com" + normalized = normalize_address(url, False) + assert normalized == "https://example.com:443" + + # Hostname with port + url = "example.com:8080" + normalized = normalize_address(url, False) + assert normalized == "https://example.com:8080" + + # Hostname with port, plaintext + url = "example.com:8080" + normalized = normalize_address(url, True) + assert normalized == "http://example.com:8080" + + +def test_normalize_address_invalid(): + """Test normalizing invalid addresses.""" + # Non-numeric port + with pytest.raises(SDKException): + normalize_address("example.com:invalid", False) + + # Very malformed URL + with pytest.raises(SDKException): + normalize_address("not a real url with spaces:123:456", False) diff --git a/tests/test_aesgcm.py b/tests/test_aesgcm.py new file mode 100644 index 0000000..965f62d --- /dev/null +++ b/tests/test_aesgcm.py @@ -0,0 +1,35 @@ +import os +import unittest + +from otdf_python.aesgcm import AesGcm + + +class TestAesGcm(unittest.TestCase): + def test_encrypt_decrypt(self): + key = os.urandom(32) + aes = AesGcm(key) + data = b"test data" + encrypted = aes.encrypt(data) + decrypted = aes.decrypt(encrypted) + self.assertEqual(decrypted, data) + + def test_encrypt_decrypt_with_iv(self): + key = os.urandom(32) + aes = AesGcm(key) + data = b"test data" + iv = os.urandom(AesGcm.GCM_NONCE_LENGTH) + ct = aes.encrypt_with_iv(iv, AesGcm.GCM_TAG_LENGTH, data) + decrypted = aes.decrypt_with_iv( + iv, AesGcm.GCM_TAG_LENGTH, ct[AesGcm.GCM_NONCE_LENGTH :] + ) + self.assertEqual(decrypted, data) + + def test_invalid_key(self): + with self.assertRaises(ValueError): + AesGcm(b"") + with self.assertRaises(ValueError): + AesGcm(b"short") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_assertion_config.py b/tests/test_assertion_config.py new file mode 100644 index 0000000..f7c1a30 --- /dev/null +++ b/tests/test_assertion_config.py @@ -0,0 +1,56 @@ +import unittest + +from otdf_python.assertion_config import ( + AppliesToState, + AssertionConfig, + AssertionKey, + AssertionKeyAlg, + BindingMethod, + Scope, + Statement, + Type, +) + + +class TestAssertionConfig(unittest.TestCase): + def test_enums(self): + self.assertEqual(str(Type.HANDLING_ASSERTION), "handling") + self.assertEqual(str(Scope.PAYLOAD), "payload") + self.assertEqual(str(AppliesToState.ENCRYPTED), "encrypted") + self.assertEqual(str(BindingMethod.JWS), "jws") + + def test_assertion_key(self): + key = AssertionKey(AssertionKeyAlg.RS256, "keydata") + self.assertTrue(key.is_defined()) + key2 = AssertionKey(AssertionKeyAlg.NOT_DEFINED, None) + self.assertFalse(key2.is_defined()) + + def test_statement_equality_and_hash(self): + s1 = Statement("fmt", "schema", "val") + s2 = Statement("fmt", "schema", "val") + s3 = Statement("fmt2", "schema", "val") + self.assertEqual(s1, s2) + self.assertNotEqual(s1, s3) + self.assertEqual(hash(s1), hash(s2)) + + def test_assertion_config(self): + statement = Statement("fmt", "schema", "val") + key = AssertionKey(AssertionKeyAlg.HS256, "keydata") + config = AssertionConfig( + id="id1", + type=Type.BASE_ASSERTION, + scope=Scope.TRUSTED_DATA_OBJ, + applies_to_state=AppliesToState.UNENCRYPTED, + statement=statement, + signing_key=key, + ) + self.assertEqual(config.id, "id1") + self.assertEqual(config.type, Type.BASE_ASSERTION) + self.assertEqual(config.scope, Scope.TRUSTED_DATA_OBJ) + self.assertEqual(config.applies_to_state, AppliesToState.UNENCRYPTED) + self.assertEqual(config.statement, statement) + self.assertEqual(config.signing_key, key) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_asym_encryption.py b/tests/test_asym_encryption.py new file mode 100644 index 0000000..617eb35 --- /dev/null +++ b/tests/test_asym_encryption.py @@ -0,0 +1,13 @@ +from otdf_python.asym_decryption import AsymDecryption +from otdf_python.asym_encryption import AsymEncryption +from tests.mock_crypto import generate_rsa_keypair + + +def test_asym_encryption_decryption(): + private_pem, public_pem = generate_rsa_keypair() + encryptor = AsymEncryption(public_key_pem=public_pem) + decryptor = AsymDecryption(private_key_pem=private_pem) + message = b"test message for encryption" + encrypted = encryptor.encrypt(message) + decrypted = decryptor.decrypt(encrypted) + assert decrypted == message diff --git a/tests/test_autoconfigure_utils.py b/tests/test_autoconfigure_utils.py new file mode 100644 index 0000000..39fa412 --- /dev/null +++ b/tests/test_autoconfigure_utils.py @@ -0,0 +1,50 @@ +import unittest + +from otdf_python.autoconfigure_utils import ( + AttributeNameFQN, + AttributeValueFQN, + AutoConfigureException, + KeySplitStep, + RuleType, +) + + +class TestAutoconfigureUtils(unittest.TestCase): + def test_rule_type(self): + self.assertEqual(RuleType.HIERARCHY, "hierarchy") + self.assertEqual(RuleType.ALL_OF, "allOf") + + def test_key_split_step(self): + k1 = KeySplitStep("kas1", "split1") + k2 = KeySplitStep("kas1", "split1") + k3 = KeySplitStep("kas2", "split2") + self.assertEqual(k1, k2) + self.assertNotEqual(k1, k3) + self.assertEqual(str(k1), "KeySplitStep{kas=kas1, splitID=split1}") + self.assertEqual(len({k1, k2, k3}), 2) + + def test_attribute_name_fqn(self): + url = "https://example.com/attr/department" + fqn = AttributeNameFQN(url) + self.assertEqual(fqn.prefix(), url) + self.assertEqual(fqn.get_key(), url.lower()) + self.assertEqual(fqn.authority(), "https://example.com") + self.assertEqual(fqn.name(), "department") + val_fqn = fqn.select("HR") + self.assertIsInstance(val_fqn, AttributeValueFQN) + self.assertIn("/value/HR", str(val_fqn)) + with self.assertRaises(AutoConfigureException): + AttributeNameFQN("badurl") + + def test_attribute_value_fqn(self): + url = "https://example.com/attr/department/value/HR" + fqn = AttributeValueFQN(url) + self.assertEqual(str(fqn), url) + self.assertEqual(fqn, AttributeValueFQN(url)) + self.assertEqual(len({fqn, AttributeValueFQN(url)}), 1) + with self.assertRaises(AutoConfigureException): + AttributeValueFQN("badurl") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..abd8cc3 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,188 @@ +""" +Test CLI functionality +""" + +import os +import subprocess +import sys +import tempfile +from pathlib import Path + +import pytest + + +def test_cli_help(project_root): + """Test that CLI help command works""" + result = subprocess.run( + [sys.executable, "-m", "otdf_python", "--help"], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 0 + assert "OpenTDF CLI" in result.stdout + assert "encrypt" in result.stdout + assert "decrypt" in result.stdout + assert "inspect" in result.stdout + + +def test_cli_version(project_root): + """Test that CLI version command works""" + result = subprocess.run( + [sys.executable, "-m", "otdf_python", "--version"], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 0 + assert "OpenTDF Python SDK" in result.stdout + + with open(Path(__file__).parent.parent / "pyproject.toml", "rb") as f: + # Use tomli for Python < 3.11, tomllib for 3.11+ + if sys.version_info < (3, 11): + import tomli + + pyproject = tomli.load(f) + else: + import tomllib + + pyproject = tomllib.load(f) + expected_version = pyproject["project"]["version"] + + assert ( + expected_version in result.stdout or "0.0.0" in result.stdout + ) # allow for dev version + + +def test_cli_encrypt_help(project_root): + """Test that CLI encrypt help works""" + result = subprocess.run( + [sys.executable, "-m", "otdf_python", "encrypt", "--help"], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 0 + assert "Path to file to encrypt" in result.stdout + assert "--attributes" in result.stdout + assert "--container-type" in result.stdout + + +def test_cli_decrypt_help(project_root): + """Test that CLI decrypt help works""" + result = subprocess.run( + [sys.executable, "-m", "otdf_python", "decrypt", "--help"], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 0 + assert "Path to encrypted file" in result.stdout + assert "--output" in result.stdout + + +def test_cli_inspect_help(project_root): + """Test that CLI inspect help works""" + result = subprocess.run( + [sys.executable, "-m", "otdf_python", "inspect", "--help"], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 0 + assert "Path to encrypted file" in result.stdout + + +def test_cli_encrypt_missing_auth(project_root): + """Test that CLI encrypt fails gracefully without authentication""" + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt") as f: + f.write("test content") + temp_file = f.name + + try: + result = subprocess.run( + [sys.executable, "-m", "otdf_python", "encrypt", temp_file], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 1 + assert "Authentication required" in result.stderr + assert "--with-client-creds-file" in result.stderr + finally: + os.unlink(temp_file) + + +def test_cli_encrypt_missing_creds_file(project_root): + """Test that CLI encrypt fails gracefully with missing credentials file""" + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt") as f: + f.write("test content") + temp_file = f.name + + try: + result = subprocess.run( + [ + sys.executable, + "-m", + "otdf_python", + "--with-client-creds-file", + "nonexistent.json", + "encrypt", + temp_file, + ], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 1 + assert "Credentials file does not exist" in result.stderr + finally: + os.unlink(temp_file) + + +def test_cli_encrypt_invalid_creds_file(project_root): + """Test that CLI encrypt fails gracefully with invalid credentials file""" + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt") as f: + f.write("test content") + temp_file = f.name + + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as creds_f: + creds_f.write('{"invalid": "format"}') + creds_file = creds_f.name + + try: + result = subprocess.run( + [ + sys.executable, + "-m", + "otdf_python", + "--with-client-creds-file", + creds_file, + "encrypt", + temp_file, + ], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 1 + assert "must contain 'clientId' and 'clientSecret' fields" in result.stderr + finally: + os.unlink(temp_file) + os.unlink(creds_file) + + +def test_cli_decrypt_missing_file(project_root): + """Test that CLI decrypt fails gracefully with missing file""" + result = subprocess.run( + [sys.executable, "-m", "otdf_python", "decrypt", "nonexistent.tdf"], + capture_output=True, + text=True, + cwd=project_root, + ) + assert result.returncode == 1 + assert "File does not exist" in result.stderr + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_collection_store.py b/tests/test_collection_store.py new file mode 100644 index 0000000..5d18c24 --- /dev/null +++ b/tests/test_collection_store.py @@ -0,0 +1,43 @@ +import unittest + +from otdf_python.collection_store import ( + CollectionKey, + CollectionStoreImpl, + NoOpCollectionStore, +) + + +class DummyHeader: + def __init__(self, value): + self.value = value + + def to_bytes(self): + return self.value.encode() + + +class TestCollectionStore(unittest.TestCase): + def test_noop_collection_store(self): + store = NoOpCollectionStore() + header = DummyHeader("header1") + key = CollectionKey(b"secret") + store.store(header, key) + self.assertIs(store.get_key(header), store.NO_PRIVATE_KEY) + + def test_collection_store_impl(self): + store = CollectionStoreImpl() + header1 = DummyHeader("header1") + header2 = DummyHeader("header2") + key1 = CollectionKey(b"key1") + key2 = CollectionKey(b"key2") + store.store(header1, key1) + store.store(header2, key2) + self.assertEqual(store.get_key(header1).key, b"key1") + self.assertEqual(store.get_key(header2).key, b"key2") + # Test eviction + for i in range(store.MAX_SIZE_STORE + 1): + store.store(DummyHeader(f"h{i}"), CollectionKey(bytes([i % 256]))) + self.assertLessEqual(len(store), store.MAX_SIZE_STORE) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..421691b --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,35 @@ +from otdf_python.config import KASInfo, TDFConfig, get_kas_address + + +def test_tdf_config_defaults(): + cfg = TDFConfig() + assert cfg.autoconfigure is True + assert cfg.default_segment_size == 2 * 1024 * 1024 + assert cfg.tdf_format.name == "JSONFormat" + assert cfg.integrity_algorithm.name == "HS256" + assert cfg.segment_integrity_algorithm.name == "GMAC" + assert cfg.mime_type == "application/octet-stream" + assert cfg.kas_info_list == [] + assert cfg.split_plan == [] + assert cfg.render_version_info_in_manifest is True + + +def test_kas_info_str(): + kas = KASInfo( + url="https://kas.example.com", + public_key="pubkey", + kid="kid1", + default=True, + algorithm="alg", + ) + s = str(kas) + assert "KASInfo{" in s + assert "kas.example.com" in s + + +def test_get_kas_address(): + assert get_kas_address("kas.example.com") == "https://kas.example.com:443" + assert ( + get_kas_address("https://kas.example.com:8443") + == "https://kas.example.com:8443" + ) diff --git a/tests/test_crypto_utils.py b/tests/test_crypto_utils.py new file mode 100644 index 0000000..2a802f5 --- /dev/null +++ b/tests/test_crypto_utils.py @@ -0,0 +1,38 @@ +import unittest + +from cryptography.hazmat.primitives.asymmetric import ec + +from otdf_python.crypto_utils import CryptoUtils + + +class TestCryptoUtils(unittest.TestCase): + def test_hmac(self): + key = b"key" + data = b"data" + h = CryptoUtils.calculate_sha256_hmac(key, data) + self.assertEqual(len(h), 32) + + def test_rsa_keypair(self): + priv, pub = CryptoUtils.generate_rsa_keypair() + pub_pem = CryptoUtils.get_rsa_public_key_pem(pub) + priv_pem = CryptoUtils.get_rsa_private_key_pem(priv) + self.assertIn("BEGIN PUBLIC KEY", pub_pem) + self.assertIn("BEGIN PRIVATE KEY", priv_pem) + + def test_ec_keypair(self): + priv, pub = CryptoUtils.generate_ec_keypair(ec.SECP384R1()) + pub_pem = CryptoUtils.get_public_key_pem(pub) + priv_pem = CryptoUtils.get_private_key_pem(priv) + self.assertIn("BEGIN PUBLIC KEY", pub_pem) + self.assertIn("BEGIN PRIVATE KEY", priv_pem) + + def test_rsa_key_type_check(self): + priv, pub = CryptoUtils.generate_rsa_keypair() + with self.assertRaises(ValueError): + CryptoUtils.get_rsa_public_key_pem("notakey") + with self.assertRaises(ValueError): + CryptoUtils.get_rsa_private_key_pem("notakey") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_eckeypair.py b/tests/test_eckeypair.py new file mode 100644 index 0000000..b193dce --- /dev/null +++ b/tests/test_eckeypair.py @@ -0,0 +1,34 @@ +import unittest + +from otdf_python.eckeypair import ECKeyPair + + +class TestECKeyPair(unittest.TestCase): + def test_keypair_generation_and_pem(self): + kp = ECKeyPair() + pub_pem = kp.public_key_pem() + priv_pem = kp.private_key_pem() + self.assertIn("BEGIN PUBLIC KEY", pub_pem) + self.assertIn("BEGIN PRIVATE KEY", priv_pem) + + def test_ecdh_and_hkdf(self): + kp1 = ECKeyPair() + kp2 = ECKeyPair() + shared1 = ECKeyPair.compute_ecdh_key(kp2.public_key, kp1.private_key) + shared2 = ECKeyPair.compute_ecdh_key(kp1.public_key, kp2.private_key) + self.assertEqual(shared1, shared2) + salt = b"salt" + key1 = ECKeyPair.calculate_hkdf(salt, shared1) + key2 = ECKeyPair.calculate_hkdf(salt, shared2) + self.assertEqual(key1, key2) + + def test_sign_and_verify(self): + kp = ECKeyPair() + data = b"test data" + sig = ECKeyPair.sign_ecdsa(data, kp.private_key) + self.assertTrue(ECKeyPair.verify_ecdsa(data, sig, kp.public_key)) + self.assertFalse(ECKeyPair.verify_ecdsa(b"bad data", sig, kp.public_key)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_header.py b/tests/test_header.py new file mode 100644 index 0000000..678f931 --- /dev/null +++ b/tests/test_header.py @@ -0,0 +1,38 @@ +import unittest + +from otdf_python.ecc_mode import ECCMode +from otdf_python.header import Header +from otdf_python.policy_info import PolicyInfo +from otdf_python.resource_locator import ResourceLocator +from otdf_python.symmetric_and_payload_config import SymmetricAndPayloadConfig + + +class TestHeader(unittest.TestCase): + def test_header_fields(self): + header = Header() + kas_locator = ResourceLocator("https://kas.example.com", "id1") + ecc_mode = ECCMode(curve_mode=1, use_ecdsa_binding=True) + payload_config = SymmetricAndPayloadConfig( + cipher_type=2, signature_ecc_mode=1, has_signature=False + ) + policy_info = PolicyInfo( + policy_type=1, has_ecdsa_binding=True, body=b"body", binding=b"bind" + ) + # Use correct ephemeral key length for curve_mode=1 (secp384r1): 49 bytes + ephemeral_key = b"e" * 49 + + header.set_kas_locator(kas_locator) + header.set_ecc_mode(ecc_mode) + header.set_payload_config(payload_config) + header.set_policy_info(policy_info) + header.set_ephemeral_key(ephemeral_key) + + self.assertEqual(header.get_kas_locator(), kas_locator) + self.assertEqual(header.get_ecc_mode(), ecc_mode) + self.assertEqual(header.get_payload_config(), payload_config) + self.assertEqual(header.get_policy_info(), policy_info) + self.assertEqual(header.get_ephemeral_key(), ephemeral_key) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_inner_classes.py b/tests/test_inner_classes.py new file mode 100644 index 0000000..3b5ea89 --- /dev/null +++ b/tests/test_inner_classes.py @@ -0,0 +1,67 @@ +import unittest + +from otdf_python.auth_headers import AuthHeaders +from otdf_python.kas_info import KASInfo +from otdf_python.policy_binding_serializer import PolicyBinding, PolicyBindingSerializer + + +class TestAuthHeaders(unittest.TestCase): + def test_auth_headers(self): + headers = AuthHeaders("Bearer token123", "dpop456") + self.assertEqual(headers.auth_header, "Bearer token123") + self.assertEqual(headers.dpop_header, "dpop456") + self.assertEqual(headers.get_auth_header(), "Bearer token123") + self.assertEqual(headers.get_dpop_header(), "dpop456") + + +class TestKASInfo(unittest.TestCase): + def test_kas_info_clone(self): + kas_info = KASInfo(url="https://kas.example.com", public_key="pubkey") + clone = kas_info.clone() + assert clone == kas_info + assert clone is not kas_info + + def test_string_representation(self): + kas_info = KASInfo( + url="https://kas.example.com", + public_key="pubkey", + kid="kid1", + default=True, + algorithm="RSA", + ) + s = str(kas_info) + assert "KASInfo" in s + assert "url=https://kas.example.com" in s + assert "kid=kid1" in s + assert "default=True" in s + assert "algorithm=RSA" in s + + +class TestPolicyBindingSerializer(unittest.TestCase): + def test_deserialize_dict(self): + json_data = {"attr": "value", "number": 42} + result = PolicyBindingSerializer.deserialize(json_data) + self.assertEqual(result.attr, "value") + self.assertEqual(result.number, 42) + + def test_deserialize_string(self): + json_data = "policy_string" + result = PolicyBindingSerializer.deserialize(json_data) + self.assertEqual(result, "policy_string") + + def test_deserialize_invalid(self): + with self.assertRaises(ValueError): + PolicyBindingSerializer.deserialize(123) + + def test_serialize_policy_binding(self): + policy = PolicyBinding(name="test_policy", value="test_value") + result = PolicyBindingSerializer.serialize(policy) + self.assertEqual(result, {"name": "test_policy", "value": "test_value"}) + + def test_serialize_string(self): + result = PolicyBindingSerializer.serialize("policy_string") + self.assertEqual(result, "policy_string") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_kas_client.py b/tests/test_kas_client.py new file mode 100644 index 0000000..b4c3b17 --- /dev/null +++ b/tests/test_kas_client.py @@ -0,0 +1,526 @@ +""" +Unit tests for KASClient. +""" + +from base64 import b64decode +from unittest.mock import MagicMock, patch + +import pytest + +from otdf_python.kas_client import KASClient, KeyAccess +from otdf_python.kas_key_cache import KASKeyCache +from otdf_python.sdk_exceptions import SDKException + + +class MockKasInfo: + def __init__(self, url, algorithm=None, public_key=None, kid=None, default=False): + self.url = url + self.algorithm = algorithm or "" + self.public_key = public_key or "" + self.kid = kid or "" + self.default = default + + def clone(self): + return MockKasInfo( + url=self.url, + algorithm=self.algorithm, + public_key=self.public_key, + kid=self.kid, + default=self.default, + ) + + +def test_get_public_key_uses_cache(): + cache = KASKeyCache() + kas_info = MockKasInfo(url="http://kas") + # Store in cache using the new mechanism + cache.store(kas_info) + client = KASClient("http://kas", cache=cache) + # Get public key should now return the cached KASInfo object + assert client.get_public_key(MockKasInfo(url="http://kas")) == kas_info + + +@patch("urllib3.PoolManager") +@patch("otdf_python.kas_connect_rpc_client.AccessServiceClient") +def test_get_public_key_fetches_and_caches( + mock_access_service_client, mock_pool_manager +): + cache = KASKeyCache() + client = KASClient("http://kas", cache=cache) + + # Mock urllib3.PoolManager to prevent real network calls + mock_pool_instance = MagicMock() + mock_pool_manager.return_value = mock_pool_instance + + # Setup a successful HTTP response that bypasses error handling + mock_response = MagicMock() + mock_response.status = 200 + mock_response.headers = {"Content-Type": "application/proto"} + mock_response.read.return_value = ( + b"" # Empty protobuf data since we're mocking the client layer + ) + mock_pool_instance.request.return_value = mock_response + + # Mock the Connect RPC client directly since it expects protobuf responses + mock_client_instance = MagicMock() + mock_access_service_client.return_value = mock_client_instance + + # Mock the public key response using protobuf structure + mock_rpc_response = MagicMock() + mock_rpc_response.kid = "kid2" + mock_rpc_response.public_key = "public-key-data" + + def mock_public_key_call(*args, **kwargs): + return mock_rpc_response + + mock_client_instance.public_key = mock_public_key_call + + # Create KASInfo with URL but no KID or public key + from otdf_python.config import KASInfo + + kas_info = KASInfo(url="http://kas") + + result = client.get_public_key(kas_info) + + # Verify the result has kid and public_key populated + assert result.kid == "kid2" + assert result.public_key == "public-key-data" + + # Verify the result was cached + cached = cache.get("http://kas") + assert cached is not None + assert cached.kid == "kid2" + assert cached.public_key == "public-key-data" + + +@patch("urllib3.PoolManager") +@patch("otdf_python.kas_connect_rpc_client.AccessServiceClient") +@patch("otdf_python.kas_client.CryptoUtils") +@patch("otdf_python.kas_client.AsymDecryption") +@patch("otdf_python.kas_client.jwt.encode") # Mock JWT encoding directly +def test_unwrap_success( + mock_jwt_encode, + mock_asym_decryption, + mock_crypto_utils, + mock_access_service_client, + mock_pool_manager, +): + # Setup mocks for RSA key pair generation and decryption + mock_private_key = MagicMock() + mock_public_key = MagicMock() + + # Mock the DPoP key generation (called in KASClient.__init__) + # and the ephemeral key generation (called in unwrap) + mock_crypto_utils.generate_rsa_keypair.side_effect = [ + (mock_private_key, mock_public_key), # First call: DPoP keys + (mock_private_key, mock_public_key), # Second call: ephemeral keys + ] + mock_crypto_utils.get_rsa_public_key_pem.return_value = "mock_private_key_pem" + + # Mock JWT encoding (for both request JWT and DPoP proof) + mock_jwt_encode.return_value = "mock_jwt_token" + + # Mock decryptor + mock_decryptor = MagicMock() + mock_decryptor.decrypt.return_value = b"decrypted_key" + mock_asym_decryption.return_value = mock_decryptor + + # Mock urllib3.PoolManager to prevent real network calls + mock_pool_instance = MagicMock() + mock_pool_manager.return_value = mock_pool_instance + + # Setup a successful HTTP response that bypasses error handling + mock_response = MagicMock() + mock_response.status = 200 + mock_response.headers = {"Content-Type": "application/proto"} + mock_response.read.return_value = ( + b"" # Empty protobuf data since we're mocking the client layer + ) + mock_pool_instance.request.return_value = mock_response + + # Mock Connect RPC client directly instead of HTTP layer + mock_client_instance = MagicMock() + mock_access_service_client.return_value = mock_client_instance + + mock_rpc_response = MagicMock() + mock_rpc_response.entity_wrapped_key = b64decode( + "d2VsY29tZQ==" + ) # "welcome" decoded + mock_rpc_response.responses = [] # Empty to test fallback to legacy field + mock_client_instance.rewrap.return_value = mock_rpc_response + + # Create client and test unwrap + # We need to patch the DPoP proof creation method to avoid RSA key access + with patch.object(KASClient, "_create_dpop_proof", return_value="mock_dpop_proof"): + client = KASClient("http://kas", token_source=lambda: "tok") + key_access = KeyAccess(url="http://kas", wrapped_key="wrapped_key") + result = client.unwrap(key_access, "policy") + + # Verify result + assert result == b"decrypted_key" + # Verify the Connect RPC client was called correctly + mock_access_service_client.assert_called_once() + mock_client_instance.rewrap.assert_called_once() + # Verify the decryptor was called + mock_decryptor.decrypt.assert_called_once() + + +@patch("urllib3.PoolManager") +@patch("otdf_python_proto.kas.kas_pb2_connect.AccessServiceClient") +def test_unwrap_failure(mock_access_service_client, mock_pool_manager): + # Setup realistic HTTP response mock for PoolManager + mock_response = MagicMock() + mock_response.status = 500 + mock_response.read.return_value = b'{"error": "fail"}' + mock_response.headers = {"content-type": "application/json"} + + mock_pool_instance = MagicMock() + mock_pool_instance.request.return_value = mock_response + mock_pool_manager.return_value = mock_pool_instance + + # Mock the Connect RPC client to raise an exception + mock_access_service_client.side_effect = Exception("fail") + + client = KASClient("http://kas", token_source=lambda: "tok") + + with pytest.raises(SDKException) as exc_info: + key_access = KeyAccess(url="http://kas", wrapped_key="wrapped_key") + client.unwrap(key_access, "policy") + + # Updated to match the new error message pattern when Connect RPC fails + assert "Connect RPC rewrap failed" in str(exc_info.value) + + +def test_kas_url_normalization_with_insecure_client(): + """Test that KAS URLs are properly normalized based on security settings. + + This test mirrors the Java SDK's testAddressNormalizationWithInsecureHTTPClient + and ensures HTTP URLs are normalized correctly for insecure connections. + """ + # Test with insecure (plaintext) client + client = KASClient(use_plaintext=True) + + # Test HTTP URL normalization + normalized_url = client._normalize_kas_url("http://example.com") + assert normalized_url == "http://example.com:80" + + # Test URL with explicit port + normalized_url = client._normalize_kas_url("example.com:8080") + assert normalized_url == "http://example.com:8080" + + # Test localhost handling + normalized_url = client._normalize_kas_url("localhost") + assert normalized_url == "http://localhost:80" + + +def test_kas_url_normalization_with_secure_client(): + """Test that KAS URLs are properly normalized for secure HTTPS connections. + + This test mirrors the Java SDK's testAddressNormalizationWithHTTPSClient + and ensures HTTPS URLs are normalized correctly for secure connections. + """ + # Test with secure (HTTPS) client + client = KASClient(use_plaintext=False) + + # Test HTTP URL gets upgraded to HTTPS + normalized_url = client._normalize_kas_url("http://example.com") + assert normalized_url == "https://example.com:443" + + # Test HTTPS URL stays HTTPS + normalized_url = client._normalize_kas_url("https://example.com") + assert normalized_url == "https://example.com:443" + + # Test URL with custom port + normalized_url = client._normalize_kas_url("https://example.com:8443") + assert normalized_url == "https://example.com:8443" + + +def test_kas_url_normalization_with_kasinfo_objects_plaintext(): + """Test URL normalization using KASInfo objects with plaintext client. + + This test ensures that _normalize_kas_url works correctly when called + with various KASInfo.url values in plaintext mode (use_plaintext=True). + """ + from otdf_python.config import KASInfo + + client = KASClient(use_plaintext=True) + + # Test cases with different URL formats in KASInfo objects + test_cases = [ + # Basic hostname without scheme + (KASInfo(url="example.com"), "http://example.com:80"), + # Hostname with port + (KASInfo(url="example.com:8080"), "http://example.com:8080"), + # Localhost + (KASInfo(url="localhost"), "http://localhost:80"), + # Localhost with port + (KASInfo(url="localhost:8080"), "http://localhost:8080"), + # HTTP URL (should preserve port) + (KASInfo(url="http://example.com"), "http://example.com:80"), + # HTTP URL with custom port + (KASInfo(url="http://example.com:9000"), "http://example.com:9000"), + # HTTPS URL (should be converted to HTTP in plaintext mode) + (KASInfo(url="https://example.com"), "http://example.com:80"), + # HTTPS URL with custom port (should be converted to HTTP) + (KASInfo(url="https://example.com:8443"), "http://example.com:8443"), + # URL with /kas path (no scheme, should add proper scheme and port) + (KASInfo(url="example.com/kas"), "http://example.com:80/kas"), + # URL with /kas path and port would be invalid as parsed - skip this case + # Complex URL with path + ( + KASInfo(url="https://platform.example.com:8443/api/kas"), + "http://platform.example.com:8443/api/kas", + ), + ] + + for kas_info, expected_url in test_cases: + normalized_url = client._normalize_kas_url(kas_info.url) + assert normalized_url == expected_url, ( + f"Failed for {kas_info.url}: expected {expected_url}, got {normalized_url}" + ) + + +def test_kas_url_normalization_with_kasinfo_objects_secure(): + """Test URL normalization using KASInfo objects with secure client. + + This test ensures that _normalize_kas_url works correctly when called + with various KASInfo.url values in secure mode (use_plaintext=False). + """ + from otdf_python.config import KASInfo + + client = KASClient(use_plaintext=False) + + # Test cases with different URL formats in KASInfo objects + test_cases = [ + # Basic hostname without scheme + (KASInfo(url="example.com"), "https://example.com:443"), + # Hostname with port + (KASInfo(url="example.com:8443"), "https://example.com:8443"), + # Localhost + (KASInfo(url="localhost"), "https://localhost:443"), + # Localhost with port + (KASInfo(url="localhost:8443"), "https://localhost:8443"), + # HTTP URL (should be upgraded to HTTPS) + (KASInfo(url="http://example.com"), "https://example.com:443"), + # HTTP URL with custom port (should be upgraded to HTTPS) + (KASInfo(url="http://example.com:8080"), "https://example.com:8080"), + # HTTPS URL (should preserve HTTPS) + (KASInfo(url="https://example.com"), "https://example.com:443"), + # HTTPS URL with custom port + (KASInfo(url="https://example.com:8443"), "https://example.com:8443"), + # URL with /kas path (no scheme, should add proper scheme and port) + (KASInfo(url="example.com/kas"), "https://example.com:443/kas"), + # Complex URL with path + ( + KASInfo(url="http://platform.example.com:8080/api/kas"), + "https://platform.example.com:8080/api/kas", + ), + ] + + for kas_info, expected_url in test_cases: + normalized_url = client._normalize_kas_url(kas_info.url) + assert normalized_url == expected_url, ( + f"Failed for {kas_info.url}: expected {expected_url}, got {normalized_url}" + ) + + +def test_kas_url_normalization_with_kasinfo_edge_cases(): + """Test URL normalization edge cases using KASInfo objects. + + This test covers edge cases and potential error conditions when + normalizing URLs from KASInfo objects. + """ + from otdf_python.config import KASInfo + + client = KASClient(use_plaintext=False) + + # Test various edge cases + test_cases = [ + # IP addresses + (KASInfo(url="192.168.1.100"), "https://192.168.1.100:443"), + (KASInfo(url="192.168.1.100:8443"), "https://192.168.1.100:8443"), + # URLs with query parameters (no scheme, should add proper scheme and port) + ( + KASInfo(url="example.com/kas?param=value"), + "https://example.com:443/kas?param=value", + ), + # URLs with fragments (no scheme, should add proper scheme and port) + (KASInfo(url="example.com/kas#section"), "https://example.com:443/kas#section"), + # Complex paths (no scheme, should add proper scheme and port) + ( + KASInfo(url="platform.example.com/api/v1/kas"), + "https://platform.example.com:443/api/v1/kas", + ), + ] + + for kas_info, expected_url in test_cases: + normalized_url = client._normalize_kas_url(kas_info.url) + assert normalized_url == expected_url, ( + f"Failed for {kas_info.url}: expected {expected_url}, got {normalized_url}" + ) + + +def test_kas_url_normalization_with_kasinfo_additional_fields(): + """Test that URL normalization works with KASInfo objects containing additional fields. + + This test ensures that the normalization process only uses the URL field + and doesn't interfere with other KASInfo fields like algorithm, kid, etc. + """ + from otdf_python.config import KASInfo + + client = KASClient(use_plaintext=False) + + # Create KASInfo with all fields populated + # Using a URL with scheme to avoid the hostname:port/path parsing issue + kas_info = KASInfo( + url="https://example.com:8443/kas", + public_key="-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A...", + kid="key-id-123", + default=True, + algorithm="rsa", + ) + + normalized_url = client._normalize_kas_url(kas_info.url) + assert normalized_url == "https://example.com:8443/kas" + + # Verify other fields remain unchanged + assert ( + kas_info.public_key + == "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A..." + ) + assert kas_info.kid == "key-id-123" + assert kas_info.default is True + assert kas_info.algorithm == "rsa" + + +def test_kas_url_normalization_error_handling_with_kasinfo(): + """Test error handling in URL normalization with invalid KASInfo URLs. + + This test ensures that appropriate SDKExceptions are raised for + malformed URLs in KASInfo objects. + """ + from otdf_python.config import KASInfo + + client = KASClient(use_plaintext=False) + + # Test cases that should raise SDKException + invalid_urls = [ + # Invalid port format + "example.com:invalid_port", + # Multiple colons (ambiguous port) + "example.com:8080:extra", + # IPv6 addresses without proper scheme (current limitation) + "[::1]", + "[2001:db8::1]", + ] + + for invalid_url in invalid_urls: + kas_info = KASInfo(url=invalid_url) + with pytest.raises(SDKException): + client._normalize_kas_url(kas_info.url) + + +@patch("urllib3.PoolManager") +@patch("otdf_python.kas_connect_rpc_client.AccessServiceClient") +def test_jwt_signature_verification_in_unwrap_request( + mock_access_service_client, mock_pool_manager, collect_server_logs +): + """Test that JWT signature is properly created and can be verified. + + This test is inspired by the Java SDK's testCallingRewrap which verifies + the JWT signature in the rewrap request. It ensures our DPoP proof and + signed request JWT are properly formatted. + """ + import jwt + + # Mock urllib3.PoolManager to prevent real network calls + mock_pool_instance = MagicMock() + mock_pool_manager.return_value = mock_pool_instance + + # Setup a successful HTTP response that bypasses error handling + mock_response = MagicMock() + mock_response.status = 200 + mock_response.headers = {"Content-Type": "application/proto"} + mock_response.read.return_value = ( + b"" # Empty protobuf data since we're mocking the client layer + ) + mock_pool_instance.request.return_value = mock_response + + # Mock Connect RPC client directly for protobuf compatibility + mock_client_instance = MagicMock() + mock_access_service_client.return_value = mock_client_instance + + # Create a mock successful response + mock_rpc_response = MagicMock() + mock_rpc_response.entity_wrapped_key = b64decode( + "d2VsY29tZQ==" + ) # "welcome" decoded + mock_rpc_response.responses = [] # Empty to test fallback to legacy field + mock_client_instance.rewrap.return_value = mock_rpc_response + + # Create client with known DPoP keys for verification + client = KASClient("http://kas", token_source=lambda: "test_token") + + # Create a key access object with all required fields + key_access = KeyAccess( + url="http://kas", + wrapped_key="dGVzdF93cmFwcGVkX2tleQ==", # "test_wrapped_key" in base64 + ) + + # Mock the decryption parts since we're focusing on JWT verification + with ( + patch("otdf_python.kas_client.CryptoUtils") as mock_crypto_utils, + patch("otdf_python.kas_client.AsymDecryption") as mock_asym_decryption, + ): + # Setup mocks for the crypto operations + mock_private_key = MagicMock() + mock_public_key = MagicMock() + mock_crypto_utils.generate_rsa_keypair.return_value = ( + mock_private_key, + mock_public_key, + ) + mock_crypto_utils.get_rsa_public_key_pem.return_value = "mock_public_key_pem" + + mock_decryptor = MagicMock() + mock_decryptor.decrypt.return_value = b"decrypted_key" + mock_asym_decryption.return_value = mock_decryptor + + # Call unwrap - this should create and send a properly signed JWT + try: + client.unwrap(key_access, '{"test": "policy"}') + + # Verify the Connect RPC client was called + assert mock_client_instance.rewrap.called + + # Extract the request to verify JWT structure + call_args = mock_client_instance.rewrap.call_args + if call_args and len(call_args) > 0: + request = call_args[0][0] # First positional argument (the request) + signed_token = request.signed_request_token + + assert signed_token is not None, ( + "signed_request_token should be present in request" + ) + + # Decode JWT without verification to check structure + # (we can't verify signature since we mocked the key generation) + decoded = jwt.decode(signed_token, options={"verify_signature": False}) + + # Verify JWT has required claims + assert "requestBody" in decoded, "JWT should contain requestBody claim" + assert "iat" in decoded, "JWT should contain iat (issued at) claim" + assert "exp" in decoded, "JWT should contain exp (expiration) claim" + + # Verify the requestBody contains the expected structure + # For Connect RPC, the request body should be protobuf-encoded + # We just verify it exists and is not empty + assert decoded["requestBody"], "requestBody should not be empty" + + except Exception as e: + # If the test fails, collect server logs for debugging + if callable(collect_server_logs): + logs = collect_server_logs() + if logs: + print(f"Server logs for debugging:\n{logs}") + # Re-raise the exception with additional context + raise SDKException(f"JWT signature verification test failed: {e!s}") from e diff --git a/tests/test_kas_key_cache.py b/tests/test_kas_key_cache.py new file mode 100644 index 0000000..b4e6cc7 --- /dev/null +++ b/tests/test_kas_key_cache.py @@ -0,0 +1,44 @@ +""" +Unit tests for KASKeyCache. +""" + +from dataclasses import dataclass + +from otdf_python.kas_key_cache import KASKeyCache + + +@dataclass +class MockKasInfo: + url: str + algorithm: str | None = None + public_key: str | None = None + kid: str | None = None + default: bool = False + + +def test_kas_key_cache_set_and_get(): + cache = KASKeyCache() + # Use the new store/get interface + kas_info = MockKasInfo(url="http://example.com") + cache.store(kas_info) + assert cache.get("http://example.com") == kas_info + + +def test_kas_key_cache_overwrite(): + cache = KASKeyCache() + # Test overwriting with new values + kas_info1 = MockKasInfo(url="http://example.com") + kas_info2 = MockKasInfo(url="http://example.com", algorithm="RSA") + cache.store(kas_info1) + cache.store(kas_info2) + # Without specifying an algorithm, should return the no-algorithm version + assert cache.get("http://example.com") == kas_info1 + # With algorithm specified, should return the algorithm-specific version + assert cache.get("http://example.com", "RSA") == kas_info2 + + +def test_kas_key_cache_clear(): + cache = KASKeyCache() + cache.set("key1", "value1") + cache.clear() + assert cache.get("key1") is None diff --git a/tests/test_kas_key_management.py b/tests/test_kas_key_management.py new file mode 100644 index 0000000..cb964df --- /dev/null +++ b/tests/test_kas_key_management.py @@ -0,0 +1,121 @@ +import base64 +import os +import unittest +from unittest.mock import Mock, patch + +import pytest + +from otdf_python.kas_client import KASClient, KeyAccess +from otdf_python.key_type_constants import EC_KEY_TYPE, RSA_KEY_TYPE + + +class TestKASKeyManagement(unittest.TestCase): + """Tests for the KAS key management pattern.""" + + def test_rsa_key_generation(self): + """Test that RSA keys are generated automatically.""" + client = KASClient(kas_url="http://kas.example.com") + + # Before unwrap, decryptor should be None + self.assertIsNone(client.decryptor) + self.assertIsNone(client.client_public_key) + + # Mock the HTTP response + with patch("httpx.post") as mock_post: + # Configure the mock + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "entityWrappedKey": base64.b64encode(os.urandom(32)).decode() + } + mock_post.return_value = mock_response + + # Create a test key access + key_access = KeyAccess( + url="http://kas.example.com", + wrapped_key=base64.b64encode(os.urandom(32)).decode(), + ) + + # Also patch the decrypt method to return a predictable value + with patch( + "otdf_python.asym_decryption.AsymDecryption.decrypt", + return_value=b"test_key", + ): + # Call unwrap + from contextlib import suppress + + with suppress(Exception): + # We expect an exception because we're not actually unwrapping a valid key + client.unwrap(key_access, "{}", RSA_KEY_TYPE) + + # After unwrap, decryptor should be created + self.assertIsNotNone(client.decryptor) + self.assertIsNotNone(client.client_public_key) + + # The public key should be in PEM format + assert isinstance(client.client_public_key, str) + assert client.client_public_key.startswith("-----BEGIN PUBLIC KEY-----") + + @pytest.mark.skip(reason="Skipping 'test_ec_key_generation' until fixed") + @pytest.mark.integration + def test_ec_key_generation(self): + """Test that EC keys are generated automatically for each request.""" + client = KASClient(kas_url="http://kas.example.com") + + # Mock the ECKeyPair and related classes + with patch("otdf_python.eckeypair.ECKeyPair") as mock_ec_key_pair_class: + # Configure the mocks + mock_ec_key_pair = Mock() + mock_ec_key_pair.public_key_in_pem_format.return_value = ( + "-----BEGIN PUBLIC KEY-----\nMOCKED_EC_KEY\n-----END PUBLIC KEY-----" + ) + mock_ec_key_pair.get_private_key.return_value = Mock() + mock_ec_key_pair_class.return_value = mock_ec_key_pair + mock_ec_key_pair_class.public_key_from_pem.return_value = Mock() + mock_ec_key_pair_class.compute_ecdh_key.return_value = b"mock_ecdh_key" + mock_ec_key_pair_class.calculate_hkdf.return_value = b"mock_hkdf_key" + + # Mock HTTP response + with patch("httpx.post") as mock_post: + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "entityWrappedKey": base64.b64encode(os.urandom(32)).decode(), + "sessionPublicKey": "MOCK_SESSION_KEY", + } + mock_post.return_value = mock_response + + # Mock AesGcm + with patch("otdf_python.aesgcm.AesGcm") as mock_aes_gcm_class: + mock_aes_gcm = Mock() + mock_aes_gcm.decrypt.return_value = b"decrypted_key" + mock_aes_gcm_class.return_value = mock_aes_gcm + + # Create a test key access + key_access = KeyAccess( + url="http://kas.example.com", + wrapped_key=base64.b64encode(os.urandom(32)).decode(), + ) + + # Call unwrap + key = client.unwrap(key_access, "{}", EC_KEY_TYPE) + + # Verify results + self.assertEqual(key, b"decrypted_key") + self.assertEqual( + client.client_public_key, + "-----BEGIN PUBLIC KEY-----\nMOCKED_EC_KEY\n-----END PUBLIC KEY-----", + ) + + # The original decryptor should not be affected + self.assertIsNone(client.decryptor) + + # EC key pair should have been created with the right curve + mock_ec_key_pair_class.assert_called_once() + self.assertEqual( + mock_ec_key_pair_class.call_args[1].get("curve_name"), "P-256" + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_key_type.py b/tests/test_key_type.py new file mode 100644 index 0000000..e899b90 --- /dev/null +++ b/tests/test_key_type.py @@ -0,0 +1,30 @@ +import unittest + +from otdf_python.key_type import KeyType + + +class TestKeyType(unittest.TestCase): + def test_str(self): + self.assertEqual(str(KeyType.RSA2048Key), "rsa:2048") + self.assertEqual(str(KeyType.EC256Key), "ec:secp256r1") + + def test_get_curve_name(self): + self.assertEqual(KeyType.EC256Key.get_curve_name(), "secp256r1") + self.assertEqual(KeyType.EC384Key.get_curve_name(), "secp384r1") + self.assertEqual(KeyType.EC521Key.get_curve_name(), "secp521r1") + with self.assertRaises(ValueError): + KeyType.RSA2048Key.get_curve_name() + + def test_from_string(self): + self.assertEqual(KeyType.from_string("rsa:2048"), KeyType.RSA2048Key) + self.assertEqual(KeyType.from_string("ec:secp256r1"), KeyType.EC256Key) + with self.assertRaises(ValueError): + KeyType.from_string("notakey") + + def test_is_ec(self): + self.assertTrue(KeyType.EC256Key.is_ec()) + self.assertFalse(KeyType.RSA2048Key.is_ec()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_log_collection.py b/tests/test_log_collection.py new file mode 100644 index 0000000..1845290 --- /dev/null +++ b/tests/test_log_collection.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +""" +Test script to verify server log collection functionality. + +This script tests the server log collection without running full pytest. +""" + +from tests.config_pydantic import CONFIG_TESTING +from tests.server_logs import collect_server_logs, log_server_logs_on_failure + + +def test_log_collection(): + """Test that we can collect server logs.""" + print("Testing server log collection...") + + # Test with a command that should work (basic SSH test) + print("\n1. Testing SSH connectivity...") + logs = collect_server_logs( + pod_name=CONFIG_TESTING.POD_NAME, + namespace=CONFIG_TESTING.NAMESPACE, + ssh_target=CONFIG_TESTING.SSH_TARGET, + lines=CONFIG_TESTING.LOG_LINES, + ) + + if logs: + print("✓ Successfully collected logs") + print(f"Log preview (first 200 chars): {logs[:200]}...") + else: + print("✗ Failed to collect logs") + + # Test the failure logging function + print("\n2. Testing failure log collection...") + log_server_logs_on_failure("test_function_name") + + print("\nTest completed!") + + +if __name__ == "__main__": + test_log_collection() diff --git a/tests/test_manifest.py b/tests/test_manifest.py new file mode 100644 index 0000000..f9e36d1 --- /dev/null +++ b/tests/test_manifest.py @@ -0,0 +1,65 @@ +from otdf_python.manifest import ( + Manifest, + ManifestAssertion, + ManifestEncryptionInformation, + ManifestIntegrityInformation, + ManifestKeyAccess, + ManifestMethod, + ManifestPayload, + ManifestRootSignature, + ManifestSegment, +) + + +def test_manifest_serialization(): + # Create a minimal manifest + seg = ManifestSegment(hash="abc", segmentSize=100, encryptedSegmentSize=120) + root_sig = ManifestRootSignature(alg="alg", sig="sig") + integrity = ManifestIntegrityInformation( + rootSignature=root_sig, + segmentHashAlg="sha256", + segmentSizeDefault=100, + encryptedSegmentSizeDefault=120, + segments=[seg], + ) + method = ManifestMethod(algorithm="AES", iv="iv123", isStreamable=True) + key_access = ManifestKeyAccess( + type="split", + url="https://kas", + protocol="kas", + wrappedKey="key", + policyBinding=None, + ) + enc_info = ManifestEncryptionInformation( + type="split", + policy="cG9saWN5", # base64 for 'policy' + keyAccess=[key_access], + method=method, + integrityInformation=integrity, + ) + payload = ManifestPayload( + type="file", + url="https://file", + protocol="https", + mimeType="text/plain", + isEncrypted=True, + ) + assertion = ManifestAssertion( + id="id1", type="type1", scope="scope1", appliesTo_state="state1", statement={} + ) + manifest = Manifest( + schemaVersion="4.3.0", + encryptionInformation=enc_info, + payload=payload, + assertions=[assertion], + ) + js = manifest.to_json() + loaded = Manifest.from_json(js) + assert loaded.schemaVersion == manifest.schemaVersion + assert isinstance(loaded.payload, ManifestPayload) + assert isinstance(manifest.payload, ManifestPayload) + assert loaded.payload.type == manifest.payload.type + assert isinstance(loaded.encryptionInformation, ManifestEncryptionInformation) + assert isinstance(manifest.encryptionInformation, ManifestEncryptionInformation) + assert loaded.encryptionInformation.type == manifest.encryptionInformation.type + assert loaded.assertions[0].id == manifest.assertions[0].id diff --git a/tests/test_manifest_format.py b/tests/test_manifest_format.py new file mode 100644 index 0000000..674aa26 --- /dev/null +++ b/tests/test_manifest_format.py @@ -0,0 +1,103 @@ +""" +Test TDF manifest format, inspired by the Java SDK manifest tests. +""" + +import json + +from otdf_python.config import KASInfo, TDFConfig +from otdf_python.tdf import TDF +from tests.mock_crypto import generate_rsa_keypair + + +def test_manifest_field_format(): + """Test that manifest uses camelCase field names as per TDF specification.""" + + # Create a mock KAS info with public key to avoid network calls + kas_private_key, kas_public_key = generate_rsa_keypair() + kas_info = KASInfo( + url="https://kas.example.com", public_key=kas_public_key, kid="test-kid" + ) + + config = TDFConfig(kas_info_list=[kas_info], tdf_private_key=kas_private_key) + + # Create a test TDF and get manifest + test_data = b"Hello World" + tdf_instance = TDF() + config.policy_object = {"uuid": "test-uuid", "body": {"dissem": ["test"]}} + manifest, size, output_stream = tdf_instance.create_tdf( + payload=test_data, config=config + ) + + manifest_dict = json.loads(manifest.to_json()) + + # Test required camelCase fields + assert "encryptionInformation" in manifest_dict + assert "payload" in manifest_dict + assert "schemaVersion" in manifest_dict or "tdfVersion" in manifest_dict + + # Test encryption information structure + enc_info = manifest_dict["encryptionInformation"] + assert "keyAccess" in enc_info + assert "integrityInformation" in enc_info + assert "policy" in enc_info + assert "method" in enc_info + + # Test key access structure + key_access = enc_info["keyAccess"][0] + assert "url" in key_access # Should be url not kas_url + assert "wrappedKey" in key_access # camelCase not wrapped_key + assert "policyBinding" in key_access # camelCase not policy_binding + + # Test integrity information + integrity_info = enc_info["integrityInformation"] + assert "encryptedSegmentSizeDefault" in integrity_info + assert "segmentHashAlg" in integrity_info + assert "segments" in integrity_info + + # Ensure no snake_case fields exist + manifest_str = json.dumps(manifest_dict) + assert "kas_url" not in manifest_str + assert "wrapped_key" not in manifest_str + assert "policy_binding" not in manifest_str + assert "encryption_information" not in manifest_str + assert "integrity_information" not in manifest_str + assert "key_access" not in manifest_str + assert "encrypted_segment_size_default" not in manifest_str + assert "segment_hash_alg" not in manifest_str + + print("✓ All manifest fields use camelCase naming") + + +def test_manifest_roundtrip_serialization(): + """Test manifest serialization/deserialization roundtrip.""" + # Create a mock KAS info with public key to avoid network calls + kas_private_key, kas_public_key = generate_rsa_keypair() + kas_info = KASInfo( + url="https://kas.example.com", public_key=kas_public_key, kid="test-kid" + ) + + config = TDFConfig(kas_info_list=[kas_info], tdf_private_key=kas_private_key) + + # Create a test TDF and get manifest + test_data = b"Hello World" + tdf_instance = TDF() + config.policy_object = {"uuid": "test-uuid", "body": {"dissem": ["test"]}} + manifest, size, output_stream = tdf_instance.create_tdf( + payload=test_data, config=config + ) + + # Test JSON roundtrip + json_str = manifest.to_json() + manifest_dict = json.loads(json_str) + roundtrip_json = json.dumps(manifest_dict) + assert json.loads(json_str) == json.loads(roundtrip_json) + + # Test that critical fields survive roundtrip + original_wrapped_key = manifest_dict["encryptionInformation"]["keyAccess"][0][ + "wrappedKey" + ] + roundtrip_dict = json.loads(roundtrip_json) + roundtrip_wrapped_key = roundtrip_dict["encryptionInformation"]["keyAccess"][0][ + "wrappedKey" + ] + assert original_wrapped_key == roundtrip_wrapped_key diff --git a/tests/test_nanotdf.py b/tests/test_nanotdf.py new file mode 100644 index 0000000..1517d1e --- /dev/null +++ b/tests/test_nanotdf.py @@ -0,0 +1,70 @@ +import secrets + +import pytest + +from otdf_python.config import NanoTDFConfig +from otdf_python.nanotdf import InvalidNanoTDFConfig, NanoTDF, NanoTDFMaxSizeLimit + + +def test_nanotdf_roundtrip(): + nanotdf = NanoTDF() + key = secrets.token_bytes(32) + data = b"nano tdf test payload" + # Create config with key in cipher field + config = NanoTDFConfig(cipher=key.hex()) + nanotdf_bytes = nanotdf.create_nanotdf(data, config) + out = nanotdf.read_nanotdf(nanotdf_bytes, config) + assert out == data + + +def test_nanotdf_too_large(): + nanotdf = NanoTDF() + key = secrets.token_bytes(32) + data = b"x" * (NanoTDF.K_MAX_TDF_SIZE + 1) + config = NanoTDFConfig(cipher=key.hex()) + with pytest.raises(NanoTDFMaxSizeLimit): + nanotdf.create_nanotdf(data, config) + + +def test_nanotdf_invalid_magic(): + nanotdf = NanoTDF() + key = secrets.token_bytes(32) + config = NanoTDFConfig(cipher=key.hex()) + bad_bytes = b"BAD" + b"rest" + with pytest.raises(InvalidNanoTDFConfig): + nanotdf.read_nanotdf(bad_bytes, config) + + +@pytest.mark.skip( + "This test is skipped because NanoTDF encryption/decryption is not implemented yet." +) +@pytest.mark.integration +def test_nanotdf_integration_encrypt_decrypt(): + # Load environment variables for integration + from otdf_python.config import KASInfo + from tests.config_pydantic import CONFIG_TDF + + # Create KAS info from configuration + kas_info = KASInfo(url=CONFIG_TDF.KAS_ENDPOINT) + + # Create KAS client with SSL verification disabled for testing + # from otdf_python.kas_client import KASClient + # client = KASClient( + # kas_url=CONFIG_TDF.KAS_ENDPOINT, + # verify_ssl=not CONFIG_TDF.INSECURE_SKIP_VERIFY, + # use_plaintext=bool(CONFIG_TDF.OPENTDF_PLATFORM_URL.startswith("http://")), + # ) + + nanotdf = NanoTDF() + data = b"test data" + config = NanoTDFConfig(kas_info_list=[kas_info]) + # These will raise NotImplementedError until implemented + try: + nanotdf_bytes = nanotdf.create_nanotdf(data, config) + except NotImplementedError: + pytest.skip("NanoTDF encryption not implemented yet.") + try: + decrypted = nanotdf.read_nanotdf(nanotdf_bytes, config) + except NotImplementedError: + pytest.skip("NanoTDF decryption not implemented yet.") + assert decrypted == data diff --git a/tests/test_nanotdf_ecdsa_struct.py b/tests/test_nanotdf_ecdsa_struct.py new file mode 100644 index 0000000..d83eb16 --- /dev/null +++ b/tests/test_nanotdf_ecdsa_struct.py @@ -0,0 +1,91 @@ +""" +Tests for NanoTDFECDSAStruct. +""" + +import pytest + +from otdf_python.nanotdf_ecdsa_struct import ( + IncorrectNanoTDFECDSASignatureSize, + NanoTDFECDSAStruct, +) + + +def test_from_bytes(): + """Test creating a NanoTDFECDSAStruct from bytes.""" + # Create a simple test signature (r_length=1, r_value=0x01, s_length=1, s_value=0x02) + key_size = 1 + signature = bytes([1, 1, 1, 2]) # r_length, r_value, s_length, s_value + + # Create the struct + struct = NanoTDFECDSAStruct.from_bytes(signature, key_size) + + # Check values + assert struct.get_r_length() == 1 + assert struct.get_r_value()[0] == 1 + assert struct.get_s_length() == 1 + assert struct.get_s_value()[0] == 2 + + +def test_from_bytes_incorrect_size(): + """Test creating a NanoTDFECDSAStruct with incorrect signature size.""" + # Create an invalid signature (too short) + key_size = 2 + signature = bytes([1, 1, 1, 2]) # Should be 6 bytes for key_size=2 + + # Should raise an exception + with pytest.raises(IncorrectNanoTDFECDSASignatureSize): + NanoTDFECDSAStruct.from_bytes(signature, key_size) + + +def test_as_bytes(): + """Test converting a NanoTDFECDSAStruct to bytes.""" + # Create a struct + struct = NanoTDFECDSAStruct() + struct.set_r_length(1) + struct.set_r_value(bytearray([1])) + struct.set_s_length(1) + struct.set_s_value(bytearray([2])) + + # Convert to bytes + signature = struct.as_bytes() + + # Check values + assert len(signature) == 4 + assert signature == bytes([1, 1, 1, 2]) + + +def test_as_bytes_missing_values(): + """Test that an exception is raised when r_value or s_value is not set.""" + # Create an incomplete struct + struct = NanoTDFECDSAStruct() + struct.set_r_length(1) + # Missing r_value + struct.set_s_length(1) + struct.set_s_value(bytearray([2])) + + # Should raise an exception + with pytest.raises(ValueError): + struct.as_bytes() + + +def test_getters_setters(): + """Test all getters and setters.""" + struct = NanoTDFECDSAStruct() + + # Test r_length + struct.set_r_length(5) + assert struct.get_r_length() == 5 + + # Test r_value + r_value = bytearray([1, 2, 3]) + struct.set_r_value(r_value) + assert struct.get_r_value() == r_value + + # Test s_length + struct.set_s_length(3) + assert struct.get_s_length() == 3 + + # Test s_value + s_value = bytearray([4, 5, 6]) + struct.set_s_value(s_value) + assert struct.get_s_value() == s_value diff --git a/tests/test_nanotdf_integration.py b/tests/test_nanotdf_integration.py new file mode 100644 index 0000000..943cfaf --- /dev/null +++ b/tests/test_nanotdf_integration.py @@ -0,0 +1,42 @@ +import io + +import pytest +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + +from otdf_python.config import KASInfo, NanoTDFConfig +from otdf_python.nanotdf import NanoTDF + + +@pytest.mark.integration +def test_nanotdf_kas_roundtrip(): + # Generate RSA keypair + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + private_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode() + public_pem = ( + private_key.public_key() + .public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + .decode() + ) + # Prepare NanoTDF + nanotdf = NanoTDF() + payload = b"nano test payload" + # Create KASInfo with public key + kas_info = KASInfo(url="https://mock-kas", public_key=public_pem) + # Configure NanoTDFConfig for encryption + config = NanoTDFConfig(kas_info_list=[kas_info]) + out = io.BytesIO() + nanotdf.create_nano_tdf(payload, out, config) + nanotdf_bytes = out.getvalue() + # Read/decrypt NanoTDF with private key + config_read = NanoTDFConfig(cipher=private_pem, config="mock_unwrap=true") + out_dec = io.BytesIO() + nanotdf.read_nano_tdf(nanotdf_bytes, out_dec, config_read) + assert out_dec.getvalue() == payload diff --git a/tests/test_nanotdf_type.py b/tests/test_nanotdf_type.py new file mode 100644 index 0000000..c93c8b8 --- /dev/null +++ b/tests/test_nanotdf_type.py @@ -0,0 +1,44 @@ +import unittest + +from otdf_python.nanotdf_type import ( + Cipher, + ECCurve, + IdentifierType, + PolicyType, + Protocol, +) + + +class TestNanoTDFType(unittest.TestCase): + def test_eccurve(self): + self.assertEqual(str(ECCurve.SECP256R1), "secp256r1") + self.assertEqual(str(ECCurve.SECP384R1), "secp384r1") + self.assertEqual(str(ECCurve.SECP521R1), "secp384r1") + self.assertEqual(str(ECCurve.SECP256K1), "secp256k1") + + def test_protocol(self): + self.assertEqual(Protocol.HTTP.value, "HTTP") + self.assertEqual(Protocol.HTTPS.value, "HTTPS") + + def test_identifier_type(self): + self.assertEqual(IdentifierType.NONE.get_length(), 0) + self.assertEqual(IdentifierType.TWO_BYTES.get_length(), 2) + self.assertEqual(IdentifierType.EIGHT_BYTES.get_length(), 8) + self.assertEqual(IdentifierType.THIRTY_TWO_BYTES.get_length(), 32) + + def test_policy_type(self): + self.assertEqual(PolicyType.REMOTE_POLICY.value, 0) + self.assertEqual(PolicyType.EMBEDDED_POLICY_PLAIN_TEXT.value, 1) + self.assertEqual(PolicyType.EMBEDDED_POLICY_ENCRYPTED.value, 2) + self.assertEqual( + PolicyType.EMBEDDED_POLICY_ENCRYPTED_POLICY_KEY_ACCESS.value, 3 + ) + + def test_cipher(self): + self.assertEqual(Cipher.AES_256_GCM_64_TAG.value, 0) + self.assertEqual(Cipher.AES_256_GCM_128_TAG.value, 5) + self.assertEqual(Cipher.EAD_AES_256_HMAC_SHA_256.value, 6) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_policy_object.py b/tests/test_policy_object.py new file mode 100644 index 0000000..a0ceb01 --- /dev/null +++ b/tests/test_policy_object.py @@ -0,0 +1,38 @@ +import unittest + +from otdf_python.policy_object import AttributeObject, PolicyBody, PolicyObject + + +class TestPolicyObject(unittest.TestCase): + def test_attribute_object(self): + attr = AttributeObject( + attribute="attr1", + display_name="Attribute 1", + is_default=True, + pub_key="pubkey123", + kas_url="https://kas.example.com", + ) + self.assertEqual(attr.attribute, "attr1") + self.assertEqual(attr.display_name, "Attribute 1") + self.assertTrue(attr.is_default) + self.assertEqual(attr.pub_key, "pubkey123") + self.assertEqual(attr.kas_url, "https://kas.example.com") + + def test_policy_body(self): + attr1 = AttributeObject(attribute="attr1") + attr2 = AttributeObject(attribute="attr2") + body = PolicyBody(data_attributes=[attr1, attr2], dissem=["user1", "user2"]) + self.assertEqual(len(body.data_attributes), 2) + self.assertIn("user1", body.dissem) + self.assertIn("user2", body.dissem) + + def test_policy_object(self): + attr = AttributeObject(attribute="attr1") + body = PolicyBody(data_attributes=[attr], dissem=["user1"]) + policy = PolicyObject(uuid="uuid-1234", body=body) + self.assertEqual(policy.uuid, "uuid-1234") + self.assertEqual(policy.body, body) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_sdk.py b/tests/test_sdk.py new file mode 100644 index 0000000..c58126e --- /dev/null +++ b/tests/test_sdk.py @@ -0,0 +1,97 @@ +""" +Basic tests for the Python SDK class port. +""" + +from otdf_python.sdk import SDK + + +class DummyServices(SDK.Services): + def close(self): + self.closed = True + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + +def test_sdk_init_and_close(): + services = DummyServices() + sdk = SDK(services) + assert sdk.get_services() is services + assert sdk.get_trust_manager() is None + assert sdk.get_auth_interceptor() is None + assert sdk.get_platform_services_client() is None + assert sdk.get_platform_url() is None + # Test context manager exit calls close + with SDK(services): + pass + # Optionally, check if close was called if you want + + +def test_split_key_exception(): + try: + raise SDK.SplitKeyException("split key error") + except SDK.SplitKeyException: + pass + + +def test_data_size_not_supported(): + try: + raise SDK.DataSizeNotSupported("too large") + except SDK.DataSizeNotSupported: + pass + + +def test_kas_info_missing(): + try: + raise SDK.KasInfoMissing("kas info missing") + except SDK.KasInfoMissing: + pass + + +def test_kas_public_key_missing(): + try: + raise SDK.KasPublicKeyMissing("kas pubkey missing") + except SDK.KasPublicKeyMissing: + pass + + +def test_tamper_exception(): + try: + raise SDK.TamperException("tamper") + except SDK.TamperException: + pass + + +def test_root_signature_validation_exception(): + try: + raise SDK.RootSignatureValidationException("root sig") + except SDK.RootSignatureValidationException: + pass + + +def test_segment_signature_mismatch(): + try: + raise SDK.SegmentSignatureMismatch("seg sig") + except SDK.SegmentSignatureMismatch: + pass + + +def test_kas_bad_request_exception(): + try: + raise SDK.KasBadRequestException("kas bad req") + except SDK.KasBadRequestException: + pass + + +def test_kas_allowlist_exception(): + try: + raise SDK.KasAllowlistException("kas allowlist") + except SDK.KasAllowlistException: + pass + + +def test_assertion_exception(): + try: + raise SDK.AssertionException("assertion", "id123") + except SDK.AssertionException: + pass diff --git a/tests/test_sdk_builder.py b/tests/test_sdk_builder.py new file mode 100644 index 0000000..0e9f835 --- /dev/null +++ b/tests/test_sdk_builder.py @@ -0,0 +1,214 @@ +""" +Tests for the SDKBuilder class. +""" + +import os +import tempfile +from unittest.mock import MagicMock, patch + +import pytest +import respx + +from otdf_python.sdk import SDK +from otdf_python.sdk_builder import SDKBuilder +from otdf_python.sdk_exceptions import AutoConfigureException + + +def test_sdk_builder_init(): + """Test basic initialization of SDKBuilder.""" + builder = SDKBuilder() + assert builder.platform_endpoint is None + assert builder.oauth_config is None + assert builder.use_plaintext is False + assert builder.ssl_context is None + assert builder.auth_token is None + + +def test_sdk_builder_new_builder(): + """Test the static new_builder method.""" + builder = SDKBuilder.new_builder() + assert isinstance(builder, SDKBuilder) + assert builder.platform_endpoint is None + assert builder.oauth_config is None + assert builder.use_plaintext is False + + +def test_platform_endpoint(): + """Test setting platform endpoint.""" + builder = SDKBuilder() + + # Test with plain domain + result = builder.set_platform_endpoint("example.com") + assert result is builder # Returns self for chaining + assert builder.platform_endpoint == "https://example.com" + + # Test with http:// + result = builder.set_platform_endpoint("http://example.org") + assert builder.platform_endpoint == "http://example.org" + + # Test with https:// + result = builder.set_platform_endpoint("https://secure.example.com") + assert builder.platform_endpoint == "https://secure.example.com" + + # Test with use_plaintext=True + builder.use_plaintext = True + result = builder.set_platform_endpoint("example.net") + assert builder.platform_endpoint == "http://example.net" + + +def test_use_insecure_plaintext_connection(): + """Test setting insecure plaintext connection.""" + builder = SDKBuilder() + + # Set endpoint first, then change connection type + builder.set_platform_endpoint("secure.example.com") + assert builder.platform_endpoint == "https://secure.example.com" + + result = builder.use_insecure_plaintext_connection(True) + assert result is builder # Returns self for chaining + assert builder.use_plaintext is True + assert builder.platform_endpoint == "http://secure.example.com" + + # Change back to secure + result = builder.use_insecure_plaintext_connection(False) + assert builder.use_plaintext is False + assert builder.platform_endpoint == "https://secure.example.com" + + +def test_client_secret(): + """Test setting client credentials.""" + builder = SDKBuilder() + result = builder.client_secret("client123", "secret456") + + assert result is builder # Returns self for chaining + assert builder.oauth_config is not None + assert builder.oauth_config.client_id == "client123" + assert builder.oauth_config.client_secret == "secret456" + assert builder.oauth_config.grant_type == "client_credentials" + + +def test_bearer_token(): + """Test setting bearer token.""" + builder = SDKBuilder() + result = builder.bearer_token("my-token-123") + + assert result is builder # Returns self for chaining + assert builder.auth_token == "my-token-123" + + +def test_ssl_context_from_directory(): + """Test setting up SSL context from certificates directory.""" + builder = SDKBuilder() + + # Create temporary directory with cert files + with tempfile.TemporaryDirectory() as tmpdirname: + # Create dummy cert files + with ( + open(os.path.join(tmpdirname, "cert1.pem"), "w") as f1, + open(os.path.join(tmpdirname, "cert2.crt"), "w") as f2, + open(os.path.join(tmpdirname, "not_a_cert.txt"), "w") as f3, + ): + f1.write("dummy cert") + f2.write("dummy cert") + f3.write("not a cert") + + # Patch ssl context creation and cert loading to avoid real SSL errors + with patch("ssl.create_default_context") as mock_create_ctx: + mock_ctx = MagicMock() + mock_create_ctx.return_value = mock_ctx + # Patch load_verify_locations to do nothing + mock_ctx.load_verify_locations.return_value = None + + # Test the builder method + result = builder.ssl_context_from_directory(tmpdirname) + + assert result is builder # Returns self for chaining + assert len(builder.cert_paths) == 2 # Only .pem and .crt files + assert any("cert1.pem" in path for path in builder.cert_paths) + assert any("cert2.crt" in path for path in builder.cert_paths) + assert not any("not_a_cert.txt" in path for path in builder.cert_paths) + + +@respx.mock +def test_get_token_from_client_credentials(): + """Test getting OAuth token from client credentials.""" + builder = SDKBuilder() + builder.set_platform_endpoint("example.com") + builder.set_issuer_endpoint("https://keycloak.example.com") + builder.client_secret("client123", "secret456") + + # Mock the discovery endpoint (Keycloak format) + respx.get( + "https://keycloak.example.com/realms/opentdf/.well-known/openid-configuration" + ).respond( + json={"token_endpoint": "https://keycloak.example.com/oauth/token"}, + status_code=200, + ) + + # Mock the token endpoint + respx.post("https://keycloak.example.com/oauth/token").respond( + json={"access_token": "test-token-123", "token_type": "Bearer"}, status_code=200 + ) + + # Test the method + token = builder._get_token_from_client_credentials() + assert token == "test-token-123" + + +@respx.mock +def test_get_token_failure(): + """Test handling of token acquisition failure.""" + builder = SDKBuilder() + builder.set_platform_endpoint("example.com") + builder.set_issuer_endpoint("https://keycloak.example.com") + builder.client_secret("client123", "secret456") + + # Mock the discovery endpoint (Keycloak format) + respx.get( + "https://keycloak.example.com/realms/opentdf/.well-known/openid-configuration" + ).respond( + json={"token_endpoint": "https://keycloak.example.com/oauth/token"}, + status_code=200, + ) + + # Mock the token endpoint with error + respx.post("https://keycloak.example.com/oauth/token").respond( + json={"error": "invalid_client"}, status_code=401 + ) + + # Test the method + with pytest.raises(AutoConfigureException) as excinfo: + builder._get_token_from_client_credentials() + + assert "Token request failed: 401" in str(excinfo.value) + + +def test_build_without_platform_endpoint(): + """Test building SDK without platform endpoint.""" + builder = SDKBuilder() + + with pytest.raises(AutoConfigureException) as excinfo: + builder.build() + + assert "Platform endpoint is not set" in str(excinfo.value) + + +def test_build_success(): + """Test successful SDK build.""" + builder = SDKBuilder() + builder.set_platform_endpoint("example.com") + builder.bearer_token("test-token") + + # Mock _create_services to avoid actual service creation + with patch.object(SDKBuilder, "_create_services") as mock_create_services: + mock_services = MagicMock(spec=SDK.Services) + mock_create_services.return_value = mock_services + + # Build the SDK + sdk = builder.build() + + # Verify the SDK was created correctly + assert isinstance(sdk, SDK) + assert sdk.platform_url == "https://example.com" + assert sdk.auth_interceptor == {"Authorization": "Bearer test-token"} + assert sdk.get_services() is mock_services diff --git a/tests/test_sdk_exceptions.py b/tests/test_sdk_exceptions.py new file mode 100644 index 0000000..92ddf4b --- /dev/null +++ b/tests/test_sdk_exceptions.py @@ -0,0 +1,22 @@ +import unittest + +from otdf_python.sdk_exceptions import AutoConfigureException, SDKException + + +class TestSDKExceptions(unittest.TestCase): + def test_sdk_exception(self): + e = SDKException("msg", Exception("reason")) + self.assertEqual(str(e), "msg") + self.assertIsInstance(e.reason, Exception) + + def test_auto_configure_exception(self): + e = AutoConfigureException("fail", Exception("cause")) + self.assertEqual(str(e), "fail") + self.assertIsInstance(e.reason, Exception) + e2 = AutoConfigureException("fail2") + self.assertEqual(str(e2), "fail2") + self.assertIsNone(e2.reason) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_sdk_mock.py b/tests/test_sdk_mock.py new file mode 100644 index 0000000..088a452 --- /dev/null +++ b/tests/test_sdk_mock.py @@ -0,0 +1,58 @@ +from otdf_python.sdk import ( + KAS, + SDK, + AttributesServiceClientInterface, + AuthorizationServiceClientInterface, + KeyAccessServerRegistryServiceClientInterface, + NamespaceServiceClientInterface, + ResourceMappingServiceClientInterface, + SubjectMappingServiceClientInterface, +) + + +class MockKAS(KAS): + def get_public_key(self, kas_info): + return "mock-public-key" + + def get_ec_public_key(self, kas_info, curve): + return "mock-ec-public-key" + + def unwrap(self, key_access, policy, session_key_type): + return b"mock-unwrapped-key" + + def unwrap_nanotdf(self, curve, header, kas_url): + return b"mock-unwrapped-nanotdf" + + def get_key_cache(self): + return None + + +class MockServices(SDK.Services): + def attributes(self): + return AttributesServiceClientInterface() + + def namespaces(self): + return NamespaceServiceClientInterface() + + def subject_mappings(self): + return SubjectMappingServiceClientInterface() + + def resource_mappings(self): + return ResourceMappingServiceClientInterface() + + def authorization(self): + return AuthorizationServiceClientInterface() + + def kas_registry(self): + return KeyAccessServerRegistryServiceClientInterface() + + def kas(self): + return MockKAS() + + +def test_sdk_instantiation(): + services = MockServices() + sdk = SDK(services=services) + assert sdk.get_services() is services + assert sdk.get_services().kas().get_public_key(None) == "mock-public-key" + assert sdk.get_services().kas().unwrap(None, "", None) == b"mock-unwrapped-key" diff --git a/tests/test_sdk_tdf_integration.py b/tests/test_sdk_tdf_integration.py new file mode 100644 index 0000000..eaedafe --- /dev/null +++ b/tests/test_sdk_tdf_integration.py @@ -0,0 +1,263 @@ +""" +Tests for the integration between SDK and TDF classes. +""" + +import io + +from otdf_python.sdk_builder import SDKBuilder +from tests.config_pydantic import CONFIG_TDF +from tests.mock_crypto import generate_rsa_keypair + + +def test_sdk_create_tdf_with_builder(): + """Test that SDK.create_tdf works with TDFConfig created from new_tdf_config.""" + from otdf_python.kas_info import KASInfo + + # Generate key pair for testing + kas_private_key, kas_public_key = generate_rsa_keypair() + + # Create SDK with builder + sdk = SDKBuilder().set_platform_endpoint("https://example.kas.com").build() + + # Create KASInfo with public key + kas_info = KASInfo( + url="https://example.kas.com", public_key=kas_public_key, kid="test-kid" + ) + + # Use the SDK to create a TDFConfig with the KASInfo + config = sdk.new_tdf_config( + attributes=[ + CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1, + CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_2, + ], + kas_info_list=[kas_info], + ) + # Use BytesIO to mimic file-like API + payload = b"Hello from test" + output = io.BytesIO() + + # This should not raise an AttributeError or ValueError + manifest, size, out_stream = sdk.create_tdf(payload, config, output_stream=output) + + # Basic validations + assert size > 0 + assert out_stream.getvalue() == output.getvalue() + assert len(output.getvalue()) > 0 + + +def test_validate_otdf_python_script(): + """Test that simulates the validate_otdf_python.py script's usage patterns.""" + from otdf_python.kas_info import KASInfo + + # Generate key pair for testing + kas_private_key, kas_public_key = generate_rsa_keypair() + + # Create SDK with builder + sdk = SDKBuilder().set_platform_endpoint("https://default.kas.example.com").build() + + # Create KASInfo with public key + kas_info = KASInfo( + url="https://default.kas.example.com", public_key=kas_public_key, kid="test-kid" + ) + + # Use the SDK to create a TDFConfig with the KASInfo + config = sdk.new_tdf_config( + attributes=[ + CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1, + CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_2, + ], + kas_info_list=[kas_info], + ) + + # Use BytesIO to mimic file-like API + payload = b"Hello from Python" + output = io.BytesIO() + + # This should not raise an AttributeError or ValueError + manifest, size, out_stream = sdk.create_tdf(payload, config, output_stream=output) + + # Basic validations + assert size > 0 + assert out_stream.getvalue() == output.getvalue() + assert len(output.getvalue()) > 0 + + +def test_new_tdf_config_with_https_platform(): + """Test that new_tdf_config correctly handles HTTPS platform URLs.""" + # Create SDK with HTTPS platform + sdk = ( + SDKBuilder() + .set_platform_endpoint("https://secure.platform.example.com") + .build() + ) + + # Create config without overriding kas_info_list + config = sdk.new_tdf_config(attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1]) + + # Verify the default KAS info was created with HTTPS URL + assert len(config.kas_info_list) == 1 + kas_info = config.kas_info_list[0] + assert kas_info.url == "https://secure.platform.example.com:443/kas" + assert kas_info.default is True + + +def test_new_tdf_config_with_http_platform_using_plaintext(): + """Test that new_tdf_config correctly handles HTTP platform URLs when using plaintext.""" + # Create SDK with HTTP platform and plaintext enabled + sdk = ( + SDKBuilder() + .set_platform_endpoint("http://localhost:8080") + .use_insecure_plaintext_connection(True) + .build() + ) + + # Create config without overriding kas_info_list + config = sdk.new_tdf_config(attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1]) + + # Verify the default KAS info was created with HTTP URL + assert len(config.kas_info_list) == 1 + kas_info = config.kas_info_list[0] + assert kas_info.url == "http://localhost:8080/kas" + assert kas_info.default is True + + +def test_new_tdf_config_with_custom_port_https(): + """Test that new_tdf_config correctly handles HTTPS URLs with custom ports.""" + # Create SDK with HTTPS platform with custom port + sdk = ( + SDKBuilder().set_platform_endpoint("https://platform.example.com:9443").build() + ) + + # Create config without overriding kas_info_list + config = sdk.new_tdf_config(attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1]) + + # Verify the default KAS info preserves the custom port + assert len(config.kas_info_list) == 1 + kas_info = config.kas_info_list[0] + assert kas_info.url == "https://platform.example.com:9443/kas" + assert kas_info.default is True + + +def test_new_tdf_config_with_custom_port_http(): + """Test that new_tdf_config correctly handles HTTP URLs with custom ports.""" + # Create SDK with HTTP platform with custom port and plaintext enabled + sdk = ( + SDKBuilder() + .set_platform_endpoint("http://localhost:9080") + .use_insecure_plaintext_connection(True) + .build() + ) + + # Create config without overriding kas_info_list + config = sdk.new_tdf_config(attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1]) + + # Verify the default KAS info preserves the custom port + assert len(config.kas_info_list) == 1 + kas_info = config.kas_info_list[0] + assert kas_info.url == "http://localhost:9080/kas" + assert kas_info.default is True + + +def test_new_tdf_config_with_path_preservation(): + """Test that new_tdf_config correctly preserves paths in platform URLs.""" + # Create SDK with platform URL that has a path + sdk = ( + SDKBuilder().set_platform_endpoint("https://api.example.com/v1/opentdf").build() + ) + + # Create config without overriding kas_info_list + config = sdk.new_tdf_config(attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1]) + + # Verify the default KAS info preserves the path + assert len(config.kas_info_list) == 1 + kas_info = config.kas_info_list[0] + assert kas_info.url == "https://api.example.com:443/v1/opentdf/kas" + assert kas_info.default is True + + +def test_new_tdf_config_use_plaintext_parameter_validation(): + """Test that use_plaintext parameter in new_tdf_config works correctly.""" + # Test with HTTPS platform but override to use plaintext + sdk = SDKBuilder().set_platform_endpoint("https://platform.example.com").build() + + # Create config with use_plaintext parameter + config = sdk.new_tdf_config( + attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1], use_plaintext=True + ) + + # Verify the config was created successfully + assert config is not None + # The use_plaintext parameter should affect KAS URL construction, converting to HTTP + assert len(config.kas_info_list) == 1 + kas_info = config.kas_info_list[0] + + # With use_plaintext=True, the KAS URL should use HTTP scheme and port 80 + assert kas_info.url.startswith("http://"), f"Expected HTTP URL, got: {kas_info.url}" + assert "80" in kas_info.url, f"Expected port 80 for HTTP, got: {kas_info.url}" + assert "/kas" in kas_info.url, f"Expected /kas path, got: {kas_info.url}" + + +def test_new_tdf_config_kas_info_list_override(): + """Test that kas_info_list parameter overrides default KAS info creation.""" + from otdf_python.kas_info import KASInfo + + # Create SDK + sdk = SDKBuilder().set_platform_endpoint("https://platform.example.com").build() + + # Create custom KAS info + custom_kas = KASInfo( + url="https://custom.kas.example.com/kas", + public_key="custom-key", + kid="custom-kid", + default=False, + ) + + # Create config with custom kas_info_list + config = sdk.new_tdf_config( + attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1], kas_info_list=[custom_kas] + ) + + # Verify the custom KAS info was used instead of default + assert len(config.kas_info_list) == 1 + kas_info = config.kas_info_list[0] + assert kas_info.url == "https://custom.kas.example.com/kas" + assert kas_info.public_key == "custom-key" + assert kas_info.kid == "custom-kid" + assert kas_info.default is False + + +def test_new_tdf_config_empty_attributes(): + """Test that new_tdf_config handles empty or None attributes correctly.""" + # Create SDK + sdk = SDKBuilder().set_platform_endpoint("https://platform.example.com").build() + + # Test with None attributes + config1 = sdk.new_tdf_config(attributes=None) + assert config1.attributes == [] + + # Test with empty attributes list + config2 = sdk.new_tdf_config(attributes=[]) + assert config2.attributes == [] + + # Test with no attributes parameter + config3 = sdk.new_tdf_config() + assert config3.attributes == [] + + +def test_new_tdf_config_kwargs_passthrough(): + """Test that additional kwargs are passed through to TDFConfig.""" + # Create SDK + sdk = SDKBuilder().set_platform_endpoint("https://platform.example.com").build() + + # Create config with additional kwargs that should be passed to TDFConfig + config = sdk.new_tdf_config( + attributes=[CONFIG_TDF.TEST_OPENTDF_ATTRIBUTE_1], + default_segment_size=1024 * 1024, + mime_type="application/json", + render_version_info_in_manifest=False, + ) + + # Verify kwargs were passed through + assert config.default_segment_size == 1024 * 1024 + assert config.mime_type == "application/json" + assert config.render_version_info_in_manifest is False diff --git a/tests/test_tdf.py b/tests/test_tdf.py new file mode 100644 index 0000000..699ba32 --- /dev/null +++ b/tests/test_tdf.py @@ -0,0 +1,58 @@ +import io +import json +import zipfile + +import pytest + +from otdf_python.config import KASInfo, TDFConfig +from otdf_python.manifest import Manifest +from otdf_python.tdf import TDF, TDFReaderConfig +from tests.mock_crypto import generate_rsa_keypair + + +def test_tdf_create_and_load(): + tdf = TDF() + payload = b"test payload" + kas_private_key, kas_public_key = generate_rsa_keypair() + kas_info = KASInfo( + url="https://kas.example.com", public_key=kas_public_key, kid="test-kid" + ) + + config = TDFConfig(kas_info_list=[kas_info], tdf_private_key=kas_private_key) + manifest, size, out = tdf.create_tdf(payload, config) + assert isinstance(manifest, Manifest) + assert size > 0 + data = out.getvalue() if hasattr(out, "getvalue") else out.read() + with zipfile.ZipFile(io.BytesIO(data), "r") as z: + files = z.namelist() + assert "0.manifest.json" in files + assert "0.payload" in files + manifest_json = json.loads(z.read("0.manifest.json").decode()) + assert manifest_json["schemaVersion"] == TDF.TDF_VERSION + encrypted_payload = z.read("0.payload") + assert encrypted_payload != payload # Should be encrypted + assert len(encrypted_payload) > 0 + # Test round-trip decryption + reader_config = TDFReaderConfig(kas_private_key=kas_private_key) + decrypted = tdf.load_tdf(data, reader_config) + assert decrypted.payload == payload + + +@pytest.mark.integration +def test_tdf_multi_kas_roundtrip(): + tdf = TDF() + payload = b"multi-kas test payload" + # Generate two KAS keypairs + priv1, pub1 = generate_rsa_keypair() + priv2, pub2 = generate_rsa_keypair() + kas1 = KASInfo(url="https://kas1.example.com", public_key=pub1, kid="kas1") + kas2 = KASInfo(url="https://kas2.example.com", public_key=pub2, kid="kas2") + + config = TDFConfig(kas_info_list=[kas1, kas2]) + manifest, size, out = tdf.create_tdf(payload, config) + data = out.getvalue() if hasattr(out, "getvalue") else out.read() + # Should be able to decrypt with either KAS private key + for priv in (priv1, priv2): + reader_config = TDFReaderConfig(kas_private_key=priv) + dec = tdf.load_tdf(data, reader_config) + assert dec.payload == payload diff --git a/tests/test_tdf_key_management.py b/tests/test_tdf_key_management.py new file mode 100644 index 0000000..03d1fff --- /dev/null +++ b/tests/test_tdf_key_management.py @@ -0,0 +1,151 @@ +import base64 +import io +import unittest +import zipfile +from unittest.mock import Mock, patch + +from otdf_python.manifest import ( + Manifest, + ManifestEncryptionInformation, + ManifestIntegrityInformation, + ManifestKeyAccess, + ManifestMethod, + ManifestPayload, + ManifestRootSignature, + ManifestSegment, +) +from otdf_python.tdf import TDF, TDFReaderConfig + + +class TestTDFKeyManagement(unittest.TestCase): + """Tests for the TDF class key management pattern.""" + + def setUp(self): + """Set up test fixtures.""" + # Create a mock Services object + self.mock_services = Mock() + self.mock_kas_client = Mock() + self.mock_services.kas.return_value = self.mock_kas_client + + # Create a TDF instance with mock services + self.tdf = TDF(services=self.mock_services) + + # Create a sample TDF file in memory + self.tdf_bytes = self._create_mock_tdf() + + def _create_mock_tdf(self): + """Create a mock TDF file with a minimal manifest.""" + buffer = io.BytesIO() + with zipfile.ZipFile(buffer, "w", zipfile.ZIP_DEFLATED) as zf: + # Create key access object + key_access = ManifestKeyAccess( + type="rsa", + url="https://kas.example.com", + protocol="https", + wrappedKey=base64.b64encode(b"wrapped_key_data").decode(), + policyBinding=None, + ) + + # Create encryption info + integrity_info = ManifestIntegrityInformation( + rootSignature=ManifestRootSignature(alg="HS256", sig="signature"), + segmentHashAlg="SHA256", + segmentSizeDefault=1024, + encryptedSegmentSizeDefault=1052, + segments=[ + ManifestSegment( + hash=base64.b64encode(b"hash").decode(), + segmentSize=10, + encryptedSegmentSize=38, + ) + ], + ) + + method = ManifestMethod(algorithm="AES-256-GCM", iv="", isStreamable=True) + enc_info = ManifestEncryptionInformation( + type="rsa", + policy="{}", + keyAccess=[key_access], + method=method, + integrityInformation=integrity_info, + ) + + # Create payload info + payload_info = ManifestPayload( + type="file", + url="0.payload", + protocol="zip", + mimeType="application/octet-stream", + isEncrypted=True, + ) + + # Create manifest + manifest = Manifest( + schemaVersion="4.3.0", + encryptionInformation=enc_info, + payload=payload_info, + assertions=[], + ) + + # Add manifest to zip + zf.writestr("0.manifest.json", manifest.to_json()) + + # Add encrypted payload + zf.writestr( + "0.payload", b"\x00\x01\x02\x03\x04\x05" + ) # dummy encrypted data + + return buffer.getvalue() + + def test_load_tdf_with_kas(self): + """Test loading a TDF without providing a KAS private key.""" + # Configure the mock KAS client - use a proper 32-byte AES-GCM key + self.mock_kas_client.unwrap.return_value = b"x" * 32 # 32-byte key + + # Patch the decrypt method + with patch( + "otdf_python.aesgcm.AesGcm.decrypt", return_value=b"decrypted_payload" + ): + # Load the TDF without a kas_private_key + config = TDFReaderConfig(attributes=["attr1"]) + result = self.tdf.load_tdf(self.tdf_bytes, config) + + # Verify KAS client was used + self.mock_kas_client.unwrap.assert_called_once() + + # Verify payload was decrypted + self.assertEqual(result.payload, b"decrypted_payload") + + def test_load_tdf_with_private_key(self): + """Test loading a TDF with a provided KAS private key (testing mode).""" + # Patch AsymDecryption + with patch( + "otdf_python.asym_decryption.AsymDecryption" + ) as mock_asym_decryption_class: + mock_asym_decryption = Mock() + mock_asym_decryption.decrypt.return_value = b"x" * 32 # 32-byte key + mock_asym_decryption_class.return_value = mock_asym_decryption + + # Patch the decrypt method + with patch( + "otdf_python.aesgcm.AesGcm.decrypt", return_value=b"decrypted_payload" + ): + # Load the TDF with a kas_private_key + config = TDFReaderConfig( + kas_private_key="PRIVATE_KEY_PEM", attributes=["attr1"] + ) + result = self.tdf.load_tdf(self.tdf_bytes, config) + + # Verify AsymDecryption was used + mock_asym_decryption_class.assert_called_once_with("PRIVATE_KEY_PEM") + mock_asym_decryption.decrypt.assert_called_once() + + # Verify KAS client was NOT used + self.mock_kas_client.unwrap.assert_not_called() + + # Verify payload was decrypted + self.assertEqual(result.payload, b"decrypted_payload") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_tdf_reader.py b/tests/test_tdf_reader.py new file mode 100644 index 0000000..5e7c634 --- /dev/null +++ b/tests/test_tdf_reader.py @@ -0,0 +1,155 @@ +""" +Tests for TDFReader. +""" + +import io +import json +from unittest.mock import MagicMock, patch + +import pytest + +from otdf_python.policy_object import PolicyObject +from otdf_python.tdf_reader import ( + TDF_MANIFEST_FILE_NAME, + TDF_PAYLOAD_FILE_NAME, + TDFReader, +) + + +class TestTDFReader: + """Tests for the TDFReader class.""" + + @pytest.fixture + def mock_zip_reader(self): + """Create a mock ZipReader for testing.""" + with patch("otdf_python.tdf_reader.ZipReader") as mock_zip_reader: + # Mock data + manifest_data = json.dumps({"test": "manifest"}).encode("utf-8") + payload_data = b"test payload data" + + # Mock the ZipReader instance + mock_reader_instance = mock_zip_reader.return_value + mock_reader_instance.namelist.return_value = [ + TDF_MANIFEST_FILE_NAME, + TDF_PAYLOAD_FILE_NAME, + ] + + # Mock the read method to return appropriate data + def mock_read(name): + if name == TDF_MANIFEST_FILE_NAME: + return manifest_data + elif name == TDF_PAYLOAD_FILE_NAME: + return payload_data + return b"" + + mock_reader_instance.read.side_effect = mock_read + + yield mock_reader_instance, manifest_data, payload_data + + def test_init_success(self, mock_zip_reader): + """Test successful initialization of TDFReader.""" + mock_reader, _, _ = mock_zip_reader + + # Create a TDFReader with a mock file object + TDFReader(io.BytesIO(b"fake tdf data")) + + # Verify ZipReader was created and its methods were called + mock_reader.namelist.assert_called_once() + + def test_init_no_manifest(self, mock_zip_reader): + """Test initialization fails when manifest is missing.""" + mock_reader, _, _ = mock_zip_reader + # Return a list without manifest.json + mock_reader.namelist.return_value = [TDF_PAYLOAD_FILE_NAME] + + # Should raise ValueError + with pytest.raises(ValueError, match="tdf doesn't contain a manifest"): + TDFReader(io.BytesIO(b"fake tdf data")) + + def test_init_no_payload(self, mock_zip_reader): + """Test initialization fails when payload is missing.""" + mock_reader, _, _ = mock_zip_reader + # Return a list without payload.bin + mock_reader.namelist.return_value = [TDF_MANIFEST_FILE_NAME] + + # Should raise ValueError + with pytest.raises(ValueError, match="tdf doesn't contain a payload"): + TDFReader(io.BytesIO(b"fake tdf data")) + + def test_manifest(self, mock_zip_reader): + """Test getting the manifest content.""" + mock_reader, manifest_data, _ = mock_zip_reader + manifest_content = json.dumps({"test": "manifest"}) + + reader = TDFReader(io.BytesIO(b"fake tdf data")) + + # Get manifest content + result = reader.manifest() + + # Verify result + assert result == manifest_content + mock_reader.read.assert_called_with(TDF_MANIFEST_FILE_NAME) + + def test_read_payload_bytes(self, mock_zip_reader): + """Test reading bytes from the payload.""" + mock_reader, _, payload_data = mock_zip_reader + + reader = TDFReader(io.BytesIO(b"fake tdf data")) + + # Create buffer and read data + buffer = bytearray(len(payload_data)) + bytes_read = reader.read_payload_bytes(buffer) + + # Verify result + assert bytes_read == len(payload_data) + assert bytes(buffer) == payload_data + mock_reader.read.assert_called_with(TDF_PAYLOAD_FILE_NAME) + + @patch("otdf_python.tdf_reader.Manifest") + def test_read_policy_object(self, mock_manifest, mock_zip_reader): + """Test reading the policy object from the manifest.""" + mock_reader, manifest_data, _ = mock_zip_reader + + # Create a realistic manifest with a base64 encoded policy + import base64 + import json + + # Create a test policy object + test_policy = { + "uuid": "test-uuid-123", + "body": { + "dataAttributes": [ + { + "attribute": "test.attr", + "displayName": "Test Attribute", + "isDefault": False, + "pubKey": "test-key", + "kasUrl": "https://kas.example.com", + } + ], + "dissem": ["user1", "user2"], + }, + } + + # Encode the policy as base64 + policy_json = json.dumps(test_policy) + policy_base64 = base64.b64encode(policy_json.encode("utf-8")).decode("utf-8") + + # Create a mock manifest object with the encoded policy + mock_manifest_obj = MagicMock() + mock_manifest_obj.encryptionInformation.policy = policy_base64 + mock_manifest.from_json.return_value = mock_manifest_obj + + reader = TDFReader(io.BytesIO(b"fake tdf data")) + + # Read policy object + result = reader.read_policy_object() + + # Verify result + assert isinstance(result, PolicyObject) + assert result.uuid == "test-uuid-123" + assert len(result.body.data_attributes) == 1 + assert result.body.data_attributes[0].attribute == "test.attr" + assert result.body.dissem == ["user1", "user2"] + mock_reader.read.assert_called_with(TDF_MANIFEST_FILE_NAME) + mock_manifest.from_json.assert_called_once() diff --git a/tests/test_tdf_writer.py b/tests/test_tdf_writer.py new file mode 100644 index 0000000..4d6ff79 --- /dev/null +++ b/tests/test_tdf_writer.py @@ -0,0 +1,60 @@ +import io +import unittest +import zipfile + +from otdf_python.tdf_writer import TDFWriter + + +class TestTDFWriter(unittest.TestCase): + def test_append_manifest_and_payload(self): + out = io.BytesIO() + writer = TDFWriter(out) + manifest = '{"foo": "bar"}' + writer.append_manifest(manifest) + with writer.payload() as f: + f.write(b"payload data") + size = writer.finish() + self.assertGreater(size, 0) + out.seek(0) + with zipfile.ZipFile(out, "r") as z: + self.assertEqual(z.read("0.manifest.json"), manifest.encode("utf-8")) + self.assertEqual(z.read("0.payload"), b"payload data") + + def test_getvalue(self): + writer = TDFWriter() + writer.append_manifest("{}") + with writer.payload() as f: + f.write(b"abc") + writer.finish() + data = writer.getvalue() + with zipfile.ZipFile(io.BytesIO(data), "r") as z: + self.assertEqual(z.read("0.manifest.json"), b"{}") + self.assertEqual(z.read("0.payload"), b"abc") + + def test_large_payload_chunks(self): + out = io.BytesIO() + writer = TDFWriter(out) + writer.append_manifest('{"test": true}') + chunk = b"x" * 1024 * 1024 # 1MB + with writer.payload() as f: + for _ in range(5): + f.write(chunk) + writer.finish() + out.seek(0) + with zipfile.ZipFile(out, "r") as z: + self.assertEqual(z.read("0.payload"), chunk * 5) + + def test_error_on_write_after_finish(self): + out = io.BytesIO() + writer = TDFWriter(out) + writer.append_manifest("{}") + with writer.payload() as f: + f.write(b"abc") + writer.finish() + # After finish, writing should raise ValueError + with self.assertRaises(ValueError), writer.payload() as f: + f.write(b"should fail") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_token_source.py b/tests/test_token_source.py new file mode 100644 index 0000000..5bc9e28 --- /dev/null +++ b/tests/test_token_source.py @@ -0,0 +1,42 @@ +""" +Unit tests for TokenSource. +""" + +import time +from unittest.mock import MagicMock, patch + +from otdf_python.token_source import TokenSource + + +def test_token_source_returns_token_and_caches(): + with patch("httpx.post") as mock_post: + mock_resp = MagicMock() + mock_resp.json.return_value = {"access_token": "abc", "expires_in": 100} + mock_resp.raise_for_status.return_value = None + mock_post.return_value = mock_resp + + ts = TokenSource("http://token", "id", "secret") + token1 = ts() + assert token1 == "abc" + # Should use cache + token2 = ts() + assert token2 == "abc" + assert mock_post.call_count == 1 + + +@patch("httpx.post") +def test_token_source_refreshes_token(mock_post): + mock_resp1 = MagicMock() + mock_resp1.json.return_value = {"access_token": "abc", "expires_in": 1} + mock_resp1.raise_for_status.return_value = None + mock_resp2 = MagicMock() + mock_resp2.json.return_value = {"access_token": "def", "expires_in": 100} + mock_resp2.raise_for_status.return_value = None + mock_post.side_effect = [mock_resp1, mock_resp2] + + ts = TokenSource("http://token", "id", "secret") + token1 = ts() + time.sleep(2) + token2 = ts() + assert token1 == "abc" + assert token2 == "def" diff --git a/tests/test_url_normalization.py b/tests/test_url_normalization.py new file mode 100644 index 0000000..6c6eab9 --- /dev/null +++ b/tests/test_url_normalization.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +""" +Test script to verify URL normalization functionality is working correctly. + +This script tests the _normalize_kas_url method to ensure it properly respects +the use_plaintext setting when converting URLs. +""" + +# Allow importing from src directory +import os +import sys + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) + +from src.otdf_python.kas_client import KASClient + + +def test_url_normalization(): + print("Testing with use_plaintext=True:") + client_plaintext = KASClient(use_plaintext=True) + + test_urls = [ + "example.com:8080", + "example.com", + "https://example.com:8080/kas", + "http://example.com:8080/kas", + ] + + for url in test_urls: + normalized = client_plaintext._normalize_kas_url(url) + print(f" {url} -> {normalized}") + # With plaintext=True, all URLs should use http:// + assert "http://" in normalized + assert "https://" not in normalized + + print("\nTesting with use_plaintext=False:") + client_https = KASClient(use_plaintext=False) + + for url in test_urls: + normalized = client_https._normalize_kas_url(url) + print(f" {url} -> {normalized}") + # With plaintext=False, all URLs should use https:// + assert "https://" in normalized + assert "http://" not in normalized + + print("\nAll tests passed!") + + +if __name__ == "__main__": + test_url_normalization() diff --git a/tests/test_use_plaintext_flow.py b/tests/test_use_plaintext_flow.py new file mode 100644 index 0000000..3019bed --- /dev/null +++ b/tests/test_use_plaintext_flow.py @@ -0,0 +1,103 @@ +""" +Test to verify that the use_plaintext parameter flows correctly from SDKBuilder to KASClient. +""" + +from unittest.mock import MagicMock, patch + +from otdf_python.sdk_builder import SDKBuilder + + +def test_use_plaintext_flows_through_sdk_builder_to_kas_client(): + """Test that use_plaintext parameter flows from SDKBuilder through to KASClient.""" + + with patch("otdf_python.kas_client.KASClient") as mock_kas_client: + # Mock the KASClient constructor to capture the arguments + mock_kas_instance = MagicMock() + mock_kas_client.return_value = mock_kas_instance + + # Create SDK with plaintext connection enabled + builder = ( + SDKBuilder.new_builder() + .set_platform_endpoint("platform.example.com") + .use_insecure_plaintext_connection(True) + ) + + sdk = builder.build() + + # Access the KAS service to trigger KASClient creation + sdk.get_services().kas() + + # Verify that KASClient was called with use_plaintext=True + mock_kas_client.assert_called_once() + call_args = mock_kas_client.call_args + + # Check that use_plaintext was passed as True + assert call_args.kwargs.get("use_plaintext") is True + + # Also verify the platform URL was set correctly for plaintext + assert call_args.kwargs.get("kas_url") == "http://platform.example.com" + + +def test_use_plaintext_false_flows_through_sdk_builder_to_kas_client(): + """Test that use_plaintext=False flows from SDKBuilder through to KASClient.""" + + with patch("otdf_python.kas_client.KASClient") as mock_kas_client: + # Mock the KASClient constructor to capture the arguments + mock_kas_instance = MagicMock() + mock_kas_client.return_value = mock_kas_instance + + # Create SDK with plaintext connection disabled (default) + builder = ( + SDKBuilder.new_builder() + .set_platform_endpoint("platform.example.com") + .use_insecure_plaintext_connection(False) + ) + + sdk = builder.build() + + # Access the KAS service to trigger KASClient creation + sdk.get_services().kas() + + # Verify that KASClient was called with use_plaintext=False + mock_kas_client.assert_called_once() + call_args = mock_kas_client.call_args + + # Check that use_plaintext was passed as False + assert call_args.kwargs.get("use_plaintext") is False + + # Also verify the platform URL was set correctly for HTTPS + assert call_args.kwargs.get("kas_url") == "https://platform.example.com" + + +def test_use_plaintext_default_value(): + """Test that the default use_plaintext value is False.""" + + with patch("otdf_python.kas_client.KASClient") as mock_kas_client: + # Mock the KASClient constructor to capture the arguments + mock_kas_instance = MagicMock() + mock_kas_client.return_value = mock_kas_instance + + # Create SDK without explicitly setting plaintext connection + builder = SDKBuilder.new_builder().set_platform_endpoint("platform.example.com") + + sdk = builder.build() + + # Access the KAS service to trigger KASClient creation + sdk.get_services().kas() + + # Verify that KASClient was called with use_plaintext=False by default + mock_kas_client.assert_called_once() + call_args = mock_kas_client.call_args + + # Check that use_plaintext defaults to False + assert call_args.kwargs.get("use_plaintext") is False + + # Also verify the platform URL defaults to HTTPS + assert call_args.kwargs.get("kas_url") == "https://platform.example.com" + + +if __name__ == "__main__": + test_use_plaintext_flows_through_sdk_builder_to_kas_client() + test_use_plaintext_false_flows_through_sdk_builder_to_kas_client() + test_use_plaintext_default_value() + print("All tests passed!") diff --git a/tests/test_validate_otdf_python.py b/tests/test_validate_otdf_python.py new file mode 100644 index 0000000..10b3860 --- /dev/null +++ b/tests/test_validate_otdf_python.py @@ -0,0 +1,191 @@ +""" +This file is effectively the same test coverage as: +https://github.com/b-long/opentdf-python-sdk/blob/v0.2.17/validate_otdf_python.py + +Execute using: + uv run pytest tests/test_validate_otdf_python.py +""" + +import logging +import sys +import tempfile +from pathlib import Path + +import pytest + +from otdf_python.tdf import TDFReaderConfig +from tests.integration.support_sdk import get_sdk + +# Set up detailed logging +logging.basicConfig(level=logging.INFO, format="%(levelname)s:%(name)s:%(message)s") + +_test_attributes = [] + + +def _get_sdk_and_tdf_config() -> tuple: + sdk = get_sdk() + + # Let the SDK create the default KAS info from the platform URL + # This will automatically append /kas to the platform URL + tdf_config = sdk.new_tdf_config( + attributes=_test_attributes, + # Don't override kas_info_list - let SDK use platform_url + /kas + ) + return sdk, tdf_config + + +def encrypt_file(input_path: Path) -> Path: + """Encrypt a file and return the path to the encrypted file.""" + + # Build the SDK + sdk, tdf_config = _get_sdk_and_tdf_config() + + output_path = input_path.with_suffix(input_path.suffix + ".tdf") + with open(input_path, "rb") as infile, open(output_path, "wb") as outfile: + sdk.create_tdf(infile.read(), tdf_config, output_stream=outfile) + return output_path + + +def decrypt_file(encrypted_path: Path) -> Path: + """Decrypt a file and return the path to the decrypted file.""" + sdk = get_sdk() + + output_path = encrypted_path.with_suffix(".decrypted") + with open(encrypted_path, "rb") as infile, open(output_path, "wb") as outfile: + # Include attributes for policy enforcement + reader_config = TDFReaderConfig( + attributes=_test_attributes # Same attributes used in encrypt_file + ) + + # Use KAS client for key unwrapping + reader = sdk.load_tdf_with_config(infile.read(), reader_config) + # TDFReader is a dataclass with payload attribute + outfile.write(reader.payload) + return output_path + + +def verify_encrypt_str() -> None: + print("Validating string encryption (local TDF)") + try: + sdk = get_sdk() + + payload = b"Hello from Python" + + # Let the SDK create the default KAS info from the platform URL + # This will automatically append /kas to the platform URL + tdf_config = sdk.new_tdf_config( + attributes=_test_attributes, + # Don't override kas_info_list - let SDK use platform_url + /kas + ) + + # Use BytesIO to mimic file-like API + from io import BytesIO + + output = BytesIO() + sdk.create_tdf(payload, tdf_config, output_stream=output) + manifest_bytes = output.getvalue() + print(f"Manifest returned: {manifest_bytes[:60]}... (truncated)") + assert manifest_bytes + assert len(manifest_bytes) > 0 + except Exception as e: + import traceback + + traceback.print_exc() + raise RuntimeError( + f"An unexpected error occurred testing otdf_python string encryption: {e}" + ) from e + + +@pytest.mark.integration +def test_verify_encrypt_str(): + """Run the string encryption verification test.""" + verify_encrypt_str() + + +def verify_encrypt_file() -> None: + print("Validating file encryption (local TDF)") + try: + with tempfile.TemporaryDirectory() as tmpDir: + print("Created temporary directory", tmpDir) + some_plaintext_file = Path(tmpDir) / "new-file.txt" + some_plaintext_file.write_text("Hello world") + encrypted_path = encrypt_file(some_plaintext_file) + print(f"Encrypted file at: {encrypted_path}") + # Optionally, check the file exists and is not empty + assert encrypted_path.exists() + assert encrypted_path.stat().st_size > 0 + except Exception as e: + raise RuntimeError( + "An unexpected error occurred testing otdf_python file encryption" + ) from e + + +@pytest.mark.integration +def test_verify_encrypt_file(): + """Run the file encryption verification test.""" + verify_encrypt_file() + + +def verify_encrypt_decrypt_file() -> None: + print("Validating encrypt/decrypt roundtrip (local TDF)") + try: + with tempfile.TemporaryDirectory() as tmpDir: + tmpDir = Path(tmpDir) + input_file = tmpDir / "plain.txt" + input_file.write_text("Secret message") + + try: + encrypted_path = encrypt_file(input_file) + print(f"Encrypted file at: {encrypted_path}") + + # Decrypt the file using the same SDK + decrypted_path = decrypt_file(encrypted_path) + print(f"Decrypted file at: {decrypted_path}") + + # Verify the result + assert decrypted_path.exists() + + # Validate content + with open(input_file, "rb") as f: + original = f.read() + with open(decrypted_path, "rb") as f: + decrypted = f.read() + assert original == decrypted, "Decrypted content doesn't match original" + + except Exception as e: + import traceback + + traceback.print_exc() + print(f"Error during encryption/decryption test: {e}") + raise + except Exception as e: + raise RuntimeError( + f"An unexpected error occurred testing otdf_python encrypt/decrypt: {e}" + ) from e + + +@pytest.mark.integration +def test_verify_encrypt_decrypt_file(): + """Run the encrypt/decrypt verification test.""" + verify_encrypt_decrypt_file() + + +if __name__ == "__main__": + import sys + + if len(sys.argv) > 1: + tdf_file = Path(sys.argv[1]) + print(f"Decrypting provided TDF file: {tdf_file}") + output = decrypt_file(tdf_file) + print(f"Decrypted file written to: {output}") + else: + print("Attempting string encryption") + verify_encrypt_str() + + print("Attempting file encryption") + verify_encrypt_file() + + print("Attempting encrypt/decrypt roundtrip") + verify_encrypt_decrypt_file() + + print("All tests have passed 👍") diff --git a/tests/test_version.py b/tests/test_version.py new file mode 100644 index 0000000..c4740cf --- /dev/null +++ b/tests/test_version.py @@ -0,0 +1,39 @@ +import unittest + +from otdf_python.version import Version + + +class TestVersion(unittest.TestCase): + def test_parse_and_str(self): + v = Version("1.2.3-alpha") + self.assertEqual(v.major, 1) + self.assertEqual(v.minor, 2) + self.assertEqual(v.patch, 3) + self.assertEqual(v.prerelease_and_metadata, "-alpha") + self.assertIn("Version{major=1, minor=2, patch=3", str(v)) + + def test_compare(self): + v1 = Version("1.2.3") + v2 = Version("1.2.4") + v3 = Version("1.3.0") + v4 = Version("2.0.0") + self.assertTrue(v1 < v2) + self.assertTrue(v2 < v3) + self.assertTrue(v3 < v4) + self.assertTrue(v4 > v1) + self.assertEqual(v1, Version(1, 2, 3)) + + def test_hash(self): + v1 = Version("1.2.3") + v2 = Version(1, 2, 3) + self.assertEqual(hash(v1), hash(v2)) + s = {v1, v2} + self.assertEqual(len(s), 1) + + def test_invalid(self): + with self.assertRaises(ValueError): + Version("not.a.version") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_zip_reader.py b/tests/test_zip_reader.py new file mode 100644 index 0000000..11af01c --- /dev/null +++ b/tests/test_zip_reader.py @@ -0,0 +1,70 @@ +import io +import random +import unittest + +from otdf_python.zip_reader import ZipReader +from otdf_python.zip_writer import ZipWriter + + +class TestZipReader(unittest.TestCase): + def test_read_and_namelist(self): + # Create a zip in memory + writer = ZipWriter() + writer.data("foo.txt", b"foo") + writer.data("bar.txt", b"bar") + writer.finish() + data = writer.getvalue() + # Read it back + reader = ZipReader(io.BytesIO(data)) + names = reader.namelist() + self.assertIn("foo.txt", names) + self.assertIn("bar.txt", names) + self.assertEqual(reader.read("foo.txt"), b"foo") + self.assertEqual(reader.read("bar.txt"), b"bar") + reader.close() + + def test_extract(self): + import tempfile + + writer = ZipWriter() + writer.data("baz.txt", b"baz") + writer.finish() + data = writer.getvalue() + reader = ZipReader(io.BytesIO(data)) + with tempfile.TemporaryDirectory() as tmpdir: + out_path = reader.extract("baz.txt", tmpdir) + with open(out_path, "rb") as f: + self.assertEqual(f.read(), b"baz") + reader.close() + + def test_entry_interface_and_random_files(self): + # Create a zip with many random files + r = random.Random(42) + num_entries = r.randint(10, 20) # Use a smaller number for test speed + test_data = [] + for _ in range(num_entries): + file_name = "".join( + r.choices("abcdefghijklmnopqrstuvwxyz0123456789", k=r.randint(5, 15)) + ) + file_content = bytes(r.getrandbits(8) for _ in range(r.randint(10, 100))) + test_data.append((file_name, file_content)) + writer = ZipWriter() + for name, content in test_data: + writer.data(name, content) + writer.finish() + data = writer.getvalue() + # Read back using the Entry interface + reader = ZipReader(io.BytesIO(data)) + names_to_data = dict(test_data) + found_names = set() + for entry in reader.get_entries(): + name = entry.get_name() + self.assertIn(name, names_to_data) + self.assertEqual(entry.get_data(), names_to_data[name]) + found_names.add(name) + self.assertEqual(found_names, set(names_to_data.keys())) + reader.close() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_zip_writer.py b/tests/test_zip_writer.py new file mode 100644 index 0000000..bf7dccd --- /dev/null +++ b/tests/test_zip_writer.py @@ -0,0 +1,35 @@ +import io +import unittest +import zipfile + +from otdf_python.zip_writer import ZipWriter + + +class TestZipWriter(unittest.TestCase): + def test_data_and_stream(self): + out = io.BytesIO() + writer = ZipWriter(out) + # Write using data + writer.data("foo.txt", b"hello world") + # Write using stream + with writer.stream("bar.txt") as f: + f.write(b"bar contents") + size = writer.finish() + self.assertGreater(size, 0) + # Validate zip contents + out.seek(0) + with zipfile.ZipFile(out, "r") as z: + self.assertEqual(z.read("foo.txt"), b"hello world") + self.assertEqual(z.read("bar.txt"), b"bar contents") + + def test_getvalue(self): + writer = ZipWriter() + writer.data("a.txt", b"A") + writer.finish() + data = writer.getvalue() + with zipfile.ZipFile(io.BytesIO(data), "r") as z: + self.assertEqual(z.read("a.txt"), b"A") + + +if __name__ == "__main__": + unittest.main() diff --git a/uv.lock b/uv.lock index ec2312a..a2c1930 100644 --- a/uv.lock +++ b/uv.lock @@ -1,7 +1,1274 @@ version = 1 +revision = 3 requires-python = ">=3.10" +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/dc/ef9394bde9080128ad401ac7ede185267ed637df03b51f05d14d1c99ad67/aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc", size = 703921, upload-time = "2025-07-29T05:49:43.584Z" }, + { url = "https://files.pythonhosted.org/packages/8f/42/63fccfc3a7ed97eb6e1a71722396f409c46b60a0552d8a56d7aad74e0df5/aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af", size = 480288, upload-time = "2025-07-29T05:49:47.851Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a2/7b8a020549f66ea2a68129db6960a762d2393248f1994499f8ba9728bbed/aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421", size = 468063, upload-time = "2025-07-29T05:49:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/8f/f5/d11e088da9176e2ad8220338ae0000ed5429a15f3c9dfd983f39105399cd/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79", size = 1650122, upload-time = "2025-07-29T05:49:51.874Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6b/b60ce2757e2faed3d70ed45dafee48cee7bfb878785a9423f7e883f0639c/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77", size = 1624176, upload-time = "2025-07-29T05:49:53.805Z" }, + { url = "https://files.pythonhosted.org/packages/dd/de/8c9fde2072a1b72c4fadecf4f7d4be7a85b1d9a4ab333d8245694057b4c6/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c", size = 1696583, upload-time = "2025-07-29T05:49:55.338Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ad/07f863ca3d895a1ad958a54006c6dafb4f9310f8c2fdb5f961b8529029d3/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4", size = 1738896, upload-time = "2025-07-29T05:49:57.045Z" }, + { url = "https://files.pythonhosted.org/packages/20/43/2bd482ebe2b126533e8755a49b128ec4e58f1a3af56879a3abdb7b42c54f/aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6", size = 1643561, upload-time = "2025-07-29T05:49:58.762Z" }, + { url = "https://files.pythonhosted.org/packages/23/40/2fa9f514c4cf4cbae8d7911927f81a1901838baf5e09a8b2c299de1acfe5/aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2", size = 1583685, upload-time = "2025-07-29T05:50:00.375Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c3/94dc7357bc421f4fb978ca72a201a6c604ee90148f1181790c129396ceeb/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d", size = 1627533, upload-time = "2025-07-29T05:50:02.306Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3f/1f8911fe1844a07001e26593b5c255a685318943864b27b4e0267e840f95/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb", size = 1638319, upload-time = "2025-07-29T05:50:04.282Z" }, + { url = "https://files.pythonhosted.org/packages/4e/46/27bf57a99168c4e145ffee6b63d0458b9c66e58bb70687c23ad3d2f0bd17/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5", size = 1613776, upload-time = "2025-07-29T05:50:05.863Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7e/1d2d9061a574584bb4ad3dbdba0da90a27fdc795bc227def3a46186a8bc1/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b", size = 1693359, upload-time = "2025-07-29T05:50:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/08/98/bee429b52233c4a391980a5b3b196b060872a13eadd41c3a34be9b1469ed/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065", size = 1716598, upload-time = "2025-07-29T05:50:09.33Z" }, + { url = "https://files.pythonhosted.org/packages/57/39/b0314c1ea774df3392751b686104a3938c63ece2b7ce0ba1ed7c0b4a934f/aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1", size = 1644940, upload-time = "2025-07-29T05:50:11.334Z" }, + { url = "https://files.pythonhosted.org/packages/1b/83/3dacb8d3f8f512c8ca43e3fa8a68b20583bd25636ffa4e56ee841ffd79ae/aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a", size = 429239, upload-time = "2025-07-29T05:50:12.803Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f9/470b5daba04d558c9673ca2034f28d067f3202a40e17804425f0c331c89f/aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830", size = 452297, upload-time = "2025-07-29T05:50:14.266Z" }, + { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, + { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, + { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, + { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, + { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, + { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, + { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, + { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, + { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, + { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, + { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, + { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, + { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, + { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, + { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, + { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, + { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, + { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, + { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" }, + { url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" }, + { url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" }, + { url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" }, + { url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" }, + { url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" }, + { url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191, upload-time = "2024-09-04T20:43:30.027Z" }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592, upload-time = "2024-09-04T20:43:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804, upload-time = "2024-09-04T20:43:48.186Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299, upload-time = "2024-09-04T20:43:49.812Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "connect-python" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "multidict" }, + { name = "protobuf" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/3c/3ab5feddb807e2c6cab38e2b81dd3f82d853c4ba1b0dd66b1fd3d29f664e/connect_python-0.4.2.tar.gz", hash = "sha256:8b0a49b9c5caf82776e8577fbb2a3acae1d0b6b9ab925dace43418ad1fb6002d", size = 178600, upload-time = "2025-07-01T06:44:49.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/af/ec2756b81573441fe8c706e27188cbebce437912d3bdc650e8d5d3856c1a/connect_python-0.4.2-py3-none-any.whl", hash = "sha256:8d76089f4e2bf97513eccf1f594d10fb2275bae3042f6f945ad8dea884a4f2c4", size = 42033, upload-time = "2025-07-01T06:44:47.657Z" }, +] + +[package.optional-dependencies] +compiler = [ + { name = "protogen" }, +] + +[[package]] +name = "cryptography" +version = "45.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949, upload-time = "2025-08-05T23:59:27.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702, upload-time = "2025-08-05T23:58:23.464Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483, upload-time = "2025-08-05T23:58:27.132Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679, upload-time = "2025-08-05T23:58:29.152Z" }, + { url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553, upload-time = "2025-08-05T23:58:30.596Z" }, + { url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499, upload-time = "2025-08-05T23:58:32.03Z" }, + { url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484, upload-time = "2025-08-05T23:58:33.526Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281, upload-time = "2025-08-05T23:58:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890, upload-time = "2025-08-05T23:58:36.923Z" }, + { url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247, upload-time = "2025-08-05T23:58:38.781Z" }, + { url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045, upload-time = "2025-08-05T23:58:40.415Z" }, + { url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923, upload-time = "2025-08-05T23:58:41.919Z" }, + { url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805, upload-time = "2025-08-05T23:58:43.792Z" }, + { url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111, upload-time = "2025-08-05T23:58:45.316Z" }, + { url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169, upload-time = "2025-08-05T23:58:47.121Z" }, + { url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273, upload-time = "2025-08-05T23:58:48.557Z" }, + { url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211, upload-time = "2025-08-05T23:58:50.139Z" }, + { url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732, upload-time = "2025-08-05T23:58:52.253Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655, upload-time = "2025-08-05T23:58:53.848Z" }, + { url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956, upload-time = "2025-08-05T23:58:55.209Z" }, + { url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859, upload-time = "2025-08-05T23:58:56.639Z" }, + { url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254, upload-time = "2025-08-05T23:58:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815, upload-time = "2025-08-05T23:59:00.283Z" }, + { url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147, upload-time = "2025-08-05T23:59:01.716Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459, upload-time = "2025-08-05T23:59:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/56/d2/4482d97c948c029be08cb29854a91bd2ae8da7eb9c4152461f1244dcea70/cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012", size = 3576812, upload-time = "2025-08-05T23:59:04.833Z" }, + { url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694, upload-time = "2025-08-05T23:59:06.66Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010, upload-time = "2025-08-05T23:59:08.14Z" }, + { url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377, upload-time = "2025-08-05T23:59:09.584Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609, upload-time = "2025-08-05T23:59:11.923Z" }, + { url = "https://files.pythonhosted.org/packages/78/6d/c49ccf243f0a1b0781c2a8de8123ee552f0c8a417c6367a24d2ecb7c11b3/cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18", size = 3322156, upload-time = "2025-08-05T23:59:13.597Z" }, + { url = "https://files.pythonhosted.org/packages/61/69/c252de4ec047ba2f567ecb53149410219577d408c2aea9c989acae7eafce/cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983", size = 3584669, upload-time = "2025-08-05T23:59:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022, upload-time = "2025-08-05T23:59:16.954Z" }, + { url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802, upload-time = "2025-08-05T23:59:18.55Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706, upload-time = "2025-08-05T23:59:20.044Z" }, + { url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740, upload-time = "2025-08-05T23:59:21.525Z" }, + { url = "https://files.pythonhosted.org/packages/0a/76/cf8d69da8d0b5ecb0db406f24a63a3f69ba5e791a11b782aeeefef27ccbb/cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043", size = 3331874, upload-time = "2025-08-05T23:59:23.017Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/36/0da0a49409f6b47cc2d060dc8c9040b897b5902a8a4e37d9bc1deb11f680/frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a", size = 81304, upload-time = "2025-06-09T22:59:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/77/f0/77c11d13d39513b298e267b22eb6cb559c103d56f155aa9a49097221f0b6/frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61", size = 47735, upload-time = "2025-06-09T22:59:48.133Z" }, + { url = "https://files.pythonhosted.org/packages/37/12/9d07fa18971a44150593de56b2f2947c46604819976784bcf6ea0d5db43b/frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d", size = 46775, upload-time = "2025-06-09T22:59:49.564Z" }, + { url = "https://files.pythonhosted.org/packages/70/34/f73539227e06288fcd1f8a76853e755b2b48bca6747e99e283111c18bcd4/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e", size = 224644, upload-time = "2025-06-09T22:59:51.35Z" }, + { url = "https://files.pythonhosted.org/packages/fb/68/c1d9c2f4a6e438e14613bad0f2973567586610cc22dcb1e1241da71de9d3/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9", size = 222125, upload-time = "2025-06-09T22:59:52.884Z" }, + { url = "https://files.pythonhosted.org/packages/b9/d0/98e8f9a515228d708344d7c6986752be3e3192d1795f748c24bcf154ad99/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c", size = 233455, upload-time = "2025-06-09T22:59:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/79/df/8a11bcec5600557f40338407d3e5bea80376ed1c01a6c0910fcfdc4b8993/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981", size = 227339, upload-time = "2025-06-09T22:59:56.187Z" }, + { url = "https://files.pythonhosted.org/packages/50/82/41cb97d9c9a5ff94438c63cc343eb7980dac4187eb625a51bdfdb7707314/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615", size = 212969, upload-time = "2025-06-09T22:59:57.604Z" }, + { url = "https://files.pythonhosted.org/packages/13/47/f9179ee5ee4f55629e4f28c660b3fdf2775c8bfde8f9c53f2de2d93f52a9/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50", size = 222862, upload-time = "2025-06-09T22:59:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/df81e41ec6b953902c8b7e3a83bee48b195cb0e5ec2eabae5d8330c78038/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa", size = 222492, upload-time = "2025-06-09T23:00:01.026Z" }, + { url = "https://files.pythonhosted.org/packages/84/17/30d6ea87fa95a9408245a948604b82c1a4b8b3e153cea596421a2aef2754/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577", size = 238250, upload-time = "2025-06-09T23:00:03.401Z" }, + { url = "https://files.pythonhosted.org/packages/8f/00/ecbeb51669e3c3df76cf2ddd66ae3e48345ec213a55e3887d216eb4fbab3/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59", size = 218720, upload-time = "2025-06-09T23:00:05.282Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c0/c224ce0e0eb31cc57f67742071bb470ba8246623c1823a7530be0e76164c/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e", size = 232585, upload-time = "2025-06-09T23:00:07.962Z" }, + { url = "https://files.pythonhosted.org/packages/55/3c/34cb694abf532f31f365106deebdeac9e45c19304d83cf7d51ebbb4ca4d1/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd", size = 234248, upload-time = "2025-06-09T23:00:09.428Z" }, + { url = "https://files.pythonhosted.org/packages/98/c0/2052d8b6cecda2e70bd81299e3512fa332abb6dcd2969b9c80dfcdddbf75/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718", size = 221621, upload-time = "2025-06-09T23:00:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bf/7dcebae315436903b1d98ffb791a09d674c88480c158aa171958a3ac07f0/frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e", size = 39578, upload-time = "2025-06-09T23:00:13.526Z" }, + { url = "https://files.pythonhosted.org/packages/8f/5f/f69818f017fa9a3d24d1ae39763e29b7f60a59e46d5f91b9c6b21622f4cd/frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464", size = 43830, upload-time = "2025-06-09T23:00:14.98Z" }, + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, +] + +[[package]] +name = "grpcio" +version = "1.74.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/54/68e51a90797ad7afc5b0a7881426c337f6a9168ebab73c3210b76aa7c90d/grpcio-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907", size = 5481935, upload-time = "2025-07-24T18:52:43.756Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/af817c7e9843929e93e54d09c9aee2555c2e8d81b93102a9426b36e91833/grpcio-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb", size = 10986796, upload-time = "2025-07-24T18:52:47.219Z" }, + { url = "https://files.pythonhosted.org/packages/d5/94/d67756638d7bb07750b07d0826c68e414124574b53840ba1ff777abcd388/grpcio-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486", size = 5983663, upload-time = "2025-07-24T18:52:49.463Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/c5e4853bf42148fea8532d49e919426585b73eafcf379a712934652a8de9/grpcio-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11", size = 6653765, upload-time = "2025-07-24T18:52:51.094Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/a1991dd64b331d199935e096cc9daa3415ee5ccbe9f909aa48eded7bba34/grpcio-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9", size = 6215172, upload-time = "2025-07-24T18:52:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/7cef3dbb3b073d0ce34fd507efc44ac4c9442a0ef9fba4fb3f5c551efef5/grpcio-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc", size = 6329142, upload-time = "2025-07-24T18:52:54.927Z" }, + { url = "https://files.pythonhosted.org/packages/bf/d3/587920f882b46e835ad96014087054655312400e2f1f1446419e5179a383/grpcio-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e", size = 7018632, upload-time = "2025-07-24T18:52:56.523Z" }, + { url = "https://files.pythonhosted.org/packages/1f/95/c70a3b15a0bc83334b507e3d2ae20ee8fa38d419b8758a4d838f5c2a7d32/grpcio-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82", size = 6509641, upload-time = "2025-07-24T18:52:58.495Z" }, + { url = "https://files.pythonhosted.org/packages/4b/06/2e7042d06247d668ae69ea6998eca33f475fd4e2855f94dcb2aa5daef334/grpcio-1.74.0-cp310-cp310-win32.whl", hash = "sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7", size = 3817478, upload-time = "2025-07-24T18:53:00.128Z" }, + { url = "https://files.pythonhosted.org/packages/93/20/e02b9dcca3ee91124060b65bbf5b8e1af80b3b76a30f694b44b964ab4d71/grpcio-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5", size = 4493971, upload-time = "2025-07-24T18:53:02.068Z" }, + { url = "https://files.pythonhosted.org/packages/e7/77/b2f06db9f240a5abeddd23a0e49eae2b6ac54d85f0e5267784ce02269c3b/grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31", size = 5487368, upload-time = "2025-07-24T18:53:03.548Z" }, + { url = "https://files.pythonhosted.org/packages/48/99/0ac8678a819c28d9a370a663007581744a9f2a844e32f0fa95e1ddda5b9e/grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4", size = 10999804, upload-time = "2025-07-24T18:53:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/45/c6/a2d586300d9e14ad72e8dc211c7aecb45fe9846a51e558c5bca0c9102c7f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce", size = 5987667, upload-time = "2025-07-24T18:53:07.157Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/5f338bf56a7f22584e68d669632e521f0de460bb3749d54533fc3d0fca4f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3", size = 6655612, upload-time = "2025-07-24T18:53:09.244Z" }, + { url = "https://files.pythonhosted.org/packages/82/ea/a4820c4c44c8b35b1903a6c72a5bdccec92d0840cf5c858c498c66786ba5/grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182", size = 6219544, upload-time = "2025-07-24T18:53:11.221Z" }, + { url = "https://files.pythonhosted.org/packages/a4/17/0537630a921365928f5abb6d14c79ba4dcb3e662e0dbeede8af4138d9dcf/grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d", size = 6334863, upload-time = "2025-07-24T18:53:12.925Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a6/85ca6cb9af3f13e1320d0a806658dca432ff88149d5972df1f7b51e87127/grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f", size = 7019320, upload-time = "2025-07-24T18:53:15.002Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a7/fe2beab970a1e25d2eff108b3cf4f7d9a53c185106377a3d1989216eba45/grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4", size = 6514228, upload-time = "2025-07-24T18:53:16.999Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c2/2f9c945c8a248cebc3ccda1b7a1bf1775b9d7d59e444dbb18c0014e23da6/grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b", size = 3817216, upload-time = "2025-07-24T18:53:20.564Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d1/a9cf9c94b55becda2199299a12b9feef0c79946b0d9d34c989de6d12d05d/grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11", size = 4495380, upload-time = "2025-07-24T18:53:22.058Z" }, + { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, + { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, + { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, + { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, + { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d8/1004a5f468715221450e66b051c839c2ce9a985aa3ee427422061fcbb6aa/grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89", size = 5449488, upload-time = "2025-07-24T18:53:41.174Z" }, + { url = "https://files.pythonhosted.org/packages/94/0e/33731a03f63740d7743dced423846c831d8e6da808fcd02821a4416df7fa/grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01", size = 10974059, upload-time = "2025-07-24T18:53:43.066Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c6/3d2c14d87771a421205bdca991467cfe473ee4c6a1231c1ede5248c62ab8/grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e", size = 5945647, upload-time = "2025-07-24T18:53:45.269Z" }, + { url = "https://files.pythonhosted.org/packages/c5/83/5a354c8aaff58594eef7fffebae41a0f8995a6258bbc6809b800c33d4c13/grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91", size = 6626101, upload-time = "2025-07-24T18:53:47.015Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ca/4fdc7bf59bf6994aa45cbd4ef1055cd65e2884de6113dbd49f75498ddb08/grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249", size = 6182562, upload-time = "2025-07-24T18:53:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/fd/48/2869e5b2c1922583686f7ae674937986807c2f676d08be70d0a541316270/grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362", size = 6303425, upload-time = "2025-07-24T18:53:50.847Z" }, + { url = "https://files.pythonhosted.org/packages/a6/0e/bac93147b9a164f759497bc6913e74af1cb632c733c7af62c0336782bd38/grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f", size = 6996533, upload-time = "2025-07-24T18:53:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/84/35/9f6b2503c1fd86d068b46818bbd7329db26a87cdd8c01e0d1a9abea1104c/grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20", size = 6491489, upload-time = "2025-07-24T18:53:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/75/33/a04e99be2a82c4cbc4039eb3a76f6c3632932b9d5d295221389d10ac9ca7/grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa", size = 3805811, upload-time = "2025-07-24T18:53:56.798Z" }, + { url = "https://files.pythonhosted.org/packages/34/80/de3eb55eb581815342d097214bed4c59e806b05f1b3110df03b2280d6dfd/grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24", size = 4489214, upload-time = "2025-07-24T18:53:59.771Z" }, +] + +[[package]] +name = "grpcio-status" +version = "1.74.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/22/238c5f01e6837df54494deb08d5c772bc3f5bf5fb80a15dce254892d1a81/grpcio_status-1.74.0.tar.gz", hash = "sha256:c58c1b24aa454e30f1fc6a7e0dbbc194c54a408143971a94b5f4e40bb5831432", size = 13662, upload-time = "2025-07-24T19:01:56.874Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/aa/1b1fe7d8ab699e1ec26d3a36b91d3df9f83a30abc07d4c881d0296b17b67/grpcio_status-1.74.0-py3-none-any.whl", hash = "sha256:52cdbd759a6760fc8f668098a03f208f493dd5c76bf8e02598bbbaf1f6fc2876", size = 14425, upload-time = "2025-07-24T19:01:19.963Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.74.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/c8/bca79cb8c14bb63027831039919c801db9f593c7504c09433934f5dff6a4/grpcio_tools-1.74.0.tar.gz", hash = "sha256:88ab9eb18b6ac1b4872add6b394073bd8d44eee7c32e4dc60a022e25ffaffb95", size = 5390007, upload-time = "2025-07-24T18:57:23.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/9e/8bbf4670f079d584b6f59a66b992791dc1ff08228e9b1256e72edb5196ff/grpcio_tools-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:796796b4d7e83a9cdd03bb95c6774fca060fd209d83fb9af5f043e9c6f06a1fa", size = 2545411, upload-time = "2025-07-24T18:55:54.457Z" }, + { url = "https://files.pythonhosted.org/packages/86/00/b483ade4e5a939c7890b8bd4041554172ad5cc2987b435e73f438086ffa0/grpcio_tools-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d576b7786207359b63c2c2e3c387639b4177cf53b1e43d020b005deead32049e", size = 5841662, upload-time = "2025-07-24T18:55:57.363Z" }, + { url = "https://files.pythonhosted.org/packages/43/70/e6d306bd3e885a0c417da27b40bb6ccdec6b2fd3081cb78f31ab4f13a73f/grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d73686934bfdd868be0dbfbfcba2a5f50a8b0b71362e86a133e8efcbdc5cad5d", size = 2516224, upload-time = "2025-07-24T18:55:58.763Z" }, + { url = "https://files.pythonhosted.org/packages/bd/99/42092932ce8802d481d41d4294b611f4269eafb2c016833f5115d804aeba/grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:187f99fd22de6e63fbf4f30b2e054a2e3c4fb80beec73b1f4716ea86192050f5", size = 2904894, upload-time = "2025-07-24T18:56:00.138Z" }, + { url = "https://files.pythonhosted.org/packages/63/04/2c2f5b933a717ff8b9da24d852f224ed4031f39fd75f182fbf36df267040/grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bef8a16c34e68aaa2d246cd358629f8103730cb96cfc521f720378995f218282", size = 2656144, upload-time = "2025-07-24T18:56:01.589Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f6/fe326c5e009541fe5e6d285c7f8c17f444990ce94d0722c22d590d919e52/grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e41084adbae7176097aa9d08a13d98c189895ec8c967f5461975750d3537625a", size = 3052117, upload-time = "2025-07-24T18:56:03.303Z" }, + { url = "https://files.pythonhosted.org/packages/d9/4d/0ced9b543bbd2df39c8b66116ac7a15faff37be4466580329e917ed12bf0/grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6b61337b47d981b4d270e3caa83607a900169617478c034e6f6baf16ab22d333", size = 3501738, upload-time = "2025-07-24T18:56:05.993Z" }, + { url = "https://files.pythonhosted.org/packages/22/b8/b81de7f416aa386f0c6a39301af5efb65f8fa74ab83d5f622914262a65db/grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7e920982b4eaab253affbd45ec6d5ec12d895f5c143374ef4c3eadef49162373", size = 3125555, upload-time = "2025-07-24T18:56:07.64Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cf695ebd5562a8b633114d0ca5084b908b17a528c4fa844a752c1fddf6a7/grpcio_tools-1.74.0-cp310-cp310-win32.whl", hash = "sha256:b966f3b93f9d24151591d096ecf9c3fdb419a50d486761f7d28a9a69b028b627", size = 992982, upload-time = "2025-07-24T18:56:09.391Z" }, + { url = "https://files.pythonhosted.org/packages/f3/01/e315fc3941e7f48d29aa4d0335081de4b9ac909c5092dab1d3263a191c0f/grpcio_tools-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:03787990b56f5c3b3f72c722a7e74fbc5a3b769bbc31ad426e2c6f6a28a9d7c8", size = 1157424, upload-time = "2025-07-24T18:56:10.781Z" }, + { url = "https://files.pythonhosted.org/packages/43/50/7bafe168b4b3494e7b96d4838b0d35eab62e5c74bf9c91e8f14233c94f60/grpcio_tools-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:9d9e28fbbab9b9e923c3d286949e8ff81ebbb402458698f0a2b1183b539779db", size = 2545457, upload-time = "2025-07-24T18:56:12.589Z" }, + { url = "https://files.pythonhosted.org/packages/8b/1c/8a0eb4e101f2fe8edc12851ddfccf4f2498d5f23d444ea73d09c94202b46/grpcio_tools-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:41040eb1b5d1e582687f6f19cf2efc4c191b6eab56b16f6fba50ac085c5ca4dd", size = 5842973, upload-time = "2025-07-24T18:56:14.063Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f2/eb1bac2dd6397f5ca271e6cb2566b61d4a4bf8df07db0988bc55200f254d/grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:1fdc013118e4e9054b6e1a64d16a0d4a17a4071042e674ada8673406ddb26e59", size = 2515918, upload-time = "2025-07-24T18:56:15.572Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fe/d270fd30ccd04d5faa9c3f2796ce56a0597eddf327a0fc746ccbb273cdd9/grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f037414c527a2c4a3af15451d9e58d7856d0a62b3f6dd3f5b969ecba82f5e843", size = 2904944, upload-time = "2025-07-24T18:56:17.091Z" }, + { url = "https://files.pythonhosted.org/packages/91/9f/3adb6e1ae826d9097745f4ad38a84c8c2edb4d768871222c95aa541f8e54/grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536f53a6a8d1ba1c469d085066cfa0dd3bb51f07013b71857bc3ad1eabe3ab49", size = 2656300, upload-time = "2025-07-24T18:56:18.51Z" }, + { url = "https://files.pythonhosted.org/packages/3f/15/e532439218674c9e451e7f965a0a6bcd53344c4178c62dc1acd66ed93797/grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1e23ff54dea7f6e9543dcebd2c0f4b7c9af39812966c05e1c5289477cb2bf2f7", size = 3051857, upload-time = "2025-07-24T18:56:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/ca/06/a63aeb1a16ab1508f2ed349faafb4e2e1fb2b048168a033e7392adab14c7/grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76072dee9fa99b33eb0c334a16e70d694df762df705c7a2481f702af33d81a28", size = 3501682, upload-time = "2025-07-24T18:56:21.65Z" }, + { url = "https://files.pythonhosted.org/packages/47/1f/81da8c39874d9152fba5fa2bf3b6708c29ea3621fde30667509b9124ef06/grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bdf91eb722f2990085b1342c277e212ec392e37bd493a2a21d9eb9238f28c3e", size = 3125364, upload-time = "2025-07-24T18:56:23.095Z" }, + { url = "https://files.pythonhosted.org/packages/a3/64/a23256ecd34ceebe8aac8adedd4f65ed240572662899acb779cfcf5e0277/grpcio_tools-1.74.0-cp311-cp311-win32.whl", hash = "sha256:a036cd2a4223901e7a9f6a9b394326a9352a4ad70bdd3f1d893f1b231fcfdf7e", size = 993385, upload-time = "2025-07-24T18:56:25.054Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b8/a0d7359d93f0a2bbaf3b0d43eb8fa3e9f315e03ef4a4ebe05b4315a64644/grpcio_tools-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:d1fdf245178158a92a2dc78e3545b6d13b6c917d9b80931fc85cfb3e9534a07d", size = 1157908, upload-time = "2025-07-24T18:56:27.042Z" }, + { url = "https://files.pythonhosted.org/packages/5e/9c/08a4018e19c937af14bfa052ad3d7826a1687da984992d31d15139c7c8d3/grpcio_tools-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:61d84f6050d7170712600f7ee1dac8849f5dc0bfe0044dd71132ee1e7aa2b373", size = 2546097, upload-time = "2025-07-24T18:56:28.565Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7b/b2985b1b8aa295d745b2e105c99401ad674fcdc2f5a9c8eb3ec0f57ad397/grpcio_tools-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:f0129a62711dbc1f1efd51d069d2ce0631d69e033bf3a046606c623acf935e08", size = 5839819, upload-time = "2025-07-24T18:56:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/de/40/de0fe696d50732c8b1f0f9271b05a3082f2a91e77e28d70dd3ffc1e4aaa5/grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:5ec661f3bb41f0d2a30125ea382f4d5c874bf4f26d4d8e3839bb7e3b3c037b3e", size = 2517611, upload-time = "2025-07-24T18:56:32.371Z" }, + { url = "https://files.pythonhosted.org/packages/a0/6d/949d3b339c3ff3c631168b355ce7be937f10feb894fdabe66c48ebd82394/grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7970a9cf3002bec2eff5a449ac7398b77e5d171cbb534c47258c72409d0aea74", size = 2905274, upload-time = "2025-07-24T18:56:33.872Z" }, + { url = "https://files.pythonhosted.org/packages/06/6b/f9b2e7b15c147ad6164e9ac7b20ee208435ca3243bcc97feb1ab74dcb902/grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f56d67b04790f84e216353341c6b298f1aeb591e1797fe955f606516c640936", size = 2656414, upload-time = "2025-07-24T18:56:35.47Z" }, + { url = "https://files.pythonhosted.org/packages/bd/de/621dde431314f49668c25b26a12f624c3da8748ac29df9db7d0a2596e575/grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3d0c33cc984d21525f190cb1af479f8da46370df5f2ced1a4e50769ababd0c0", size = 3052690, upload-time = "2025-07-24T18:56:37.799Z" }, + { url = "https://files.pythonhosted.org/packages/40/82/d43c9484174feea5a153371a011e06eabe508b97519a1e9a338b7ebdf43b/grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:88e535c1cf349e57e371529ea9918f811c5eff88161f322bbc06d6222bad6d50", size = 3501214, upload-time = "2025-07-24T18:56:39.493Z" }, + { url = "https://files.pythonhosted.org/packages/30/fc/195b90e4571f6c70665a25c7b748e13c2087025660d6d5aead9093f28b18/grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3cf9401ce72bc49582c2d80e0a2ee0e573e1c3c998c8bc5f739db8845e8e148", size = 3125689, upload-time = "2025-07-24T18:56:41.555Z" }, + { url = "https://files.pythonhosted.org/packages/cb/81/fe8980e5fb768090ffc531902ec1b7e5bf1d92108ecf8b7305405b297475/grpcio_tools-1.74.0-cp312-cp312-win32.whl", hash = "sha256:b63e250da44b15c67b9a34c5c30c81059bde528fc8af092d7f43194469f7c719", size = 993069, upload-time = "2025-07-24T18:56:43.088Z" }, + { url = "https://files.pythonhosted.org/packages/63/a9/7b081924d655787d56d2b409f703f0bf457b3dac10a67ad04dc7338e9aae/grpcio_tools-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:519d7cae085ae6695a8031bb990bf7766a922332b0a531e51342abc5431b78b5", size = 1157502, upload-time = "2025-07-24T18:56:44.814Z" }, + { url = "https://files.pythonhosted.org/packages/2f/65/307a72cf4bfa553a25e284bd1f27b94a53816ac01ddf432c398117b91b2a/grpcio_tools-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e2e22460355adbd0f25fdd7ed8b9ae53afb3875b9d5f34cdf1cf12559418245e", size = 2545750, upload-time = "2025-07-24T18:56:46.386Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8e/9b2217c15baadc7cfca3eba9f980e147452ca82f41767490f619edea3489/grpcio_tools-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:0cab5a2c6ae75b555fee8a1a9a9b575205171e1de392fe2d4139a29e67d8f5bb", size = 5838169, upload-time = "2025-07-24T18:56:48.057Z" }, + { url = "https://files.pythonhosted.org/packages/ea/42/a6a158b7e91c0a358cddf3f9088b004c2bfa42d1f96154b9b8eb17e16d73/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:9b18afca48b55832402a716ea4634ef2b68927a8a17ddf4038f51812299255c9", size = 2517140, upload-time = "2025-07-24T18:56:49.696Z" }, + { url = "https://files.pythonhosted.org/packages/05/db/d4576a07b2d1211822a070f76a99a9f4f4cb63496a02964ce77c88df8a28/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85f442a9e89e276bf89a0c9c76ea71647a927d967759333c1fa40300c27f7bd", size = 2905214, upload-time = "2025-07-24T18:56:51.768Z" }, + { url = "https://files.pythonhosted.org/packages/77/dc/3713e75751f862d8c84f823ba935d486c0aac0b6f789fa61fbde04ad5019/grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051ce925b0b99ae2daf61b3cba19962b8655cc2a72758ce4081b89272206f5a3", size = 2656245, upload-time = "2025-07-24T18:56:53.877Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e4/01f9e8e0401d8e11a70ae8aff6899eb8c16536f69a0a9ffb25873588721c/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:98c7b8eb0de6984cd7fa7335ce3383b3bb9a1559edc238c811df88008d5d3593", size = 3052327, upload-time = "2025-07-24T18:56:55.535Z" }, + { url = "https://files.pythonhosted.org/packages/28/c2/264b4e705375a834c9c7462847ae435c0be1644f03a705d3d7464af07bd5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f8f7d17b7573b9a2a6b4183fa4a56a2ab17370c8d0541e1424cf0c9c6f863434", size = 3500706, upload-time = "2025-07-24T18:56:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/ee/c0/cc034cec5871a1918e7888e8ce700e06fab5bbb328f998a2f2750cd603b5/grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:db08b91ea0cd66dc4b1b929100e7aa84c9c10c51573c8282ec1ba05b41f887ef", size = 3125098, upload-time = "2025-07-24T18:56:59.02Z" }, + { url = "https://files.pythonhosted.org/packages/69/55/5792b681af82b3ff1e50ce0ccfbb6d52fc68a13932ed3da57e58d7dfb67b/grpcio_tools-1.74.0-cp313-cp313-win32.whl", hash = "sha256:4b6c5efb331ae9e5f614437f4a5938459a8a5a1ab3dfe133d2bbdeaba39b894d", size = 992431, upload-time = "2025-07-24T18:57:00.618Z" }, + { url = "https://files.pythonhosted.org/packages/94/9f/626f0fe6bfc1c6917785c6a5ee2eb8c07b5a30771e4bf4cff3c1ab5b431b/grpcio_tools-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8324cd67f61f7900d227b36913ee5f0302ba3ba8777c8bc705afa8174098d28", size = 1157064, upload-time = "2025-07-24T18:57:02.579Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "multidict" +version = "6.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/67/414933982bce2efce7cbcb3169eaaf901e0f25baec69432b4874dfb1f297/multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a2be5b7b35271f7fff1397204ba6708365e3d773579fe2a30625e16c4b4ce817", size = 77017, upload-time = "2025-06-30T15:50:58.931Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fe/d8a3ee1fad37dc2ef4f75488b0d9d4f25bf204aad8306cbab63d97bff64a/multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12f4581d2930840295c461764b9a65732ec01250b46c6b2c510d7ee68872b140", size = 44897, upload-time = "2025-06-30T15:51:00.999Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e0/265d89af8c98240265d82b8cbcf35897f83b76cd59ee3ab3879050fd8c45/multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dd7793bab517e706c9ed9d7310b06c8672fd0aeee5781bfad612f56b8e0f7d14", size = 44574, upload-time = "2025-06-30T15:51:02.449Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/6b759379f7e8e04ccc97cfb2a5dcc5cdbd44a97f072b2272dc51281e6a40/multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:72d8815f2cd3cf3df0f83cac3f3ef801d908b2d90409ae28102e0553af85545a", size = 225729, upload-time = "2025-06-30T15:51:03.794Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f5/8d5a15488edd9a91fa4aad97228d785df208ed6298580883aa3d9def1959/multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:531e331a2ee53543ab32b16334e2deb26f4e6b9b28e41f8e0c87e99a6c8e2d69", size = 242515, upload-time = "2025-06-30T15:51:05.002Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b5/a8f317d47d0ac5bb746d6d8325885c8967c2a8ce0bb57be5399e3642cccb/multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:42ca5aa9329a63be8dc49040f63817d1ac980e02eeddba763a9ae5b4027b9c9c", size = 222224, upload-time = "2025-06-30T15:51:06.148Z" }, + { url = "https://files.pythonhosted.org/packages/76/88/18b2a0d5e80515fa22716556061189c2853ecf2aa2133081ebbe85ebea38/multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:208b9b9757060b9faa6f11ab4bc52846e4f3c2fb8b14d5680c8aac80af3dc751", size = 253124, upload-time = "2025-06-30T15:51:07.375Z" }, + { url = "https://files.pythonhosted.org/packages/62/bf/ebfcfd6b55a1b05ef16d0775ae34c0fe15e8dab570d69ca9941073b969e7/multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:acf6b97bd0884891af6a8b43d0f586ab2fcf8e717cbd47ab4bdddc09e20652d8", size = 251529, upload-time = "2025-06-30T15:51:08.691Z" }, + { url = "https://files.pythonhosted.org/packages/44/11/780615a98fd3775fc309d0234d563941af69ade2df0bb82c91dda6ddaea1/multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:68e9e12ed00e2089725669bdc88602b0b6f8d23c0c95e52b95f0bc69f7fe9b55", size = 241627, upload-time = "2025-06-30T15:51:10.605Z" }, + { url = "https://files.pythonhosted.org/packages/28/3d/35f33045e21034b388686213752cabc3a1b9d03e20969e6fa8f1b1d82db1/multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:05db2f66c9addb10cfa226e1acb363450fab2ff8a6df73c622fefe2f5af6d4e7", size = 239351, upload-time = "2025-06-30T15:51:12.18Z" }, + { url = "https://files.pythonhosted.org/packages/6e/cc/ff84c03b95b430015d2166d9aae775a3985d757b94f6635010d0038d9241/multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:0db58da8eafb514db832a1b44f8fa7906fdd102f7d982025f816a93ba45e3dcb", size = 233429, upload-time = "2025-06-30T15:51:13.533Z" }, + { url = "https://files.pythonhosted.org/packages/2e/f0/8cd49a0b37bdea673a4b793c2093f2f4ba8e7c9d6d7c9bd672fd6d38cd11/multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14117a41c8fdb3ee19c743b1c027da0736fdb79584d61a766da53d399b71176c", size = 243094, upload-time = "2025-06-30T15:51:14.815Z" }, + { url = "https://files.pythonhosted.org/packages/96/19/5d9a0cfdafe65d82b616a45ae950975820289069f885328e8185e64283c2/multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:877443eaaabcd0b74ff32ebeed6f6176c71850feb7d6a1d2db65945256ea535c", size = 248957, upload-time = "2025-06-30T15:51:16.076Z" }, + { url = "https://files.pythonhosted.org/packages/e6/dc/c90066151da87d1e489f147b9b4327927241e65f1876702fafec6729c014/multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:70b72e749a4f6e7ed8fb334fa8d8496384840319512746a5f42fa0aec79f4d61", size = 243590, upload-time = "2025-06-30T15:51:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/ec/39/458afb0cccbb0ee9164365273be3e039efddcfcb94ef35924b7dbdb05db0/multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43571f785b86afd02b3855c5ac8e86ec921b760298d6f82ff2a61daf5a35330b", size = 237487, upload-time = "2025-06-30T15:51:19.039Z" }, + { url = "https://files.pythonhosted.org/packages/35/38/0016adac3990426610a081787011177e661875546b434f50a26319dc8372/multidict-6.6.3-cp310-cp310-win32.whl", hash = "sha256:20c5a0c3c13a15fd5ea86c42311859f970070e4e24de5a550e99d7c271d76318", size = 41390, upload-time = "2025-06-30T15:51:20.362Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/17897a8f3f2c5363d969b4c635aa40375fe1f09168dc09a7826780bfb2a4/multidict-6.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485", size = 45954, upload-time = "2025-06-30T15:51:21.383Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5f/d4a717c1e457fe44072e33fa400d2b93eb0f2819c4d669381f925b7cba1f/multidict-6.6.3-cp310-cp310-win_arm64.whl", hash = "sha256:769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5", size = 42981, upload-time = "2025-06-30T15:51:22.809Z" }, + { url = "https://files.pythonhosted.org/packages/08/f0/1a39863ced51f639c81a5463fbfa9eb4df59c20d1a8769ab9ef4ca57ae04/multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c", size = 76445, upload-time = "2025-06-30T15:51:24.01Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0e/a7cfa451c7b0365cd844e90b41e21fab32edaa1e42fc0c9f68461ce44ed7/multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df", size = 44610, upload-time = "2025-06-30T15:51:25.158Z" }, + { url = "https://files.pythonhosted.org/packages/c6/bb/a14a4efc5ee748cc1904b0748be278c31b9295ce5f4d2ef66526f410b94d/multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d", size = 44267, upload-time = "2025-06-30T15:51:26.326Z" }, + { url = "https://files.pythonhosted.org/packages/c2/f8/410677d563c2d55e063ef74fe578f9d53fe6b0a51649597a5861f83ffa15/multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539", size = 230004, upload-time = "2025-06-30T15:51:27.491Z" }, + { url = "https://files.pythonhosted.org/packages/fd/df/2b787f80059314a98e1ec6a4cc7576244986df3e56b3c755e6fc7c99e038/multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462", size = 247196, upload-time = "2025-06-30T15:51:28.762Z" }, + { url = "https://files.pythonhosted.org/packages/05/f2/f9117089151b9a8ab39f9019620d10d9718eec2ac89e7ca9d30f3ec78e96/multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9", size = 225337, upload-time = "2025-06-30T15:51:30.025Z" }, + { url = "https://files.pythonhosted.org/packages/93/2d/7115300ec5b699faa152c56799b089a53ed69e399c3c2d528251f0aeda1a/multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7", size = 257079, upload-time = "2025-06-30T15:51:31.716Z" }, + { url = "https://files.pythonhosted.org/packages/15/ea/ff4bab367623e39c20d3b07637225c7688d79e4f3cc1f3b9f89867677f9a/multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9", size = 255461, upload-time = "2025-06-30T15:51:33.029Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/2c9246cda322dfe08be85f1b8739646f2c4c5113a1422d7a407763422ec4/multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821", size = 246611, upload-time = "2025-06-30T15:51:34.47Z" }, + { url = "https://files.pythonhosted.org/packages/a8/62/279c13d584207d5697a752a66ffc9bb19355a95f7659140cb1b3cf82180e/multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d", size = 243102, upload-time = "2025-06-30T15:51:36.525Z" }, + { url = "https://files.pythonhosted.org/packages/69/cc/e06636f48c6d51e724a8bc8d9e1db5f136fe1df066d7cafe37ef4000f86a/multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6", size = 238693, upload-time = "2025-06-30T15:51:38.278Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/66c9d8fb9acf3b226cdd468ed009537ac65b520aebdc1703dd6908b19d33/multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430", size = 246582, upload-time = "2025-06-30T15:51:39.709Z" }, + { url = "https://files.pythonhosted.org/packages/cf/01/c69e0317be556e46257826d5449feb4e6aa0d18573e567a48a2c14156f1f/multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b", size = 253355, upload-time = "2025-06-30T15:51:41.013Z" }, + { url = "https://files.pythonhosted.org/packages/c0/da/9cc1da0299762d20e626fe0042e71b5694f9f72d7d3f9678397cbaa71b2b/multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56", size = 247774, upload-time = "2025-06-30T15:51:42.291Z" }, + { url = "https://files.pythonhosted.org/packages/e6/91/b22756afec99cc31105ddd4a52f95ab32b1a4a58f4d417979c570c4a922e/multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183", size = 242275, upload-time = "2025-06-30T15:51:43.642Z" }, + { url = "https://files.pythonhosted.org/packages/be/f1/adcc185b878036a20399d5be5228f3cbe7f823d78985d101d425af35c800/multidict-6.6.3-cp311-cp311-win32.whl", hash = "sha256:9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5", size = 41290, upload-time = "2025-06-30T15:51:45.264Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d4/27652c1c6526ea6b4f5ddd397e93f4232ff5de42bea71d339bc6a6cc497f/multidict-6.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2", size = 45942, upload-time = "2025-06-30T15:51:46.377Z" }, + { url = "https://files.pythonhosted.org/packages/16/18/23f4932019804e56d3c2413e237f866444b774b0263bcb81df2fdecaf593/multidict-6.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb", size = 42880, upload-time = "2025-06-30T15:51:47.561Z" }, + { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" }, + { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" }, + { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" }, + { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" }, + { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" }, + { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" }, + { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" }, + { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" }, + { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" }, + { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" }, + { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843, upload-time = "2025-06-30T15:52:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053, upload-time = "2025-06-30T15:52:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273, upload-time = "2025-06-30T15:52:19.346Z" }, + { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124, upload-time = "2025-06-30T15:52:20.773Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892, upload-time = "2025-06-30T15:52:22.242Z" }, + { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547, upload-time = "2025-06-30T15:52:23.736Z" }, + { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223, upload-time = "2025-06-30T15:52:25.185Z" }, + { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262, upload-time = "2025-06-30T15:52:26.969Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345, upload-time = "2025-06-30T15:52:28.467Z" }, + { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248, upload-time = "2025-06-30T15:52:29.938Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115, upload-time = "2025-06-30T15:52:31.416Z" }, + { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649, upload-time = "2025-06-30T15:52:32.996Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203, upload-time = "2025-06-30T15:52:34.521Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051, upload-time = "2025-06-30T15:52:35.999Z" }, + { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601, upload-time = "2025-06-30T15:52:37.473Z" }, + { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683, upload-time = "2025-06-30T15:52:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811, upload-time = "2025-06-30T15:52:40.207Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056, upload-time = "2025-06-30T15:52:41.575Z" }, + { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811, upload-time = "2025-06-30T15:52:43.281Z" }, + { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304, upload-time = "2025-06-30T15:52:45.026Z" }, + { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775, upload-time = "2025-06-30T15:52:46.459Z" }, + { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773, upload-time = "2025-06-30T15:52:47.88Z" }, + { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083, upload-time = "2025-06-30T15:52:49.366Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980, upload-time = "2025-06-30T15:52:50.903Z" }, + { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776, upload-time = "2025-06-30T15:52:52.764Z" }, + { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882, upload-time = "2025-06-30T15:52:54.596Z" }, + { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816, upload-time = "2025-06-30T15:52:56.175Z" }, + { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341, upload-time = "2025-06-30T15:52:57.752Z" }, + { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854, upload-time = "2025-06-30T15:52:59.74Z" }, + { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432, upload-time = "2025-06-30T15:53:01.602Z" }, + { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731, upload-time = "2025-06-30T15:53:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086, upload-time = "2025-06-30T15:53:05.48Z" }, + { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338, upload-time = "2025-06-30T15:53:07.522Z" }, + { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812, upload-time = "2025-06-30T15:53:09.263Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011, upload-time = "2025-06-30T15:53:11.038Z" }, + { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254, upload-time = "2025-06-30T15:53:12.421Z" }, + { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, +] + [[package]] name = "otdf-python" -version = "0.2.20" +version = "0.3.1" source = { editable = "." } +dependencies = [ + { name = "connect-python", extra = ["compiler"] }, + { name = "cryptography" }, + { name = "grpcio" }, + { name = "grpcio-status" }, + { name = "grpcio-tools" }, + { name = "httpx" }, + { name = "protobuf" }, + { name = "protoc-gen-openapiv2" }, + { name = "pyjwt" }, + { name = "typing-extensions" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pydantic-settings" }, + { name = "pytest" }, + { name = "respx" }, + { name = "ruff" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] + +[package.metadata] +requires-dist = [ + { name = "connect-python", extras = ["compiler"], specifier = ">=0.4.2" }, + { name = "cryptography", specifier = ">=45.0.4" }, + { name = "grpcio", specifier = ">=1.74.0" }, + { name = "grpcio-status", specifier = ">=1.74.0" }, + { name = "grpcio-tools", specifier = ">=1.74.0" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "protobuf", specifier = ">=6.31.1" }, + { name = "protoc-gen-openapiv2", specifier = ">=0.0.1" }, + { name = "pyjwt", specifier = ">=2.10.1" }, + { name = "typing-extensions", specifier = ">=4.14.1" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pydantic-settings", specifier = ">=2.10.1" }, + { name = "pytest", specifier = ">=8.4.1" }, + { name = "respx", specifier = ">=0.21.1" }, + { name = "ruff", specifier = ">=0.12.10" }, + { name = "tomli", marker = "python_full_version < '3.11'", specifier = ">=2.2.1" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", size = 73178, upload-time = "2025-06-09T22:53:40.126Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3", size = 43133, upload-time = "2025-06-09T22:53:41.965Z" }, + { url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3", size = 43039, upload-time = "2025-06-09T22:53:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e", size = 201903, upload-time = "2025-06-09T22:53:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220", size = 213362, upload-time = "2025-06-09T22:53:46.707Z" }, + { url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb", size = 210525, upload-time = "2025-06-09T22:53:48.547Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614", size = 198283, upload-time = "2025-06-09T22:53:50.067Z" }, + { url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50", size = 191872, upload-time = "2025-06-09T22:53:51.438Z" }, + { url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339", size = 199452, upload-time = "2025-06-09T22:53:53.229Z" }, + { url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0", size = 191567, upload-time = "2025-06-09T22:53:54.541Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2", size = 193015, upload-time = "2025-06-09T22:53:56.44Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7", size = 204660, upload-time = "2025-06-09T22:53:57.839Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b", size = 206105, upload-time = "2025-06-09T22:53:59.638Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c", size = 196980, upload-time = "2025-06-09T22:54:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70", size = 37679, upload-time = "2025-06-09T22:54:03.003Z" }, + { url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9", size = 41459, upload-time = "2025-06-09T22:54:04.134Z" }, + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + +[[package]] +name = "protobuf" +version = "6.31.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/f3/b9655a711b32c19720253f6f06326faf90580834e2e83f840472d752bc8b/protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a", size = 441797, upload-time = "2025-05-28T19:25:54.947Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/6f/6ab8e4bf962fd5570d3deaa2d5c38f0a363f57b4501047b5ebeb83ab1125/protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9", size = 423603, upload-time = "2025-05-28T19:25:41.198Z" }, + { url = "https://files.pythonhosted.org/packages/44/3a/b15c4347dd4bf3a1b0ee882f384623e2063bb5cf9fa9d57990a4f7df2fb6/protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447", size = 435283, upload-time = "2025-05-28T19:25:44.275Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c9/b9689a2a250264a84e66c46d8862ba788ee7a641cdca39bccf64f59284b7/protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402", size = 425604, upload-time = "2025-05-28T19:25:45.702Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39", size = 322115, upload-time = "2025-05-28T19:25:47.128Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6", size = 321070, upload-time = "2025-05-28T19:25:50.036Z" }, + { url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e", size = 168724, upload-time = "2025-05-28T19:25:53.926Z" }, +] + +[[package]] +name = "protoc-gen-openapiv2" +version = "0.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/d2/84fecd8df61640226c726c12ad7ddd2a7666a7cd7f898b9a5b72e3a66d44/protoc-gen-openapiv2-0.0.1.tar.gz", hash = "sha256:6f79188d842c13177c9c0558845442c340b43011bf67dfef1dfc3bc067506409", size = 7323, upload-time = "2022-12-02T01:40:57.306Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/ac/bd8961859d8f3f81530465d2ce9b165627e961c00348939009bac2700cc6/protoc_gen_openapiv2-0.0.1-py3-none-any.whl", hash = "sha256:18090c8be3877c438e7da0f7eb7cace45a9a210306bca4707708dbad367857be", size = 7883, upload-time = "2022-12-02T01:40:55.244Z" }, +] + +[[package]] +name = "protogen" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/fc/2e68784a06e46fe799dd375b732c13f99559a2c3b2164100607ec8b5cccf/protogen-0.3.1.tar.gz", hash = "sha256:1e55405f6c94476c45c400b069dbdb0274f065e3109fee28122e96dbba075dcd", size = 23018, upload-time = "2023-11-20T15:34:48.288Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/a0/c3f3a2e2fa866547d82190ec5c0cd55580bc29c7894221bd793003a578a1/protogen-0.3.1-py3-none-any.whl", hash = "sha256:65b60b284d20ee4899d515b1959882d8c7504b271552de36f4ebfe77f6b07331", size = 21425, upload-time = "2023-11-20T15:34:45.958Z" }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, + { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, + { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "respx" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, +] + +[[package]] +name = "ruff" +version = "0.12.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/eb/8c073deb376e46ae767f4961390d17545e8535921d2f65101720ed8bd434/ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9", size = 5310076, upload-time = "2025-08-21T18:23:22.595Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/e7/560d049d15585d6c201f9eeacd2fd130def3741323e5ccf123786e0e3c95/ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b", size = 11935161, upload-time = "2025-08-21T18:22:26.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b0/ad2464922a1113c365d12b8f80ed70fcfb39764288ac77c995156080488d/ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1", size = 12660884, upload-time = "2025-08-21T18:22:30.925Z" }, + { url = "https://files.pythonhosted.org/packages/d7/f1/97f509b4108d7bae16c48389f54f005b62ce86712120fd8b2d8e88a7cb49/ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839", size = 11872754, upload-time = "2025-08-21T18:22:34.035Z" }, + { url = "https://files.pythonhosted.org/packages/12/ad/44f606d243f744a75adc432275217296095101f83f966842063d78eee2d3/ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844", size = 12092276, upload-time = "2025-08-21T18:22:36.764Z" }, + { url = "https://files.pythonhosted.org/packages/06/1f/ed6c265e199568010197909b25c896d66e4ef2c5e1c3808caf461f6f3579/ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db", size = 11734700, upload-time = "2025-08-21T18:22:39.822Z" }, + { url = "https://files.pythonhosted.org/packages/63/c5/b21cde720f54a1d1db71538c0bc9b73dee4b563a7dd7d2e404914904d7f5/ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e", size = 13468783, upload-time = "2025-08-21T18:22:42.559Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/39369e6ac7f2a1848f22fb0b00b690492f20811a1ac5c1fd1d2798329263/ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559", size = 14436642, upload-time = "2025-08-21T18:22:45.612Z" }, + { url = "https://files.pythonhosted.org/packages/e3/03/5da8cad4b0d5242a936eb203b58318016db44f5c5d351b07e3f5e211bb89/ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf", size = 13859107, upload-time = "2025-08-21T18:22:48.886Z" }, + { url = "https://files.pythonhosted.org/packages/19/19/dd7273b69bf7f93a070c9cec9494a94048325ad18fdcf50114f07e6bf417/ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b", size = 12886521, upload-time = "2025-08-21T18:22:51.567Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1d/b4207ec35e7babaee62c462769e77457e26eb853fbdc877af29417033333/ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9", size = 13097528, upload-time = "2025-08-21T18:22:54.609Z" }, + { url = "https://files.pythonhosted.org/packages/ff/00/58f7b873b21114456e880b75176af3490d7a2836033779ca42f50de3b47a/ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a", size = 13080443, upload-time = "2025-08-21T18:22:57.413Z" }, + { url = "https://files.pythonhosted.org/packages/12/8c/9e6660007fb10189ccb78a02b41691288038e51e4788bf49b0a60f740604/ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60", size = 11896759, upload-time = "2025-08-21T18:23:00.473Z" }, + { url = "https://files.pythonhosted.org/packages/67/4c/6d092bb99ea9ea6ebda817a0e7ad886f42a58b4501a7e27cd97371d0ba54/ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56", size = 11701463, upload-time = "2025-08-21T18:23:03.211Z" }, + { url = "https://files.pythonhosted.org/packages/59/80/d982c55e91df981f3ab62559371380616c57ffd0172d96850280c2b04fa8/ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9", size = 12691603, upload-time = "2025-08-21T18:23:06.935Z" }, + { url = "https://files.pythonhosted.org/packages/ad/37/63a9c788bbe0b0850611669ec6b8589838faf2f4f959647f2d3e320383ae/ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b", size = 13164356, upload-time = "2025-08-21T18:23:10.225Z" }, + { url = "https://files.pythonhosted.org/packages/47/d4/1aaa7fb201a74181989970ebccd12f88c0fc074777027e2a21de5a90657e/ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266", size = 11896089, upload-time = "2025-08-21T18:23:14.232Z" }, + { url = "https://files.pythonhosted.org/packages/ad/14/2ad38fd4037daab9e023456a4a40ed0154e9971f8d6aed41bdea390aabd9/ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e", size = 13004616, upload-time = "2025-08-21T18:23:17.422Z" }, + { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4", size = 132910, upload-time = "2025-06-10T00:42:31.108Z" }, + { url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a", size = 90644, upload-time = "2025-06-10T00:42:33.851Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed", size = 89322, upload-time = "2025-06-10T00:42:35.688Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e", size = 323786, upload-time = "2025-06-10T00:42:37.817Z" }, + { url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73", size = 319627, upload-time = "2025-06-10T00:42:39.937Z" }, + { url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e", size = 339149, upload-time = "2025-06-10T00:42:42.627Z" }, + { url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8", size = 333327, upload-time = "2025-06-10T00:42:44.842Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23", size = 326054, upload-time = "2025-06-10T00:42:47.149Z" }, + { url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70", size = 315035, upload-time = "2025-06-10T00:42:48.852Z" }, + { url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb", size = 338962, upload-time = "2025-06-10T00:42:51.024Z" }, + { url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2", size = 335399, upload-time = "2025-06-10T00:42:53.007Z" }, + { url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30", size = 338649, upload-time = "2025-06-10T00:42:54.964Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309", size = 358563, upload-time = "2025-06-10T00:42:57.28Z" }, + { url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24", size = 357609, upload-time = "2025-06-10T00:42:59.055Z" }, + { url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13", size = 350224, upload-time = "2025-06-10T00:43:01.248Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8", size = 81753, upload-time = "2025-06-10T00:43:03.486Z" }, + { url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16", size = 86817, upload-time = "2025-06-10T00:43:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, + { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, + { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, + { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, + { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, + { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +] diff --git a/validate_otdf_python.py b/validate_otdf_python.py deleted file mode 100644 index 8344738..0000000 --- a/validate_otdf_python.py +++ /dev/null @@ -1,127 +0,0 @@ -""" -This file serves as a test of otdf_python. -""" - -import tempfile -from pathlib import Path -from zipfile import is_zipfile -from os import environ - -from otdf_python.gotdf_python import OpentdfConfig - - -def _get_configuration() -> OpentdfConfig: - platformEndpoint = "http://localhost:8080" - - config: OpentdfConfig = OpentdfConfig( - ClientId=environ.get("OPENTDF_CLIENT_ID", "opentdf-sdk"), - ClientSecret=environ.get("OPENTDF_CLIENT_SECRET", "secret"), - PlatformEndpoint=environ.get("OPENTDF_HOSTNAME", platformEndpoint), - TokenEndpoint=environ.get( - "OIDC_TOKEN_ENDPOINT", - "http://localhost:8888/auth/realms/opentdf/protocol/openid-connect/token", - ), - KasUrl=environ.get("OPENTDF_KAS_URL", f"http://{platformEndpoint}/kas"), - ) - - # NOTE: Structs from golang can be printed, like below - # This should print a string like - # gotdf_python.OpentdfConfig{ClientId=opentdf-sdk, ClientSecret=secret, KasUrl=http://localhost:8080/kas, PlatformEndpoint=localhost:8080, TokenEndpoint=http://localhost:8888/auth/realms/opentdf/protocol/openid-connect/token, handle=1} - print(config) - - return config - - -def verify_encrypt_str() -> None: - print("Validating string encryption") - try: - from otdf_python.gotdf_python import EncryptString - - config: OpentdfConfig = _get_configuration() - - from otdf_python.go import Slice_string - - # da = Slice_string( - # [ - # "https://example.com/attr/attr1/value/value1", - # "https://example.com/attr/attr1/value/value2", - # ] - # ) - da = Slice_string([]) - - tdf_manifest_json = EncryptString( - inputText="Hello from Python", - config=config, - dataAttributes=da, - authScopes=Slice_string(["email"]), - ) - - print(tdf_manifest_json) - # breakpoint() - except Exception as e: - raise RuntimeError("An unexpected error occurred testing otdf_python") from e - - -def verify_encrypt_file() -> None: - print("Validating file encryption") - try: - from otdf_python.gotdf_python import EncryptFile - - with tempfile.TemporaryDirectory() as tmpDir: - print("Created temporary directory", tmpDir) - - config: OpentdfConfig = _get_configuration() - - SOME_ENCRYPTED_FILE = Path(tmpDir) / "some-file.tdf" - - if SOME_ENCRYPTED_FILE.exists(): - SOME_ENCRYPTED_FILE.unlink() - - if SOME_ENCRYPTED_FILE.exists(): - raise ValueError( - "The output path should not exist before calling 'EncryptFile()'." - ) - - SOME_PLAINTEXT_FILE = Path(tmpDir) / "new-file.txt" - SOME_PLAINTEXT_FILE.write_text("Hello world") - - from otdf_python.go import Slice_string - - # da = Slice_string( - # [ - # "https://example.com/attr/attr1/value/value1", - # "https://example.com/attr/attr1/value/value2", - # ] - # ) - da = Slice_string([]) - outputFilePath = EncryptFile( - inputFilePath=str(SOME_PLAINTEXT_FILE), - outputFilePath=str(SOME_ENCRYPTED_FILE), - config=config, - dataAttributes=da, - authScopes=Slice_string(["email"]), - ) - - print(f"The output file was written to destination path: {outputFilePath}") - if not SOME_ENCRYPTED_FILE.exists(): - raise ValueError("The output file does not exist!") - - encrypted_file_size = SOME_ENCRYPTED_FILE.stat().st_size - print(f"The encrypted file size is {encrypted_file_size}") - - if not (encrypted_file_size > 1500 and is_zipfile(SOME_ENCRYPTED_FILE)): - raise ValueError("The output file has unexpected content!") - - # breakpoint() - except Exception as e: - raise RuntimeError("An unexpected error occurred testing otdf_python") from e - - -if __name__ == "__main__": - print("Attempting string encryption") - verify_encrypt_str() - - print("Attempting file encryption") - verify_encrypt_file() - - print("All tests have passed 👍")