diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5df761bb6..872c85930 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,13 +4,15 @@ on: push: branches: [main, release/*] pull_request: - branches: [main] + branches: + - "main" + - "release/*" jobs: build: strategy: matrix: - runner: [ubuntu-latest, ubicloud] + runner: [ubicloud-standard-8] runs-on: ${{ matrix.runner }} timeout-minutes: 30 services: @@ -31,7 +33,6 @@ jobs: - uses: actions/checkout@v3 with: submodules: recursive - token: ${{ secrets.SUBMODULE_CHECKOUT }} - uses: dtolnay/rust-toolchain@stable @@ -46,7 +47,7 @@ jobs: with: name: "bq_service_account.json" json: ${{ secrets.GCP_GH_CI_PKEY }} - dir: 'nexus/server/tests/assets/' + dir: "nexus/server/tests/assets/" - name: setup snowflake credentials id: sf-credentials @@ -54,7 +55,7 @@ jobs: with: name: "snowflake_creds.json" json: ${{ secrets.SNOWFLAKE_GH_CI_PKEY }} - dir: 'nexus/server/tests/assets/' + dir: "nexus/server/tests/assets/" - name: cargo check run: cargo check diff --git a/.github/workflows/customer-docker.yml b/.github/workflows/customer-docker.yml new file mode 100644 index 000000000..026e70c7f --- /dev/null +++ b/.github/workflows/customer-docker.yml @@ -0,0 +1,52 @@ +name: Customer Docker images + +on: + push: + branches: + - 'customer-*' + pull_request: + branches: + - 'customer-*' + +jobs: + docker-build: + strategy: + matrix: + runner: [ubicloud] + runs-on: ${{ matrix.runner }} + permissions: + contents: read + packages: write + steps: + - name: checkout + uses: actions/checkout@v3 + with: + submodules: recursive + + - uses: depot/setup-action@v1 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2.1.0 + with: + registry: ghcr.io + username: ${{github.actor}} + password: ${{secrets.GITHUB_TOKEN}} + + - name: Set Short Commit Hash + id: vars + run: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" + + - name: extract branch name suffix for customer + id: branch + run: | + echo "::set-output name=branch::$(echo $GITHUB_REF | sed -e 's/.*customer-//')" + + - name: Build (optionally publish) PeerDB Images + uses: depot/bake-action@v1 + with: + token: ${{ secrets.DEPOT_TOKEN }} + files: ./docker-bake.hcl + push: ${{ startsWith(github.ref, 'refs/heads/customer-') }} + env: + SHA_SHORT: ${{ steps.branch.outputs.branch }}-${{ steps.vars.outputs.sha_short }} + TAG: latest-${{ steps.branch.outputs.branch }} diff --git a/.github/workflows/dev-debian.yml b/.github/workflows/dev-debian.yml deleted file mode 100644 index acc2994aa..000000000 --- a/.github/workflows/dev-debian.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Dev Debian package - -on: - push: - branches: [main] - pull_request: - branches: [main] - -jobs: - release: - name: build and release - strategy: - matrix: - runner: [ubuntu-latest, ubicloud] - runs-on: ${{ matrix.runner }} - steps: - - name: checkout sources - uses: actions/checkout@v3 - with: - submodules: recursive - token: ${{ secrets.SUBMODULE_CHECKOUT }} - - - name: install system tools - run: | - sudo apt-get update - sudo apt-get install -y musl-tools protobuf-compiler gcc-multilib \ - protobuf-compiler libssl-dev pkg-config build-essential - - - name: install cargo binstall - run: | - curl -L --proto '=https' --tlsv1.2 -sSf\ - https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash - - - name: install cargo binaries - run: | - cargo binstall --no-confirm --no-symlinks cargo-deb - - - uses: dtolnay/rust-toolchain@stable - with: - targets: x86_64-unknown-linux-musl - - - name: build project release - working-directory: ./nexus - run: cargo build --release --target=x86_64-unknown-linux-musl - - - name: create peerdb-server deb package - working-directory: ./nexus/ - run: cargo deb --target=x86_64-unknown-linux-musl -p peerdb-server --no-build diff --git a/.github/workflows/dev-docker.yml b/.github/workflows/dev-docker.yml index 0bc3f3353..1a8e64a87 100644 --- a/.github/workflows/dev-docker.yml +++ b/.github/workflows/dev-docker.yml @@ -10,7 +10,7 @@ jobs: docker-build: strategy: matrix: - runner: [ubuntu-latest, ubicloud] + runner: [ubicloud] runs-on: ${{ matrix.runner }} permissions: contents: read @@ -20,7 +20,6 @@ jobs: uses: actions/checkout@v3 with: submodules: recursive - token: ${{ secrets.SUBMODULE_CHECKOUT }} - uses: depot/setup-action@v1 @@ -35,46 +34,12 @@ jobs: id: vars run: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" - - name: Build (optionally publish) PeerDB Dev Image - uses: depot/build-push-action@v1 + - name: Build (optionally publish) PeerDB Images + uses: depot/bake-action@v1 with: token: ${{ secrets.DEPOT_TOKEN }} - platforms: linux/amd64,linux/arm64 - context: . - file: stacks/nexus.Dockerfile + files: ./docker-bake.hcl push: ${{ github.ref == 'refs/heads/main' }} - tags: | - ghcr.io/peerdb-io/peerdb-server:dev-${{ steps.vars.outputs.sha_short }} - - - name: Build (optionally publish) Flow API Dev Image - uses: depot/build-push-action@v1 - with: - token: ${{ secrets.DEPOT_TOKEN }} - platforms: linux/amd64,linux/arm64 - context: . - file: stacks/flow-api.Dockerfile - push: ${{ github.ref == 'refs/heads/main' }} - tags: | - ghcr.io/peerdb-io/flow-api:dev-${{ steps.vars.outputs.sha_short }} - - - name: Build (optionally publish) Flow Worker Dev Image - uses: depot/build-push-action@v1 - with: - token: ${{ secrets.DEPOT_TOKEN }} - platforms: linux/amd64,linux/arm64 - context: . - file: stacks/flow-worker.Dockerfile - push: ${{ github.ref == 'refs/heads/main' }} - tags: | - ghcr.io/peerdb-io/flow-worker:dev-${{ steps.vars.outputs.sha_short }} - - - name: Build (optionally publish) Flow Snapshot Worker Dev Image - uses: depot/build-push-action@v1 - with: - token: ${{ secrets.DEPOT_TOKEN }} - platforms: linux/amd64,linux/arm64 - context: . - file: stacks/flow-snapshot-worker.Dockerfile - push: ${{ github.ref == 'refs/heads/main' }} - tags: | - ghcr.io/peerdb-io/flow-snapshot-worker:dev-${{ steps.vars.outputs.sha_short }} + env: + SHA_SHORT: dev-${{ steps.vars.outputs.sha_short }} + TAG: latest-dev diff --git a/.github/workflows/flow.yml b/.github/workflows/flow.yml index 80b536489..66cb68493 100644 --- a/.github/workflows/flow.yml +++ b/.github/workflows/flow.yml @@ -2,7 +2,8 @@ name: Flow build and test on: pull_request: - branches: [main] + branches: + - "main" push: branches: [main] @@ -15,7 +16,7 @@ jobs: timeout-minutes: 30 services: pg_cdc: - image: postgres:15.4-alpine + image: postgis/postgis:15-3.4-alpine ports: - 7132:5432 env: @@ -34,12 +35,17 @@ jobs: - uses: actions/setup-go@v3 with: - go-version: '>=1.19.0' + go-version: ">=1.19.0" - name: install gotestsum run: | go install gotest.tools/gotestsum@latest - + + - name: install lib-geos + run: | + sudo apt-get update + sudo apt-get install libgeos-dev + - name: download go modules run: | go mod download @@ -58,6 +64,20 @@ jobs: with: name: "snowflake_creds.json" json: ${{ secrets.SNOWFLAKE_GH_CI_PKEY }} + + - name: setup S3 credentials + id: s3-credentials + uses: jsdaniell/create-json@v1.2.2 + with: + name: "s3_creds.json" + json: ${{ secrets.S3_CREDS }} + + - name: setup GCS credentials + id: gcs-credentials + uses: jsdaniell/create-json@v1.2.2 + with: + name: "gcs_creds.json" + json: ${{ secrets.GCS_CREDS }} - name: create hstore extension and increase logical replication limits run: | @@ -73,7 +93,7 @@ jobs: - name: run tests run: | - gotestsum --format testname -- -p 4 ./... -timeout 1200s + gotestsum --format testname -- -p 4 ./... -timeout 2400s working-directory: ./flow env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} @@ -81,6 +101,8 @@ jobs: AWS_REGION: ${{ secrets.AWS_REGION }} TEST_BQ_CREDS: ${{ github.workspace }}/bq_service_account.json TEST_SF_CREDS: ${{ github.workspace }}/snowflake_creds.json + TEST_S3_CREDS: ${{ github.workspace }}/s3_creds.json + TEST_GCS_CREDS: ${{ github.workspace }}/gcs_creds.json AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} diff --git a/.github/workflows/golang-lint.yml b/.github/workflows/golang-lint.yml index 4b1821b7f..fd685d8c4 100644 --- a/.github/workflows/golang-lint.yml +++ b/.github/workflows/golang-lint.yml @@ -1,6 +1,9 @@ name: GolangCI-Lint -on: [pull_request] +on: + pull_request: + branches: + - "main" jobs: golangci-lint: @@ -10,14 +13,13 @@ jobs: pull-requests: write strategy: matrix: - runner: [ubuntu-latest, ubicloud] + runner: [ubicloud] runs-on: ${{ matrix.runner }} steps: - name: checkout uses: actions/checkout@v3 with: submodules: recursive - token: ${{ secrets.SUBMODULE_CHECKOUT }} - name: golangci-lint uses: reviewdog/action-golangci-lint@v2 diff --git a/.github/workflows/rust-lint.yml b/.github/workflows/rust-lint.yml index 029160274..01c3dde17 100644 --- a/.github/workflows/rust-lint.yml +++ b/.github/workflows/rust-lint.yml @@ -1,5 +1,10 @@ name: clippy-action -on: [pull_request] + +on: + pull_request: + branches: + - "main" + jobs: clippy: permissions: @@ -8,23 +13,18 @@ jobs: pull-requests: write strategy: matrix: - runner: [ubuntu-latest, ubicloud] + runner: [ubicloud-standard-4] runs-on: ${{ matrix.runner }} steps: - - name: checkout - uses: actions/checkout@v3 - with: - submodules: recursive - token: ${{ secrets.SUBMODULE_CHECKOUT }} + - name: checkout + uses: actions/checkout@v3 + with: + submodules: recursive - - uses: dtolnay/rust-toolchain@stable - with: - components: clippy + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy - - uses: giraffate/clippy-action@v1 - with: - reporter: 'github-pr-review' - github_token: ${{ secrets.GITHUB_TOKEN }} - workdir: ./nexus - env: - REVIEWDOG_TOKEN: ${{ secrets.REVIEWDOG_TOKEN }} + - name: clippy + run: cargo clippy -- -D warnings + working-directory: ./nexus diff --git a/.github/workflows/stable-debian.yml b/.github/workflows/stable-debian.yml deleted file mode 100644 index b58afce10..000000000 --- a/.github/workflows/stable-debian.yml +++ /dev/null @@ -1,62 +0,0 @@ -name: Stable Debian package - -on: - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - -jobs: - release: - name: build and release - strategy: - matrix: - runner: [ubuntu-latest, ubicloud] - runs-on: ${{ matrix.runner }} - permissions: - contents: write - steps: - - name: checkout sources - uses: actions/checkout@v3 - with: - submodules: recursive - token: ${{ secrets.SUBMODULE_CHECKOUT }} - - - name: install system tools - run: | - sudo apt-get update - sudo apt-get install -y musl-tools protobuf-compiler gcc-multilib \ - protobuf-compiler libssl-dev pkg-config build-essential - - - name: install cargo binstall - run: | - curl -L --proto '=https' --tlsv1.2 -sSf\ - https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash - - - name: install cargo binaries - run: | - cargo binstall --no-confirm --no-symlinks cargo-deb - - - uses: dtolnay/rust-toolchain@stable - with: - targets: x86_64-unknown-linux-musl - - - name: Set Cargo version as Git tag - working-directory: ./nexus/server - run: | - export VERSION=$(echo "${{ github.ref_name }}" | sed 's/^v//') - sed -i "s/0.1.0/$VERSION/g" Cargo.toml - - - name: build project release - working-directory: ./nexus - run: cargo build --release --target=x86_64-unknown-linux-musl - - - name: create peerdb-server deb package - working-directory: ./nexus/ - run: cargo deb --target=x86_64-unknown-linux-musl -p peerdb-server --no-build - - - name: upload release artifacts - uses: softprops/action-gh-release@v1 - if: startsWith(github.ref, 'refs/tags/') - with: - files: | - nexus/target/x86_64-unknown-linux-musl/debian/peerdb-server*.deb diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index a4b68f37c..360a50f6c 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -9,7 +9,7 @@ jobs: docker-build: strategy: matrix: - runner: [ubuntu-latest, ubicloud] + runner: [ubicloud] runs-on: ${{ matrix.runner }} permissions: contents: read @@ -19,7 +19,6 @@ jobs: uses: actions/checkout@v3 with: submodules: recursive - token: ${{ secrets.SUBMODULE_CHECKOUT }} - uses: depot/setup-action@v1 @@ -30,46 +29,12 @@ jobs: username: ${{github.actor}} password: ${{secrets.GITHUB_TOKEN}} - - name: Publish PeerDB Stable Image - uses: depot/build-push-action@v1 + - name: Build (optionally publish) PeerDB Images + uses: depot/bake-action@v1 with: - platforms: linux/amd64,linux/arm64 token: ${{ secrets.DEPOT_TOKEN }} - context: . - file: stacks/nexus.Dockerfile - push: true - tags: | - ghcr.io/peerdb-io/peerdb-server:${{ github.ref_name }} - - - name: Publish Flow API Image - uses: depot/build-push-action@v1 - with: - platforms: linux/amd64,linux/arm64 - token: ${{ secrets.DEPOT_TOKEN }} - context: . - file: stacks/flow-api.Dockerfile - push: true - tags: | - ghcr.io/peerdb-io/flow-api:${{ github.ref_name }} - - - name: Publish Flow Worker Stable Image - uses: depot/build-push-action@v1 - with: - platforms: linux/amd64,linux/arm64 - token: ${{ secrets.DEPOT_TOKEN }} - context: . - file: stacks/flow-worker.Dockerfile - push: true - tags: | - ghcr.io/peerdb-io/flow-worker:${{ github.ref_name }} - - - name: Publish Flow Snapshot Worker Stable Image - uses: depot/build-push-action@v1 - with: - platforms: linux/amd64,linux/arm64 - token: ${{ secrets.DEPOT_TOKEN }} - context: . - file: stacks/flow-snapshot-worker.Dockerfile - push: true - tags: | - ghcr.io/peerdb-io/flow-snapshot-worker:${{ github.ref_name }} + files: ./docker-bake.hcl + push: ${{ github.ref == 'refs/heads/stable' }} + env: + SHA_SHORT: stable-${{ github.ref_name }} + TAG: latest-stable diff --git a/.github/workflows/ui-build.yml b/.github/workflows/ui-build.yml index b1acc0de4..41a4e3395 100644 --- a/.github/workflows/ui-build.yml +++ b/.github/workflows/ui-build.yml @@ -13,7 +13,7 @@ jobs: name: Build & Test UI strategy: matrix: - runner: [ubuntu-latest, ubicloud] + runner: [ubicloud] runs-on: ${{ matrix.runner }} steps: - name: checkout @@ -25,4 +25,4 @@ jobs: - name: Build working-directory: ui - run: yarn build \ No newline at end of file + run: yarn build diff --git a/.github/workflows/ui-lint.yml b/.github/workflows/ui-lint.yml index d8f2e5853..5c697aa3b 100644 --- a/.github/workflows/ui-lint.yml +++ b/.github/workflows/ui-lint.yml @@ -17,7 +17,7 @@ jobs: name: Run UI linters strategy: matrix: - runner: [ubuntu-latest, ubicloud] + runner: [ubicloud] runs-on: ${{ matrix.runner }} steps: - name: checkout @@ -33,4 +33,4 @@ jobs: eslint: true prettier: true eslint_dir: ui - prettier_dir: ui \ No newline at end of file + prettier_dir: ui diff --git a/.gitignore b/.gitignore index d997e9644..8df46502d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .vscode .env tmp/ +.envrc diff --git a/README.md b/README.md index d35b8ddee..bca6cc82d 100644 --- a/README.md +++ b/README.md @@ -27,11 +27,12 @@ You can use PeerDB for any of the below use-cases: git clone --recursive git@github.com:PeerDB-io/peerdb.git cd peerdb -# Run docker containers: peerdb-server, postgres as catalog, temporal -export COMPOSE_PROJECT_NAME=peerdb-stack -docker compose up +# Run docker containers: postgres as catalog, temporal, PeerDB server, PeerDB flow API + workers, PeerDB UI +bash ./run-peerdb.sh +# OR for local development, images will be built locally: +bash ./dev-peerdb.sh -# connect to peerdb and query away +# connect to peerdb and query away (Use psql version >=14.0, <16.0) psql "port=9900 host=localhost password=peerdb" ``` diff --git a/buf.gen.yaml b/buf.gen.yaml index 4ba8ac285..bbdcfef5a 100644 --- a/buf.gen.yaml +++ b/buf.gen.yaml @@ -3,6 +3,8 @@ managed: enabled: true go_package_prefix: default: generated/protos + except: + - buf.build/googleapis/googleapis plugins: - plugin: buf.build/protocolbuffers/go:v1.31.0 out: flow/generated/protos @@ -27,3 +29,7 @@ plugins: opt: - esModuleInterop=true - outputServices=grpc-js + - plugin: buf.build/grpc-ecosystem/gateway:v2.18.0 + out: flow/generated/protos + opt: + - paths=source_relative diff --git a/dev-peerdb.sh b/dev-peerdb.sh new file mode 100755 index 000000000..09e93defe --- /dev/null +++ b/dev-peerdb.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -Eeuo pipefail + +if ! command -v docker &> /dev/null +then + echo "docker could not be found on PATH" + exit 1 +fi + +docker compose -f docker-compose-dev.yml up --build\ + --no-attach temporal --no-attach pyroscope --no-attach temporal-ui diff --git a/docker-bake.hcl b/docker-bake.hcl new file mode 100644 index 000000000..ec3b7cd46 --- /dev/null +++ b/docker-bake.hcl @@ -0,0 +1,89 @@ +variable SHA_SHORT { + default = "123456" +} + +variable TAG { + default = "latest-dev" +} + +variable REGISTRY { + default = "ghcr.io/peerdb-io" +} + +group "default" { + targets = [ + "peerdb", + "flow-worker", + "flow-api", + "flow-snapshot-worker", + "peerdb-ui" + ] +} + +target "flow-api" { + context = "." + dockerfile = "stacks/flow.Dockerfile" + target = "flow-api" + platforms = [ + "linux/amd64", + "linux/arm64", + ] + tags = [ + "${REGISTRY}/flow-api:${TAG}", + "${REGISTRY}/flow-api:${SHA_SHORT}", + ] +} + +target "flow-snapshot-worker" { + context = "." + dockerfile = "stacks/flow.Dockerfile" + target = "flow-snapshot-worker" + platforms = [ + "linux/amd64", + "linux/arm64", + ] + tags = [ + "${REGISTRY}/flow-snapshot-worker:${TAG}", + "${REGISTRY}/flow-snapshot-worker:${SHA_SHORT}", + ] +} + +target "flow-worker" { + context = "." + dockerfile = "stacks/flow.Dockerfile" + target = "flow-worker" + platforms = [ + "linux/amd64", + "linux/arm64", + ] + tags = [ + "${REGISTRY}/flow-worker:${TAG}", + "${REGISTRY}/flow-worker:${SHA_SHORT}", + ] +} + +target "peerdb" { + context = "." + dockerfile = "stacks/peerdb-server.Dockerfile" + platforms = [ + "linux/amd64", + "linux/arm64", + ] + tags = [ + "${REGISTRY}/peerdb-server:${TAG}", + "${REGISTRY}/peerdb-server:${SHA_SHORT}", + ] +} + +target "peerdb-ui" { + context = "." + dockerfile = "stacks/peerdb-ui.Dockerfile" + platforms = [ + "linux/amd64", + "linux/arm64", + ] + tags = [ + "${REGISTRY}/peerdb-ui:${TAG}", + "${REGISTRY}/peerdb-ui:${SHA_SHORT}", + ] +} diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml new file mode 100644 index 000000000..c6e4de2fa --- /dev/null +++ b/docker-compose-dev.yml @@ -0,0 +1,243 @@ +version: "3.9" + +x-catalog-config: &catalog-config + PEERDB_CATALOG_HOST: catalog + PEERDB_CATALOG_PORT: 5432 + PEERDB_CATALOG_USER: postgres + PEERDB_CATALOG_PASSWORD: postgres + PEERDB_CATALOG_DATABASE: postgres + +x-flow-worker-env: &flow-worker-env + TEMPORAL_HOST_PORT: temporal:7233 + # For GCS, these will be your HMAC keys instead + # For more information: + # https://cloud.google.com/storage/docs/authentication/managing-hmackeys + AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID:-} + AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY:-} + # For GCS, set this to "auto" without the quotes + AWS_REGION: ${AWS_REGION:-} + # For GCS, set this as: https://storage.googleapis.com + AWS_ENDPOINT: ${AWS_ENDPOINT:-} + # enables worker profiling using Grafana Pyroscope + ENABLE_PROFILING: "true" + # enables exporting of mirror metrics to Prometheus for visualization using Grafana + ENABLE_METRICS: "true" + PYROSCOPE_SERVER_ADDRESS: http://pyroscope:4040 + +services: + catalog: + container_name: catalog + image: debezium/postgres:16-alpine + ports: + - 9901:5432 + environment: + PGUSER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + extra_hosts: + - "host.docker.internal:host-gateway" + volumes: + - pgdata:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready", "-d", "postgres", "-U", "postgres"] + interval: 10s + timeout: 30s + retries: 5 + start_period: 60s + + temporal: + container_name: temporal + depends_on: + catalog: + condition: service_healthy + environment: + - DB=postgresql + - DB_PORT=5432 + - POSTGRES_USER=postgres + - POSTGRES_PWD=postgres + - POSTGRES_SEEDS=catalog + - DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development-sql.yaml + image: temporalio/auto-setup:1.22.1 + ports: + - 7233:7233 + volumes: + - ./temporal-dynamicconfig:/etc/temporal/config/dynamicconfig + labels: + kompose.volume.type: configMap + + pyroscope: + container_name: pyroscope + image: grafana/pyroscope:latest + ports: + - 4040:4040 + + temporal-admin-tools: + container_name: temporal-admin-tools + depends_on: + - temporal + environment: + - TEMPORAL_ADDRESS=temporal:7233 + - TEMPORAL_CLI_ADDRESS=temporal:7233 + image: temporalio/admin-tools:1.22.1 + stdin_open: true + tty: true + healthcheck: + test: ["CMD", "tctl", "workflow", "list"] + interval: 1s + timeout: 5s + retries: 30 + + temporal-ui: + container_name: temporal-ui + depends_on: + - temporal + environment: + - TEMPORAL_ADDRESS=temporal:7233 + - TEMPORAL_CORS_ORIGINS=http://localhost:3000 + - TEMPORAL_CSRF_COOKIE_INSECURE=true + image: temporalio/ui:2.17.2 + ports: + - 8085:8080 + + flow-api: + container_name: flow_api + build: + context: . + dockerfile: stacks/flow.Dockerfile + target: flow-api + ports: + - 8112:8112 + - 8113:8113 + environment: + <<: [*catalog-config] + TEMPORAL_HOST_PORT: temporal:7233 + depends_on: + temporal-admin-tools: + condition: service_healthy + + flow-snapshot-worker: + container_name: flow-snapshot-worker + build: + context: . + dockerfile: stacks/flow.Dockerfile + target: flow-snapshot-worker + environment: + TEMPORAL_HOST_PORT: temporal:7233 + depends_on: + temporal-admin-tools: + condition: service_healthy + + flow-worker1: + container_name: flow-worker1 + build: + context: . + dockerfile: stacks/flow.Dockerfile + target: flow-worker + environment: + <<: [*catalog-config, *flow-worker-env] + METRICS_SERVER: 0.0.0.0:6061 + ports: + - 6060:6060 + - 6061:6061 + depends_on: + temporal-admin-tools: + condition: service_healthy + + flow-worker2: + container_name: flow-worker2 + build: + context: . + dockerfile: stacks/flow.Dockerfile + target: flow-worker + environment: + <<: [*catalog-config, *flow-worker-env] + METRICS_SERVER: 0.0.0.0:6063 + ports: + - 6062:6062 + - 6063:6063 + profiles: + - multi + - multi-metrics + depends_on: + temporal-admin-tools: + condition: service_healthy + + flow-worker3: + container_name: flow-worker3 + build: + context: . + dockerfile: stacks/flow.Dockerfile + target: flow-worker + environment: + <<: [*catalog-config, *flow-worker-env] + METRICS_SERVER: 0.0.0.0:6065 + ports: + - 6064:6064 + - 6065:6065 + profiles: + - multi + - multi-metrics + depends_on: + temporal-admin-tools: + condition: service_healthy + + peerdb: + container_name: peerdb-server + build: + context: . + dockerfile: stacks/peerdb-server.Dockerfile + environment: + <<: *catalog-config + PEERDB_LOG_DIR: /var/log/peerdb + PEERDB_PASSWORD: peerdb + PEERDB_FLOW_SERVER_ADDRESS: grpc://flow_api:8112 + RUST_LOG: info + RUST_BACKTRACE: 1 + ports: + - 9900:9900 + depends_on: + catalog: + condition: service_healthy + + peerdb-prometheus: + container_name: peerdb-prometheus + build: + context: . + dockerfile: stacks/prometheus.Dockerfile + volumes: + - prometheusdata:/prometheus + ports: + - 9090:9090 + profiles: + - multi-metrics + - metrics + + peerdb-grafana: + container_name: peerdb-grafana + build: + context: . + dockerfile: stacks/grafana.Dockerfile + ports: + - 3000:3000 + environment: + GF_SECURITY_ADMIN_USER: admin + GF_SECURITY_ADMIN_PASSWORD: peerdb + profiles: + - multi-metrics + - metrics + + peerdb-ui: + container_name: peerdb-ui + build: + context: . + dockerfile: stacks/peerdb-ui.Dockerfile + ports: + - 3001:3000 + environment: + <<: *catalog-config + DATABASE_URL: postgres://postgres:postgres@catalog:5432/postgres + PEERDB_FLOW_SERVER_HTTP: http://flow_api:8113 + +volumes: + pgdata: + prometheusdata: diff --git a/docker-compose.yml b/docker-compose.yml index a5c527b05..d5d4dbbff 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,21 +9,25 @@ x-catalog-config: &catalog-config x-flow-worker-env: &flow-worker-env TEMPORAL_HOST_PORT: temporal:7233 - AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID:-""} - AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY:-""} - AWS_REGION: ${AWS_REGION:-""} - # enables worker profiling using Go's pprof - ENABLE_PROFILING: "true" + # For GCS, these will be your HMAC keys instead + # For more information: + # https://cloud.google.com/storage/docs/authentication/managing-hmackeys + AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID:-} + AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY:-} + # For GCS, set this to "auto" without the quotes + AWS_REGION: ${AWS_REGION:-} + # For GCS, set this as: https://storage.googleapis.com + AWS_ENDPOINT: ${AWS_ENDPOINT:-} # enables exporting of mirror metrics to Prometheus for visualization using Grafana ENABLE_METRICS: "true" - # enables exporting of mirror metrics to Catalog in the PEERDB_STATS schema. - ENABLE_STATS: "true" - PYROSCOPE_SERVER_ADDRESS: http://pyroscope:4040 + +x-peerdb-temporal-namespace: &peerdb-temporal-namespace + PEERDB_TEMPORAL_NAMESPACE: default services: catalog: container_name: catalog - image: debezium/postgres:15-alpine + image: debezium/postgres:16-alpine ports: - 9901:5432 environment: @@ -53,7 +57,7 @@ services: - POSTGRES_PWD=postgres - POSTGRES_SEEDS=catalog - DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development-sql.yaml - image: temporalio/auto-setup:1.21.3 + image: temporalio/auto-setup:1.22.1 ports: - 7233:7233 volumes: @@ -61,12 +65,6 @@ services: labels: kompose.volume.type: configMap - pyroscope: - container_name: pyroscope - image: grafana/pyroscope:latest - ports: - - 4040:4040 - temporal-admin-tools: container_name: temporal-admin-tools depends_on: @@ -74,7 +72,7 @@ services: environment: - TEMPORAL_ADDRESS=temporal:7233 - TEMPORAL_CLI_ADDRESS=temporal:7233 - image: temporalio/admin-tools:1.21.3 + image: temporalio/admin-tools:1.22.1 stdin_open: true tty: true healthcheck: @@ -95,38 +93,34 @@ services: ports: - 8085:8080 - flow_api: + flow-api: container_name: flow_api - build: - context: . - dockerfile: stacks/flow-api.Dockerfile + image: ghcr.io/peerdb-io/flow-api:latest-dev ports: - 8112:8112 + - 8113:8113 environment: - <<: [*catalog-config] + <<: [*catalog-config, *peerdb-temporal-namespace] TEMPORAL_HOST_PORT: temporal:7233 depends_on: temporal-admin-tools: condition: service_healthy - flow_snapshot_worker: - container_name: flow_snapshot_worker - build: - context: . - dockerfile: stacks/flow-snapshot-worker.Dockerfile + flow-snapshot-worker: + container_name: flow-snapshot-worker + image: ghcr.io/peerdb-io/flow-snapshot-worker:latest-dev environment: + <<: [*peerdb-temporal-namespace] TEMPORAL_HOST_PORT: temporal:7233 depends_on: temporal-admin-tools: condition: service_healthy - flow_worker1: - container_name: flow_worker1 - build: - context: . - dockerfile: stacks/flow-worker.Dockerfile + flow-worker1: + container_name: flow-worker1 + image: ghcr.io/peerdb-io/flow-worker:latest-dev environment: - <<: [*catalog-config, *flow-worker-env] + <<: [*catalog-config, *flow-worker-env, *peerdb-temporal-namespace] METRICS_SERVER: 0.0.0.0:6061 ports: - 6060:6060 @@ -135,13 +129,11 @@ services: temporal-admin-tools: condition: service_healthy - flow_worker2: - container_name: flow_worker2 - build: - context: . - dockerfile: stacks/flow-worker.Dockerfile + flow-worker2: + container_name: flow-worker2 + image: ghcr.io/peerdb-io/flow-worker:latest-dev environment: - <<: [*catalog-config, *flow-worker-env] + <<: [*catalog-config, *flow-worker-env, *peerdb-temporal-namespace] METRICS_SERVER: 0.0.0.0:6063 ports: - 6062:6062 @@ -153,13 +145,11 @@ services: temporal-admin-tools: condition: service_healthy - flow_worker3: - container_name: flow_worker3 - build: - context: . - dockerfile: stacks/flow-worker.Dockerfile + flow-worker3: + container_name: flow-worker3 + image: ghcr.io/peerdb-io/flow-worker:latest-dev environment: - <<: [*catalog-config, *flow-worker-env] + <<: [*catalog-config, *flow-worker-env, *peerdb-temporal-namespace] METRICS_SERVER: 0.0.0.0:6065 ports: - 6064:6064 @@ -172,10 +162,8 @@ services: condition: service_healthy peerdb: - container_name: peerdb_server - build: - context: . - dockerfile: stacks/nexus.Dockerfile + container_name: peerdb-server + image: ghcr.io/peerdb-io/peerdb-server:latest-dev environment: <<: *catalog-config PEERDB_LOG_DIR: /var/log/peerdb @@ -189,8 +177,8 @@ services: catalog: condition: service_healthy - peerdb_prometheus: - container_name: peerdb_prometheus + peerdb-prometheus: + container_name: peerdb-prometheus build: context: . dockerfile: stacks/prometheus.Dockerfile @@ -202,8 +190,8 @@ services: - multi-metrics - metrics - peerdb_grafana: - container_name: peerdb_grafana + peerdb-grafana: + container_name: peerdb-grafana build: context: . dockerfile: stacks/grafana.Dockerfile @@ -216,16 +204,15 @@ services: - multi-metrics - metrics - peerdb_ui: - container_name: peerdb_ui - build: - context: . - dockerfile: stacks/ui.Dockerfile + peerdb-ui: + container_name: peerdb-ui + image: ghcr.io/peerdb-io/peerdb-ui:latest-dev ports: - - 3000:3000 + - 3001:3000 environment: <<: *catalog-config - PEERDB_FLOW_SERVER_ADDRESS: flow_api:8112 + DATABASE_URL: postgres://postgres:postgres@catalog:5432/postgres + PEERDB_FLOW_SERVER_HTTP: http://flow_api:8113 volumes: pgdata: diff --git a/flow/activities/flowable.go b/flow/activities/flowable.go index 5011845a2..6d209a364 100644 --- a/flow/activities/flowable.go +++ b/flow/activities/flowable.go @@ -9,6 +9,7 @@ import ( "github.com/PeerDB-io/peer-flow/connectors" connpostgres "github.com/PeerDB-io/peer-flow/connectors/postgres" + connsnowflake "github.com/PeerDB-io/peer-flow/connectors/snowflake" "github.com/PeerDB-io/peer-flow/connectors/utils" "github.com/PeerDB-io/peer-flow/connectors/utils/monitoring" "github.com/PeerDB-io/peer-flow/generated/protos" @@ -17,6 +18,7 @@ import ( "github.com/jackc/pglogrepl" log "github.com/sirupsen/logrus" "go.temporal.io/sdk/activity" + "golang.org/x/sync/errgroup" ) // CheckConnectionResult is the result of a CheckConnection call. @@ -163,11 +165,6 @@ func (a *FlowableActivity) StartFlow(ctx context.Context, ctx = context.WithValue(ctx, shared.EnableMetricsKey, a.EnableMetrics) ctx = context.WithValue(ctx, shared.CDCMirrorMonitorKey, a.CatalogMirrorMonitor) - srcConn, err := connectors.GetCDCPullConnector(ctx, conn.Source) - if err != nil { - return nil, fmt.Errorf("failed to get source connector: %w", err) - } - defer connectors.CloseConnector(srcConn) dstConn, err := connectors.GetCDCSyncConnector(ctx, conn.Destination) if err != nil { return nil, fmt.Errorf("failed to get destination connector: %w", err) @@ -187,28 +184,47 @@ func (a *FlowableActivity) StartFlow(ctx context.Context, "flowName": input.FlowConnectionConfigs.FlowJobName, }).Info("pulling records...") + tblNameMapping := make(map[string]string) + for _, v := range input.FlowConnectionConfigs.TableMappings { + tblNameMapping[v.SourceTableIdentifier] = v.DestinationTableIdentifier + } + + idleTimeout := utils.GetEnvInt("PEERDB_CDC_IDLE_TIMEOUT_SECONDS", 10) + + recordBatch := model.NewCDCRecordStream() + startTime := time.Now() - recordsWithTableSchemaDelta, err := srcConn.PullRecords(&model.PullRecordsRequest{ - FlowJobName: input.FlowConnectionConfigs.FlowJobName, - SrcTableIDNameMapping: input.FlowConnectionConfigs.SrcTableIdNameMapping, - TableNameMapping: input.FlowConnectionConfigs.TableNameMapping, - LastSyncState: input.LastSyncState, - MaxBatchSize: uint32(input.SyncFlowOptions.BatchSize), - IdleTimeout: 10 * time.Second, - TableNameSchemaMapping: input.FlowConnectionConfigs.TableNameSchemaMapping, - OverridePublicationName: input.FlowConnectionConfigs.PublicationName, - OverrideReplicationSlotName: input.FlowConnectionConfigs.ReplicationSlotName, - RelationMessageMapping: input.RelationMessageMapping, - }) + + errGroup, errCtx := errgroup.WithContext(ctx) + srcConn, err := connectors.GetCDCPullConnector(errCtx, conn.Source) if err != nil { - return nil, fmt.Errorf("failed to pull records: %w", err) + return nil, fmt.Errorf("failed to get source connector: %w", err) } - recordBatch := recordsWithTableSchemaDelta.RecordBatch + defer connectors.CloseConnector(srcConn) + + // start a goroutine to pull records from the source + errGroup.Go(func() error { + return srcConn.PullRecords(&model.PullRecordsRequest{ + FlowJobName: input.FlowConnectionConfigs.FlowJobName, + SrcTableIDNameMapping: input.FlowConnectionConfigs.SrcTableIdNameMapping, + TableNameMapping: tblNameMapping, + LastSyncState: input.LastSyncState, + MaxBatchSize: uint32(input.SyncFlowOptions.BatchSize), + IdleTimeout: time.Duration(idleTimeout) * time.Second, + TableNameSchemaMapping: input.FlowConnectionConfigs.TableNameSchemaMapping, + OverridePublicationName: input.FlowConnectionConfigs.PublicationName, + OverrideReplicationSlotName: input.FlowConnectionConfigs.ReplicationSlotName, + RelationMessageMapping: input.RelationMessageMapping, + RecordStream: recordBatch, + }) + }) - pullRecordWithCount := fmt.Sprintf("pulled %d records", len(recordBatch.Records)) - activity.RecordHeartbeat(ctx, pullRecordWithCount) + hasRecords := !recordBatch.WaitAndCheckEmpty() + log.WithFields(log.Fields{ + "flowName": input.FlowConnectionConfigs.FlowJobName, + }).Infof("the current sync flow has records: %v", hasRecords) - if a.CatalogMirrorMonitor.IsActive() && len(recordBatch.Records) > 0 { + if a.CatalogMirrorMonitor.IsActive() && hasRecords { syncBatchID, err := dstConn.GetLastSyncBatchID(input.FlowConnectionConfigs.FlowJobName) if err != nil && conn.Destination.Type != protos.DBType_EVENTHUB { return nil, err @@ -217,9 +233,9 @@ func (a *FlowableActivity) StartFlow(ctx context.Context, err = a.CatalogMirrorMonitor.AddCDCBatchForFlow(ctx, input.FlowConnectionConfigs.FlowJobName, monitoring.CDCBatchInfo{ BatchID: syncBatchID + 1, - RowsInBatch: uint32(len(recordBatch.Records)), - BatchStartLSN: pglogrepl.LSN(recordBatch.FirstCheckPointID), - BatchEndlSN: pglogrepl.LSN(recordBatch.LastCheckPointID), + RowsInBatch: 0, + BatchStartLSN: pglogrepl.LSN(recordBatch.GetFirstCheckpoint()), + BatchEndlSN: 0, StartTime: startTime, }) if err != nil { @@ -227,21 +243,18 @@ func (a *FlowableActivity) StartFlow(ctx context.Context, } } - pullDuration := time.Since(startTime) - numRecords := len(recordBatch.Records) - log.WithFields(log.Fields{ - "flowName": input.FlowConnectionConfigs.FlowJobName, - }).Infof("pulled %d records in %d seconds\n", numRecords, int(pullDuration.Seconds())) - activity.RecordHeartbeat(ctx, fmt.Sprintf("pulled %d records", numRecords)) + if !hasRecords { + // wait for the pull goroutine to finish + err = errGroup.Wait() + if err != nil { + return nil, fmt.Errorf("failed to pull records: %w", err) + } - if numRecords == 0 { - log.WithFields(log.Fields{ - "flowName": input.FlowConnectionConfigs.FlowJobName, - }).Info("no records to push") - return &model.SyncResponse{ - RelationMessageMapping: recordsWithTableSchemaDelta.RelationMessageMapping, - TableSchemaDelta: recordsWithTableSchemaDelta.TableSchemaDelta, - }, nil + log.WithFields(log.Fields{"flowName": input.FlowConnectionConfigs.FlowJobName}).Info("no records to push") + syncResponse := &model.SyncResponse{} + syncResponse.RelationMessageMapping = <-recordBatch.RelationMessageMapping + syncResponse.TableSchemaDeltas = recordBatch.WaitForSchemaDeltas() + return syncResponse, nil } shutdown := utils.HeartbeatRoutine(ctx, 10*time.Second, func() string { @@ -267,14 +280,35 @@ func (a *FlowableActivity) StartFlow(ctx context.Context, return nil, fmt.Errorf("failed to push records: %w", err) } + err = errGroup.Wait() + if err != nil { + return nil, fmt.Errorf("failed to pull records: %w", err) + } + + numRecords := res.NumRecordsSynced syncDuration := time.Since(syncStartTime) log.WithFields(log.Fields{ "flowName": input.FlowConnectionConfigs.FlowJobName, }).Infof("pushed %d records in %d seconds\n", numRecords, int(syncDuration.Seconds())) + lastCheckpoint, err := recordBatch.GetLastCheckpoint() + if err != nil { + return nil, fmt.Errorf("failed to get last checkpoint: %w", err) + } + + err = a.CatalogMirrorMonitor.UpdateNumRowsAndEndLSNForCDCBatch( + ctx, + input.FlowConnectionConfigs.FlowJobName, + res.CurrentSyncBatchID, + uint32(numRecords), + pglogrepl.LSN(lastCheckpoint), + ) + if err != nil { + return nil, err + } + err = a.CatalogMirrorMonitor. - UpdateLatestLSNAtTargetForCDCFlow(ctx, input.FlowConnectionConfigs.FlowJobName, - pglogrepl.LSN(recordBatch.LastCheckPointID)) + UpdateLatestLSNAtTargetForCDCFlow(ctx, input.FlowConnectionConfigs.FlowJobName, pglogrepl.LSN(lastCheckpoint)) if err != nil { return nil, err } @@ -288,8 +322,8 @@ func (a *FlowableActivity) StartFlow(ctx context.Context, if err != nil { return nil, err } - res.TableSchemaDelta = recordsWithTableSchemaDelta.TableSchemaDelta - res.RelationMessageMapping = recordsWithTableSchemaDelta.RelationMessageMapping + res.TableSchemaDeltas = recordBatch.WaitForSchemaDeltas() + res.RelationMessageMapping = <-recordBatch.RelationMessageMapping pushedRecordsWithCount := fmt.Sprintf("pushed %d records", numRecords) activity.RecordHeartbeat(ctx, pushedRecordsWithCount) @@ -317,8 +351,9 @@ func (a *FlowableActivity) StartNormalize( return nil, fmt.Errorf("failed to get last sync batch ID: %v", err) } - return nil, a.CatalogMirrorMonitor.UpdateEndTimeForCDCBatch(ctx, input.FlowConnectionConfigs.FlowJobName, + err = a.CatalogMirrorMonitor.UpdateEndTimeForCDCBatch(ctx, input.FlowConnectionConfigs.FlowJobName, lastSyncBatchID) + return nil, err } else if err != nil { return nil, err } @@ -345,10 +380,13 @@ func (a *FlowableActivity) StartNormalize( return nil, fmt.Errorf("failed to normalized records: %w", err) } - err = a.CatalogMirrorMonitor.UpdateEndTimeForCDCBatch(ctx, input.FlowConnectionConfigs.FlowJobName, - res.EndBatchID) - if err != nil { - return nil, err + // normalize flow did not run due to no records, no need to update end time. + if res.Done { + err = a.CatalogMirrorMonitor.UpdateEndTimeForCDCBatch(ctx, input.FlowConnectionConfigs.FlowJobName, + res.EndBatchID) + if err != nil { + return nil, err + } } // log the number of batches normalized @@ -359,7 +397,7 @@ func (a *FlowableActivity) StartNormalize( return res, nil } -func (a *FlowableActivity) ReplayTableSchemaDelta( +func (a *FlowableActivity) ReplayTableSchemaDeltas( ctx context.Context, input *protos.ReplayTableSchemaDeltaInput, ) error { @@ -371,7 +409,7 @@ func (a *FlowableActivity) ReplayTableSchemaDelta( } defer connectors.CloseConnector(dest) - return dest.ReplayTableSchemaDelta(input.FlowConnectionConfigs.FlowJobName, input.TableSchemaDelta) + return dest.ReplayTableSchemaDeltas(input.FlowConnectionConfigs.FlowJobName, input.TableSchemaDeltas) } // SetupQRepMetadataTables sets up the metadata tables for QReplication. @@ -393,7 +431,7 @@ func (a *FlowableActivity) GetQRepPartitions(ctx context.Context, ) (*protos.QRepParitionResult, error) { srcConn, err := connectors.GetQRepPullConnector(ctx, config.SourcePeer) if err != nil { - return nil, fmt.Errorf("failed to get connector: %w", err) + return nil, fmt.Errorf("failed to get qrep pull connector: %w", err) } defer connectors.CloseConnector(srcConn) @@ -405,14 +443,17 @@ func (a *FlowableActivity) GetQRepPartitions(ctx context.Context, shutdown <- true }() - startTime := time.Now() partitions, err := srcConn.GetQRepPartitions(config, last) if err != nil { return nil, fmt.Errorf("failed to get partitions from source: %w", err) } if len(partitions) > 0 { - err = a.CatalogMirrorMonitor.InitializeQRepRun(ctx, config.FlowJobName, - runUUID, startTime) + err = a.CatalogMirrorMonitor.InitializeQRepRun( + ctx, + config, + runUUID, + partitions, + ) if err != nil { return nil, err } @@ -429,6 +470,11 @@ func (a *FlowableActivity) ReplicateQRepPartitions(ctx context.Context, partitions *protos.QRepPartitionBatch, runUUID string, ) error { + err := a.CatalogMirrorMonitor.UpdateStartTimeForQRepRun(ctx, runUUID) + if err != nil { + return fmt.Errorf("failed to update start time for qrep run: %w", err) + } + numPartitions := len(partitions.Partitions) log.Infof("replicating partitions for job - %s - batch %d - size: %d\n", config.FlowJobName, partitions.BatchId, numPartitions) @@ -451,16 +497,21 @@ func (a *FlowableActivity) replicateQRepPartition(ctx context.Context, partition *protos.QRepPartition, runUUID string, ) error { + err := a.CatalogMirrorMonitor.UpdateStartTimeForPartition(ctx, runUUID, partition) + if err != nil { + return fmt.Errorf("failed to update start time for partition: %w", err) + } + ctx = context.WithValue(ctx, shared.EnableMetricsKey, a.EnableMetrics) srcConn, err := connectors.GetQRepPullConnector(ctx, config.SourcePeer) if err != nil { - return fmt.Errorf("failed to get source connector: %w", err) + return fmt.Errorf("failed to get qrep source connector: %w", err) } defer connectors.CloseConnector(srcConn) dstConn, err := connectors.GetQRepSyncConnector(ctx, config.DestinationPeer) if err != nil { - return fmt.Errorf("failed to get destination connector: %w", err) + return fmt.Errorf("failed to get qrep destination connector: %w", err) } defer connectors.CloseConnector(dstConn) @@ -471,10 +522,6 @@ func (a *FlowableActivity) replicateQRepPartition(ctx context.Context, var wg sync.WaitGroup var numRecords int64 - err = a.CatalogMirrorMonitor.AddPartitionToQRepRun(ctx, config.FlowJobName, runUUID, partition) - if err != nil { - return err - } var goroutineErr error = nil if config.SourcePeer.Type == protos.DBType_POSTGRES { stream = model.NewQRecordStream(bufferSize) @@ -537,16 +584,16 @@ func (a *FlowableActivity) replicateQRepPartition(ctx context.Context, log.WithFields(log.Fields{ "flowName": config.FlowJobName, }).Infof("no records to push for partition %s\n", partition.PartitionId) - return nil + } else { + wg.Wait() + if goroutineErr != nil { + return goroutineErr + } + log.WithFields(log.Fields{ + "flowName": config.FlowJobName, + }).Infof("pushed %d records\n", res) } - wg.Wait() - if goroutineErr != nil { - return goroutineErr - } - log.WithFields(log.Fields{ - "flowName": config.FlowJobName, - }).Infof("pushed %d records\n", res) err = a.CatalogMirrorMonitor.UpdateEndTimeForPartition(ctx, runUUID, partition) if err != nil { return err @@ -560,7 +607,7 @@ func (a *FlowableActivity) ConsolidateQRepPartitions(ctx context.Context, config ctx = context.WithValue(ctx, shared.EnableMetricsKey, a.EnableMetrics) dstConn, err := connectors.GetQRepConsolidateConnector(ctx, config.DestinationPeer) if errors.Is(err, connectors.ErrUnsupportedFunctionality) { - return nil + return a.CatalogMirrorMonitor.UpdateEndTimeForQRepRun(ctx, runUUID) } else if err != nil { return err } @@ -574,13 +621,11 @@ func (a *FlowableActivity) ConsolidateQRepPartitions(ctx context.Context, config }() err = dstConn.ConsolidateQRepPartitions(config) - if errors.Is(err, connectors.ErrUnsupportedFunctionality) { - return nil - } else if err != nil { + if err != nil { return err } - err = a.CatalogMirrorMonitor.UpdateEndTimeForQRepRun(ctx, runUUID) - return err + + return a.CatalogMirrorMonitor.UpdateEndTimeForQRepRun(ctx, runUUID) } func (a *FlowableActivity) CleanupQRepFlow(ctx context.Context, config *protos.QRepConfig) error { @@ -617,3 +662,83 @@ func (a *FlowableActivity) DropFlow(ctx context.Context, config *protos.Shutdown } return nil } + +func (a *FlowableActivity) SendWALHeartbeat(ctx context.Context, config *protos.FlowConnectionConfigs) error { + srcConn, err := connectors.GetCDCPullConnector(ctx, config.Source) + if err != nil { + return fmt.Errorf("failed to get destination connector: %w", err) + } + defer connectors.CloseConnector(srcConn) + + err = srcConn.SendWALHeartbeat() + if err != nil { + return fmt.Errorf("failed to send WAL heartbeat: %w", err) + } + + return nil +} + +func (a *FlowableActivity) QRepWaitUntilNewRows(ctx context.Context, + config *protos.QRepConfig, last *protos.QRepPartition) error { + if config.SourcePeer.Type != protos.DBType_POSTGRES { + return nil + } + waitBetweenBatches := 5 * time.Second + if config.WaitBetweenBatchesSeconds > 0 { + waitBetweenBatches = time.Duration(config.WaitBetweenBatchesSeconds) * time.Second + } + + if config.WatermarkColumn == "xmin" { + // for xmin we ignore the wait between batches, as seq scan time is + // extremely slow. + waitBetweenBatches = 10 * time.Second + } + + srcConn, err := connectors.GetQRepPullConnector(ctx, config.SourcePeer) + if err != nil { + return fmt.Errorf("failed to get qrep source connector: %w", err) + } + defer connectors.CloseConnector(srcConn) + pgSrcConn := srcConn.(*connpostgres.PostgresConnector) + + log.WithFields(log.Fields{ + "flowName": config.FlowJobName, + }).Infof("current last partition value is %v\n", last) + attemptCount := 1 + for { + activity.RecordHeartbeat(ctx, fmt.Sprintf("no new rows yet, attempt #%d", attemptCount)) + time.Sleep(waitBetweenBatches) + + result, err := pgSrcConn.CheckForUpdatedMaxValue(config, last) + if err != nil { + return fmt.Errorf("failed to check for new rows: %w", err) + } + if result { + break + } + + attemptCount += 1 + } + + return nil +} + +func (a *FlowableActivity) RenameTables(ctx context.Context, config *protos.RenameTablesInput) (*protos.RenameTablesOutput, error) { + dstConn, err := connectors.GetCDCSyncConnector(ctx, config.Peer) + if err != nil { + return nil, fmt.Errorf("failed to get connector: %w", err) + } + defer connectors.CloseConnector(dstConn) + + // check if destination is snowflake, if not error out + if config.Peer.Type != protos.DBType_SNOWFLAKE { + return nil, fmt.Errorf("rename tables is only supported for snowflake") + } + + sfConn, ok := dstConn.(*connsnowflake.SnowflakeConnector) + if !ok { + return nil, fmt.Errorf("failed to cast connector to snowflake connector") + } + + return sfConn.RenameTables(config) +} diff --git a/flow/cmd/api.go b/flow/cmd/api.go index d5c0946c0..9126dc2c0 100644 --- a/flow/cmd/api.go +++ b/flow/cmd/api.go @@ -4,11 +4,15 @@ import ( "context" "fmt" "net" + "net/http" + "time" utils "github.com/PeerDB-io/peer-flow/connectors/utils/catalog" "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" log "github.com/sirupsen/logrus" "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" "google.golang.org/grpc/reflection" "go.temporal.io/sdk/client" @@ -17,16 +21,46 @@ import ( ) type APIServerParams struct { - ctx context.Context - Port uint - TemporalHostPort string + ctx context.Context + Port uint + GatewayPort uint + TemporalHostPort string + TemporalNamespace string +} + +// setupGRPCGatewayServer sets up the grpc-gateway mux +func setupGRPCGatewayServer(args *APIServerParams) (*http.Server, error) { + conn, err := grpc.DialContext( + context.Background(), + fmt.Sprintf("0.0.0.0:%d", args.Port), + grpc.WithBlock(), + grpc.WithTransportCredentials(insecure.NewCredentials()), + ) + + if err != nil { + return nil, fmt.Errorf("unable to dial grpc server: %w", err) + } + + gwmux := runtime.NewServeMux() + err = protos.RegisterFlowServiceHandler(context.Background(), gwmux, conn) + if err != nil { + return nil, fmt.Errorf("unable to register gateway: %w", err) + } + + server := &http.Server{ + Addr: fmt.Sprintf(":%d", args.GatewayPort), + Handler: gwmux, + ReadHeaderTimeout: 5 * time.Minute, + } + return server, nil } func APIMain(args *APIServerParams) error { ctx := args.ctx tc, err := client.Dial(client.Options{ - HostPort: args.TemporalHostPort, + HostPort: args.TemporalHostPort, + Namespace: args.TemporalNamespace, }) if err != nil { return fmt.Errorf("unable to create Temporal client: %w", err) @@ -58,6 +92,18 @@ func APIMain(args *APIServerParams) error { } }() + gateway, err := setupGRPCGatewayServer(args) + if err != nil { + return fmt.Errorf("unable to setup gateway server: %w", err) + } + + log.Infof("Starting API gateway on port %d", args.GatewayPort) + go func() { + if err := gateway.ListenAndServe(); err != nil { + log.Fatalf("failed to serve http: %v", err) + } + }() + <-ctx.Done() grpcServer.GracefulStop() diff --git a/flow/cmd/handler.go b/flow/cmd/handler.go index 0365c76a8..99c105e96 100644 --- a/flow/cmd/handler.go +++ b/flow/cmd/handler.go @@ -3,10 +3,14 @@ package main import ( "context" "fmt" + "strings" + "time" + "github.com/PeerDB-io/peer-flow/connectors" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/shared" peerflow "github.com/PeerDB-io/peer-flow/workflows" + backoff "github.com/cenkalti/backoff/v4" "github.com/google/uuid" "github.com/jackc/pgx/v5/pgxpool" log "github.com/sirupsen/logrus" @@ -28,6 +32,87 @@ func NewFlowRequestHandler(temporalClient client.Client, pool *pgxpool.Pool) *Fl } } +func (h *FlowRequestHandler) getPeerID(ctx context.Context, peerName string) (int32, int32, error) { + var id int32 + var peerType int32 + err := h.pool.QueryRow(ctx, "SELECT id,type FROM peers WHERE name = $1", peerName).Scan(&id, &peerType) + if err != nil { + log.Errorf("unable to query peer id for peer %s: %s", peerName, err.Error()) + return -1, -1, fmt.Errorf("unable to query peer id for peer %s: %s", peerName, err) + } + return id, peerType, nil +} + +func schemaForTableIdentifier(tableIdentifier string, peerDBType int32) string { + tableIdentifierParts := strings.Split(tableIdentifier, ".") + if len(tableIdentifierParts) == 1 && peerDBType != int32(protos.DBType_BIGQUERY) { + tableIdentifierParts = append([]string{"public"}, tableIdentifierParts...) + } + return strings.Join(tableIdentifierParts, ".") +} + +func (h *FlowRequestHandler) createCdcJobEntry(ctx context.Context, + req *protos.CreateCDCFlowRequest, workflowID string) error { + sourcePeerID, sourePeerType, srcErr := h.getPeerID(ctx, req.ConnectionConfigs.Source.Name) + if srcErr != nil { + return fmt.Errorf("unable to get peer id for source peer %s: %w", + req.ConnectionConfigs.Source.Name, srcErr) + } + + destinationPeerID, destinationPeerType, dstErr := h.getPeerID(ctx, req.ConnectionConfigs.Destination.Name) + if dstErr != nil { + return fmt.Errorf("unable to get peer id for target peer %s: %w", + req.ConnectionConfigs.Destination.Name, srcErr) + } + + for _, v := range req.ConnectionConfigs.TableMappings { + _, err := h.pool.Exec(ctx, ` + INSERT INTO flows (workflow_id, name, source_peer, destination_peer, description, + source_table_identifier, destination_table_identifier) VALUES ($1, $2, $3, $4, $5, $6, $7) + `, workflowID, req.ConnectionConfigs.FlowJobName, sourcePeerID, destinationPeerID, + "Mirror created via GRPC", + schemaForTableIdentifier(v.SourceTableIdentifier, sourePeerType), + schemaForTableIdentifier(v.DestinationTableIdentifier, destinationPeerType)) + if err != nil { + return fmt.Errorf("unable to insert into flows table for flow %s with source table %s: %w", + req.ConnectionConfigs.FlowJobName, v.SourceTableIdentifier, err) + } + } + + return nil +} + +func (h *FlowRequestHandler) createQrepJobEntry(ctx context.Context, + req *protos.CreateQRepFlowRequest, workflowID string) error { + sourcePeerName := req.QrepConfig.SourcePeer.Name + sourcePeerID, _, srcErr := h.getPeerID(ctx, sourcePeerName) + if srcErr != nil { + return fmt.Errorf("unable to get peer id for source peer %s: %w", + sourcePeerName, srcErr) + } + + destinationPeerName := req.QrepConfig.DestinationPeer.Name + destinationPeerID, _, dstErr := h.getPeerID(ctx, destinationPeerName) + if dstErr != nil { + return fmt.Errorf("unable to get peer id for target peer %s: %w", + destinationPeerName, srcErr) + } + flowName := req.QrepConfig.FlowJobName + _, err := h.pool.Exec(ctx, `INSERT INTO flows (workflow_id,name, source_peer, destination_peer, description, + destination_table_identifier, query_string) VALUES ($1, $2, $3, $4, $5, $6, $7) + `, workflowID, flowName, sourcePeerID, destinationPeerID, + "Mirror created via GRPC", + req.QrepConfig.DestinationTableIdentifier, + req.QrepConfig.Query, + ) + if err != nil { + return fmt.Errorf("unable to insert into flows table for flow %s with source table %s: %w", + flowName, req.QrepConfig.WatermarkTable, err) + } + + return nil +} + // Close closes the connection pool func (h *FlowRequestHandler) Close() { if h.pool != nil { @@ -56,8 +141,21 @@ func (h *FlowRequestHandler) CreateCDCFlow( MaxBatchSize: maxBatchSize, } + if req.CreateCatalogEntry { + err := h.createCdcJobEntry(ctx, req, workflowID) + if err != nil { + return nil, fmt.Errorf("unable to create flow job entry: %w", err) + } + } + + var err error + err = h.updateFlowConfigInCatalog(cfg) + if err != nil { + return nil, fmt.Errorf("unable to update flow config in catalog: %w", err) + } + state := peerflow.NewCDCFlowState() - _, err := h.temporalClient.ExecuteWorkflow( + _, err = h.temporalClient.ExecuteWorkflow( ctx, // context workflowOptions, // workflow start options peerflow.CDCFlowWorkflowWithConfig, // workflow function @@ -74,6 +172,40 @@ func (h *FlowRequestHandler) CreateCDCFlow( }, nil } +func (h *FlowRequestHandler) updateFlowConfigInCatalog( + cfg *protos.FlowConnectionConfigs, +) error { + var cfgBytes []byte + var err error + + cfgBytes, err = proto.Marshal(cfg) + if err != nil { + return fmt.Errorf("unable to marshal flow config: %w", err) + } + + _, err = h.pool.Exec(context.Background(), + "UPDATE flows SET config_proto = $1 WHERE name = $2", + cfgBytes, cfg.FlowJobName) + if err != nil { + return fmt.Errorf("unable to update flow config in catalog: %w", err) + } + + return nil +} + +func (h *FlowRequestHandler) removeFlowEntryInCatalog( + flowName string, +) error { + _, err := h.pool.Exec(context.Background(), + "DELETE FROM flows WHERE name = $1", + flowName) + if err != nil { + return fmt.Errorf("unable to remove flow entry in catalog: %w", err) + } + + return nil +} + func (h *FlowRequestHandler) CreateQRepFlow( ctx context.Context, req *protos.CreateQRepFlowRequest) (*protos.CreateQRepFlowResponse, error) { lastPartition := &protos.QRepPartition{ @@ -87,7 +219,12 @@ func (h *FlowRequestHandler) CreateQRepFlow( ID: workflowID, TaskQueue: shared.PeerFlowTaskQueue, } - + if req.CreateCatalogEntry { + err := h.createQrepJobEntry(ctx, req, workflowID) + if err != nil { + return nil, fmt.Errorf("unable to create flow job entry: %w", err) + } + } numPartitionsProcessed := 0 _, err := h.temporalClient.ExecuteWorkflow( ctx, // context @@ -101,13 +238,42 @@ func (h *FlowRequestHandler) CreateQRepFlow( return nil, fmt.Errorf("unable to start QRepFlow workflow: %w", err) } + err = h.updateQRepConfigInCatalog(cfg) + if err != nil { + return nil, fmt.Errorf("unable to update qrep config in catalog: %w", err) + } + return &protos.CreateQRepFlowResponse{ WorflowId: workflowID, }, nil } +// updateQRepConfigInCatalog updates the qrep config in the catalog +func (h *FlowRequestHandler) updateQRepConfigInCatalog( + cfg *protos.QRepConfig, +) error { + var cfgBytes []byte + var err error + + cfgBytes, err = proto.Marshal(cfg) + if err != nil { + return fmt.Errorf("unable to marshal qrep config: %w", err) + } + + _, err = h.pool.Exec(context.Background(), + "UPDATE flows SET config_proto = $1 WHERE name = $2", + cfgBytes, cfg.FlowJobName) + if err != nil { + return fmt.Errorf("unable to update qrep config in catalog: %w", err) + } + + return nil +} + func (h *FlowRequestHandler) ShutdownFlow( - ctx context.Context, req *protos.ShutdownRequest) (*protos.ShutdownResponse, error) { + ctx context.Context, + req *protos.ShutdownRequest, +) (*protos.ShutdownResponse, error) { err := h.temporalClient.SignalWorkflow( ctx, req.WorkflowId, @@ -116,7 +282,18 @@ func (h *FlowRequestHandler) ShutdownFlow( shared.ShutdownSignal, ) if err != nil { - return nil, fmt.Errorf("unable to signal PeerFlow workflow: %w", err) + return &protos.ShutdownResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("unable to signal PeerFlow workflow: %v", err), + }, fmt.Errorf("unable to signal PeerFlow workflow: %w", err) + } + + err = h.waitForWorkflowClose(ctx, req.WorkflowId) + if err != nil { + return &protos.ShutdownResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("unable to wait for PeerFlow workflow to close: %v", err), + }, fmt.Errorf("unable to wait for PeerFlow workflow to close: %w", err) } workflowID := fmt.Sprintf("%s-dropflow-%s", req.FlowJobName, uuid.New()) @@ -131,11 +308,44 @@ func (h *FlowRequestHandler) ShutdownFlow( req, // workflow input ) if err != nil { - return nil, fmt.Errorf("unable to start DropFlow workflow: %w", err) + return &protos.ShutdownResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("unable to start DropFlow workflow: %v", err), + }, fmt.Errorf("unable to start DropFlow workflow: %w", err) } - if err = dropFlowHandle.Get(ctx, nil); err != nil { - return nil, fmt.Errorf("DropFlow workflow did not execute successfully: %w", err) + cancelCtx, cancel := context.WithTimeout(ctx, 2*time.Minute) + defer cancel() + + errChan := make(chan error, 1) + go func() { + errChan <- dropFlowHandle.Get(cancelCtx, nil) + }() + + select { + case err := <-errChan: + if err != nil { + return &protos.ShutdownResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("DropFlow workflow did not execute successfully: %v", err), + }, fmt.Errorf("DropFlow workflow did not execute successfully: %w", err) + } + case <-time.After(1 * time.Minute): + err := h.handleWorkflowNotClosed(ctx, workflowID, "") + if err != nil { + return &protos.ShutdownResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("unable to wait for DropFlow workflow to close: %v", err), + }, fmt.Errorf("unable to wait for DropFlow workflow to close: %w", err) + } + } + + delErr := h.removeFlowEntryInCatalog(req.FlowJobName) + if delErr != nil { + return &protos.ShutdownResponse{ + Ok: false, + ErrorMessage: err.Error(), + }, err } return &protos.ShutdownResponse{ @@ -143,92 +353,238 @@ func (h *FlowRequestHandler) ShutdownFlow( }, nil } -func (h *FlowRequestHandler) ListPeers( - ctx context.Context, - req *protos.ListPeersRequest, -) (*protos.ListPeersResponse, error) { - rows, err := h.pool.Query(ctx, "SELECT * FROM peers") - if err != nil { - return nil, fmt.Errorf("unable to query peers: %w", err) - } - defer rows.Close() - - peers := []*protos.Peer{} - for rows.Next() { - var id int - var name string - var peerType int - var options []byte - if err := rows.Scan(&id, &name, &peerType, &options); err != nil { - return nil, fmt.Errorf("unable to scan peer row: %w", err) +func (h *FlowRequestHandler) waitForWorkflowClose(ctx context.Context, workflowID string) error { + expBackoff := backoff.NewExponentialBackOff() + expBackoff.InitialInterval = 3 * time.Second + expBackoff.MaxInterval = 10 * time.Second + expBackoff.MaxElapsedTime = 1 * time.Minute + + // empty will terminate the latest run + runID := "" + + operation := func() error { + workflowRes, err := h.temporalClient.DescribeWorkflowExecution(ctx, workflowID, runID) + if err != nil { + // Permanent error will stop the retries + return backoff.Permanent(fmt.Errorf("unable to describe PeerFlow workflow: %w", err)) } - dbtype := protos.DBType(peerType) - var peer *protos.Peer - switch dbtype { - case protos.DBType_POSTGRES: - var pgOptions protos.PostgresConfig - err := proto.Unmarshal(options, &pgOptions) - if err != nil { - return nil, fmt.Errorf("unable to unmarshal postgres options: %w", err) - } - peer = &protos.Peer{ - Name: name, - Type: dbtype, - Config: &protos.Peer_PostgresConfig{PostgresConfig: &pgOptions}, - } - case protos.DBType_BIGQUERY: - var bqOptions protos.BigqueryConfig - err := proto.Unmarshal(options, &bqOptions) - if err != nil { - return nil, fmt.Errorf("unable to unmarshal bigquery options: %w", err) - } - peer = &protos.Peer{ - Name: name, - Type: dbtype, - Config: &protos.Peer_BigqueryConfig{BigqueryConfig: &bqOptions}, - } - case protos.DBType_SNOWFLAKE: - var sfOptions protos.SnowflakeConfig - err := proto.Unmarshal(options, &sfOptions) - if err != nil { - return nil, fmt.Errorf("unable to unmarshal snowflake options: %w", err) - } - peer = &protos.Peer{ - Name: name, - Type: dbtype, - Config: &protos.Peer_SnowflakeConfig{SnowflakeConfig: &sfOptions}, - } - case protos.DBType_EVENTHUB: - var ehOptions protos.EventHubConfig - err := proto.Unmarshal(options, &ehOptions) - if err != nil { - return nil, fmt.Errorf("unable to unmarshal eventhub options: %w", err) - } - peer = &protos.Peer{ - Name: name, - Type: dbtype, - Config: &protos.Peer_EventhubConfig{EventhubConfig: &ehOptions}, - } - case protos.DBType_SQLSERVER: - var ssOptions protos.SqlServerConfig - err := proto.Unmarshal(options, &ssOptions) - if err != nil { - return nil, fmt.Errorf("unable to unmarshal sqlserver options: %w", err) - } - peer = &protos.Peer{ - Name: name, - Type: dbtype, - Config: &protos.Peer_SqlserverConfig{SqlserverConfig: &ssOptions}, + if workflowRes.WorkflowExecutionInfo.CloseTime != nil { + return nil + } + + return fmt.Errorf("workflow - %s not closed yet: %v", workflowID, workflowRes) + } + + err := backoff.Retry(operation, expBackoff) + if err != nil { + return h.handleWorkflowNotClosed(ctx, workflowID, runID) + } + + return nil +} + +func (h *FlowRequestHandler) handleWorkflowNotClosed(ctx context.Context, workflowID, runID string) error { + errChan := make(chan error, 1) + + // Create a new context with timeout for CancelWorkflow + ctxWithTimeout, cancel := context.WithTimeout(ctx, 2*time.Minute) + defer cancel() + + // Call CancelWorkflow in a goroutine + go func() { + err := h.temporalClient.CancelWorkflow(ctxWithTimeout, workflowID, runID) + errChan <- err + }() + + select { + case err := <-errChan: + if err != nil { + log.Errorf("unable to cancel PeerFlow workflow: %s. Attempting to terminate.", err.Error()) + terminationReason := fmt.Sprintf("workflow %s did not cancel in time.", workflowID) + if err = h.temporalClient.TerminateWorkflow(ctx, workflowID, runID, terminationReason); err != nil { + return fmt.Errorf("unable to terminate PeerFlow workflow: %w", err) } - default: - log.Errorf("unsupported peer type for peer '%s': %v", name, dbtype) } + case <-time.After(1 * time.Minute): + // If 1 minute has passed and we haven't received an error, terminate the workflow + log.Errorf("Timeout reached while trying to cancel PeerFlow workflow. Attempting to terminate.") + terminationReason := fmt.Sprintf("workflow %s did not cancel in time.", workflowID) + if err := h.temporalClient.TerminateWorkflow(ctx, workflowID, runID, terminationReason); err != nil { + return fmt.Errorf("unable to terminate PeerFlow workflow: %w", err) + } + } + + return nil +} + +func (h *FlowRequestHandler) ValidatePeer( + ctx context.Context, + req *protos.ValidatePeerRequest, +) (*protos.ValidatePeerResponse, error) { + if req.Peer == nil { + return &protos.ValidatePeerResponse{ + Status: protos.ValidatePeerStatus_INVALID, + Message: "no peer provided", + }, nil + } + + if len(req.Peer.Name) == 0 { + return &protos.ValidatePeerResponse{ + Status: protos.ValidatePeerStatus_INVALID, + Message: "no peer name provided", + }, nil + } + + conn, err := connectors.GetConnector(ctx, req.Peer) + if err != nil { + return &protos.ValidatePeerResponse{ + Status: protos.ValidatePeerStatus_INVALID, + Message: fmt.Sprintf("peer type is missing or "+ + "your requested configuration for %s peer %s was invalidated: %s", + req.Peer.Type, req.Peer.Name, err), + }, nil + } + + status := conn.ConnectionActive() + if !status { + return &protos.ValidatePeerResponse{ + Status: protos.ValidatePeerStatus_INVALID, + Message: fmt.Sprintf("failed to establish active connection to %s peer %s.", + req.Peer.Type, req.Peer.Name), + }, nil + } + + return &protos.ValidatePeerResponse{ + Status: protos.ValidatePeerStatus_VALID, + Message: fmt.Sprintf("%s peer %s is valid", + req.Peer.Type, req.Peer.Name), + }, nil +} + +func (h *FlowRequestHandler) CreatePeer( + ctx context.Context, + req *protos.CreatePeerRequest, +) (*protos.CreatePeerResponse, error) { + status, validateErr := h.ValidatePeer(ctx, &protos.ValidatePeerRequest{Peer: req.Peer}) + if validateErr != nil { + return nil, validateErr + } + if status.Status != protos.ValidatePeerStatus_VALID { + return &protos.CreatePeerResponse{ + Status: protos.CreatePeerStatus_FAILED, + Message: status.Message, + }, nil + } + + config := req.Peer.Config + wrongConfigResponse := &protos.CreatePeerResponse{ + Status: protos.CreatePeerStatus_FAILED, + Message: fmt.Sprintf("invalid config for %s peer %s", + req.Peer.Type, req.Peer.Name), + } + var encodedConfig []byte + var encodingErr error + peerType := req.Peer.Type + switch peerType { + case protos.DBType_POSTGRES: + pgConfigObject, ok := config.(*protos.Peer_PostgresConfig) + if !ok { + return wrongConfigResponse, nil + } + pgConfig := pgConfigObject.PostgresConfig + + encodedConfig, encodingErr = proto.Marshal(pgConfig) + + case protos.DBType_SNOWFLAKE: + sfConfigObject, ok := config.(*protos.Peer_SnowflakeConfig) + if !ok { + return wrongConfigResponse, nil + } + sfConfig := sfConfigObject.SnowflakeConfig + encodedConfig, encodingErr = proto.Marshal(sfConfig) + case protos.DBType_SQLSERVER: + sqlServerConfigObject, ok := config.(*protos.Peer_SqlserverConfig) + if !ok { + return wrongConfigResponse, nil + } + sqlServerConfig := sqlServerConfigObject.SqlserverConfig + encodedConfig, encodingErr = proto.Marshal(sqlServerConfig) + + default: + return wrongConfigResponse, nil + } + if encodingErr != nil { + log.Errorf("failed to encode peer configuration for %s peer %s : %v", + req.Peer.Type, req.Peer.Name, encodingErr) + return nil, encodingErr + } + + _, err := h.pool.Exec(ctx, "INSERT INTO peers (name, type, options) VALUES ($1, $2, $3)", + req.Peer.Name, peerType, encodedConfig, + ) + if err != nil { + return &protos.CreatePeerResponse{ + Status: protos.CreatePeerStatus_FAILED, + Message: fmt.Sprintf("failed to insert into peers table for %s peer %s: %s", + req.Peer.Type, req.Peer.Name, err.Error()), + }, nil + } + + return &protos.CreatePeerResponse{ + Status: protos.CreatePeerStatus_CREATED, + Message: "", + }, nil +} + +func (h *FlowRequestHandler) DropPeer( + ctx context.Context, + req *protos.DropPeerRequest, +) (*protos.DropPeerResponse, error) { + if req.PeerName == "" { + return &protos.DropPeerResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("Peer %s not found", req.PeerName), + }, fmt.Errorf("peer %s not found", req.PeerName) + } - peers = append(peers, peer) + // Check if peer name is in flows table + peerID, _, err := h.getPeerID(ctx, req.PeerName) + if err != nil { + return &protos.DropPeerResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("Failed to obtain peer ID for peer %s: %v", req.PeerName, err), + }, fmt.Errorf("failed to obtain peer ID for peer %s: %v", req.PeerName, err) + } + + var inMirror int64 + queryErr := h.pool.QueryRow(ctx, + "SELECT COUNT(*) FROM flows WHERE source_peer=$1 or destination_peer=$2", + peerID, peerID).Scan(&inMirror) + if queryErr != nil { + return &protos.DropPeerResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("Failed to check for existing mirrors with peer %s: %v", req.PeerName, queryErr), + }, fmt.Errorf("failed to check for existing mirrors with peer %s", req.PeerName) + } + + if inMirror != 0 { + return &protos.DropPeerResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("Peer %s is currently involved in an ongoing mirror.", req.PeerName), + }, nil } - return &protos.ListPeersResponse{ - Peers: peers, + _, delErr := h.pool.Exec(ctx, "DELETE FROM peers WHERE name = $1", req.PeerName) + if delErr != nil { + return &protos.DropPeerResponse{ + Ok: false, + ErrorMessage: fmt.Sprintf("failed to delete peer %s from metadata table: %v", req.PeerName, delErr), + }, fmt.Errorf("failed to delete peer %s from metadata table: %v", req.PeerName, delErr) + } + + return &protos.DropPeerResponse{ + Ok: true, }, nil + } diff --git a/flow/cmd/main.go b/flow/cmd/main.go index e2c0a6a41..66e96996c 100644 --- a/flow/cmd/main.go +++ b/flow/cmd/main.go @@ -44,13 +44,6 @@ func main() { EnvVars: []string{"ENABLE_METRICS"}, } - monitoringFlag := &cli.BoolFlag{ - Name: "enable-monitoring", - Value: false, // Default is off - Usage: "Enable mirror monitoring for the application", - EnvVars: []string{"ENABLE_STATS"}, - } - pyroscopeServerFlag := &cli.StringFlag{ Name: "pyroscope-server-address", Value: "http://pyroscope:4040", @@ -65,6 +58,13 @@ func main() { EnvVars: []string{"METRICS_SERVER"}, } + temporalNamespaceFlag := &cli.StringFlag{ + Name: "temporal-namespace", + Value: "default", + Usage: "Temporal namespace to use for workflow orchestration", + EnvVars: []string{"PEERDB_TEMPORAL_NAMESPACE"}, + } + app := &cli.App{ Name: "PeerDB Flows CLI", Commands: []*cli.Command{ @@ -73,21 +73,21 @@ func main() { Action: func(ctx *cli.Context) error { temporalHostPort := ctx.String("temporal-host-port") return WorkerMain(&WorkerOptions{ - TemporalHostPort: temporalHostPort, - EnableProfiling: ctx.Bool("enable-profiling"), - EnableMetrics: ctx.Bool("enable-metrics"), - EnableMonitoring: ctx.Bool("enable-monitoring"), - PyroscopeServer: ctx.String("pyroscope-server-address"), - MetricsServer: ctx.String("metrics-server"), + TemporalHostPort: temporalHostPort, + EnableProfiling: ctx.Bool("enable-profiling"), + EnableMetrics: ctx.Bool("enable-metrics"), + PyroscopeServer: ctx.String("pyroscope-server-address"), + MetricsServer: ctx.String("metrics-server"), + TemporalNamespace: ctx.String("temporal-namespace"), }) }, Flags: []cli.Flag{ temporalHostPortFlag, profilingFlag, metricsFlag, - monitoringFlag, pyroscopeServerFlag, metricsServerFlag, + temporalNamespaceFlag, }, }, { @@ -95,11 +95,13 @@ func main() { Action: func(ctx *cli.Context) error { temporalHostPort := ctx.String("temporal-host-port") return SnapshotWorkerMain(&SnapshotWorkerOptions{ - TemporalHostPort: temporalHostPort, + TemporalHostPort: temporalHostPort, + TemporalNamespace: ctx.String("temporal-namespace"), }) }, Flags: []cli.Flag{ temporalHostPortFlag, + temporalNamespaceFlag, }, }, { @@ -110,15 +112,23 @@ func main() { Aliases: []string{"p"}, Value: 8110, }, + // gateway port is the port that the grpc-gateway listens on + &cli.UintFlag{ + Name: "gateway-port", + Value: 8111, + }, temporalHostPortFlag, + temporalNamespaceFlag, }, Action: func(ctx *cli.Context) error { temporalHostPort := ctx.String("temporal-host-port") return APIMain(&APIServerParams{ - ctx: appCtx, - Port: ctx.Uint("port"), - TemporalHostPort: temporalHostPort, + ctx: appCtx, + Port: ctx.Uint("port"), + TemporalHostPort: temporalHostPort, + GatewayPort: ctx.Uint("gateway-port"), + TemporalNamespace: ctx.String("temporal-namespace"), }) }, }, diff --git a/flow/cmd/mirror_status.go b/flow/cmd/mirror_status.go new file mode 100644 index 000000000..73b2b444a --- /dev/null +++ b/flow/cmd/mirror_status.go @@ -0,0 +1,275 @@ +package main + +import ( + "context" + "database/sql" + "fmt" + + "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/jackc/pgx/v5/pgtype" + "github.com/sirupsen/logrus" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" +) + +func (h *FlowRequestHandler) MirrorStatus( + ctx context.Context, + req *protos.MirrorStatusRequest, +) (*protos.MirrorStatusResponse, error) { + cdcFlow, err := h.isCDCFlow(ctx, req.FlowJobName) + if err != nil { + return &protos.MirrorStatusResponse{ + ErrorMessage: fmt.Sprintf("unable to query flow: %s", err.Error()), + }, nil + } + + if cdcFlow { + cdcStatus, err := h.CDCFlowStatus(ctx, req) + if err != nil { + return &protos.MirrorStatusResponse{ + ErrorMessage: fmt.Sprintf("unable to query flow: %s", err.Error()), + }, nil + } + + return &protos.MirrorStatusResponse{ + FlowJobName: req.FlowJobName, + Status: &protos.MirrorStatusResponse_CdcStatus{ + CdcStatus: cdcStatus, + }, + }, nil + } else { + qrepStatus, err := h.QRepFlowStatus(ctx, req) + if err != nil { + return &protos.MirrorStatusResponse{ + ErrorMessage: fmt.Sprintf("unable to query flow: %s", err.Error()), + }, nil + } + + return &protos.MirrorStatusResponse{ + FlowJobName: req.FlowJobName, + Status: &protos.MirrorStatusResponse_QrepStatus{ + QrepStatus: qrepStatus, + }, + }, nil + } +} + +func (h *FlowRequestHandler) CDCFlowStatus( + ctx context.Context, + req *protos.MirrorStatusRequest, +) (*protos.CDCMirrorStatus, error) { + config, err := h.getFlowConfigFromCatalog(req.FlowJobName) + if err != nil { + return nil, err + } + + var initialCopyStatus *protos.SnapshotStatus + if config.DoInitialCopy { + cloneJobNames, err := h.getCloneTableFlowNames(ctx, req.FlowJobName) + if err != nil { + return nil, err + } + + cloneStatuses := []*protos.QRepMirrorStatus{} + for _, cloneJobName := range cloneJobNames { + cloneStatus, err := h.QRepFlowStatus(ctx, &protos.MirrorStatusRequest{ + FlowJobName: cloneJobName, + }) + if err != nil { + return nil, err + } + cloneStatuses = append(cloneStatuses, cloneStatus) + } + + initialCopyStatus = &protos.SnapshotStatus{ + Clones: cloneStatuses, + } + } + + return &protos.CDCMirrorStatus{ + Config: config, + SnapshotStatus: initialCopyStatus, + }, nil +} + +func (h *FlowRequestHandler) QRepFlowStatus( + ctx context.Context, + req *protos.MirrorStatusRequest, +) (*protos.QRepMirrorStatus, error) { + parts, err := h.getPartitionUUIDs(ctx, req.FlowJobName) + if err != nil { + return nil, err + } + + partitionStatuses := []*protos.PartitionStatus{} + for _, part := range parts { + partitionStatus, err := h.getPartitionStatus(ctx, part) + if err != nil { + return nil, err + } + + partitionStatuses = append(partitionStatuses, partitionStatus) + } + + return &protos.QRepMirrorStatus{ + // The clone table jobs that are children of the CDC snapshot flow + // do not have a config entry, so allow this to be nil. + Config: h.getQRepConfigFromCatalog(req.FlowJobName), + Partitions: partitionStatuses, + }, nil +} + +// getPartitionStatus returns the status of a partition uuid. +func (h *FlowRequestHandler) getPartitionStatus( + ctx context.Context, + partitionUUID string, +) (*protos.PartitionStatus, error) { + partitionStatus := &protos.PartitionStatus{ + PartitionId: partitionUUID, + } + + var startTime pgtype.Timestamp + var endTime pgtype.Timestamp + var numRows pgtype.Int4 + + q := "SELECT start_time, end_time, rows_in_partition FROM peerdb_stats.qrep_partitions WHERE partition_uuid = $1" + err := h.pool.QueryRow(ctx, q, partitionUUID).Scan(&startTime, &endTime, &numRows) + if err != nil { + return nil, fmt.Errorf("unable to query qrep partition - %s: %w", partitionUUID, err) + } + + if startTime.Valid { + partitionStatus.StartTime = timestamppb.New(startTime.Time) + } + + if endTime.Valid { + partitionStatus.EndTime = timestamppb.New(endTime.Time) + } + + if numRows.Valid { + partitionStatus.NumRows = numRows.Int32 + } + + return partitionStatus, nil +} + +func (h *FlowRequestHandler) getPartitionUUIDs( + ctx context.Context, + flowJobName string, +) ([]string, error) { + rows, err := h.pool.Query(ctx, + "SELECT partition_uuid FROM peerdb_stats.qrep_partitions WHERE flow_name = $1", flowJobName) + if err != nil { + return nil, fmt.Errorf("unable to query qrep partitions: %w", err) + } + defer rows.Close() + + partitionUUIDs := []string{} + for rows.Next() { + var partitionUUID pgtype.Text + if err := rows.Scan(&partitionUUID); err != nil { + return nil, fmt.Errorf("unable to scan partition row: %w", err) + } + partitionUUIDs = append(partitionUUIDs, partitionUUID.String) + } + + return partitionUUIDs, nil +} + +func (h *FlowRequestHandler) getFlowConfigFromCatalog( + flowJobName string, +) (*protos.FlowConnectionConfigs, error) { + var configBytes sql.RawBytes + var err error + var config protos.FlowConnectionConfigs + + err = h.pool.QueryRow(context.Background(), + "SELECT config_proto FROM flows WHERE name = $1", flowJobName).Scan(&configBytes) + if err != nil { + return nil, fmt.Errorf("unable to query flow config from catalog: %w", err) + } + + err = proto.Unmarshal(configBytes, &config) + if err != nil { + return nil, fmt.Errorf("unable to unmarshal flow config: %w", err) + } + + return &config, nil +} + +func (h *FlowRequestHandler) getQRepConfigFromCatalog(flowJobName string) *protos.QRepConfig { + var configBytes []byte + var config protos.QRepConfig + + queryInfos := []struct { + Query string + Warning string + }{ + { + Query: "SELECT config_proto FROM flows WHERE name = $1", + Warning: "unable to query qrep config from catalog", + }, + { + Query: "SELECT config_proto FROM peerdb_stats.qrep_runs WHERE flow_name = $1", + Warning: "unable to query qrep config from qrep_runs", + }, + } + + // Iterate over queries and attempt to fetch the config + for _, qInfo := range queryInfos { + err := h.pool.QueryRow(context.Background(), qInfo.Query, flowJobName).Scan(&configBytes) + if err == nil { + break + } + logrus.Warnf("%s - %s: %s", qInfo.Warning, flowJobName, err.Error()) + } + + // If no config was fetched, return nil + if len(configBytes) == 0 { + return nil + } + + // Try unmarshaling + if err := proto.Unmarshal(configBytes, &config); err != nil { + logrus.Warnf("failed to unmarshal config for %s: %s", flowJobName, err.Error()) + return nil + } + + return &config +} + +func (h *FlowRequestHandler) isCDCFlow(ctx context.Context, flowJobName string) (bool, error) { + var query pgtype.Text + err := h.pool.QueryRow(ctx, "SELECT query_string FROM flows WHERE name = $1", flowJobName).Scan(&query) + if err != nil { + return false, fmt.Errorf("unable to query flow: %w", err) + } + + if !query.Valid || len(query.String) == 0 { + return true, nil + } + + return false, nil +} + +func (h *FlowRequestHandler) getCloneTableFlowNames(ctx context.Context, flowJobName string) ([]string, error) { + q := "SELECT flow_name FROM peerdb_stats.qrep_runs WHERE flow_name ILIKE $1" + rows, err := h.pool.Query(ctx, q, "clone_"+flowJobName+"_%") + if err != nil { + return nil, fmt.Errorf("unable to getCloneTableFlowNames: %w", err) + } + defer rows.Close() + + flowNames := []string{} + for rows.Next() { + var name pgtype.Text + if err := rows.Scan(&name); err != nil { + return nil, fmt.Errorf("unable to scan flow row: %w", err) + } + if name.Valid { + flowNames = append(flowNames, name.String) + } + } + + return flowNames, nil +} diff --git a/flow/cmd/peer_data.go b/flow/cmd/peer_data.go new file mode 100644 index 000000000..11b5761c1 --- /dev/null +++ b/flow/cmd/peer_data.go @@ -0,0 +1,238 @@ +package main + +import ( + "context" + "database/sql" + "fmt" + + "github.com/PeerDB-io/peer-flow/connectors/utils" + "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/jackc/pgx/v5/pgxpool" + "google.golang.org/protobuf/proto" +) + +func (h *FlowRequestHandler) getPoolForPGPeer(ctx context.Context, peerName string) (*pgxpool.Pool, string, error) { + var pgPeerOptions sql.RawBytes + var pgPeerConfig protos.PostgresConfig + err := h.pool.QueryRow(ctx, + "SELECT options FROM peers WHERE name = $1 AND type=3", peerName).Scan(&pgPeerOptions) + if err != nil { + return nil, "", err + } + + unmarshalErr := proto.Unmarshal(pgPeerOptions, &pgPeerConfig) + if err != nil { + return nil, "", unmarshalErr + } + + connStr := utils.GetPGConnectionString(&pgPeerConfig) + peerPool, err := pgxpool.New(ctx, connStr) + if err != nil { + return nil, "", err + } + return peerPool, pgPeerConfig.User, nil +} + +func (h *FlowRequestHandler) GetSchemas( + ctx context.Context, + req *protos.PostgresPeerActivityInfoRequest, +) (*protos.PeerSchemasResponse, error) { + peerPool, _, err := h.getPoolForPGPeer(ctx, req.PeerName) + if err != nil { + return &protos.PeerSchemasResponse{Schemas: nil}, err + } + + defer peerPool.Close() + rows, err := peerPool.Query(ctx, "SELECT schema_name"+ + " FROM information_schema.schemata;") + if err != nil { + return &protos.PeerSchemasResponse{Schemas: nil}, err + } + + defer rows.Close() + var schemas []string + for rows.Next() { + var schema string + err := rows.Scan(&schema) + if err != nil { + return &protos.PeerSchemasResponse{Schemas: nil}, err + } + + schemas = append(schemas, schema) + } + return &protos.PeerSchemasResponse{Schemas: schemas}, nil +} + +func (h *FlowRequestHandler) GetTablesInSchema( + ctx context.Context, + req *protos.SchemaTablesRequest, +) (*protos.SchemaTablesResponse, error) { + peerPool, _, err := h.getPoolForPGPeer(ctx, req.PeerName) + if err != nil { + return &protos.SchemaTablesResponse{Tables: nil}, err + } + + defer peerPool.Close() + rows, err := peerPool.Query(ctx, "SELECT table_name "+ + "FROM information_schema.tables "+ + "WHERE table_schema = $1 AND table_type = 'BASE TABLE';", req.SchemaName) + if err != nil { + return &protos.SchemaTablesResponse{Tables: nil}, err + } + + defer rows.Close() + var tables []string + for rows.Next() { + var table string + err := rows.Scan(&table) + if err != nil { + return &protos.SchemaTablesResponse{Tables: nil}, err + } + + tables = append(tables, table) + } + return &protos.SchemaTablesResponse{Tables: tables}, nil +} + +func (h *FlowRequestHandler) GetColumns( + ctx context.Context, + req *protos.TableColumnsRequest, +) (*protos.TableColumnsResponse, error) { + peerPool, _, err := h.getPoolForPGPeer(ctx, req.PeerName) + if err != nil { + return &protos.TableColumnsResponse{Columns: nil}, err + } + + defer peerPool.Close() + rows, err := peerPool.Query(ctx, "SELECT column_name, data_type"+ + " FROM information_schema.columns"+ + " WHERE table_schema = $1 AND table_name = $2;", req.SchemaName, req.TableName) + if err != nil { + return &protos.TableColumnsResponse{Columns: nil}, err + } + + defer rows.Close() + var columns []string + for rows.Next() { + var columnName string + var datatype string + err := rows.Scan(&columnName, &datatype) + if err != nil { + return &protos.TableColumnsResponse{Columns: nil}, err + } + column := fmt.Sprintf("%s:%s", columnName, datatype) + columns = append(columns, column) + } + return &protos.TableColumnsResponse{Columns: columns}, nil +} + +func (h *FlowRequestHandler) GetSlotInfo( + ctx context.Context, + req *protos.PostgresPeerActivityInfoRequest, +) (*protos.PeerSlotResponse, error) { + peerPool, _, err := h.getPoolForPGPeer(ctx, req.PeerName) + if err != nil { + return &protos.PeerSlotResponse{SlotData: nil}, err + } + defer peerPool.Close() + rows, err := peerPool.Query(ctx, "SELECT slot_name, redo_lsn::Text,restart_lsn::text,active,"+ + "round((redo_lsn-restart_lsn) / 1024 / 1024 , 2) AS MB_Behind"+ + " FROM pg_control_checkpoint(), pg_replication_slots;") + if err != nil { + return &protos.PeerSlotResponse{SlotData: nil}, err + } + defer rows.Close() + var slotInfoRows []*protos.SlotInfo + for rows.Next() { + var redoLSN string + var slotName string + var restartLSN string + var active bool + var lagInMB float32 + err := rows.Scan(&slotName, &redoLSN, &restartLSN, &active, &lagInMB) + if err != nil { + return &protos.PeerSlotResponse{SlotData: nil}, err + } + + slotInfoRows = append(slotInfoRows, &protos.SlotInfo{ + RedoLSN: redoLSN, + RestartLSN: restartLSN, + SlotName: slotName, + Active: active, + LagInMb: lagInMB, + }) + } + return &protos.PeerSlotResponse{ + SlotData: slotInfoRows, + }, nil +} + +func (h *FlowRequestHandler) GetStatInfo( + ctx context.Context, + req *protos.PostgresPeerActivityInfoRequest, +) (*protos.PeerStatResponse, error) { + peerPool, peerUser, err := h.getPoolForPGPeer(ctx, req.PeerName) + if err != nil { + return &protos.PeerStatResponse{StatData: nil}, err + } + defer peerPool.Close() + rows, err := peerPool.Query(ctx, "SELECT pid, wait_event, wait_event_type, query_start::text, query,"+ + "EXTRACT(epoch FROM(now()-query_start)) AS dur"+ + " FROM pg_stat_activity WHERE "+ + "usename=$1 AND state != 'idle';", peerUser) + if err != nil { + return &protos.PeerStatResponse{StatData: nil}, err + } + defer rows.Close() + var statInfoRows []*protos.StatInfo + for rows.Next() { + var pid int64 + var waitEvent sql.NullString + var waitEventType sql.NullString + var queryStart sql.NullString + var query sql.NullString + var duration sql.NullFloat64 + + err := rows.Scan(&pid, &waitEvent, &waitEventType, &queryStart, &query, &duration) + if err != nil { + return &protos.PeerStatResponse{StatData: nil}, err + } + + we := waitEvent.String + if !waitEvent.Valid { + we = "" + } + + wet := waitEventType.String + if !waitEventType.Valid { + wet = "" + } + + q := query.String + if !query.Valid { + q = "" + } + + qs := queryStart.String + if !queryStart.Valid { + qs = "" + } + + d := duration.Float64 + if !duration.Valid { + d = -1 + } + + statInfoRows = append(statInfoRows, &protos.StatInfo{ + Pid: pid, + WaitEvent: we, + WaitEventType: wet, + QueryStart: qs, + Query: q, + Duration: float32(d), + }) + } + return &protos.PeerStatResponse{ + StatData: statInfoRows, + }, nil +} diff --git a/flow/cmd/snapshot_worker.go b/flow/cmd/snapshot_worker.go index 043d5069c..2b09bbc0f 100644 --- a/flow/cmd/snapshot_worker.go +++ b/flow/cmd/snapshot_worker.go @@ -12,12 +12,14 @@ import ( ) type SnapshotWorkerOptions struct { - TemporalHostPort string + TemporalHostPort string + TemporalNamespace string } func SnapshotWorkerMain(opts *SnapshotWorkerOptions) error { clientOptions := client.Options{ - HostPort: opts.TemporalHostPort, + HostPort: opts.TemporalHostPort, + Namespace: opts.TemporalNamespace, } c, err := client.Dial(clientOptions) diff --git a/flow/cmd/worker.go b/flow/cmd/worker.go index b1631e68d..2e9746b12 100644 --- a/flow/cmd/worker.go +++ b/flow/cmd/worker.go @@ -8,9 +8,6 @@ import ( "syscall" "time" - //nolint:gosec - _ "net/http/pprof" - "github.com/PeerDB-io/peer-flow/activities" utils "github.com/PeerDB-io/peer-flow/connectors/utils/catalog" "github.com/PeerDB-io/peer-flow/connectors/utils/monitoring" @@ -28,12 +25,12 @@ import ( ) type WorkerOptions struct { - TemporalHostPort string - EnableProfiling bool - EnableMetrics bool - EnableMonitoring bool - PyroscopeServer string - MetricsServer string + TemporalHostPort string + EnableProfiling bool + EnableMetrics bool + PyroscopeServer string + MetricsServer string + TemporalNamespace string } func setupPyroscope(opts *WorkerOptions) { @@ -94,31 +91,24 @@ func WorkerMain(opts *WorkerOptions) error { } }() - var clientOptions client.Options + clientOptions := client.Options{ + HostPort: opts.TemporalHostPort, + Namespace: opts.TemporalNamespace, + } if opts.EnableMetrics { - clientOptions = client.Options{ - HostPort: opts.TemporalHostPort, - MetricsHandler: sdktally.NewMetricsHandler(newPrometheusScope( - prometheus.Configuration{ - ListenAddress: opts.MetricsServer, - TimerType: "histogram", - }, - )), - } - } else { - clientOptions = client.Options{ - HostPort: opts.TemporalHostPort, - } + clientOptions.MetricsHandler = sdktally.NewMetricsHandler(newPrometheusScope( + prometheus.Configuration{ + ListenAddress: opts.MetricsServer, + TimerType: "histogram", + }, + )) } - catalogMirrorMonitor := monitoring.NewCatalogMirrorMonitor(nil) - if opts.EnableMonitoring { - conn, err := utils.GetCatalogConnectionPoolFromEnv() - if err != nil { - return fmt.Errorf("unable to create catalog connection pool: %w", err) - } - catalogMirrorMonitor = monitoring.NewCatalogMirrorMonitor(conn) + conn, err := utils.GetCatalogConnectionPoolFromEnv() + if err != nil { + return fmt.Errorf("unable to create catalog connection pool: %w", err) } + catalogMirrorMonitor := monitoring.NewCatalogMirrorMonitor(conn) defer catalogMirrorMonitor.Close() c, err := client.Dial(clientOptions) @@ -137,7 +127,7 @@ func WorkerMain(opts *WorkerOptions) error { w.RegisterWorkflow(peerflow.DropFlowWorkflow) w.RegisterActivity(&activities.FlowableActivity{ EnableMetrics: opts.EnableMetrics, - CatalogMirrorMonitor: &catalogMirrorMonitor, + CatalogMirrorMonitor: catalogMirrorMonitor, }) err = w.Run(worker.InterruptCh()) diff --git a/flow/connectors/bigquery/bigquery.go b/flow/connectors/bigquery/bigquery.go index 8f6df0f5b..7383bac2a 100644 --- a/flow/connectors/bigquery/bigquery.go +++ b/flow/connectors/bigquery/bigquery.go @@ -13,7 +13,6 @@ import ( "cloud.google.com/go/bigquery" "cloud.google.com/go/storage" "github.com/PeerDB-io/peer-flow/connectors/utils" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" "github.com/PeerDB-io/peer-flow/model/qvalue" @@ -205,38 +204,29 @@ func (c *BigQueryConnector) InitializeTableSchema(req map[string]*protos.TableSc return nil } -// ReplayTableSchemaDelta changes a destination table to match the schema at source +// ReplayTableSchemaDeltas changes a destination table to match the schema at source // This could involve adding or dropping multiple columns. -func (c *BigQueryConnector) ReplayTableSchemaDelta(flowJobName string, - schemaDelta *protos.TableSchemaDelta) error { - if (schemaDelta == nil) || (len(schemaDelta.AddedColumns) == 0 && len(schemaDelta.DroppedColumns) == 0) { - return nil - } - - for _, droppedColumn := range schemaDelta.DroppedColumns { - _, err := c.client.Query(fmt.Sprintf("ALTER TABLE %s.%s DROP COLUMN %s", c.datasetID, - schemaDelta.DstTableName, droppedColumn)).Read(c.ctx) - if err != nil { - return fmt.Errorf("failed to drop column %s for table %s: %w", droppedColumn, - schemaDelta.SrcTableName, err) +func (c *BigQueryConnector) ReplayTableSchemaDeltas(flowJobName string, + schemaDeltas []*protos.TableSchemaDelta) error { + for _, schemaDelta := range schemaDeltas { + if schemaDelta == nil || len(schemaDelta.AddedColumns) == 0 { + return nil } - log.WithFields(log.Fields{ - "flowName": flowJobName, - "tableName": schemaDelta.SrcTableName, - }).Infof("[schema delta replay] dropped column %s", droppedColumn) - } - for _, addedColumn := range schemaDelta.AddedColumns { - _, err := c.client.Query(fmt.Sprintf("ALTER TABLE %s.%s ADD COLUMN %s %s", c.datasetID, - schemaDelta.DstTableName, addedColumn.ColumnName, addedColumn.ColumnType)).Read(c.ctx) - if err != nil { - return fmt.Errorf("failed to add column %s for table %s: %w", addedColumn.ColumnName, - schemaDelta.SrcTableName, err) + + for _, addedColumn := range schemaDelta.AddedColumns { + _, err := c.client.Query(fmt.Sprintf("ALTER TABLE %s.%s ADD COLUMN `%s` %s", c.datasetID, + schemaDelta.DstTableName, addedColumn.ColumnName, + qValueKindToBigQueryType(addedColumn.ColumnType))).Read(c.ctx) + if err != nil { + return fmt.Errorf("failed to add column %s for table %s: %w", addedColumn.ColumnName, + schemaDelta.SrcTableName, err) + } + log.WithFields(log.Fields{ + "flowName": flowJobName, + "tableName": schemaDelta.SrcTableName, + }).Infof("[schema delta replay] added column %s with data type %s", addedColumn.ColumnName, + addedColumn.ColumnType) } - log.WithFields(log.Fields{ - "flowName": flowJobName, - "tableName": schemaDelta.SrcTableName, - }).Infof("[schema delta replay] added column %s with data type %s", addedColumn.ColumnName, - addedColumn.ColumnType) } return nil @@ -298,7 +288,8 @@ func (c *BigQueryConnector) GetLastOffset(jobName string) (*protos.LastSyncState } func (c *BigQueryConnector) GetLastSyncBatchID(jobName string) (int64, error) { - query := fmt.Sprintf("SELECT sync_batch_id FROM %s.%s WHERE mirror_job_name = '%s'", c.datasetID, MirrorJobsTable, jobName) + query := fmt.Sprintf("SELECT sync_batch_id FROM %s.%s WHERE mirror_job_name = '%s'", + c.datasetID, MirrorJobsTable, jobName) q := c.client.Query(query) it, err := q.Read(c.ctx) if err != nil { @@ -322,7 +313,8 @@ func (c *BigQueryConnector) GetLastSyncBatchID(jobName string) (int64, error) { } func (c *BigQueryConnector) GetLastNormalizeBatchID(jobName string) (int64, error) { - query := fmt.Sprintf("SELECT normalize_batch_id FROM %s.%s WHERE mirror_job_name = '%s'", c.datasetID, MirrorJobsTable, jobName) + query := fmt.Sprintf("SELECT normalize_batch_id FROM %s.%s WHERE mirror_job_name = '%s'", + c.datasetID, MirrorJobsTable, jobName) q := c.client.Query(query) it, err := q.Read(c.ctx) if err != nil { @@ -439,17 +431,9 @@ func (r StagingBQRecord) Save() (map[string]bigquery.Value, string, error) { // currently only supports inserts,updates and deletes // more record types will be added in the future. func (c *BigQueryConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.SyncResponse, error) { - if len(req.Records.Records) == 0 { - return &model.SyncResponse{ - FirstSyncedCheckPointID: 0, - LastSyncedCheckPointID: 0, - NumRecordsSynced: 0, - }, nil - } - rawTableName := c.getRawTableName(req.FlowJobName) - log.Printf("pushing %d records to %s.%s", len(req.Records.Records), c.datasetID, rawTableName) + log.Printf("pushing records to %s.%s...", c.datasetID, rawTableName) // generate a sequential number for the last synced batch // this sequence will be used to keep track of records that are normalized @@ -493,9 +477,9 @@ func (c *BigQueryConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, tableNameRowsMapping := make(map[string]uint32) first := true var firstCP int64 = 0 - lastCP := req.Records.LastCheckPointID + // loop over req.Records - for _, record := range req.Records.Records { + for record := range req.Records.GetRecords() { switch r := record.(type) { case *model.InsertRecord: // create the 3 required fields @@ -519,7 +503,7 @@ func (c *BigQueryConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, matchData: "", batchID: syncBatchID, stagingBatchID: stagingBatchID, - unchangedToastColumns: utils.KeysToString(r.UnchangedToastColumns), + unchangedToastColumns: "", }) tableNameRowsMapping[r.DestinationTableName] += 1 case *model.UpdateRecord: @@ -578,7 +562,7 @@ func (c *BigQueryConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, matchData: itemsJSON, batchID: syncBatchID, stagingBatchID: stagingBatchID, - unchangedToastColumns: utils.KeysToString(r.UnchangedToastColumns), + unchangedToastColumns: "", }) tableNameRowsMapping[r.DestinationTableName] += 1 @@ -593,14 +577,6 @@ func (c *BigQueryConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, } numRecords := len(records) - if numRecords == 0 { - return &model.SyncResponse{ - FirstSyncedCheckPointID: 0, - LastSyncedCheckPointID: 0, - NumRecordsSynced: 0, - }, nil - } - // insert the records into the staging table stagingInserter := stagingTable.Inserter() stagingInserter.IgnoreUnknownValues = true @@ -620,6 +596,11 @@ func (c *BigQueryConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, } } + lastCP, err := req.Records.GetLastCheckpoint() + if err != nil { + return nil, fmt.Errorf("failed to get last checkpoint: %v", err) + } + // we have to do the following things in a transaction // 1. append the records in the staging table to the raw table. // 2. execute the update metadata query to store the last committed watermark. @@ -636,13 +617,11 @@ func (c *BigQueryConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, stmts = append(stmts, appendStmt) stmts = append(stmts, updateMetadataStmt) stmts = append(stmts, "COMMIT TRANSACTION;") - startTime := time.Now() _, err = c.client.Query(strings.Join(stmts, "\n")).Read(c.ctx) if err != nil { return nil, fmt.Errorf("failed to execute statements in a transaction: %v", err) } - metrics.LogSyncMetrics(c.ctx, req.FlowJobName, int64(numRecords), time.Since(startTime)) log.Printf("pushed %d records to %s.%s", numRecords, c.datasetID, rawTableName) return &model.SyncResponse{ @@ -654,13 +633,15 @@ func (c *BigQueryConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, }, nil } -func (c *BigQueryConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, - rawTableName string, syncBatchID int64) (*model.SyncResponse, error) { +func (c *BigQueryConnector) syncRecordsViaAvro( + req *model.SyncRecordsRequest, + rawTableName string, + syncBatchID int64, +) (*model.SyncResponse, error) { tableNameRowsMapping := make(map[string]uint32) first := true var firstCP int64 = 0 - lastCP := req.Records.LastCheckPointID - recordStream := model.NewQRecordStream(len(req.Records.Records)) + recordStream := model.NewQRecordStream(1 << 20) err := recordStream.SetSchema(&model.QRecordSchema{ Fields: []*model.QField{ { @@ -720,7 +701,7 @@ func (c *BigQueryConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, } // loop over req.Records - for _, record := range req.Records.Records { + for record := range req.Records.GetRecords() { var entries [10]qvalue.QValue switch r := record.(type) { case *model.InsertRecord: @@ -748,7 +729,7 @@ func (c *BigQueryConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, } entries[9] = qvalue.QValue{ Kind: qvalue.QValueKindString, - Value: utils.KeysToString(r.UnchangedToastColumns), + Value: "", } tableNameRowsMapping[r.DestinationTableName] += 1 @@ -809,7 +790,7 @@ func (c *BigQueryConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, } entries[9] = qvalue.QValue{ Kind: qvalue.QValueKindString, - Value: utils.KeysToString(r.UnchangedToastColumns), + Value: "", } tableNameRowsMapping[r.DestinationTableName] += 1 @@ -850,7 +831,6 @@ func (c *BigQueryConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, } } - startTime := time.Now() close(recordStream.Records) avroSync := NewQRepAvroSyncMethod(c, req.StagingPath) rawTableMetadata, err := c.client.Dataset(c.datasetID).Table(rawTableName).Metadata(c.ctx) @@ -858,13 +838,17 @@ func (c *BigQueryConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, return nil, fmt.Errorf("failed to get metadata of destination table: %v", err) } + lastCP, err := req.Records.GetLastCheckpoint() + if err != nil { + return nil, fmt.Errorf("failed to get last checkpoint: %v", err) + } + numRecords, err := avroSync.SyncRecords(rawTableName, req.FlowJobName, lastCP, rawTableMetadata, syncBatchID, recordStream) if err != nil { return nil, fmt.Errorf("failed to sync records via avro : %v", err) } - metrics.LogSyncMetrics(c.ctx, req.FlowJobName, int64(numRecords), time.Since(startTime)) log.Printf("pushed %d records to %s.%s", numRecords, c.datasetID, rawTableName) return &model.SyncResponse{ @@ -900,7 +884,7 @@ func (c *BigQueryConnector) NormalizeRecords(req *model.NormalizeRecordsRequest) if !hasJob || normalizeBatchID == syncBatchID { log.Printf("waiting for sync to catch up for job %s, so finishing", req.FlowJobName) return &model.NormalizeResponse{ - Done: true, + Done: false, StartBatchID: normalizeBatchID, EndBatchID: syncBatchID, }, nil @@ -980,7 +964,7 @@ func (c *BigQueryConnector) CreateRawTable(req *protos.CreateRawTableInput) (*pr {Name: "_peerdb_unchanged_toast_columns", Type: bigquery.StringFieldType}, } - staging_schema := bigquery.Schema{ + stagingSchema := bigquery.Schema{ {Name: "_peerdb_uid", Type: bigquery.StringFieldType}, {Name: "_peerdb_timestamp", Type: bigquery.TimestampFieldType}, {Name: "_peerdb_timestamp_nanos", Type: bigquery.IntegerFieldType}, @@ -1021,7 +1005,7 @@ func (c *BigQueryConnector) CreateRawTable(req *protos.CreateRawTableInput) (*pr stagingTableName := c.getStagingTableName(req.FlowJobName) stagingTable := c.client.Dataset(c.datasetID).Table(stagingTableName) err = stagingTable.Create(c.ctx, &bigquery.TableMetadata{ - Schema: staging_schema, + Schema: stagingSchema, }) if err != nil { return nil, fmt.Errorf("failed to create table %s.%s: %w", c.datasetID, stagingTableName, err) @@ -1033,7 +1017,8 @@ func (c *BigQueryConnector) CreateRawTable(req *protos.CreateRawTableInput) (*pr } // getUpdateMetadataStmt updates the metadata tables for a given job. -func (c *BigQueryConnector) getUpdateMetadataStmt(jobName string, lastSyncedCheckpointID int64, batchID int64) (string, error) { +func (c *BigQueryConnector) getUpdateMetadataStmt(jobName string, lastSyncedCheckpointID int64, + batchID int64) (string, error) { hasJob, err := c.metadataHasJob(jobName) if err != nil { return "", fmt.Errorf("failed to check if job exists: %w", err) @@ -1041,7 +1026,7 @@ func (c *BigQueryConnector) getUpdateMetadataStmt(jobName string, lastSyncedChec // create the job in the metadata table jobStatement := fmt.Sprintf( - "INSERT INTO %s.%s (mirror_job_name, offset,sync_batch_id) VALUES ('%s',%d,%d);", + "INSERT INTO %s.%s (mirror_job_name,offset,sync_batch_id) VALUES ('%s',%d,%d);", c.datasetID, MirrorJobsTable, jobName, lastSyncedCheckpointID, batchID) if hasJob { jobStatement = fmt.Sprintf( @@ -1294,14 +1279,13 @@ func (m *MergeStmtGenerator) generateDeDupedCTE() string { ) _peerdb_ranked WHERE _peerdb_rank = 1 ) SELECT * FROM _peerdb_de_duplicated_data_res` - pkey := m.NormalizedTableSchema.PrimaryKeyColumn - return fmt.Sprintf(cte, pkey) + pkeyColsStr := fmt.Sprintf("(CONCAT(%s))", strings.Join(m.NormalizedTableSchema.PrimaryKeyColumns, + ", '_peerdb_concat_', ")) + return fmt.Sprintf(cte, pkeyColsStr) } // generateMergeStmt generates a merge statement. func (m *MergeStmtGenerator) generateMergeStmt(tempTable string) string { - pkey := m.NormalizedTableSchema.PrimaryKeyColumn - // comma separated list of column names backtickColNames := make([]string, 0) pureColNames := make([]string, 0) @@ -1311,18 +1295,26 @@ func (m *MergeStmtGenerator) generateMergeStmt(tempTable string) string { } csep := strings.Join(backtickColNames, ", ") - udateStatementsforToastCols := m.generateUpdateStatement(pureColNames, m.UnchangedToastColumns) - updateStringToastCols := strings.Join(udateStatementsforToastCols, " ") + updateStatementsforToastCols := m.generateUpdateStatements(pureColNames, m.UnchangedToastColumns) + updateStringToastCols := strings.Join(updateStatementsforToastCols, " ") + + pkeySelectSQLArray := make([]string, 0, len(m.NormalizedTableSchema.PrimaryKeyColumns)) + for _, pkeyColName := range m.NormalizedTableSchema.PrimaryKeyColumns { + pkeySelectSQLArray = append(pkeySelectSQLArray, fmt.Sprintf("_peerdb_target.%s = _peerdb_deduped.%s", + pkeyColName, pkeyColName)) + } + // _peerdb_target. = _peerdb_deduped. AND _peerdb_target. = _peerdb_deduped. ... + pkeySelectSQL := strings.Join(pkeySelectSQLArray, " AND ") return fmt.Sprintf(` MERGE %s.%s _peerdb_target USING %s _peerdb_deduped - ON _peerdb_target.%s = _peerdb_deduped.%s + ON %s WHEN NOT MATCHED and (_peerdb_deduped._peerdb_record_type != 2) THEN INSERT (%s) VALUES (%s) %s WHEN MATCHED AND (_peerdb_deduped._peerdb_record_type = 2) THEN DELETE; - `, m.Dataset, m.NormalizedTable, tempTable, pkey, pkey, csep, csep, updateStringToastCols) + `, m.Dataset, m.NormalizedTable, tempTable, pkeySelectSQL, csep, csep, updateStringToastCols) } /* @@ -1340,7 +1332,7 @@ and updating the other columns (not the unchanged toast columns) 6. Repeat steps 1-5 for each unique unchanged toast column group. 7. Return the list of generated update statements. */ -func (m *MergeStmtGenerator) generateUpdateStatement(allCols []string, unchangedToastCols []string) []string { +func (m *MergeStmtGenerator) generateUpdateStatements(allCols []string, unchangedToastCols []string) []string { updateStmts := make([]string, 0) for _, cols := range unchangedToastCols { diff --git a/flow/connectors/bigquery/merge_stmt_generator_test.go b/flow/connectors/bigquery/merge_stmt_generator_test.go index 732063968..3d8892d4c 100644 --- a/flow/connectors/bigquery/merge_stmt_generator_test.go +++ b/flow/connectors/bigquery/merge_stmt_generator_test.go @@ -30,7 +30,7 @@ func TestGenerateUpdateStatement_WithUnchangedToastCols(t *testing.T) { " `col2` = _peerdb_deduped.col2", } - result := m.generateUpdateStatement(allCols, unchangedToastCols) + result := m.generateUpdateStatements(allCols, unchangedToastCols) for i := range expected { expected[i] = removeSpacesTabsNewlines(expected[i]) @@ -56,7 +56,7 @@ func TestGenerateUpdateStatement_NoUnchangedToastCols(t *testing.T) { " `col3` = _peerdb_deduped.col3", } - result := m.generateUpdateStatement(allCols, unchangedToastCols) + result := m.generateUpdateStatements(allCols, unchangedToastCols) for i := range expected { expected[i] = removeSpacesTabsNewlines(expected[i]) diff --git a/flow/connectors/bigquery/qrep_avro_sync.go b/flow/connectors/bigquery/qrep_avro_sync.go index a4cd92a82..8cb8af79c 100644 --- a/flow/connectors/bigquery/qrep_avro_sync.go +++ b/flow/connectors/bigquery/qrep_avro_sync.go @@ -10,7 +10,6 @@ import ( "cloud.google.com/go/bigquery" "github.com/PeerDB-io/peer-flow/connectors/utils" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" "github.com/PeerDB-io/peer-flow/model/qvalue" @@ -92,8 +91,7 @@ func (s *QRepAvroSyncMethod) SyncRecords( }).Errorf("failed to delete staging table %s: %v", stagingTable, err) } - log.Printf("loaded stage into %s.%s", - datasetID, dstTableName) + log.Printf("loaded stage into %s.%s", datasetID, dstTableName) return numRecords, nil } @@ -150,13 +148,10 @@ func (s *QRepAvroSyncMethod) SyncQRepRecords( stmts = append(stmts, insertMetadataStmt) stmts = append(stmts, "COMMIT TRANSACTION;") // Execute the statements in a transaction - syncRecordsStartTime := time.Now() _, err = bqClient.Query(strings.Join(stmts, "\n")).Read(s.connector.ctx) if err != nil { return -1, fmt.Errorf("failed to execute statements in a transaction: %v", err) } - metrics.LogQRepSyncMetrics(s.connector.ctx, flowJobName, - int64(numRecords), time.Since(syncRecordsStartTime)) // drop the staging table if err := bqClient.Dataset(datasetID).Table(stagingTable).Delete(s.connector.ctx); err != nil { @@ -387,7 +382,6 @@ func (s *QRepAvroSyncMethod) writeToStage( return 0, fmt.Errorf("failed to write record to OCF file: %w", err) } numRecords++ - } activity.RecordHeartbeat(s.connector.ctx, fmt.Sprintf( "Writing OCF contents to BigQuery for partition/batch ID %s", diff --git a/flow/connectors/bigquery/qrep_sync_method.go b/flow/connectors/bigquery/qrep_sync_method.go index 2a8eb0f5c..8a9e42a3b 100644 --- a/flow/connectors/bigquery/qrep_sync_method.go +++ b/flow/connectors/bigquery/qrep_sync_method.go @@ -7,7 +7,6 @@ import ( "time" "cloud.google.com/go/bigquery" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" log "github.com/sirupsen/logrus" @@ -106,8 +105,6 @@ func (s *QRepStagingTableSync) SyncQRepRecords( if err != nil { return -1, fmt.Errorf("failed to insert records into staging table: %v", err) } - metrics.LogQRepSyncMetrics(s.connector.ctx, flowJobName, int64(len(valueSaverRecords)), - time.Since(startTime)) // Copy the records into the destination table in a transaction. // append all the statements to one list diff --git a/flow/connectors/core.go b/flow/connectors/core.go index 9810fe667..0612514dd 100644 --- a/flow/connectors/core.go +++ b/flow/connectors/core.go @@ -3,6 +3,9 @@ package connectors import ( "context" "errors" + "fmt" + + log "github.com/sirupsen/logrus" connbigquery "github.com/PeerDB-io/peer-flow/connectors/bigquery" conneventhub "github.com/PeerDB-io/peer-flow/connectors/eventhub" @@ -35,10 +38,13 @@ type CDCPullConnector interface { // PullRecords pulls records from the source, and returns a RecordBatch. // This method should be idempotent, and should be able to be called multiple times with the same request. - PullRecords(req *model.PullRecordsRequest) (*model.RecordsWithTableSchemaDelta, error) + PullRecords(req *model.PullRecordsRequest) error // PullFlowCleanup drops both the Postgres publication and replication slot, as a part of DROP MIRROR PullFlowCleanup(jobName string) error + + // SendWALHeartbeat allows for activity to progress restart_lsn on postgres. + SendWALHeartbeat() error } type CDCSyncConnector interface { @@ -86,7 +92,7 @@ type CDCNormalizeConnector interface { // ReplayTableSchemaDelta changes a destination table to match the schema at source // This could involve adding or dropping multiple columns. - ReplayTableSchemaDelta(flowJobName string, schemaDelta *protos.TableSchemaDelta) error + ReplayTableSchemaDeltas(flowJobName string, schemaDeltas []*protos.TableSchemaDelta) error } type QRepPullConnector interface { @@ -141,7 +147,11 @@ func GetCDCSyncConnector(ctx context.Context, config *protos.Peer) (CDCSyncConne case *protos.Peer_SnowflakeConfig: return connsnowflake.NewSnowflakeConnector(ctx, config.GetSnowflakeConfig()) case *protos.Peer_EventhubConfig: - return conneventhub.NewEventHubConnector(ctx, config.GetEventhubConfig()) + return nil, fmt.Errorf("use eventhub group config instead") + case *protos.Peer_EventhubGroupConfig: + return conneventhub.NewEventHubConnector(ctx, config.GetEventhubGroupConfig()) + case *protos.Peer_S3Config: + return conns3.NewS3Connector(ctx, config.GetS3Config()) default: return nil, ErrUnsupportedFunctionality } @@ -190,6 +200,45 @@ func GetQRepSyncConnector(ctx context.Context, config *protos.Peer) (QRepSyncCon } } +func GetConnector(ctx context.Context, peer *protos.Peer) (Connector, error) { + inner := peer.Type + switch inner { + case protos.DBType_POSTGRES: + pgConfig := peer.GetPostgresConfig() + + if pgConfig == nil { + return nil, fmt.Errorf("missing postgres config for %s peer %s", peer.Type.String(), peer.Name) + } + return connpostgres.NewPostgresConnector(ctx, pgConfig) + case protos.DBType_BIGQUERY: + bqConfig := peer.GetBigqueryConfig() + if bqConfig == nil { + return nil, fmt.Errorf("missing bigquery config for %s peer %s", peer.Type.String(), peer.Name) + } + return connbigquery.NewBigQueryConnector(ctx, bqConfig) + + case protos.DBType_SNOWFLAKE: + sfConfig := peer.GetSnowflakeConfig() + if sfConfig == nil { + return nil, fmt.Errorf("missing snowflake config for %s peer %s", peer.Type.String(), peer.Name) + } + return connsnowflake.NewSnowflakeConnector(ctx, sfConfig) + + case protos.DBType_SQLSERVER: + sqlServerConfig := peer.GetSqlserverConfig() + if sqlServerConfig == nil { + return nil, fmt.Errorf("missing sqlserver config for %s peer %s", peer.Type.String(), peer.Name) + } + return connsqlserver.NewSQLServerConnector(ctx, sqlServerConfig) + // case protos.DBType_S3: + // return conns3.NewS3Connector(ctx, config.GetS3Config()) + // case protos.DBType_EVENTHUB: + // return connsqlserver.NewSQLServerConnector(ctx, config.GetSqlserverConfig()) + default: + return nil, fmt.Errorf("unsupported peer type %s", peer.Type.String()) + } +} + func GetQRepConsolidateConnector(ctx context.Context, config *protos.Peer) (QRepConsolidateConnector, error) { inner := config.Config @@ -207,5 +256,8 @@ func CloseConnector(conn Connector) { return } - conn.Close() + err := conn.Close() + if err != nil { + log.Errorf("error closing connector: %v", err) + } } diff --git a/flow/connectors/eventhub/eventhub.go b/flow/connectors/eventhub/eventhub.go index ef0f65824..30be24eba 100644 --- a/flow/connectors/eventhub/eventhub.go +++ b/flow/connectors/eventhub/eventhub.go @@ -8,34 +8,29 @@ import ( "sync/atomic" "time" - "github.com/Azure/azure-amqp-common-go/v4/aad" - "github.com/Azure/azure-amqp-common-go/v4/auth" - eventhub "github.com/Azure/azure-event-hubs-go/v3" "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub" + azeventhubs "github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs" + metadataStore "github.com/PeerDB-io/peer-flow/connectors/external_metadata" "github.com/PeerDB-io/peer-flow/connectors/utils" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" cmap "github.com/orcaman/concurrent-map/v2" log "github.com/sirupsen/logrus" - "go.temporal.io/sdk/activity" ) type EventHubConnector struct { - ctx context.Context - config *protos.EventHubConfig - pgMetadata *PostgresMetadataStore - tableSchemas map[string]*protos.TableSchema - creds *azidentity.DefaultAzureCredential - tokenProvider auth.TokenProvider - hubs cmap.ConcurrentMap[string, *eventhub.Hub] + ctx context.Context + config *protos.EventHubGroupConfig + pgMetadata *metadataStore.PostgresMetadataStore + tableSchemas map[string]*protos.TableSchema + creds *azidentity.DefaultAzureCredential + hubManager *EventHubManager } // NewEventHubConnector creates a new EventHubConnector. func NewEventHubConnector( ctx context.Context, - config *protos.EventHubConfig, + config *protos.EventHubGroupConfig, ) (*EventHubConnector, error) { defaultAzureCreds, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { @@ -43,47 +38,27 @@ func NewEventHubConnector( return nil, err } - jwtTokenProvider, err := aad.NewJWTProvider(aad.JWTProviderWithEnvironmentVars()) - if err != nil { - log.Errorf("failed to get jwt token provider: %v", err) - return nil, err - } - - pgMetadata, err := NewPostgresMetadataStore(ctx, config.GetMetadataDb()) + hubManager := NewEventHubManager(ctx, defaultAzureCreds, config) + metadataSchemaName := "peerdb_eventhub_metadata" // #nosec G101 + pgMetadata, err := metadataStore.NewPostgresMetadataStore(ctx, config.GetMetadataDb(), + metadataSchemaName) if err != nil { log.Errorf("failed to create postgres metadata store: %v", err) return nil, err } return &EventHubConnector{ - ctx: ctx, - config: config, - pgMetadata: pgMetadata, - creds: defaultAzureCreds, - tokenProvider: jwtTokenProvider, - hubs: cmap.New[*eventhub.Hub](), + ctx: ctx, + config: config, + pgMetadata: pgMetadata, + creds: defaultAzureCreds, + hubManager: hubManager, }, nil } func (c *EventHubConnector) Close() error { var allErrors error - // close all the event hub connections. - for _, hubName := range c.hubs.Keys() { - hub, ok := c.hubs.Get(hubName) - if !ok { - log.Errorf("failed to get event hub connection: %v", hubName) - allErrors = errors.Join(allErrors, fmt.Errorf("failed to get event hub connection: %v", hubName)) - continue - } - - err := hub.Close(c.ctx) - if err != nil { - log.Errorf("failed to close event hub connection: %v", err) - allErrors = errors.Join(allErrors, err) - } - } - // close the postgres metadata store. err := c.pgMetadata.Close() if err != nil { @@ -103,6 +78,121 @@ func (c *EventHubConnector) InitializeTableSchema(req map[string]*protos.TableSc return nil } +func (c *EventHubConnector) NeedsSetupMetadataTables() bool { + return c.pgMetadata.NeedsSetupMetadata() +} + +func (c *EventHubConnector) SetupMetadataTables() error { + err := c.pgMetadata.SetupMetadata() + if err != nil { + log.Errorf("failed to setup metadata tables: %v", err) + return err + } + + return nil +} + +func (c *EventHubConnector) GetLastSyncBatchID(jobName string) (int64, error) { + syncBatchID, err := c.pgMetadata.GetLastBatchID(jobName) + if err != nil { + return 0, err + } + + return syncBatchID, nil +} + +func (c *EventHubConnector) GetLastOffset(jobName string) (*protos.LastSyncState, error) { + res, err := c.pgMetadata.FetchLastOffset(jobName) + if err != nil { + return nil, err + } + + return res, nil +} + +func (c *EventHubConnector) updateLastOffset(jobName string, offset int64) error { + err := c.pgMetadata.UpdateLastOffset(jobName, offset) + if err != nil { + log.Errorf("failed to update last offset: %v", err) + return err + } + + return nil +} + +// returns the number of records synced +func (c *EventHubConnector) processBatch( + flowJobName string, + batch *model.CDCRecordStream, + eventsPerBatch int, + maxParallelism int64, +) (uint32, error) { + ctx := context.Background() + + tableNameRowsMapping := cmap.New[uint32]() + batchPerTopic := NewHubBatches(c.hubManager) + toJSONOpts := model.NewToJSONOptions(c.config.UnnestColumns) + + numRecords := 0 + for record := range batch.GetRecords() { + numRecords++ + json, err := record.GetItems().ToJSONWithOpts(toJSONOpts) + if err != nil { + log.WithFields(log.Fields{ + "flowName": flowJobName, + }).Infof("failed to convert record to json: %v", err) + return 0, err + } + + flushBatch := func() error { + err := c.sendEventBatch(ctx, batchPerTopic, maxParallelism, flowJobName, tableNameRowsMapping) + if err != nil { + log.WithFields(log.Fields{ + "flowName": flowJobName, + }).Infof("failed to send event batch: %v", err) + return err + } + batchPerTopic.Clear() + return nil + } + + topicName, err := NewScopedEventhub(record.GetTableName()) + if err != nil { + log.WithFields(log.Fields{ + "flowName": flowJobName, + }).Infof("failed to get topic name: %v", err) + return 0, err + } + + err = batchPerTopic.AddEvent(ctx, topicName, json) + if err != nil { + log.WithFields(log.Fields{ + "flowName": flowJobName, + }).Infof("failed to add event to batch: %v", err) + return 0, err + } + + if (numRecords)%eventsPerBatch == 0 { + err := flushBatch() + if err != nil { + return 0, err + } + } + } + + if batchPerTopic.Len() > 0 { + err := c.sendEventBatch(ctx, batchPerTopic, maxParallelism, flowJobName, tableNameRowsMapping) + if err != nil { + return 0, err + } + } + + log.WithFields(log.Fields{ + "flowName": flowJobName, + }).Infof("[total] successfully sent %d records to event hub", numRecords) + return uint32(numRecords), nil +} + func (c *EventHubConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.SyncResponse, error) { shutdown := utils.HeartbeatRoutine(c.ctx, 10*time.Second, func() string { return fmt.Sprintf("syncing records to eventhub with"+ @@ -112,9 +202,7 @@ func (c *EventHubConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S defer func() { shutdown <- true }() - tableNameRowsMapping := cmap.New[uint32]() - batch := req.Records - eventsPerHeartBeat := 1000 + eventsPerBatch := int(req.PushBatchSize) if eventsPerBatch <= 0 { eventsPerBatch = 10000 @@ -124,90 +212,67 @@ func (c *EventHubConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S maxParallelism = 10 } - batchPerTopic := make(map[string][]*eventhub.Event) - startTime := time.Now() - for i, record := range batch.Records { - json, err := record.GetItems().ToJSON() - if err != nil { - log.WithFields(log.Fields{ - "flowName": req.FlowJobName, - }).Infof("failed to convert record to json: %v", err) - return nil, err - } - - // TODO (kaushik): this is a hack to get the table name. - topicName := record.GetTableName() - - if _, ok := batchPerTopic[topicName]; !ok { - batchPerTopic[topicName] = make([]*eventhub.Event, 0) - } - - batchPerTopic[topicName] = append(batchPerTopic[topicName], eventhub.NewEventFromString(json)) - - if i%eventsPerHeartBeat == 0 { - activity.RecordHeartbeat(c.ctx, fmt.Sprintf("sent %d records to hub: %s", i, topicName)) - } - - if (i+1)%eventsPerBatch == 0 { - err := c.sendEventBatch(batchPerTopic, maxParallelism, - req.FlowJobName, tableNameRowsMapping) + var err error + batch := req.Records + var numRecords uint32 + + // if env var PEERDB_BETA_EVENTHUB_PUSH_ASYNC=true + // we kick off processBatch in a goroutine and return immediately. + // otherwise, we block until processBatch is done. + if utils.GetEnvBool("PEERDB_BETA_EVENTHUB_PUSH_ASYNC", false) { + go func() { + numRecords, err = c.processBatch(req.FlowJobName, batch, eventsPerBatch, maxParallelism) if err != nil { - return nil, err + log.Errorf("[async] failed to process batch: %v", err) } - - batchPerTopic = make(map[string][]*eventhub.Event) - } - } - - // send the remaining events. - if len(batchPerTopic) > 0 { - err := c.sendEventBatch(batchPerTopic, maxParallelism, - req.FlowJobName, tableNameRowsMapping) + }() + } else { + numRecords, err = c.processBatch(req.FlowJobName, batch, eventsPerBatch, maxParallelism) if err != nil { + log.Errorf("failed to process batch: %v", err) return nil, err } } - rowsSynced := len(batch.Records) - log.WithFields(log.Fields{ - "flowName": req.FlowJobName, - }).Infof("[total] successfully sent %d records to event hub", rowsSynced) - err := c.updateLastOffset(req.FlowJobName, batch.LastCheckPointID) + lastCheckpoint, err := req.Records.GetLastCheckpoint() + if err != nil { + log.Errorf("failed to get last checkpoint: %v", err) + return nil, err + } + + err = c.updateLastOffset(req.FlowJobName, lastCheckpoint) if err != nil { log.Errorf("failed to update last offset: %v", err) return nil, err } - err = c.incrementSyncBatchID(req.FlowJobName) + err = c.pgMetadata.IncrementID(req.FlowJobName) if err != nil { log.Errorf("%v", err) return nil, err } - metrics.LogSyncMetrics(c.ctx, req.FlowJobName, int64(rowsSynced), time.Since(startTime)) - metrics.LogNormalizeMetrics(c.ctx, req.FlowJobName, int64(rowsSynced), - time.Since(startTime), int64(rowsSynced)) + rowsSynced := int64(numRecords) return &model.SyncResponse{ - FirstSyncedCheckPointID: batch.FirstCheckPointID, - LastSyncedCheckPointID: batch.LastCheckPointID, - NumRecordsSynced: int64(len(batch.Records)), - TableNameRowsMapping: tableNameRowsMapping.Items(), + FirstSyncedCheckPointID: batch.GetFirstCheckpoint(), + LastSyncedCheckPointID: lastCheckpoint, + NumRecordsSynced: rowsSynced, + TableNameRowsMapping: make(map[string]uint32), }, nil } -func (c *EventHubConnector) sendEventBatch(events map[string][]*eventhub.Event, +func (c *EventHubConnector) sendEventBatch( + ctx context.Context, + events *HubBatches, maxParallelism int64, flowName string, tableNameRowsMapping cmap.ConcurrentMap[string, uint32]) error { - if len(events) == 0 { + if events.Len() == 0 { log.WithFields(log.Fields{ "flowName": flowName, }).Infof("no events to send") return nil } - subCtx, cancel := context.WithTimeout(c.ctx, 5*time.Minute) - defer cancel() - var numEventsPushed int32 var wg sync.WaitGroup var once sync.Once @@ -215,43 +280,34 @@ func (c *EventHubConnector) sendEventBatch(events map[string][]*eventhub.Event, // Limiting concurrent sends guard := make(chan struct{}, maxParallelism) - for tblName, eventBatch := range events { + events.ForEach(func(tblName ScopedEventhub, eventBatch *azeventhubs.EventDataBatch) { guard <- struct{}{} wg.Add(1) - go func(tblName string, eventBatch []*eventhub.Event) { + go func(tblName ScopedEventhub, eventBatch *azeventhubs.EventDataBatch) { defer func() { <-guard wg.Done() }() - hub, err := c.getOrCreateHubConnection(tblName) - if err != nil { - once.Do(func() { firstErr = err }) - return - } - log.WithFields(log.Fields{ - "flowName": flowName, - }).Infof("obtained hub connection and now sending %d events to event hub: %s", - len(eventBatch), tblName) - err = hub.SendBatch(subCtx, eventhub.NewEventBatchIterator(eventBatch...)) + numEvents := eventBatch.NumEvents() + err := c.sendBatch(ctx, tblName, eventBatch) if err != nil { once.Do(func() { firstErr = err }) return } - atomic.AddInt32(&numEventsPushed, int32(len(eventBatch))) + atomic.AddInt32(&numEventsPushed, numEvents) log.WithFields(log.Fields{ "flowName": flowName, - }).Infof("pushed %d events to event hub: %s", - numEventsPushed, tblName) - rowCount, ok := tableNameRowsMapping.Get(tblName) + }).Infof("pushed %d events to event hub: %s", numEvents, tblName) + rowCount, ok := tableNameRowsMapping.Get(tblName.ToString()) if !ok { rowCount = uint32(0) } - rowCount += uint32(len(eventBatch)) - tableNameRowsMapping.Set(tblName, rowCount) + rowCount += uint32(numEvents) + tableNameRowsMapping.Set(tblName.ToString(), rowCount) }(tblName, eventBatch) - } + }) wg.Wait() @@ -264,91 +320,58 @@ func (c *EventHubConnector) sendEventBatch(events map[string][]*eventhub.Event, return nil } -func (c *EventHubConnector) getOrCreateHubConnection(name string) (*eventhub.Hub, error) { - hub, ok := c.hubs.Get(name) - if !ok { - hub, err := eventhub.NewHub(c.config.GetNamespace(), name, c.tokenProvider) - if err != nil { - log.Errorf("failed to create event hub connection: %v", err) - return nil, err - } - c.hubs.Set(name, hub) - return hub, nil +func (c *EventHubConnector) sendBatch( + ctx context.Context, + tblName ScopedEventhub, + events *azeventhubs.EventDataBatch, +) error { + subCtx, cancel := context.WithTimeout(ctx, 5*time.Minute) + defer cancel() + + hub, err := c.hubManager.GetOrCreateHubClient(tblName) + if err != nil { + return err + } + + opts := &azeventhubs.SendEventDataBatchOptions{} + err = hub.SendEventDataBatch(subCtx, events, opts) + if err != nil { + return err } - return hub, nil + log.Infof("successfully sent %d events to event hub topic - %s", events.NumEvents(), tblName.ToString()) + return nil } func (c *EventHubConnector) CreateRawTable(req *protos.CreateRawTableInput) (*protos.CreateRawTableOutput, error) { // create topics for each table - // key is the source table and value is the destination topic name. + // key is the source table and value is the "eh_peer.eh_topic" that ought to be used. tableMap := req.GetTableNameMapping() for _, table := range tableMap { - err := c.ensureEventHub(c.ctx, table, req.FlowJobName) + // parse peer name and topic name. + name, err := NewScopedEventhub(table) if err != nil { log.WithFields(log.Fields{ "flowName": req.FlowJobName, "table": table, - }).Errorf("failed to get event hub properties: %v", err) + }).Errorf("failed to parse peer and topic name: %v", err) return nil, err } - } - - return nil, nil -} - -func (c *EventHubConnector) ensureEventHub(ctx context.Context, name string, flowName string) error { - hubClient, err := c.getEventHubMgmtClient() - if err != nil { - return err - } - - namespace := c.config.GetNamespace() - resourceGroup := c.config.GetResourceGroup() - _, err = hubClient.Get(ctx, resourceGroup, namespace, name, nil) - - // TODO (kaushik): make these configurable. - partitionCount := int64(3) - retention := int64(1) - if err != nil { - opts := armeventhub.Eventhub{ - Properties: &armeventhub.Properties{ - PartitionCount: &partitionCount, - MessageRetentionInDays: &retention, - }, - } - _, err := hubClient.CreateOrUpdate(ctx, resourceGroup, namespace, name, opts, nil) + err = c.hubManager.EnsureEventHubExists(c.ctx, name) if err != nil { - log.Errorf("failed to create event hub: %v", err) - return err + log.WithFields(log.Fields{ + "flowName": req.FlowJobName, + "table": table, + }).Errorf("failed to ensure event hub exists: %v", err) + return nil, err } - - log.WithFields(log.Fields{ - "flowName": flowName, - }).Infof("event hub %s created", name) - } else { - log.Infof("event hub %s already exists", name) - } - - return nil -} - -func (c *EventHubConnector) getEventHubMgmtClient() (*armeventhub.EventHubsClient, error) { - subID, err := utils.GetAzureSubscriptionID() - if err != nil { - log.Errorf("failed to get azure subscription id: %v", err) - return nil, err } - hubClient, err := armeventhub.NewEventHubsClient(subID, c.creds, nil) - if err != nil { - log.Errorf("failed to get event hub client: %v", err) - return nil, err - } - - return hubClient, nil + return &protos.CreateRawTableOutput{ + TableIdentifier: "n/a", + }, nil } func (c *EventHubConnector) SetupNormalizedTables( @@ -361,7 +384,9 @@ func (c *EventHubConnector) SetupNormalizedTables( } func (c *EventHubConnector) SyncFlowCleanup(jobName string) error { - _, err := c.pgMetadata.pool.Exec(c.ctx, fmt.Sprintf("DROP SCHEMA IF EXISTS %s CASCADE", - metadataSchema)) - return err + err := c.pgMetadata.DropMetadata(jobName) + if err != nil { + return err + } + return nil } diff --git a/flow/connectors/eventhub/hub_batches.go b/flow/connectors/eventhub/hub_batches.go new file mode 100644 index 000000000..652e10d45 --- /dev/null +++ b/flow/connectors/eventhub/hub_batches.go @@ -0,0 +1,97 @@ +package conneventhub + +import ( + "context" + "fmt" + "strings" + + azeventhubs "github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs" +) + +// multimap from ScopedEventhub to *azeventhubs.EventDataBatch +type HubBatches struct { + batches map[ScopedEventhub][]*azeventhubs.EventDataBatch + manager *EventHubManager +} + +func NewHubBatches(manager *EventHubManager) *HubBatches { + return &HubBatches{ + batches: make(map[ScopedEventhub][]*azeventhubs.EventDataBatch), + manager: manager, + } +} + +func (h *HubBatches) AddEvent(ctx context.Context, name ScopedEventhub, event string) error { + batches, ok := h.batches[name] + if !ok { + batches = []*azeventhubs.EventDataBatch{} + } + + if len(batches) == 0 { + newBatch, err := h.manager.CreateEventDataBatch(ctx, name) + if err != nil { + return err + } + batches = append(batches, newBatch) + } + + if err := tryAddEventToBatch(event, batches[len(batches)-1]); err != nil { + if strings.Contains(err.Error(), "too large for the batch") { + overflowBatch, err := h.handleBatchOverflow(ctx, name, event) + if err != nil { + return fmt.Errorf("failed to handle batch overflow: %v", err) + } + batches = append(batches, overflowBatch) + } else { + return fmt.Errorf("failed to add event data: %v", err) + } + } + + h.batches[name] = batches + return nil +} + +func (h *HubBatches) handleBatchOverflow( + ctx context.Context, + name ScopedEventhub, + event string, +) (*azeventhubs.EventDataBatch, error) { + newBatch, err := h.manager.CreateEventDataBatch(ctx, name) + if err != nil { + return nil, err + } + if err := tryAddEventToBatch(event, newBatch); err != nil { + return nil, fmt.Errorf("failed to add event data to new batch: %v", err) + } + return newBatch, nil +} + +func (h *HubBatches) Len() int { + return len(h.batches) +} + +// ForEach calls the given function for each ScopedEventhub and batch pair +func (h *HubBatches) ForEach(fn func(ScopedEventhub, *azeventhubs.EventDataBatch)) { + for name, batches := range h.batches { + for _, batch := range batches { + fn(name, batch) + } + } +} + +// Clear removes all batches from the HubBatches +func (h *HubBatches) Clear() { + h.batches = make(map[ScopedEventhub][]*azeventhubs.EventDataBatch) +} + +func tryAddEventToBatch(event string, batch *azeventhubs.EventDataBatch) error { + eventData := eventDataFromString(event) + opts := &azeventhubs.AddEventDataOptions{} + return batch.AddEventData(eventData, opts) +} + +func eventDataFromString(s string) *azeventhubs.EventData { + return &azeventhubs.EventData{ + Body: []byte(s), + } +} diff --git a/flow/connectors/eventhub/hubmanager.go b/flow/connectors/eventhub/hubmanager.go new file mode 100644 index 000000000..1241c0ec7 --- /dev/null +++ b/flow/connectors/eventhub/hubmanager.go @@ -0,0 +1,143 @@ +package conneventhub + +import ( + "context" + "fmt" + "strings" + "sync" + + "github.com/Azure/azure-sdk-for-go/sdk/azidentity" + "github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs" + "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub" + "github.com/PeerDB-io/peer-flow/connectors/utils" + "github.com/PeerDB-io/peer-flow/generated/protos" + cmap "github.com/orcaman/concurrent-map/v2" + log "github.com/sirupsen/logrus" +) + +type EventHubManager struct { + creds *azidentity.DefaultAzureCredential + // eventhub peer name -> config + peerConfig cmap.ConcurrentMap[string, *protos.EventHubConfig] + // eventhub name -> client + hubs sync.Map +} + +func NewEventHubManager( + ctx context.Context, + creds *azidentity.DefaultAzureCredential, + groupConfig *protos.EventHubGroupConfig, +) *EventHubManager { + peerConfig := cmap.New[*protos.EventHubConfig]() + + for name, config := range groupConfig.Eventhubs { + peerConfig.Set(name, config) + } + + return &EventHubManager{ + creds: creds, + peerConfig: peerConfig, + } +} + +func (m *EventHubManager) GetOrCreateHubClient(name ScopedEventhub) (*azeventhubs.ProducerClient, error) { + ehConfig, ok := m.peerConfig.Get(name.PeerName) + if !ok { + return nil, fmt.Errorf("eventhub '%s' not configured", name) + } + + namespace := ehConfig.Namespace + // if the namespace isn't fully qualified, add the `.servicebus.windows.net` + // check by counting the number of '.' in the namespace + if strings.Count(namespace, ".") < 2 { + namespace = fmt.Sprintf("%s.servicebus.windows.net", namespace) + } + + hub, ok := m.hubs.Load(name) + if !ok { + opts := &azeventhubs.ProducerClientOptions{} + hub, err := azeventhubs.NewProducerClient(namespace, name.Eventhub, m.creds, opts) + if err != nil { + return nil, fmt.Errorf("failed to create eventhub client: %v", err) + } + m.hubs.Store(name, hub) + return hub, nil + } + + return hub.(*azeventhubs.ProducerClient), nil +} + +func (m *EventHubManager) CreateEventDataBatch(ctx context.Context, name ScopedEventhub) (*azeventhubs.EventDataBatch, error) { + hub, err := m.GetOrCreateHubClient(name) + if err != nil { + return nil, err + } + + opts := &azeventhubs.EventDataBatchOptions{} + batch, err := hub.NewEventDataBatch(ctx, opts) + if err != nil { + return nil, fmt.Errorf("failed to create event data batch: %v", err) + } + + return batch, nil +} + +// EnsureEventHubExists ensures that the eventhub exists. +func (m *EventHubManager) EnsureEventHubExists(ctx context.Context, name ScopedEventhub) error { + cfg, ok := m.peerConfig.Get(name.PeerName) + if !ok { + return fmt.Errorf("eventhub peer '%s' not configured", name.PeerName) + } + + hubClient, err := m.getEventHubMgmtClient(cfg.SubscriptionId) + if err != nil { + return fmt.Errorf("failed to get event hub client: %v", err) + } + + namespace := cfg.Namespace + resourceGroup := cfg.ResourceGroup + + _, err = hubClient.Get(ctx, resourceGroup, namespace, name.Eventhub, nil) + + partitionCount := int64(cfg.PartitionCount) + retention := int64(cfg.MessageRetentionInDays) + if err != nil { + opts := armeventhub.Eventhub{ + Properties: &armeventhub.Properties{ + PartitionCount: &partitionCount, + MessageRetentionInDays: &retention, + }, + } + + _, err := hubClient.CreateOrUpdate(ctx, resourceGroup, namespace, name.Eventhub, opts, nil) + if err != nil { + log.Errorf("failed to create event hub: %v", err) + return err + } + + log.Infof("event hub %s created", name) + } else { + log.Infof("event hub %s already exists", name) + } + + return nil +} + +func (m *EventHubManager) getEventHubMgmtClient(subID string) (*armeventhub.EventHubsClient, error) { + if subID == "" { + envSubID, err := utils.GetAzureSubscriptionID() + if err != nil { + log.Errorf("failed to get azure subscription id: %v", err) + return nil, err + } + subID = envSubID + } + + hubClient, err := armeventhub.NewEventHubsClient(subID, m.creds, nil) + if err != nil { + log.Errorf("failed to get event hub client: %v", err) + return nil, err + } + + return hubClient, nil +} diff --git a/flow/connectors/eventhub/scoped_eventhub.go b/flow/connectors/eventhub/scoped_eventhub.go new file mode 100644 index 000000000..048ce595c --- /dev/null +++ b/flow/connectors/eventhub/scoped_eventhub.go @@ -0,0 +1,38 @@ +package conneventhub + +import ( + "fmt" + "strings" +) + +type ScopedEventhub struct { + PeerName string + Eventhub string + Identifier string +} + +func NewScopedEventhub(raw string) (ScopedEventhub, error) { + // split by dot, the model is peername.eventhub.identifier + parts := strings.Split(raw, ".") + + if len(parts) != 3 { + return ScopedEventhub{}, fmt.Errorf("invalid scoped eventhub '%s'", raw) + } + + return ScopedEventhub{ + PeerName: parts[0], + Eventhub: parts[1], + Identifier: parts[2], + }, nil +} + +func (s ScopedEventhub) Equals(other ScopedEventhub) bool { + return s.PeerName == other.PeerName && + s.Eventhub == other.Eventhub && + s.Identifier == other.Identifier +} + +// ToString returns the string representation of the ScopedEventhub +func (s ScopedEventhub) ToString() string { + return fmt.Sprintf("%s.%s.%s", s.PeerName, s.Eventhub, s.Identifier) +} diff --git a/flow/connectors/eventhub/metadata.go b/flow/connectors/external_metadata/store.go similarity index 66% rename from flow/connectors/eventhub/metadata.go rename to flow/connectors/external_metadata/store.go index 72410bce6..6ec7c0714 100644 --- a/flow/connectors/eventhub/metadata.go +++ b/flow/connectors/external_metadata/store.go @@ -1,4 +1,4 @@ -package conneventhub +package connmetadata import ( "context" @@ -10,18 +10,18 @@ import ( ) const ( - // schema for the peerdb metadata - metadataSchema = "peerdb_eventhub_metadata" - // The name of the table that stores the last sync state. lastSyncStateTableName = "last_sync_state" ) type PostgresMetadataStore struct { - config *protos.PostgresConfig - pool *pgxpool.Pool + ctx context.Context + config *protos.PostgresConfig + pool *pgxpool.Pool + schemaName string } -func NewPostgresMetadataStore(ctx context.Context, pgConfig *protos.PostgresConfig) (*PostgresMetadataStore, error) { +func NewPostgresMetadataStore(ctx context.Context, pgConfig *protos.PostgresConfig, + schemaName string) (*PostgresMetadataStore, error) { connectionString := utils.GetPGConnectionString(pgConfig) pool, err := pgxpool.New(ctx, connectionString) @@ -29,11 +29,13 @@ func NewPostgresMetadataStore(ctx context.Context, pgConfig *protos.PostgresConf log.Errorf("failed to create connection pool: %v", err) return nil, err } - log.Info("created connection pool for eventhub metadata store") + log.Info("created connection pool for metadata store") return &PostgresMetadataStore{ - config: pgConfig, - pool: pool, + ctx: ctx, + config: pgConfig, + pool: pool, + schemaName: schemaName, }, nil } @@ -45,11 +47,9 @@ func (p *PostgresMetadataStore) Close() error { return nil } -func (c *EventHubConnector) NeedsSetupMetadataTables() bool { - ms := c.pgMetadata - +func (p *PostgresMetadataStore) NeedsSetupMetadata() bool { // check if schema exists - rows := ms.pool.QueryRow(c.ctx, "SELECT count(*) FROM pg_catalog.pg_namespace WHERE nspname = $1", metadataSchema) + rows := p.pool.QueryRow(p.ctx, "SELECT count(*) FROM pg_catalog.pg_namespace WHERE nspname = $1", p.schemaName) var exists int64 err := rows.Scan(&exists) @@ -65,26 +65,24 @@ func (c *EventHubConnector) NeedsSetupMetadataTables() bool { return true } -func (c *EventHubConnector) SetupMetadataTables() error { - ms := c.pgMetadata - +func (p *PostgresMetadataStore) SetupMetadata() error { // start a transaction - tx, err := ms.pool.Begin(c.ctx) + tx, err := p.pool.Begin(p.ctx) if err != nil { log.Errorf("failed to start transaction: %v", err) return err } // create the schema - _, err = tx.Exec(c.ctx, "CREATE SCHEMA IF NOT EXISTS "+metadataSchema) + _, err = tx.Exec(p.ctx, "CREATE SCHEMA IF NOT EXISTS "+p.schemaName) if err != nil { log.Errorf("failed to create schema: %v", err) return err } // create the last sync state table - _, err = tx.Exec(c.ctx, ` - CREATE TABLE IF NOT EXISTS `+metadataSchema+`.`+lastSyncStateTableName+` ( + _, err = tx.Exec(p.ctx, ` + CREATE TABLE IF NOT EXISTS `+p.schemaName+`.`+lastSyncStateTableName+` ( job_name TEXT PRIMARY KEY NOT NULL, last_offset BIGINT NOT NULL, updated_at TIMESTAMP NOT NULL DEFAULT NOW(), @@ -95,9 +93,10 @@ func (c *EventHubConnector) SetupMetadataTables() error { log.Errorf("failed to create last sync state table: %v", err) return err } + log.Infof("created external metadata table %s.%s", p.schemaName, lastSyncStateTableName) // commit the transaction - err = tx.Commit(c.ctx) + err = tx.Commit(p.ctx) if err != nil { log.Errorf("failed to commit transaction: %v", err) return err @@ -106,15 +105,12 @@ func (c *EventHubConnector) SetupMetadataTables() error { return nil } -func (c *EventHubConnector) GetLastOffset(jobName string) (*protos.LastSyncState, error) { - ms := c.pgMetadata - - rows := ms.pool.QueryRow(c.ctx, ` +func (p *PostgresMetadataStore) FetchLastOffset(jobName string) (*protos.LastSyncState, error) { + rows := p.pool.QueryRow(p.ctx, ` SELECT last_offset - FROM `+metadataSchema+`.`+lastSyncStateTableName+` + FROM `+p.schemaName+`.`+lastSyncStateTableName+` WHERE job_name = $1 `, jobName) - var offset int64 err := rows.Scan(&offset) if err != nil { @@ -138,12 +134,10 @@ func (c *EventHubConnector) GetLastOffset(jobName string) (*protos.LastSyncState }, nil } -func (c *EventHubConnector) GetLastSyncBatchID(jobName string) (int64, error) { - ms := c.pgMetadata - - rows := ms.pool.QueryRow(c.ctx, ` +func (p *PostgresMetadataStore) GetLastBatchID(jobName string) (int64, error) { + rows := p.pool.QueryRow(p.ctx, ` SELECT sync_batch_id - FROM `+metadataSchema+`.`+lastSyncStateTableName+` + FROM `+p.schemaName+`.`+lastSyncStateTableName+` WHERE job_name = $1 `, jobName) @@ -167,11 +161,9 @@ func (c *EventHubConnector) GetLastSyncBatchID(jobName string) (int64, error) { } // update offset for a job -func (c *EventHubConnector) updateLastOffset(jobName string, offset int64) error { - ms := c.pgMetadata - +func (p *PostgresMetadataStore) UpdateLastOffset(jobName string, offset int64) error { // start a transaction - tx, err := ms.pool.Begin(c.ctx) + tx, err := p.pool.Begin(p.ctx) if err != nil { log.Errorf("failed to start transaction: %v", err) return err @@ -181,8 +173,8 @@ func (c *EventHubConnector) updateLastOffset(jobName string, offset int64) error log.WithFields(log.Fields{ "flowName": jobName, }).Infof("updating last offset for job `%s` to `%d`", jobName, offset) - _, err = tx.Exec(c.ctx, ` - INSERT INTO `+metadataSchema+`.`+lastSyncStateTableName+` (job_name, last_offset, sync_batch_id) + _, err = tx.Exec(p.ctx, ` + INSERT INTO `+p.schemaName+`.`+lastSyncStateTableName+` (job_name, last_offset, sync_batch_id) VALUES ($1, $2, $3) ON CONFLICT (job_name) DO UPDATE SET last_offset = $2, updated_at = NOW() @@ -196,7 +188,7 @@ func (c *EventHubConnector) updateLastOffset(jobName string, offset int64) error } // commit the transaction - err = tx.Commit(c.ctx) + err = tx.Commit(p.ctx) if err != nil { log.Errorf("failed to commit transaction: %v", err) return err @@ -206,14 +198,12 @@ func (c *EventHubConnector) updateLastOffset(jobName string, offset int64) error } // update offset for a job -func (c *EventHubConnector) incrementSyncBatchID(jobName string) error { - ms := c.pgMetadata - +func (p *PostgresMetadataStore) IncrementID(jobName string) error { log.WithFields(log.Fields{ "flowName": jobName, }).Infof("incrementing sync batch id for job `%s`", jobName) - _, err := ms.pool.Exec(c.ctx, ` - UPDATE `+metadataSchema+`.`+lastSyncStateTableName+` + _, err := p.pool.Exec(p.ctx, ` + UPDATE `+p.schemaName+`.`+lastSyncStateTableName+` SET sync_batch_id=sync_batch_id+1 WHERE job_name=$1 `, jobName) @@ -226,3 +216,11 @@ func (c *EventHubConnector) incrementSyncBatchID(jobName string) error { return nil } + +func (p *PostgresMetadataStore) DropMetadata(jobName string) error { + _, err := p.pool.Exec(p.ctx, ` + DELETE FROM `+p.schemaName+`.`+lastSyncStateTableName+` + WHERE job_name = $1 + `, jobName) + return err +} diff --git a/flow/connectors/postgres/cdc.go b/flow/connectors/postgres/cdc.go index 641d5f854..533b4ee0c 100644 --- a/flow/connectors/postgres/cdc.go +++ b/flow/connectors/postgres/cdc.go @@ -2,21 +2,23 @@ package connpostgres import ( "context" + "crypto/sha256" "fmt" "reflect" "time" + "github.com/PeerDB-io/peer-flow/connectors/utils" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" "github.com/PeerDB-io/peer-flow/model/qvalue" "github.com/jackc/pglogrepl" + "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgconn" "github.com/jackc/pgx/v5/pgproto3" "github.com/jackc/pgx/v5/pgtype" "github.com/jackc/pgx/v5/pgxpool" "github.com/lib/pq/oid" log "github.com/sirupsen/logrus" - "go.temporal.io/sdk/activity" ) type PostgresCDCSource struct { @@ -29,6 +31,11 @@ type PostgresCDCSource struct { relationMessageMapping model.RelationMessageMapping typeMap *pgtype.Map startLSN pglogrepl.LSN + commitLock bool + customTypeMapping map[uint32]string + + // for partitioned tables, maps child relid to parent relid + chIdToParRelId map[uint32]uint32 } type PostgresCDCConfig struct { @@ -42,7 +49,12 @@ type PostgresCDCConfig struct { } // Create a new PostgresCDCSource -func NewPostgresCDCSource(cdcConfig *PostgresCDCConfig) (*PostgresCDCSource, error) { +func NewPostgresCDCSource(cdcConfig *PostgresCDCConfig, customTypeMap map[uint32]string) (*PostgresCDCSource, error) { + childToParentRelIdMap, err := getChildToParentRelIdMap(cdcConfig.AppContext, cdcConfig.Connection) + if err != nil { + return nil, fmt.Errorf("error getting child to parent relid map: %w", err) + } + return &PostgresCDCSource{ ctx: cdcConfig.AppContext, replPool: cdcConfig.Connection, @@ -52,12 +64,46 @@ func NewPostgresCDCSource(cdcConfig *PostgresCDCConfig) (*PostgresCDCSource, err publication: cdcConfig.Publication, relationMessageMapping: cdcConfig.RelationMessageMapping, typeMap: pgtype.NewMap(), + chIdToParRelId: childToParentRelIdMap, + commitLock: false, + customTypeMapping: customTypeMap, }, nil } +func getChildToParentRelIdMap(ctx context.Context, pool *pgxpool.Pool) (map[uint32]uint32, error) { + query := ` + SELECT + parent.oid AS parentrelid, + child.oid AS childrelid + FROM pg_inherits + JOIN pg_class parent ON pg_inherits.inhparent = parent.oid + JOIN pg_class child ON pg_inherits.inhrelid = child.oid + WHERE parent.relkind='p'; + ` + + rows, err := pool.Query(ctx, query, pgx.QueryExecModeSimpleProtocol) + if err != nil { + return nil, fmt.Errorf("error querying for child to parent relid map: %w", err) + } + + defer rows.Close() + + childToParentRelIdMap := make(map[uint32]uint32) + for rows.Next() { + var parentRelId uint32 + var childRelId uint32 + err := rows.Scan(&parentRelId, &childRelId) + if err != nil { + return nil, fmt.Errorf("error scanning child to parent relid map: %w", err) + } + childToParentRelIdMap[childRelId] = parentRelId + } + + return childToParentRelIdMap, nil +} + // PullRecords pulls records from the cdc stream -func (p *PostgresCDCSource) PullRecords(req *model.PullRecordsRequest) ( - *model.RecordsWithTableSchemaDelta, error) { +func (p *PostgresCDCSource) PullRecords(req *model.PullRecordsRequest) error { // setup options pluginArguments := []string{ "proto_version '1'", @@ -66,6 +112,8 @@ func (p *PostgresCDCSource) PullRecords(req *model.PullRecordsRequest) ( if p.publication != "" { pubOpt := fmt.Sprintf("publication_names '%s'", p.publication) pluginArguments = append(pluginArguments, pubOpt) + } else { + return fmt.Errorf("publication name is not set") } replicationOpts := pglogrepl.StartReplicationOptions{PluginArgs: pluginArguments} @@ -74,7 +122,7 @@ func (p *PostgresCDCSource) PullRecords(req *model.PullRecordsRequest) ( // create replication connection replicationConn, err := p.replPool.Acquire(p.ctx) if err != nil { - return nil, fmt.Errorf("error acquiring connection for replication: %w", err) + return fmt.Errorf("error acquiring connection for replication: %w", err) } defer replicationConn.Release() @@ -86,7 +134,7 @@ func (p *PostgresCDCSource) PullRecords(req *model.PullRecordsRequest) ( sysident, err := pglogrepl.IdentifySystem(p.ctx, pgConn) if err != nil { - return nil, fmt.Errorf("IdentifySystem failed: %w", err) + return fmt.Errorf("IdentifySystem failed: %w", err) } log.Debugf("SystemID: %s, Timeline: %d, XLogPos: %d, DBName: %s", sysident.SystemID, sysident.Timeline, sysident.XLogPos, sysident.DBName) @@ -100,13 +148,13 @@ func (p *PostgresCDCSource) PullRecords(req *model.PullRecordsRequest) ( err = pglogrepl.StartReplication(p.ctx, pgConn, replicationSlot, p.startLSN, replicationOpts) if err != nil { - return nil, fmt.Errorf("error starting replication at startLsn - %d: %w", p.startLSN, err) + return fmt.Errorf("error starting replication at startLsn - %d: %w", p.startLSN, err) } log.WithFields(log.Fields{ "flowName": req.FlowJobName, }).Infof("started replication on slot %s at startLSN: %d", p.slot, p.startLSN) - return p.consumeStream(pgConn, req, p.startLSN) + return p.consumeStream(pgConn, req, p.startLSN, req.RecordStream) } // start consuming the cdc stream @@ -114,22 +162,10 @@ func (p *PostgresCDCSource) consumeStream( conn *pgconn.PgConn, req *model.PullRecordsRequest, clientXLogPos pglogrepl.LSN, -) (*model.RecordsWithTableSchemaDelta, error) { - // TODO (kaushik): take into consideration the MaxBatchSize - // parameters in the original request. - records := &model.RecordBatch{ - Records: make([]model.Record, 0), - TablePKeyLastSeen: make(map[model.TableWithPkey]int), - } - result := &model.RecordsWithTableSchemaDelta{ - RecordBatch: records, - TableSchemaDelta: nil, - RelationMessageMapping: p.relationMessageMapping, - } - + records *model.CDCRecordStream, +) error { standbyMessageTimeout := req.IdleTimeout nextStandbyMessageDeadline := time.Now().Add(standbyMessageTimeout) - earlyReturn := false defer func() { err := conn.Close(p.ctx) @@ -140,41 +176,83 @@ func (p *PostgresCDCSource) consumeStream( } }() + // clientXLogPos is the last checkpoint id + 1, we need to ack that we have processed + // until clientXLogPos - 1 each time we send a standby status update. + // consumedXLogPos is the lsn that has been committed on the destination. + consumedXLogPos := pglogrepl.LSN(0) + if clientXLogPos > 0 { + consumedXLogPos = clientXLogPos - 1 + } + + var standByLastLogged time.Time + localRecords := make([]model.Record, 0) + defer func() { + if len(localRecords) == 0 { + records.SignalAsEmpty() + } + records.RelationMessageMapping <- &p.relationMessageMapping + }() + + shutdown := utils.HeartbeatRoutine(p.ctx, 10*time.Second, func() string { + jobName := req.FlowJobName + currRecords := len(localRecords) + return fmt.Sprintf("pulling records for job - %s, currently have %d records", jobName, currRecords) + }) + + defer func() { + shutdown <- true + }() + + tablePKeyLastSeen := make(map[model.TableWithPkey]int) + + addRecord := func(rec model.Record) { + records.AddRecord(rec) + localRecords = append(localRecords, rec) + + if len(localRecords) == 1 { + records.SignalAsNotEmpty() + } + } + for { - if time.Now().After(nextStandbyMessageDeadline) { - // update the WALWritePosition to be clientXLogPos - 1 - // as the clientXLogPos is the last checkpoint id + 1 - // and we want to send the last checkpoint id as the last - // checkpoint id that we have processed. - lastProcessedXLogPos := clientXLogPos - if clientXLogPos > 0 { - lastProcessedXLogPos = clientXLogPos - 1 - } + if time.Now().After(nextStandbyMessageDeadline) || + (len(localRecords) >= int(req.MaxBatchSize)) { + // Update XLogPos to the last processed position, we can only confirm + // that this is the last row committed on the destination. err := pglogrepl.SendStandbyStatusUpdate(p.ctx, conn, - pglogrepl.StandbyStatusUpdate{WALWritePosition: lastProcessedXLogPos}) + pglogrepl.StandbyStatusUpdate{WALWritePosition: consumedXLogPos}) if err != nil { - return nil, fmt.Errorf("SendStandbyStatusUpdate failed: %w", err) + return fmt.Errorf("SendStandbyStatusUpdate failed: %w", err) + } + + numRowsProcessedMessage := fmt.Sprintf("processed %d rows", len(localRecords)) + + if time.Since(standByLastLogged) > 10*time.Second { + log.Infof("Sent Standby status message. %s", numRowsProcessedMessage) + standByLastLogged = time.Now() } - numRowsProcessedMessage := fmt.Sprintf("processed %d rows", len(records.Records)) - activity.RecordHeartbeat(p.ctx, numRowsProcessedMessage) - log.Infof("Sent Standby status message. %s", numRowsProcessedMessage) nextStandbyMessageDeadline = time.Now().Add(standbyMessageTimeout) + + if !p.commitLock && (len(localRecords) >= int(req.MaxBatchSize)) { + return nil + } } ctx, cancel := context.WithDeadline(p.ctx, nextStandbyMessageDeadline) rawMsg, err := conn.ReceiveMessage(ctx) cancel() - if err != nil { + if err != nil && !p.commitLock { if pgconn.Timeout(err) { - log.Infof("Idle timeout reached, returning currently accumulated records") - return result, nil + log.Infof("Idle timeout reached, returning currently accumulated records - %d", len(localRecords)) + return nil + } else { + return fmt.Errorf("ReceiveMessage failed: %w", err) } - return nil, fmt.Errorf("ReceiveMessage failed: %w", err) } if errMsg, ok := rawMsg.(*pgproto3.ErrorResponse); ok { - return nil, fmt.Errorf("received Postgres WAL error: %+v", errMsg) + return fmt.Errorf("received Postgres WAL error: %+v", errMsg) } msg, ok := rawMsg.(*pgproto3.CopyData) @@ -183,18 +261,19 @@ func (p *PostgresCDCSource) consumeStream( continue } - firstProcessed := false - switch msg.Data[0] { case pglogrepl.PrimaryKeepaliveMessageByteID: pkm, err := pglogrepl.ParsePrimaryKeepaliveMessage(msg.Data[1:]) if err != nil { - return nil, fmt.Errorf("ParsePrimaryKeepaliveMessage failed: %w", err) + return fmt.Errorf("ParsePrimaryKeepaliveMessage failed: %w", err) } log.Debugf("Primary Keepalive Message => ServerWALEnd: %s ServerTime: %s ReplyRequested: %t", pkm.ServerWALEnd, pkm.ServerTime, pkm.ReplyRequested) + if pkm.ServerWALEnd > clientXLogPos { + clientXLogPos = pkm.ServerWALEnd + } if pkm.ReplyRequested { nextStandbyMessageDeadline = time.Time{} } @@ -202,7 +281,7 @@ func (p *PostgresCDCSource) consumeStream( case pglogrepl.XLogDataByteID: xld, err := pglogrepl.ParseXLogData(msg.Data[1:]) if err != nil { - return nil, fmt.Errorf("ParseXLogData failed: %w", err) + return fmt.Errorf("ParseXLogData failed: %w", err) } log.Debugf("XLogData => WALStart %s ServerWALEnd %s ServerTime %s\n", @@ -210,13 +289,9 @@ func (p *PostgresCDCSource) consumeStream( rec, err := p.processMessage(records, xld) if err != nil { - return nil, fmt.Errorf("error processing message: %w", err) + return fmt.Errorf("error processing message: %w", err) } - if !firstProcessed { - firstProcessed = true - records.FirstCheckPointID = int64(xld.WALStart) - } if rec != nil { tableName := rec.GetTableName() switch r := rec.(type) { @@ -226,76 +301,77 @@ func (p *PostgresCDCSource) consumeStream( // will change in future isFullReplica := req.TableNameSchemaMapping[tableName].IsReplicaIdentityFull if isFullReplica { - records.Records = append(records.Records, rec) + addRecord(rec) } else { - pkeyCol := req.TableNameSchemaMapping[tableName].PrimaryKeyColumn - pkeyColVal, err := rec.GetItems().GetValueByColName(pkeyCol) + compositePKeyString, err := p.compositePKeyToString(req, rec) if err != nil { - return nil, fmt.Errorf("error getting pkey column value: %w", err) + return err } tablePkeyVal := model.TableWithPkey{ TableName: tableName, - PkeyColVal: *pkeyColVal, + PkeyColVal: compositePKeyString, } - _, ok := records.TablePKeyLastSeen[tablePkeyVal] + _, ok := tablePKeyLastSeen[tablePkeyVal] if !ok { - records.Records = append(records.Records, rec) - records.TablePKeyLastSeen[tablePkeyVal] = len(records.Records) - 1 + addRecord(rec) + tablePKeyLastSeen[tablePkeyVal] = len(localRecords) - 1 } else { - oldRec := records.Records[records.TablePKeyLastSeen[tablePkeyVal]] + oldRec := localRecords[tablePKeyLastSeen[tablePkeyVal]] // iterate through unchanged toast cols and set them in new record updatedCols := r.NewItems.UpdateIfNotExists(oldRec.GetItems()) for _, col := range updatedCols { delete(r.UnchangedToastColumns, col) } - records.Records = append(records.Records, rec) - records.TablePKeyLastSeen[tablePkeyVal] = len(records.Records) - 1 + addRecord(rec) + tablePKeyLastSeen[tablePkeyVal] = len(localRecords) - 1 } } case *model.InsertRecord: isFullReplica := req.TableNameSchemaMapping[tableName].IsReplicaIdentityFull if isFullReplica { - records.Records = append(records.Records, rec) + addRecord(rec) } else { - pkeyCol := req.TableNameSchemaMapping[tableName].PrimaryKeyColumn - pkeyColVal, err := rec.GetItems().GetValueByColName(pkeyCol) + compositePKeyString, err := p.compositePKeyToString(req, rec) if err != nil { - return nil, fmt.Errorf("error getting pkey column value: %w", err) + return err } + tablePkeyVal := model.TableWithPkey{ TableName: tableName, - PkeyColVal: *pkeyColVal, + PkeyColVal: compositePKeyString, } - records.Records = append(records.Records, rec) + addRecord(rec) // all columns will be set in insert record, so add it to the map - records.TablePKeyLastSeen[tablePkeyVal] = len(records.Records) - 1 + tablePKeyLastSeen[tablePkeyVal] = len(localRecords) - 1 } case *model.DeleteRecord: - records.Records = append(records.Records, rec) + addRecord(rec) case *model.RelationRecord: - tableSchemaDelta := rec.(*model.RelationRecord).TableSchemaDelta - if len(tableSchemaDelta.AddedColumns) > 0 || len(tableSchemaDelta.DroppedColumns) > 0 { - result.TableSchemaDelta = tableSchemaDelta - log.Infof("Detected schema change for table %s, returning currently accumulated records", - result.TableSchemaDelta.SrcTableName) - earlyReturn = true + tableSchemaDelta := r.TableSchemaDelta + if len(tableSchemaDelta.AddedColumns) > 0 { + log.Infof("Detected schema change for table %s, addedColumns: %v", + tableSchemaDelta.SrcTableName, tableSchemaDelta.AddedColumns) + records.SchemaDeltas <- tableSchemaDelta } } } - currentPos := xld.WALStart + pglogrepl.LSN(len(xld.WALData)) - records.LastCheckPointID = int64(currentPos) + if xld.WALStart > clientXLogPos { + clientXLogPos = xld.WALStart + } - if records.Records != nil && - ((len(records.Records) == int(req.MaxBatchSize)) || earlyReturn) { - return result, nil + if len(localRecords) == 0 { + // given that we have no records it is safe to update the flush wal position + // to the clientXLogPos. clientXLogPos can be moved forward due to PKM messages. + consumedXLogPos = clientXLogPos + records.UpdateLatestCheckpoint(int64(clientXLogPos)) } } } } -func (p *PostgresCDCSource) processMessage(batch *model.RecordBatch, xld pglogrepl.XLogData) (model.Record, error) { +func (p *PostgresCDCSource) processMessage(batch *model.CDCRecordStream, xld pglogrepl.XLogData) (model.Record, error) { logicalMsg, err := pglogrepl.Parse(xld.WALData) if err != nil { return nil, fmt.Errorf("error parsing logical message: %w", err) @@ -303,7 +379,9 @@ func (p *PostgresCDCSource) processMessage(batch *model.RecordBatch, xld pglogre switch msg := logicalMsg.(type) { case *pglogrepl.BeginMessage: - log.Debugf("Ignoring BeginMessage") + log.Debugf("BeginMessage => FinalLSN: %v, XID: %v", msg.FinalLSN, msg.Xid) + log.Debugf("Locking PullRecords at BeginMessage, awaiting CommitMessage") + p.commitLock = true case *pglogrepl.InsertMessage: return p.processInsertMessage(xld.WALStart, msg) case *pglogrepl.UpdateMessage: @@ -312,8 +390,14 @@ func (p *PostgresCDCSource) processMessage(batch *model.RecordBatch, xld pglogre return p.processDeleteMessage(xld.WALStart, msg) case *pglogrepl.CommitMessage: // for a commit message, update the last checkpoint id for the record batch. - batch.LastCheckPointID = int64(xld.WALStart) + log.Debugf("CommitMessage => CommitLSN: %v, TransactionEndLSN: %v", + msg.CommitLSN, msg.TransactionEndLSN) + batch.UpdateLatestCheckpoint(int64(msg.CommitLSN)) + p.commitLock = false case *pglogrepl.RelationMessage: + // treat all relation messages as correponding to parent if partitioned. + msg.RelationID = p.getParentRelIdIfPartitioned(msg.RelationID) + // TODO (kaushik): consider persistent state for a mirror job // to be stored somewhere in temporal state. We might need to persist // the state of the relation message somewhere @@ -339,31 +423,32 @@ func (p *PostgresCDCSource) processInsertMessage( lsn pglogrepl.LSN, msg *pglogrepl.InsertMessage, ) (model.Record, error) { - tableName, exists := p.SrcTableIDNameMapping[msg.RelationID] + relId := p.getParentRelIdIfPartitioned(msg.RelationID) + + tableName, exists := p.SrcTableIDNameMapping[relId] if !exists { return nil, nil } // log lsn and relation id for debugging - log.Debugf("InsertMessage => LSN: %d, RelationID: %d, Relation Name: %s", lsn, msg.RelationID, tableName) + log.Debugf("InsertMessage => LSN: %d, RelationID: %d, Relation Name: %s", lsn, relId, tableName) - rel, ok := p.relationMessageMapping[msg.RelationID] + rel, ok := p.relationMessageMapping[relId] if !ok { - return nil, fmt.Errorf("unknown relation id: %d", msg.RelationID) + return nil, fmt.Errorf("unknown relation id: %d", relId) } // create empty map of string to interface{} - items, unchangedToastColumns, err := p.convertTupleToMap(msg.Tuple, rel) + items, _, err := p.convertTupleToMap(msg.Tuple, rel) if err != nil { return nil, fmt.Errorf("error converting tuple to map: %w", err) } return &model.InsertRecord{ - CheckPointID: int64(lsn), - Items: items, - DestinationTableName: p.TableNameMapping[tableName], - SourceTableName: tableName, - UnchangedToastColumns: unchangedToastColumns, + CheckPointID: int64(lsn), + Items: items, + DestinationTableName: p.TableNameMapping[tableName], + SourceTableName: tableName, }, nil } @@ -372,17 +457,19 @@ func (p *PostgresCDCSource) processUpdateMessage( lsn pglogrepl.LSN, msg *pglogrepl.UpdateMessage, ) (model.Record, error) { - tableName, exists := p.SrcTableIDNameMapping[msg.RelationID] + relID := p.getParentRelIdIfPartitioned(msg.RelationID) + + tableName, exists := p.SrcTableIDNameMapping[relID] if !exists { return nil, nil } // log lsn and relation id for debugging - log.Debugf("UpdateMessage => LSN: %d, RelationID: %d, Relation Name: %s", lsn, msg.RelationID, tableName) + log.Debugf("UpdateMessage => LSN: %d, RelationID: %d, Relation Name: %s", lsn, relID, tableName) - rel, ok := p.relationMessageMapping[msg.RelationID] + rel, ok := p.relationMessageMapping[relID] if !ok { - return nil, fmt.Errorf("unknown relation id: %d", msg.RelationID) + return nil, fmt.Errorf("unknown relation id: %d", relID) } // create empty map of string to interface{} @@ -411,31 +498,32 @@ func (p *PostgresCDCSource) processDeleteMessage( lsn pglogrepl.LSN, msg *pglogrepl.DeleteMessage, ) (model.Record, error) { - tableName, exists := p.SrcTableIDNameMapping[msg.RelationID] + relID := p.getParentRelIdIfPartitioned(msg.RelationID) + + tableName, exists := p.SrcTableIDNameMapping[relID] if !exists { return nil, nil } // log lsn and relation id for debugging - log.Debugf("DeleteMessage => LSN: %d, RelationID: %d, Relation Name: %s", lsn, msg.RelationID, tableName) + log.Debugf("DeleteMessage => LSN: %d, RelationID: %d, Relation Name: %s", lsn, relID, tableName) - rel, ok := p.relationMessageMapping[msg.RelationID] + rel, ok := p.relationMessageMapping[relID] if !ok { - return nil, fmt.Errorf("unknown relation id: %d", msg.RelationID) + return nil, fmt.Errorf("unknown relation id: %d", relID) } // create empty map of string to interface{} - items, unchangedToastColumns, err := p.convertTupleToMap(msg.OldTuple, rel) + items, _, err := p.convertTupleToMap(msg.OldTuple, rel) if err != nil { return nil, fmt.Errorf("error converting tuple to map: %w", err) } return &model.DeleteRecord{ - CheckPointID: int64(lsn), - Items: items, - DestinationTableName: p.TableNameMapping[tableName], - SourceTableName: tableName, - UnchangedToastColumns: unchangedToastColumns, + CheckPointID: int64(lsn), + Items: items, + DestinationTableName: p.TableNameMapping[tableName], + SourceTableName: tableName, }, nil } @@ -449,15 +537,15 @@ It takes a tuple and a relation message as input and returns func (p *PostgresCDCSource) convertTupleToMap( tuple *pglogrepl.TupleData, rel *protos.RelationMessage, -) (*model.RecordItems, map[string]bool, error) { +) (*model.RecordItems, map[string]struct{}, error) { // if the tuple is nil, return an empty map if tuple == nil { - return model.NewRecordItems(), make(map[string]bool), nil + return model.NewRecordItems(), make(map[string]struct{}), nil } // create empty map of string to interface{} items := model.NewRecordItems() - unchangedToastColumns := make(map[string]bool) + unchangedToastColumns := make(map[string]struct{}) for idx, col := range tuple.Columns { colName := rel.Columns[idx].Name @@ -479,7 +567,7 @@ func (p *PostgresCDCSource) convertTupleToMap( } items.AddColumn(colName, data) case 'u': // unchanged toast - unchangedToastColumns[colName] = true + unchangedToastColumns[colName] = struct{}{} default: return nil, nil, fmt.Errorf("unknown column data type: %s", string(col.DataType)) } @@ -512,6 +600,31 @@ func (p *PostgresCDCSource) decodeColumnData(data []byte, dataType uint32, forma } return retVal, nil } + + typeName, ok := p.customTypeMapping[dataType] + if ok { + customQKind := customTypeToQKind(typeName) + if customQKind == qvalue.QValueKindGeography || customQKind == qvalue.QValueKindGeometry { + wkt, err := GeoValidate(string(data)) + if err != nil { + return &qvalue.QValue{ + Kind: customQKind, + Value: nil, + }, nil + } else { + return &qvalue.QValue{ + Kind: customQKind, + Value: wkt, + }, nil + } + } else { + return &qvalue.QValue{ + Kind: customQKind, + Value: string(data), + }, nil + } + } + return &qvalue.QValue{Kind: qvalue.QValueKindString, Value: string(data)}, nil } @@ -539,36 +652,52 @@ func (p *PostgresCDCSource) processRelationMessage( // retrieve initial RelationMessage for table changed. prevRel := p.relationMessageMapping[currRel.RelationId] // creating maps for lookup later - prevRelMap := make(map[string]bool) - currRelMap := make(map[string]bool) + prevRelMap := make(map[string]*protos.PostgresTableIdentifier) + currRelMap := make(map[string]*protos.PostgresTableIdentifier) for _, column := range prevRel.Columns { - prevRelMap[column.Name] = true + prevRelMap[column.Name] = &protos.PostgresTableIdentifier{ + RelId: column.DataType, + } } for _, column := range currRel.Columns { - currRelMap[column.Name] = true + currRelMap[column.Name] = &protos.PostgresTableIdentifier{ + RelId: column.DataType, + } } schemaDelta := &protos.TableSchemaDelta{ // set it to the source table for now, so we can update the schema on the source side // then at the Workflow level we set it t - SrcTableName: p.SrcTableIDNameMapping[currRel.RelationId], - DstTableName: p.TableNameMapping[p.SrcTableIDNameMapping[currRel.RelationId]], - AddedColumns: make([]*protos.DeltaAddedColumn, 0), - DroppedColumns: make([]string, 0), + SrcTableName: p.SrcTableIDNameMapping[currRel.RelationId], + DstTableName: p.TableNameMapping[p.SrcTableIDNameMapping[currRel.RelationId]], + AddedColumns: make([]*protos.DeltaAddedColumn, 0), } for _, column := range currRel.Columns { // not present in previous relation message, but in current one, so added. - if !prevRelMap[column.Name] { + if prevRelMap[column.Name] == nil { + qKind := postgresOIDToQValueKind(column.DataType) + if qKind == qvalue.QValueKindInvalid { + typeName, ok := p.customTypeMapping[column.DataType] + if ok { + qKind = customTypeToQKind(typeName) + } + } schemaDelta.AddedColumns = append(schemaDelta.AddedColumns, &protos.DeltaAddedColumn{ ColumnName: column.Name, - ColumnType: string(postgresOIDToQValueKind(column.DataType)), + ColumnType: string(qKind), }) + // present in previous and current relation messages, but data types have changed. + // so we add it to AddedColumns and DroppedColumns, knowing that we process DroppedColumns first. + } else if prevRelMap[column.Name].RelId != currRelMap[column.Name].RelId { + log.Warnf("Detected dropped column %s in table %s, but not propagating", column, + schemaDelta.SrcTableName) } } for _, column := range prevRel.Columns { // present in previous relation message, but not in current one, so dropped. - if !currRelMap[column.Name] { - schemaDelta.DroppedColumns = append(schemaDelta.DroppedColumns, column.Name) + if currRelMap[column.Name] == nil { + log.Warnf("Detected dropped column %s in table %s, but not propagating", column, + schemaDelta.SrcTableName) } } @@ -578,3 +707,29 @@ func (p *PostgresCDCSource) processRelationMessage( CheckPointID: int64(lsn), }, nil } + +func (p *PostgresCDCSource) compositePKeyToString(req *model.PullRecordsRequest, rec model.Record) (string, error) { + tableName := rec.GetTableName() + pkeyColsMerged := make([]byte, 0) + + for _, pkeyCol := range req.TableNameSchemaMapping[tableName].PrimaryKeyColumns { + pkeyColVal, err := rec.GetItems().GetValueByColName(pkeyCol) + if err != nil { + return "", fmt.Errorf("error getting pkey column value: %w", err) + } + pkeyColsMerged = append(pkeyColsMerged, []byte(fmt.Sprintf("%v", pkeyColVal.Value))...) + } + + hasher := sha256.New() + hasher.Write(pkeyColsMerged) + return fmt.Sprintf("%x", hasher.Sum(nil)), nil +} + +func (p *PostgresCDCSource) getParentRelIdIfPartitioned(relId uint32) uint32 { + parentRelId, ok := p.chIdToParRelId[relId] + if ok { + return parentRelId + } + + return relId +} diff --git a/flow/connectors/postgres/client.go b/flow/connectors/postgres/client.go index f50687ff9..c9bed66d2 100644 --- a/flow/connectors/postgres/client.go +++ b/flow/connectors/postgres/client.go @@ -12,6 +12,7 @@ import ( "github.com/jackc/pgx/v5" log "github.com/sirupsen/logrus" "golang.org/x/exp/maps" + "golang.org/x/exp/slices" ) //nolint:stylecheck @@ -26,6 +27,8 @@ const ( _peerdb_timestamp BIGINT NOT NULL,_peerdb_destination_table_name TEXT NOT NULL,_peerdb_data JSONB NOT NULL, _peerdb_record_type INTEGER NOT NULL, _peerdb_match_data JSONB,_peerdb_batch_id INTEGER, _peerdb_unchanged_toast_columns TEXT)` + createRawTableBatchIDIndexSQL = "CREATE INDEX IF NOT EXISTS %s_batchid_idx ON %s.%s(_peerdb_batch_id)" + createRawTableDstTableIndexSQL = "CREATE INDEX IF NOT EXISTS %s_dst_table_idx ON %s.%s(_peerdb_destination_table_name)" getLastOffsetSQL = "SELECT lsn_offset FROM %s.%s WHERE mirror_job_name=$1" getLastSyncBatchID_SQL = "SELECT sync_batch_id FROM %s.%s WHERE mirror_job_name=$1" @@ -48,7 +51,7 @@ const ( ) MERGE INTO %s dst USING (SELECT %s,_peerdb_record_type,_peerdb_unchanged_toast_columns FROM src_rank WHERE _peerdb_rank=1) src - ON dst.%s=src.%s + ON %s WHEN NOT MATCHED AND src._peerdb_record_type!=2 THEN INSERT (%s) VALUES (%s) %s @@ -66,19 +69,19 @@ const ( RANK() OVER (PARTITION BY %s ORDER BY _peerdb_timestamp DESC) AS _peerdb_rank FROM %s.%s WHERE _peerdb_batch_id>$1 AND _peerdb_batch_id<=$2 AND _peerdb_destination_table_name=$3 ) - DELETE FROM %s USING src_rank WHERE %s.%s=%s AND src_rank._peerdb_rank=1 AND src_rank._peerdb_record_type=2` + DELETE FROM %s USING src_rank WHERE %s AND src_rank._peerdb_rank=1 AND src_rank._peerdb_record_type=2` dropTableIfExistsSQL = "DROP TABLE IF EXISTS %s.%s" deleteJobMetadataSQL = "DELETE FROM %s.%s WHERE MIRROR_JOB_NAME=$1" ) // getRelIDForTable returns the relation ID for a table. -func (c *PostgresConnector) getRelIDForTable(schemaTable *SchemaTable) (uint32, error) { +func (c *PostgresConnector) getRelIDForTable(schemaTable *utils.SchemaTable) (uint32, error) { var relID uint32 err := c.pool.QueryRow(c.ctx, `SELECT c.oid FROM pg_class c JOIN pg_namespace n - ON n.oid = c.relnamespace WHERE n.nspname = $1 AND c.relname = $2`, - strings.ToLower(schemaTable.Schema), strings.ToLower(schemaTable.Table)).Scan(&relID) + ON n.oid = c.relnamespace WHERE n.nspname=$1 AND c.relname=$2`, + schemaTable.Schema, schemaTable.Table).Scan(&relID) if err != nil { return 0, fmt.Errorf("error getting relation ID for table %s: %w", schemaTable, err) } @@ -87,7 +90,7 @@ func (c *PostgresConnector) getRelIDForTable(schemaTable *SchemaTable) (uint32, } // getReplicaIdentity returns the replica identity for a table. -func (c *PostgresConnector) isTableFullReplica(schemaTable *SchemaTable) (bool, error) { +func (c *PostgresConnector) isTableFullReplica(schemaTable *utils.SchemaTable) (bool, error) { relID, relIDErr := c.getRelIDForTable(schemaTable) if relIDErr != nil { return false, fmt.Errorf("failed to get relation id for table %s: %w", schemaTable, relIDErr) @@ -103,42 +106,45 @@ func (c *PostgresConnector) isTableFullReplica(schemaTable *SchemaTable) (bool, return string(replicaIdentity) == "f", nil } -// getPrimaryKeyColumn for table returns the primary key column for a given table +// getPrimaryKeyColumns for table returns the primary key column for a given table // errors if there is no primary key column or if there is more than one primary key column. -func (c *PostgresConnector) getPrimaryKeyColumn(schemaTable *SchemaTable) (string, error) { +func (c *PostgresConnector) getPrimaryKeyColumns(schemaTable *utils.SchemaTable) ([]string, error) { relID, err := c.getRelIDForTable(schemaTable) if err != nil { - return "", fmt.Errorf("failed to get relation id for table %s: %w", schemaTable, err) + return nil, fmt.Errorf("failed to get relation id for table %s: %w", schemaTable, err) } // Get the primary key column name var pkCol string + pkCols := make([]string, 0) rows, err := c.pool.Query(c.ctx, `SELECT a.attname FROM pg_index i JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = ANY(i.indkey) - WHERE i.indrelid = $1 AND i.indisprimary`, + WHERE i.indrelid = $1 AND i.indisprimary ORDER BY a.attname ASC`, relID) if err != nil { - return "", fmt.Errorf("error getting primary key column for table %s: %w", schemaTable, err) + return nil, fmt.Errorf("error getting primary key column for table %s: %w", schemaTable, err) } defer rows.Close() // 0 rows returned, table has no primary keys if !rows.Next() { - return "", fmt.Errorf("table %s has no primary keys", schemaTable) + return nil, fmt.Errorf("table %s has no primary keys", schemaTable) } - err = rows.Scan(&pkCol) - if err != nil { - return "", fmt.Errorf("error scanning primary key column for table %s: %w", schemaTable, err) - } - // more than 1 row returned, table has more than 1 primary key - if rows.Next() { - return "", fmt.Errorf("table %s has more than one primary key", schemaTable) + for { + err = rows.Scan(&pkCol) + if err != nil { + return nil, fmt.Errorf("error scanning primary key column for table %s: %w", schemaTable, err) + } + pkCols = append(pkCols, pkCol) + if !rows.Next() { + break + } } - return pkCol, nil + return pkCols, nil } -func (c *PostgresConnector) tableExists(schemaTable *SchemaTable) (bool, error) { +func (c *PostgresConnector) tableExists(schemaTable *utils.SchemaTable) (bool, error) { var exists bool err := c.pool.QueryRow(c.ctx, `SELECT EXISTS ( @@ -210,10 +216,11 @@ func (c *PostgresConnector) createSlotAndPublication( */ srcTableNames := make([]string, 0, len(tableNameMapping)) for srcTableName := range tableNameMapping { - if len(strings.Split(srcTableName, ".")) != 2 { - return fmt.Errorf("source tables identifier is invalid: %v", srcTableName) + parsedSrcTableName, err := utils.ParseSchemaTable(srcTableName) + if err != nil { + return fmt.Errorf("source table identifier %s is invalid", srcTableName) } - srcTableNames = append(srcTableNames, srcTableName) + srcTableNames = append(srcTableNames, parsedSrcTableName.String()) } tableNameString := strings.Join(srcTableNames, ", ") @@ -223,6 +230,7 @@ func (c *PostgresConnector) createSlotAndPublication( _, err := c.pool.Exec(c.ctx, stmt) if err != nil { log.Warnf("Error creating publication '%s': %v", publication, err) + return fmt.Errorf("error creating publication '%s' : %w", publication, err) } } @@ -301,14 +309,19 @@ func generateCreateTableSQLForNormalizedTable(sourceTableIdentifier string, sourceTableSchema *protos.TableSchema) string { createTableSQLArray := make([]string, 0, len(sourceTableSchema.Columns)) for columnName, genericColumnType := range sourceTableSchema.Columns { - if sourceTableSchema.PrimaryKeyColumn == strings.ToLower(columnName) { - createTableSQLArray = append(createTableSQLArray, fmt.Sprintf("\"%s\" %s PRIMARY KEY,", - columnName, qValueKindToPostgresType(genericColumnType))) - } else { - createTableSQLArray = append(createTableSQLArray, fmt.Sprintf("\"%s\" %s,", columnName, - qValueKindToPostgresType(genericColumnType))) - } + createTableSQLArray = append(createTableSQLArray, fmt.Sprintf("\"%s\" %s,", columnName, + qValueKindToPostgresType(genericColumnType))) + } + + // add composite primary key to the table + primaryKeyColsQuoted := make([]string, 0) + for _, primaryKeyCol := range sourceTableSchema.PrimaryKeyColumns { + primaryKeyColsQuoted = append(primaryKeyColsQuoted, + fmt.Sprintf(`"%s"`, primaryKeyCol)) } + createTableSQLArray = append(createTableSQLArray, fmt.Sprintf("PRIMARY KEY(%s),", + strings.TrimSuffix(strings.Join(primaryKeyColsQuoted, ","), ","))) + return fmt.Sprintf(createNormalizedTableSQL, sourceTableIdentifier, strings.TrimSuffix(strings.Join(createTableSQLArray, ""), ",")) } @@ -462,11 +475,10 @@ func (c *PostgresConnector) generateNormalizeStatements(destinationTableIdentifi unchangedToastColumns []string, rawTableIdentifier string, supportsMerge bool) []string { if supportsMerge { return []string{c.generateMergeStatement(destinationTableIdentifier, unchangedToastColumns, rawTableIdentifier)} - } else { - log.Warnf("Postgres version is not high enough to support MERGE, falling back to UPSERT + DELETE") - log.Warnf("TOAST columns will not be updated properly, use REPLICA IDENTITY FULL or upgrade Postgres") - return c.generateFallbackStatements(destinationTableIdentifier, rawTableIdentifier) } + log.Warnf("Postgres version is not high enough to support MERGE, falling back to UPSERT + DELETE") + log.Warnf("TOAST columns will not be updated properly, use REPLICA IDENTITY FULL or upgrade Postgres") + return c.generateFallbackStatements(destinationTableIdentifier, rawTableIdentifier) } func (c *PostgresConnector) generateFallbackStatements(destinationTableIdentifier string, @@ -474,14 +486,14 @@ func (c *PostgresConnector) generateFallbackStatements(destinationTableIdentifie normalizedTableSchema := c.tableSchemaMapping[destinationTableIdentifier] columnNames := make([]string, 0, len(normalizedTableSchema.Columns)) flattenedCastsSQLArray := make([]string, 0, len(normalizedTableSchema.Columns)) - var primaryKeyColumnCast string + primaryKeyColumnCasts := make(map[string]string) for columnName, genericColumnType := range normalizedTableSchema.Columns { columnNames = append(columnNames, fmt.Sprintf("\"%s\"", columnName)) pgType := qValueKindToPostgresType(genericColumnType) flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("(_peerdb_data->>'%s')::%s AS \"%s\"", columnName, pgType, columnName)) - if normalizedTableSchema.PrimaryKeyColumn == columnName { - primaryKeyColumnCast = fmt.Sprintf("(_peerdb_data->>'%s')::%s", columnName, pgType) + if slices.Contains(normalizedTableSchema.PrimaryKeyColumns, columnName) { + primaryKeyColumnCasts[columnName] = fmt.Sprintf("(_peerdb_data->>'%s')::%s", columnName, pgType) } } flattenedCastsSQL := strings.TrimSuffix(strings.Join(flattenedCastsSQLArray, ","), ",") @@ -492,12 +504,20 @@ func (c *PostgresConnector) generateFallbackStatements(destinationTableIdentifie updateColumnsSQLArray = append(updateColumnsSQLArray, fmt.Sprintf("%s=EXCLUDED.%s", columnName, columnName)) } updateColumnsSQL := strings.TrimSuffix(strings.Join(updateColumnsSQLArray, ","), ",") - fallbackUpsertStatement := fmt.Sprintf(fallbackUpsertStatementSQL, primaryKeyColumnCast, internalSchema, + deleteWhereClauseArray := make([]string, 0, len(normalizedTableSchema.PrimaryKeyColumns)) + for columnName, columnCast := range primaryKeyColumnCasts { + deleteWhereClauseArray = append(deleteWhereClauseArray, fmt.Sprintf("%s.%s=%s AND ", + destinationTableIdentifier, columnName, columnCast)) + } + deleteWhereClauseSQL := strings.TrimSuffix(strings.Join(deleteWhereClauseArray, ""), "AND ") + + fallbackUpsertStatement := fmt.Sprintf(fallbackUpsertStatementSQL, + strings.TrimSuffix(strings.Join(maps.Values(primaryKeyColumnCasts), ","), ","), internalSchema, rawTableIdentifier, destinationTableIdentifier, insertColumnsSQL, flattenedCastsSQL, - normalizedTableSchema.PrimaryKeyColumn, updateColumnsSQL) - fallbackDeleteStatement := fmt.Sprintf(fallbackDeleteStatementSQL, primaryKeyColumnCast, internalSchema, - rawTableIdentifier, destinationTableIdentifier, destinationTableIdentifier, - normalizedTableSchema.PrimaryKeyColumn, primaryKeyColumnCast) + strings.Join(normalizedTableSchema.PrimaryKeyColumns, ","), updateColumnsSQL) + fallbackDeleteStatement := fmt.Sprintf(fallbackDeleteStatementSQL, + strings.Join(maps.Values(primaryKeyColumnCasts), ","), internalSchema, + rawTableIdentifier, destinationTableIdentifier, deleteWhereClauseSQL) return []string{fallbackUpsertStatement, fallbackDeleteStatement} } @@ -511,19 +531,22 @@ func (c *PostgresConnector) generateMergeStatement(destinationTableIdentifier st } flattenedCastsSQLArray := make([]string, 0, len(normalizedTableSchema.Columns)) - var primaryKeyColumnCast string + primaryKeyColumnCasts := make(map[string]string) + primaryKeySelectSQLArray := make([]string, 0, len(normalizedTableSchema.PrimaryKeyColumns)) for columnName, genericColumnType := range normalizedTableSchema.Columns { pgType := qValueKindToPostgresType(genericColumnType) if strings.Contains(genericColumnType, "array") { flattenedCastsSQLArray = append(flattenedCastsSQLArray, - fmt.Sprintf("ARRAY(SELECT * FROM JSON_ARRAY_ELEMENTS_TEXT((_peerdb_data->>'%s')::JSON))::%s AS %s", + fmt.Sprintf("ARRAY(SELECT * FROM JSON_ARRAY_ELEMENTS_TEXT((_peerdb_data->>'%s')::JSON))::%s AS \"%s\"", strings.Trim(columnName, "\""), pgType, columnName)) } else { - flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("(_peerdb_data->>'%s')::%s AS %s", + flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("(_peerdb_data->>'%s')::%s AS \"%s\"", strings.Trim(columnName, "\""), pgType, columnName)) } - if normalizedTableSchema.PrimaryKeyColumn == columnName { - primaryKeyColumnCast = fmt.Sprintf("(_peerdb_data->>'%s')::%s", strings.Trim(columnName, "\""), pgType) + if slices.Contains(normalizedTableSchema.PrimaryKeyColumns, columnName) { + primaryKeyColumnCasts[columnName] = fmt.Sprintf("(_peerdb_data->>'%s')::%s", columnName, pgType) + primaryKeySelectSQLArray = append(primaryKeySelectSQLArray, fmt.Sprintf("src.%s=dst.%s", + columnName, columnName)) } } flattenedCastsSQL := strings.TrimSuffix(strings.Join(flattenedCastsSQLArray, ","), ",") @@ -536,18 +559,18 @@ func (c *PostgresConnector) generateMergeStatement(destinationTableIdentifier st insertValuesSQL := strings.TrimSuffix(strings.Join(insertValuesSQLArray, ","), ",") updateStatements := c.generateUpdateStatement(columnNames, unchangedToastColumns) - return fmt.Sprintf(mergeStatementSQL, primaryKeyColumnCast, internalSchema, rawTableIdentifier, - destinationTableIdentifier, flattenedCastsSQL, normalizedTableSchema.PrimaryKeyColumn, - normalizedTableSchema.PrimaryKeyColumn, insertColumnsSQL, insertValuesSQL, updateStatements) + return fmt.Sprintf(mergeStatementSQL, strings.Join(maps.Values(primaryKeyColumnCasts), ","), + internalSchema, rawTableIdentifier, destinationTableIdentifier, flattenedCastsSQL, + strings.Join(primaryKeySelectSQLArray, " AND "), insertColumnsSQL, insertValuesSQL, updateStatements) } func (c *PostgresConnector) generateUpdateStatement(allCols []string, unchangedToastColsLists []string) string { updateStmts := make([]string, 0) for _, cols := range unchangedToastColsLists { - unchangedColsArray := strings.Split(cols, ",") - for i, col := range unchangedColsArray { - unchangedColsArray[i] = fmt.Sprintf("\"%s\"", col) + unchangedColsArray := make([]string, 0) + for _, unchangedToastCol := range strings.Split(cols, ",") { + unchangedColsArray = append(unchangedColsArray, fmt.Sprintf(`"%s"`, unchangedToastCol)) } otherCols := utils.ArrayMinus(allCols, unchangedColsArray) tmpArray := make([]string, 0) @@ -555,10 +578,9 @@ func (c *PostgresConnector) generateUpdateStatement(allCols []string, unchangedT tmpArray = append(tmpArray, fmt.Sprintf("%s=src.%s", colName, colName)) } ssep := strings.Join(tmpArray, ",") - quotedCols := strings.Join(unchangedColsArray, ",") updateStmt := fmt.Sprintf(`WHEN MATCHED AND - src._peerdb_record_type=1 AND _peerdb_unchanged_toast_columns='%s' - THEN UPDATE SET %s `, quotedCols, ssep) + src._peerdb_record_type=1 AND _peerdb_unchanged_toast_columns='%s' + THEN UPDATE SET %s `, cols, ssep) updateStmts = append(updateStmts, updateStmt) } return strings.Join(updateStmts, "\n") @@ -568,13 +590,14 @@ func (c *PostgresConnector) getApproxTableCounts(tables []string) (int64, error) countTablesBatch := &pgx.Batch{} totalCount := int64(0) for _, table := range tables { - _, err := parseSchemaTable(table) + parsedTable, err := utils.ParseSchemaTable(table) if err != nil { log.Errorf("error while parsing table %s: %v", table, err) return 0, fmt.Errorf("error while parsing table %s: %w", table, err) } countTablesBatch.Queue( - fmt.Sprintf("SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = '%s'::regclass;", table)). + fmt.Sprintf("SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = '%s'::regclass;", + parsedTable.String())). QueryRow(func(row pgx.Row) error { var count int64 err := row.Scan(&count) diff --git a/flow/connectors/postgres/postgres.go b/flow/connectors/postgres/postgres.go index 20ebe0958..6c06cb60c 100644 --- a/flow/connectors/postgres/postgres.go +++ b/flow/connectors/postgres/postgres.go @@ -5,11 +5,9 @@ import ( "database/sql" "fmt" "regexp" - "strings" "time" "github.com/PeerDB-io/peer-flow/connectors/utils" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" "github.com/PeerDB-io/peer-flow/connectors/utils/monitoring" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" @@ -20,8 +18,6 @@ import ( "github.com/jackc/pgx/v5/pgconn" "github.com/jackc/pgx/v5/pgxpool" log "github.com/sirupsen/logrus" - "go.temporal.io/sdk/activity" - "golang.org/x/exp/maps" ) // PostgresConnector is a Connector implementation for Postgres. @@ -32,18 +28,7 @@ type PostgresConnector struct { pool *pgxpool.Pool replPool *pgxpool.Pool tableSchemaMapping map[string]*protos.TableSchema -} - -// SchemaTable is a table in a schema. -type SchemaTable struct { - Schema string - Table string -} - -func (t *SchemaTable) String() string { - quotedSchema := fmt.Sprintf(`"%s"`, t.Schema) - quotedTable := fmt.Sprintf(`"%s"`, t.Table) - return fmt.Sprintf("%s.%s", quotedSchema, quotedTable) + customTypesMapping map[uint32]string } // NewPostgresConnector creates a new instance of PostgresConnector. @@ -52,32 +37,48 @@ func NewPostgresConnector(ctx context.Context, pgConfig *protos.PostgresConfig) // create a separate connection pool for non-replication queries as replication connections cannot // be used for extended query protocol, i.e. prepared statements - pool, err := pgxpool.New(ctx, connectionString) + connConfig, err := pgxpool.ParseConfig(connectionString) + if err != nil { + return nil, fmt.Errorf("failed to parse connection string: %w", err) + } + + runtimeParams := connConfig.ConnConfig.RuntimeParams + runtimeParams["application_name"] = "peerdb_query_executor" + runtimeParams["idle_in_transaction_session_timeout"] = "0" + runtimeParams["statement_timeout"] = "0" + + pool, err := pgxpool.NewWithConfig(ctx, connConfig) if err != nil { return nil, fmt.Errorf("failed to create connection pool: %w", err) } + customTypeMap, err := utils.GetCustomDataTypes(ctx, pool) + if err != nil { + return nil, fmt.Errorf("failed to get custom type map: %w", err) + } + // ensure that replication is set to database - connConfig, err := pgxpool.ParseConfig(connectionString) + replConnConfig, err := pgxpool.ParseConfig(connectionString) if err != nil { return nil, fmt.Errorf("failed to parse connection string: %w", err) } - connConfig.ConnConfig.RuntimeParams["replication"] = "database" - connConfig.ConnConfig.RuntimeParams["bytea_output"] = "hex" - connConfig.MaxConns = 1 + replConnConfig.ConnConfig.RuntimeParams["replication"] = "database" + replConnConfig.ConnConfig.RuntimeParams["bytea_output"] = "hex" + replConnConfig.MaxConns = 1 - replPool, err := pgxpool.NewWithConfig(ctx, connConfig) + replPool, err := pgxpool.NewWithConfig(ctx, replConnConfig) if err != nil { return nil, fmt.Errorf("failed to create connection pool: %w", err) } return &PostgresConnector{ - connStr: connectionString, - ctx: ctx, - config: pgConfig, - pool: pool, - replPool: replPool, + connStr: connectionString, + ctx: ctx, + config: pgConfig, + pool: pool, + replPool: replPool, + customTypesMapping: customTypeMap, }, nil } @@ -104,7 +105,7 @@ func (c *PostgresConnector) ConnectionActive() bool { // NeedsSetupMetadataTables returns true if the metadata tables need to be set up. func (c *PostgresConnector) NeedsSetupMetadataTables() bool { - result, err := c.tableExists(&SchemaTable{ + result, err := c.tableExists(&utils.SchemaTable{ Schema: internalSchema, Table: mirrorJobsTableIdentifier, }) @@ -173,7 +174,11 @@ func (c *PostgresConnector) GetLastOffset(jobName string) (*protos.LastSyncState } // PullRecords pulls records from the source. -func (c *PostgresConnector) PullRecords(req *model.PullRecordsRequest) (*model.RecordsWithTableSchemaDelta, error) { +func (c *PostgresConnector) PullRecords(req *model.PullRecordsRequest) error { + defer func() { + req.RecordStream.Close() + }() + // Slotname would be the job name prefixed with "peerflow_slot_" slotName := fmt.Sprintf("peerflow_slot_%s", req.FlowJobName) if req.OverrideReplicationSlotName != "" { @@ -189,7 +194,7 @@ func (c *PostgresConnector) PullRecords(req *model.PullRecordsRequest) (*model.R // Check if the replication slot and publication exist exists, err := c.checkSlotAndPublication(slotName, publicationName) if err != nil { - return nil, fmt.Errorf("error checking for replication slot and publication: %w", err) + return fmt.Errorf("error checking for replication slot and publication: %w", err) } if !exists.PublicationExists { @@ -203,7 +208,7 @@ func (c *PostgresConnector) PullRecords(req *model.PullRecordsRequest) (*model.R log.WithFields(log.Fields{ "flowName": req.FlowJobName, }).Warnf("slot %s does not exist", slotName) - return nil, fmt.Errorf("replication slot %s does not exist", slotName) + return fmt.Errorf("replication slot %s does not exist", slotName) } log.WithFields(log.Fields{ @@ -218,35 +223,29 @@ func (c *PostgresConnector) PullRecords(req *model.PullRecordsRequest) (*model.R Publication: publicationName, TableNameMapping: req.TableNameMapping, RelationMessageMapping: req.RelationMessageMapping, - }) + }, c.customTypesMapping) if err != nil { - return nil, fmt.Errorf("failed to create cdc source: %w", err) + return fmt.Errorf("failed to create cdc source: %w", err) } - recordsWithSchemaDelta, err := cdc.PullRecords(req) + err = cdc.PullRecords(req) if err != nil { - return nil, err + return err } - if len(recordsWithSchemaDelta.RecordBatch.Records) > 0 { - totalRecordsAtSource, err := c.getApproxTableCounts(maps.Keys(req.TableNameMapping)) + + cdcMirrorMonitor, ok := c.ctx.Value(shared.CDCMirrorMonitorKey).(*monitoring.CatalogMirrorMonitor) + if ok { + latestLSN, err := c.getCurrentLSN() if err != nil { - return nil, err + return fmt.Errorf("failed to get current LSN: %w", err) } - metrics.LogPullMetrics(c.ctx, req.FlowJobName, recordsWithSchemaDelta.RecordBatch, totalRecordsAtSource) - cdcMirrorMonitor, ok := c.ctx.Value(shared.CDCMirrorMonitorKey).(*monitoring.CatalogMirrorMonitor) - if ok { - latestLSN, err := c.getCurrentLSN() - if err != nil { - return nil, err - } - err = cdcMirrorMonitor.UpdateLatestLSNAtSourceForCDCFlow(c.ctx, req.FlowJobName, latestLSN) - if err != nil { - return nil, err - } + err = cdcMirrorMonitor.UpdateLatestLSNAtSourceForCDCFlow(c.ctx, req.FlowJobName, latestLSN) + if err != nil { + return fmt.Errorf("failed to update latest LSN at source for CDC flow: %w", err) } } - return recordsWithSchemaDelta, nil + return nil } // SyncRecords pushes records to the destination. @@ -254,7 +253,7 @@ func (c *PostgresConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S rawTableIdentifier := getRawTableIdentifier(req.FlowJobName) log.WithFields(log.Fields{ "flowName": req.FlowJobName, - }).Printf("pushing %d records to Postgres table %s via COPY", len(req.Records.Records), rawTableIdentifier) + }).Printf("pushing records to Postgres table %s via COPY", rawTableIdentifier) syncBatchID, err := c.GetLastSyncBatchID(req.FlowJobName) if err != nil { @@ -266,9 +265,8 @@ func (c *PostgresConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S first := true var firstCP int64 = 0 - lastCP := req.Records.LastCheckPointID - for _, record := range req.Records.Records { + for record := range req.Records.GetRecords() { switch typedRecord := record.(type) { case *model.InsertRecord: itemsJSON, err := typedRecord.Items.ToJSON() @@ -284,7 +282,7 @@ func (c *PostgresConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S 0, "{}", syncBatchID, - utils.KeysToString(typedRecord.UnchangedToastColumns), + "", }) tableNameRowsMapping[typedRecord.DestinationTableName] += 1 case *model.UpdateRecord: @@ -322,7 +320,7 @@ func (c *PostgresConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S 2, itemsJSON, syncBatchID, - utils.KeysToString(typedRecord.UnchangedToastColumns), + "", }) tableNameRowsMapping[typedRecord.DestinationTableName] += 1 default: @@ -356,7 +354,6 @@ func (c *PostgresConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S } }() - startTime := time.Now() syncedRecordsCount, err := syncRecordsTx.CopyFrom(c.ctx, pgx.Identifier{internalSchema, rawTableIdentifier}, []string{"_peerdb_uid", "_peerdb_timestamp", "_peerdb_destination_table_name", "_peerdb_data", "_peerdb_record_type", "_peerdb_match_data", "_peerdb_batch_id", "_peerdb_unchanged_toast_columns"}, @@ -368,12 +365,16 @@ func (c *PostgresConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S return nil, fmt.Errorf("error syncing records: expected %d records to be synced, but %d were synced", len(records), syncedRecordsCount) } - metrics.LogSyncMetrics(c.ctx, req.FlowJobName, syncedRecordsCount, time.Since(startTime)) log.WithFields(log.Fields{ "flowName": req.FlowJobName, }).Printf("synced %d records to Postgres table %s via COPY", syncedRecordsCount, rawTableIdentifier) + lastCP, err := req.Records.GetLastCheckpoint() + if err != nil { + return nil, fmt.Errorf("error getting last checkpoint: %w", err) + } + // updating metadata with new offset and syncBatchID err = c.updateSyncMetadata(req.FlowJobName, lastCP, syncBatchID, syncRecordsTx) if err != nil { @@ -414,7 +415,7 @@ func (c *PostgresConnector) NormalizeRecords(req *model.NormalizeRecordsRequest) "flowName": req.FlowJobName, }).Printf("no records to normalize: syncBatchID %d, normalizeBatchID %d", syncBatchID, normalizeBatchID) return &model.NormalizeResponse{ - Done: true, + Done: false, StartBatchID: normalizeBatchID, EndBatchID: syncBatchID, }, nil @@ -455,7 +456,6 @@ func (c *PostgresConnector) NormalizeRecords(req *model.NormalizeRecordsRequest) }) } } - startTime := time.Now() if mergeStatementsBatch.Len() > 0 { mergeResults := normalizeRecordsTx.SendBatch(c.ctx, mergeStatementsBatch) err = mergeResults.Close() @@ -466,14 +466,6 @@ func (c *PostgresConnector) NormalizeRecords(req *model.NormalizeRecordsRequest) log.WithFields(log.Fields{ "flowName": req.FlowJobName, }).Infof("normalized %d records", totalRowsAffected) - if totalRowsAffected > 0 { - totalRowsAtTarget, err := c.getApproxTableCounts(maps.Keys(unchangedToastColsMap)) - if err != nil { - return nil, err - } - metrics.LogNormalizeMetrics(c.ctx, req.FlowJobName, int64(totalRowsAffected), - time.Since(startTime), totalRowsAtTarget) - } // updating metadata with new normalizeBatchID err = c.updateNormalizeMetadata(req.FlowJobName, syncBatchID, normalizeRecordsTx) @@ -523,6 +515,16 @@ func (c *PostgresConnector) CreateRawTable(req *protos.CreateRawTableInput) (*pr if err != nil { return nil, fmt.Errorf("error creating raw table: %w", err) } + _, err = createRawTableTx.Exec(c.ctx, fmt.Sprintf(createRawTableBatchIDIndexSQL, rawTableIdentifier, + internalSchema, rawTableIdentifier)) + if err != nil { + return nil, fmt.Errorf("error creating batch ID index on raw table: %w", err) + } + _, err = createRawTableTx.Exec(c.ctx, fmt.Sprintf(createRawTableDstTableIndexSQL, rawTableIdentifier, + internalSchema, rawTableIdentifier)) + if err != nil { + return nil, fmt.Errorf("error creating destion table index on raw table: %w", err) + } err = createRawTableTx.Commit(c.ctx) if err != nil { @@ -542,7 +544,7 @@ func (c *PostgresConnector) GetTableSchema( return nil, err } res[tableName] = tableSchema - c.recordHeartbeatWithRecover(fmt.Sprintf("fetched schema for table %s", tableName)) + utils.RecordHeartbeatWithRecover(c.ctx, fmt.Sprintf("fetched schema for table %s", tableName)) } return &protos.GetTableSchemaBatchOutput{ @@ -553,25 +555,26 @@ func (c *PostgresConnector) GetTableSchema( func (c *PostgresConnector) getTableSchemaForTable( tableName string, ) (*protos.TableSchema, error) { - schemaTable, err := parseSchemaTable(tableName) + schemaTable, err := utils.ParseSchemaTable(tableName) if err != nil { return nil, err } + isFullReplica, replErr := c.isTableFullReplica(schemaTable) + if replErr != nil { + return nil, fmt.Errorf("error getting replica identity for table %s: %w", schemaTable, replErr) + } + // Get the column names and types rows, err := c.pool.Query(c.ctx, - fmt.Sprintf(`SELECT * FROM %s LIMIT 0`, tableName)) + fmt.Sprintf(`SELECT * FROM %s LIMIT 0`, schemaTable.String()), + pgx.QueryExecModeSimpleProtocol) if err != nil { return nil, fmt.Errorf("error getting table schema for table %s: %w", schemaTable, err) } defer rows.Close() - isFullReplica, replErr := c.isTableFullReplica(schemaTable) - if replErr != nil { - return nil, fmt.Errorf("error getting replica identity for table %s: %w", schemaTable, replErr) - } - - pkey, err := c.getPrimaryKeyColumn(schemaTable) + pKeyCols, err := c.getPrimaryKeyColumns(schemaTable) if err != nil { if !isFullReplica { return nil, fmt.Errorf("error getting primary key column for table %s: %w", schemaTable, err) @@ -581,15 +584,19 @@ func (c *PostgresConnector) getTableSchemaForTable( res := &protos.TableSchema{ TableIdentifier: tableName, Columns: make(map[string]string), - PrimaryKeyColumn: pkey, + PrimaryKeyColumns: pKeyCols, IsReplicaIdentityFull: isFullReplica, } for _, fieldDescription := range rows.FieldDescriptions() { genericColType := postgresOIDToQValueKind(fieldDescription.DataTypeOID) if genericColType == qvalue.QValueKindInvalid { - // we use string for invalid types - genericColType = qvalue.QValueKindString + typeName, ok := c.customTypesMapping[fieldDescription.DataTypeOID] + if ok { + genericColType = customTypeToQKind(typeName) + } else { + genericColType = qvalue.QValueKindString + } } res.Columns[fieldDescription.Name] = string(genericColType) @@ -622,7 +629,7 @@ func (c *PostgresConnector) SetupNormalizedTables(req *protos.SetupNormalizedTab }() for tableIdentifier, tableSchema := range req.TableNameSchemaMapping { - normalizedTableNameComponents, err := parseSchemaTable(tableIdentifier) + normalizedTableNameComponents, err := utils.ParseSchemaTable(tableIdentifier) if err != nil { return nil, fmt.Errorf("error while parsing table schema and name: %w", err) } @@ -644,7 +651,7 @@ func (c *PostgresConnector) SetupNormalizedTables(req *protos.SetupNormalizedTab tableExistsMapping[tableIdentifier] = false log.Printf("created table %s", tableIdentifier) - c.recordHeartbeatWithRecover(fmt.Sprintf("created table %s", tableIdentifier)) + utils.RecordHeartbeatWithRecover(c.ctx, fmt.Sprintf("created table %s", tableIdentifier)) } err = createNormalizedTablesTx.Commit(c.ctx) @@ -665,16 +672,13 @@ func (c *PostgresConnector) InitializeTableSchema(req map[string]*protos.TableSc // ReplayTableSchemaDelta changes a destination table to match the schema at source // This could involve adding or dropping multiple columns. -func (c *PostgresConnector) ReplayTableSchemaDelta(flowJobName string, schemaDelta *protos.TableSchemaDelta) error { - if (schemaDelta == nil) || (len(schemaDelta.AddedColumns) == 0 && len(schemaDelta.DroppedColumns) == 0) { - return nil - } - +func (c *PostgresConnector) ReplayTableSchemaDeltas(flowJobName string, + schemaDeltas []*protos.TableSchemaDelta) error { // Postgres is cool and supports transactional DDL. So we use a transaction. tableSchemaModifyTx, err := c.pool.Begin(c.ctx) if err != nil { - return fmt.Errorf("error starting transaction for schema modification for table %s: %w", - schemaDelta.SrcTableName, err) + return fmt.Errorf("error starting transaction for schema modification: %w", + err) } defer func() { deferErr := tableSchemaModifyTx.Rollback(c.ctx) @@ -685,37 +689,32 @@ func (c *PostgresConnector) ReplayTableSchemaDelta(flowJobName string, schemaDel } }() - for _, droppedColumn := range schemaDelta.DroppedColumns { - _, err = tableSchemaModifyTx.Exec(c.ctx, fmt.Sprintf("ALTER TABLE %s DROP COLUMN %s", schemaDelta.DstTableName, - droppedColumn)) - if err != nil { - return fmt.Errorf("failed to drop column %s for table %s: %w", droppedColumn, - schemaDelta.SrcTableName, err) + for _, schemaDelta := range schemaDeltas { + if schemaDelta == nil || len(schemaDelta.AddedColumns) == 0 { + return nil } - log.WithFields(log.Fields{ - "flowName": flowJobName, - "tableName": schemaDelta.SrcTableName, - }).Infof("[schema delta replay] dropped column %s", droppedColumn) - } - for _, addedColumn := range schemaDelta.AddedColumns { - _, err = tableSchemaModifyTx.Exec(c.ctx, fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s", - schemaDelta.DstTableName, addedColumn.ColumnName, - qValueKindToPostgresType(addedColumn.ColumnType))) - if err != nil { - return fmt.Errorf("failed to add column %s for table %s: %w", addedColumn.ColumnName, - schemaDelta.SrcTableName, err) + + for _, addedColumn := range schemaDelta.AddedColumns { + _, err = tableSchemaModifyTx.Exec(c.ctx, fmt.Sprintf("ALTER TABLE %s ADD COLUMN \"%s\" %s", + schemaDelta.DstTableName, addedColumn.ColumnName, + qValueKindToPostgresType(addedColumn.ColumnType))) + if err != nil { + return fmt.Errorf("failed to add column %s for table %s: %w", addedColumn.ColumnName, + schemaDelta.DstTableName, err) + } + log.WithFields(log.Fields{ + "flowName": flowJobName, + "srcTableName": schemaDelta.SrcTableName, + "dstTableName": schemaDelta.DstTableName, + }).Infof("[schema delta replay] added column %s with data type %s", + addedColumn.ColumnName, addedColumn.ColumnType) } - log.WithFields(log.Fields{ - "flowName": flowJobName, - "tableName": schemaDelta.SrcTableName, - }).Infof("[schema delta replay] added column %s with data type %s", - addedColumn.ColumnName, addedColumn.ColumnType) } err = tableSchemaModifyTx.Commit(c.ctx) if err != nil { - return fmt.Errorf("failed to commit transaction for table schema modification for table %s: %w", - schemaDelta.SrcTableName, err) + return fmt.Errorf("failed to commit transaction for table schema modification: %w", + err) } return nil @@ -727,7 +726,7 @@ func (c *PostgresConnector) EnsurePullability(req *protos.EnsurePullabilityBatch tableIdentifierMapping := make(map[string]*protos.TableIdentifier) for _, tableName := range req.SourceTableIdentifiers { - schemaTable, err := parseSchemaTable(tableName) + schemaTable, err := utils.ParseSchemaTable(tableName) if err != nil { return nil, fmt.Errorf("error parsing schema and table: %w", err) } @@ -744,7 +743,7 @@ func (c *PostgresConnector) EnsurePullability(req *protos.EnsurePullabilityBatch RelId: relID}, }, } - c.recordHeartbeatWithRecover(fmt.Sprintf("ensured pullability table %s", tableName)) + utils.RecordHeartbeatWithRecover(c.ctx, fmt.Sprintf("ensured pullability table %s", tableName)) } return &protos.EnsurePullabilityBatchOutput{TableIdentifierMapping: tableIdentifierMapping}, nil @@ -856,27 +855,18 @@ func (c *PostgresConnector) SyncFlowCleanup(jobName string) error { return nil } -// parseSchemaTable parses a table name into schema and table name. -func parseSchemaTable(tableName string) (*SchemaTable, error) { - parts := strings.Split(tableName, ".") - if len(parts) != 2 { - return nil, fmt.Errorf("invalid table name: %s", tableName) +func (c *PostgresConnector) SendWALHeartbeat() error { + command := ` + BEGIN; + DROP aggregate IF EXISTS PEERDB_EPHEMERAL_HEARTBEAT(float4); + CREATE AGGREGATE PEERDB_EPHEMERAL_HEARTBEAT(float4) (SFUNC = float4pl, STYPE = float4); + DROP aggregate PEERDB_EPHEMERAL_HEARTBEAT(float4); + END; + ` + _, err := c.pool.Exec(c.ctx, command) + if err != nil { + return fmt.Errorf("error bumping wal position: %w", err) } - return &SchemaTable{ - Schema: parts[0], - Table: parts[1], - }, nil -} - -// if the functions are being called outside the context of a Temporal workflow, -// activity.RecordHeartbeat panics, this is a bandaid for that. -func (c *PostgresConnector) recordHeartbeatWithRecover(details ...interface{}) { - defer func() { - if r := recover(); r != nil { - log.Warnln("ignoring panic from activity.RecordHeartbeat") - log.Warnln("this can happen when function is invoked outside of a Temporal workflow") - } - }() - activity.RecordHeartbeat(c.ctx, details...) + return nil } diff --git a/flow/connectors/postgres/postgres_cdc_test.go b/flow/connectors/postgres/postgres_cdc_test.go deleted file mode 100644 index ddc9dc3d8..000000000 --- a/flow/connectors/postgres/postgres_cdc_test.go +++ /dev/null @@ -1,831 +0,0 @@ -package connpostgres - -import ( - "context" - "fmt" - "math/rand" - "testing" - "time" - - "github.com/PeerDB-io/peer-flow/generated/protos" - "github.com/PeerDB-io/peer-flow/model" - "github.com/PeerDB-io/peer-flow/model/qvalue" - "github.com/jackc/pgx/v5" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" -) - -type PostgresCDCTestSuite struct { - suite.Suite - connector *PostgresConnector -} - -func (suite *PostgresCDCTestSuite) failTestError(err error) { - if err != nil { - suite.FailNow(err.Error()) - } -} - -func (suite *PostgresCDCTestSuite) dropTable(tableName string) { - _, err := suite.connector.pool.Exec(context.Background(), fmt.Sprintf("DROP TABLE IF EXISTS %s", tableName)) - suite.failTestError(err) -} - -func (suite *PostgresCDCTestSuite) insertSimpleRecords(srcTableName string) { - _, err := suite.connector.pool.Exec(context.Background(), - fmt.Sprintf("INSERT INTO %s(id, name) VALUES (2, 'quick'), (4, 'brown'), (8, 'fox')", srcTableName)) - suite.failTestError(err) -} - -func (suite *PostgresCDCTestSuite) validateInsertedSimpleRecords(records []model.Record, srcTableName string, - dstTableName string) { - suite.Equal(3, len(records)) - model.NewRecordItemWithData([]string{"id", "name"}, - []*qvalue.QValue{ - {Kind: qvalue.QValueKindInt32, Value: int32(2)}, - {Kind: qvalue.QValueKindString, Value: "quick"}}) - matchData := []*model.RecordItems{ - model.NewRecordItemWithData([]string{"id", "name"}, - []*qvalue.QValue{ - {Kind: qvalue.QValueKindInt32, Value: int32(2)}, - {Kind: qvalue.QValueKindString, Value: "quick"}}), - model.NewRecordItemWithData([]string{"id", "name"}, - []*qvalue.QValue{ - {Kind: qvalue.QValueKindInt32, Value: int32(4)}, - {Kind: qvalue.QValueKindString, Value: "brown"}}), - model.NewRecordItemWithData([]string{"id", "name"}, - []*qvalue.QValue{ - {Kind: qvalue.QValueKindInt32, Value: int32(8)}, - {Kind: qvalue.QValueKindString, Value: "fox"}}), - } - for idx, record := range records { - suite.IsType(&model.InsertRecord{}, record) - insertRecord := record.(*model.InsertRecord) - suite.Equal(srcTableName, insertRecord.SourceTableName) - suite.Equal(dstTableName, insertRecord.DestinationTableName) - suite.Equal(matchData[idx], insertRecord.Items) - } -} - -func (suite *PostgresCDCTestSuite) mutateSimpleRecords(srcTableName string) { - mutateRecordsTx, err := suite.connector.pool.Begin(context.Background()) - suite.failTestError(err) - defer func() { - err := mutateRecordsTx.Rollback(context.Background()) - if err != pgx.ErrTxClosed { - suite.failTestError(err) - } - }() - - _, err = mutateRecordsTx.Exec(context.Background(), - fmt.Sprintf("UPDATE %s SET name = 'slow' WHERE id = 2", srcTableName)) - suite.failTestError(err) - _, err = mutateRecordsTx.Exec(context.Background(), fmt.Sprintf("DELETE FROM %s WHERE id = 8", srcTableName)) - suite.failTestError(err) - err = mutateRecordsTx.Commit(context.Background()) - suite.failTestError(err) -} - -func (suite *PostgresCDCTestSuite) validateSimpleMutatedRecords(records []model.Record, srcTableName string, - dstTableName string) { - suite.Equal(2, len(records)) - - suite.IsType(&model.UpdateRecord{}, records[0]) - updateRecord := records[0].(*model.UpdateRecord) - suite.Equal(srcTableName, updateRecord.SourceTableName) - suite.Equal(dstTableName, updateRecord.DestinationTableName) - suite.Equal(model.NewRecordItemWithData([]string{}, []*qvalue.QValue{}), updateRecord.OldItems) - - items := model.NewRecordItemWithData([]string{"id", "name"}, - []*qvalue.QValue{ - {Kind: qvalue.QValueKindInt32, Value: int32(2)}, - {Kind: qvalue.QValueKindString, Value: "slow"}}) - suite.Equal(items, updateRecord.NewItems) - - suite.IsType(&model.DeleteRecord{}, records[1]) - deleteRecord := records[1].(*model.DeleteRecord) - suite.Equal(srcTableName, deleteRecord.SourceTableName) - suite.Equal(dstTableName, deleteRecord.DestinationTableName) - items = model.NewRecordItemWithData([]string{"id", "name"}, - []*qvalue.QValue{ - {Kind: qvalue.QValueKindInt32, Value: int32(8)}, - {Kind: qvalue.QValueKindInvalid, Value: nil}}) - suite.Equal(items, deleteRecord.Items) -} - -func (suite *PostgresCDCTestSuite) randBytea(n int) []byte { - b := make([]byte, n) - //nolint:gosec - _, err := rand.Read(b) - suite.failTestError(err) - return b -} - -func (suite *PostgresCDCTestSuite) randString(n int) string { - const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" - - b := make([]byte, n) - for i := range b { - //nolint:gosec - b[i] = letterBytes[rand.Intn(len(letterBytes))] - } - return string(b) -} - -func (suite *PostgresCDCTestSuite) insertToastRecords(srcTableName string) { - insertRecordsTx, err := suite.connector.pool.Begin(context.Background()) - suite.failTestError(err) - defer func() { - err := insertRecordsTx.Rollback(context.Background()) - if err != pgx.ErrTxClosed { - suite.failTestError(err) - } - }() - - for i := 0; i < 4; i++ { - _, err := insertRecordsTx.Exec(context.Background(), - fmt.Sprintf("INSERT INTO %s(n_t, lz4_t, n_b, lz4_b) VALUES ($1, $2, $3, $4)", srcTableName), - suite.randString(32768), suite.randString(32768), suite.randBytea(32768), suite.randBytea(32768)) - suite.failTestError(err) - } - - err = insertRecordsTx.Commit(context.Background()) - suite.failTestError(err) -} - -func (suite *PostgresCDCTestSuite) validateInsertedToastRecords(records []model.Record, srcTableName string, - dstTableName string) { - suite.Equal(4, len(records)) - for idx, record := range records { - suite.IsType(&model.InsertRecord{}, record) - insertRecord := record.(*model.InsertRecord) - suite.Equal(srcTableName, insertRecord.SourceTableName) - suite.Equal(dstTableName, insertRecord.DestinationTableName) - suite.Equal(5, insertRecord.Items.Len()) - - idVal, err := insertRecord.Items.GetValueByColName("id") - suite.NoError(err, "Error fetching id") - - n_tVal, err := insertRecord.Items.GetValueByColName("n_t") - suite.NoError(err, "Error fetching n_t") - - lz4_tVal, err := insertRecord.Items.GetValueByColName("lz4_t") - suite.NoError(err, "Error fetching lz4_t") - - n_bVal, err := insertRecord.Items.GetValueByColName("n_b") - suite.NoError(err, "Error fetching n_b") - - lz4_bVal, err := insertRecord.Items.GetValueByColName("lz4_b") - suite.NoError(err, "Error fetching lz4_b") - - // Perform the actual value checks - suite.Equal(int32(idx+1), idVal.Value.(int32)) - suite.Equal(32768, len(n_tVal.Value.(string))) - suite.Equal(32768, len(lz4_tVal.Value.(string))) - suite.Equal(32768, len(n_bVal.Value.([]byte))) - suite.Equal(32768, len(lz4_bVal.Value.([]byte))) - } -} - -func (suite *PostgresCDCTestSuite) mutateToastRecords(srcTableName string) { - mutateRecordsTx, err := suite.connector.pool.Begin(context.Background()) - suite.failTestError(err) - defer func() { - err := mutateRecordsTx.Rollback(context.Background()) - if err != pgx.ErrTxClosed { - suite.failTestError(err) - } - }() - - _, err = mutateRecordsTx.Exec(context.Background(), fmt.Sprintf("UPDATE %s SET n_t = $1 WHERE id = 1", - srcTableName), - suite.randString(65536)) - suite.failTestError(err) - _, err = mutateRecordsTx.Exec(context.Background(), - fmt.Sprintf("UPDATE %s SET lz4_t = $1, n_b = $2, lz4_b = $3 WHERE id = 3", srcTableName), - suite.randString(65536), suite.randBytea(65536), suite.randBytea(65536)) - suite.failTestError(err) - _, err = mutateRecordsTx.Exec(context.Background(), - fmt.Sprintf("UPDATE %s SET n_t = $1, lz4_t = $2, n_b = $3, lz4_b = $4 WHERE id = 4", srcTableName), - suite.randString(65536), suite.randString(65536), suite.randBytea(65536), suite.randBytea(65536)) - suite.failTestError(err) - _, err = mutateRecordsTx.Exec(context.Background(), - fmt.Sprintf("DELETE FROM %s WHERE id = 3", srcTableName)) - suite.failTestError(err) - - err = mutateRecordsTx.Commit(context.Background()) - suite.failTestError(err) -} - -func (suite *PostgresCDCTestSuite) validateMutatedToastRecords(records []model.Record, srcTableName string, - dstTableName string) { - suite.Equal(4, len(records)) - - suite.IsType(&model.UpdateRecord{}, records[0]) - updateRecord := records[0].(*model.UpdateRecord) - suite.Equal(srcTableName, updateRecord.SourceTableName) - suite.Equal(dstTableName, updateRecord.DestinationTableName) - items := updateRecord.NewItems - suite.Equal(2, items.Len()) - v, err := items.GetValueByColName("id") - suite.NoError(err, "Error fetching id") - suite.Equal(int32(1), v.Value.(int32)) - v, err = items.GetValueByColName("n_t") - suite.NoError(err, "Error fetching n_t") - suite.Equal(qvalue.QValueKindString, v.Kind) - suite.Equal(65536, len(v.Value.(string))) - suite.Equal(3, len(updateRecord.UnchangedToastColumns)) - suite.True(updateRecord.UnchangedToastColumns["lz4_t"]) - suite.True(updateRecord.UnchangedToastColumns["n_b"]) - suite.True(updateRecord.UnchangedToastColumns["lz4_b"]) - suite.IsType(&model.UpdateRecord{}, records[1]) - updateRecord = records[1].(*model.UpdateRecord) - suite.Equal(srcTableName, updateRecord.SourceTableName) - suite.Equal(dstTableName, updateRecord.DestinationTableName) - - items = updateRecord.NewItems - suite.Equal(4, items.Len()) - v = items.GetColumnValue("id") - suite.Equal(qvalue.QValueKindInt32, v.Kind) - suite.Equal(int32(3), v.Value.(int32)) - v = items.GetColumnValue("lz4_t") - suite.Equal(qvalue.QValueKindString, v.Kind) - suite.Equal(65536, len(v.Value.(string))) - v = items.GetColumnValue("n_b") - suite.Equal(qvalue.QValueKindBytes, v.Kind) - suite.Equal(65536, len(v.Value.([]byte))) - v = items.GetColumnValue("lz4_b") - suite.Equal(qvalue.QValueKindBytes, v.Kind) - suite.Equal(65536, len(v.Value.([]byte))) - suite.Equal(1, len(updateRecord.UnchangedToastColumns)) - suite.True(updateRecord.UnchangedToastColumns["n_t"]) - // Test case for records[2] - suite.IsType(&model.UpdateRecord{}, records[2]) - updateRecord = records[2].(*model.UpdateRecord) - suite.Equal(srcTableName, updateRecord.SourceTableName) - suite.Equal(dstTableName, updateRecord.DestinationTableName) - - items = updateRecord.NewItems - suite.Equal(5, items.Len()) - v = items.GetColumnValue("id") - suite.Equal(int32(4), v.Value.(int32)) - suite.Equal(qvalue.QValueKindString, items.GetColumnValue("n_t").Kind) - suite.Equal(65536, len(items.GetColumnValue("n_t").Value.(string))) - suite.Equal(qvalue.QValueKindString, items.GetColumnValue("lz4_t").Kind) - suite.Equal(65536, len(items.GetColumnValue("lz4_t").Value.(string))) - suite.Equal(qvalue.QValueKindBytes, items.GetColumnValue("n_b").Kind) - suite.Equal(65536, len(items.GetColumnValue("n_b").Value.([]byte))) - suite.Equal(qvalue.QValueKindBytes, items.GetColumnValue("lz4_b").Kind) - suite.Equal(65536, len(items.GetColumnValue("lz4_b").Value.([]byte))) - suite.Equal(0, len(updateRecord.UnchangedToastColumns)) - - // Test case for records[3] - suite.IsType(&model.DeleteRecord{}, records[3]) - deleteRecord := records[3].(*model.DeleteRecord) - suite.Equal(srcTableName, deleteRecord.SourceTableName) - suite.Equal(dstTableName, deleteRecord.DestinationTableName) - items = deleteRecord.Items - suite.Equal(5, items.Len()) - suite.Equal(int32(3), items.GetColumnValue("id").Value.(int32)) - suite.Equal(qvalue.QValueKindInvalid, items.GetColumnValue("n_t").Kind) - suite.Nil(items.GetColumnValue("n_t").Value) - suite.Equal(qvalue.QValueKindInvalid, items.GetColumnValue("lz4_t").Kind) - suite.Nil(items.GetColumnValue("lz4_t").Value) - suite.Equal(qvalue.QValueKindInvalid, items.GetColumnValue("n_b").Kind) - suite.Nil(items.GetColumnValue("n_b").Value) - suite.Equal(qvalue.QValueKindInvalid, items.GetColumnValue("lz4_b").Kind) - suite.Nil(items.GetColumnValue("lz4_b").Value) -} - -func (suite *PostgresCDCTestSuite) SetupSuite() { - rand.Seed(time.Now().UnixNano()) - - var err error - suite.connector, err = NewPostgresConnector(context.Background(), &protos.PostgresConfig{ - Host: "localhost", - Port: 7132, - User: "postgres", - Password: "postgres", - Database: "postgres", - }) - suite.failTestError(err) - - setupTx, err := suite.connector.pool.Begin(context.Background()) - suite.failTestError(err) - defer func() { - err := setupTx.Rollback(context.Background()) - if err != pgx.ErrTxClosed { - suite.failTestError(err) - } - }() - _, err = setupTx.Exec(context.Background(), "DROP SCHEMA IF EXISTS pgpeer_test CASCADE") - suite.failTestError(err) - _, err = setupTx.Exec(context.Background(), "CREATE SCHEMA pgpeer_test") - suite.failTestError(err) - err = setupTx.Commit(context.Background()) - suite.failTestError(err) -} - -func (suite *PostgresCDCTestSuite) TearDownSuite() { - teardownTx, err := suite.connector.pool.Begin(context.Background()) - suite.failTestError(err) - defer func() { - err := teardownTx.Rollback(context.Background()) - if err != pgx.ErrTxClosed { - suite.failTestError(err) - } - }() - _, err = teardownTx.Exec(context.Background(), "DROP SCHEMA IF EXISTS pgpeer_test CASCADE") - suite.failTestError(err) - err = teardownTx.Commit(context.Background()) - suite.failTestError(err) - - suite.True(suite.connector.ConnectionActive()) - err = suite.connector.Close() - suite.failTestError(err) - suite.False(suite.connector.ConnectionActive()) -} - -func (suite *PostgresCDCTestSuite) TestParseSchemaTable() { - schemaTest1, err := parseSchemaTable("schema") - suite.Nil(schemaTest1) - suite.NotNil(err) - - schemaTest2, err := parseSchemaTable("schema.table") - suite.Equal(&SchemaTable{ - Schema: "schema", - Table: "table", - }, schemaTest2) - suite.Equal("\"schema\".\"table\"", schemaTest2.String()) - suite.Nil(err) - - schemaTest3, err := parseSchemaTable("database.schema.table") - suite.Nil(schemaTest3) - suite.NotNil(err) -} - -func (suite *PostgresCDCTestSuite) TestErrorForInvalidConfig() { - connector, err := NewPostgresConnector(context.Background(), &protos.PostgresConfig{ - Host: "fakehost", - Port: 0, - User: "fakeuser", - Password: "fakepassword", - Database: "fakedatabase", - }) - suite.Nil(connector) - suite.NotNil(err) -} - -// intended to test how activities react to a table that does not exist. -func (suite *PostgresCDCTestSuite) TestErrorForTableNotExist() { - nonExistentFlowName := "non_existent_table_testing" - nonExistentFlowSrcTableName := "pgpeer_test.non_existent_table" - nonExistentFlowDstTableName := "non_existent_table_dst" - - ensurePullabilityOutput, err := suite.connector.EnsurePullability(&protos.EnsurePullabilityBatchInput{ - FlowJobName: nonExistentFlowName, - SourceTableIdentifiers: []string{nonExistentFlowSrcTableName}, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.Nil(ensurePullabilityOutput) - suite.Errorf(err, "error getting relation ID for table %s: no rows in result set", nonExistentFlowSrcTableName) - - tableNameMapping := map[string]string{ - nonExistentFlowSrcTableName: nonExistentFlowDstTableName, - } - relationMessageMapping := make(model.RelationMessageMapping) - - getTblSchemaInput := &protos.GetTableSchemaBatchInput{ - TableIdentifiers: []string{nonExistentFlowSrcTableName}, - PeerConnectionConfig: nil, - } - - tableSchema, err := suite.connector.GetTableSchema(getTblSchemaInput) - suite.Errorf(err, "error getting relation ID for table %s: no rows in result set", nonExistentFlowSrcTableName) - suite.Nil(tableSchema) - tableNameSchemaMapping := make(map[string]*protos.TableSchema) - tableNameSchemaMapping[nonExistentFlowDstTableName] = &protos.TableSchema{ - TableIdentifier: nonExistentFlowSrcTableName, - Columns: map[string]string{ - "id": string(qvalue.QValueKindInt32), - "name": string(qvalue.QValueKindString), - }, - PrimaryKeyColumn: "id", - } - - err = suite.connector.PullFlowCleanup(nonExistentFlowName) - suite.Nil(err) - - // creating table and the replication slots for it, and dropping before pull records. - _, err = suite.connector.pool.Exec(context.Background(), - fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(id INT PRIMARY KEY, name TEXT)", nonExistentFlowSrcTableName)) - suite.failTestError(err) - ensurePullabilityOutput, err = suite.connector.EnsurePullability(&protos.EnsurePullabilityBatchInput{ - FlowJobName: nonExistentFlowName, - SourceTableIdentifiers: []string{nonExistentFlowSrcTableName}, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - tableRelID := ensurePullabilityOutput.TableIdentifierMapping[nonExistentFlowSrcTableName]. - GetPostgresTableIdentifier().RelId - relIDTableNameMapping := map[uint32]string{ - tableRelID: nonExistentFlowSrcTableName, - } - err = suite.connector.SetupReplication(nil, &protos.SetupReplicationInput{ - FlowJobName: nonExistentFlowName, - TableNameMapping: tableNameMapping, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - suite.dropTable(nonExistentFlowSrcTableName) - recordsWithSchemaDelta, err := suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: nonExistentFlowName, - LastSyncState: nil, - IdleTimeout: 5 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.Equal(0, len(recordsWithSchemaDelta.RecordBatch.Records)) - suite.Nil(recordsWithSchemaDelta.TableSchemaDelta) - suite.Nil(err) - - err = suite.connector.PullFlowCleanup(nonExistentFlowName) - suite.failTestError(err) -} - -func (suite *PostgresCDCTestSuite) TestSimpleHappyFlow() { - simpleHappyFlowName := "simple_happy_flow_testing_flow" - simpleHappyFlowSrcTableName := "pgpeer_test.simple_table" - simpleHappyFlowDstTableName := "simple_table_dst" - - _, err := suite.connector.pool.Exec(context.Background(), - fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(id INT PRIMARY KEY, name TEXT)", simpleHappyFlowSrcTableName)) - suite.failTestError(err) - - ensurePullabilityOutput, err := suite.connector.EnsurePullability(&protos.EnsurePullabilityBatchInput{ - FlowJobName: simpleHappyFlowName, - SourceTableIdentifiers: []string{simpleHappyFlowSrcTableName}, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - tableRelID := ensurePullabilityOutput.TableIdentifierMapping[simpleHappyFlowSrcTableName]. - GetPostgresTableIdentifier().RelId - - relIDTableNameMapping := map[uint32]string{ - tableRelID: simpleHappyFlowSrcTableName, - } - tableNameMapping := map[string]string{ - simpleHappyFlowSrcTableName: simpleHappyFlowDstTableName, - } - relationMessageMapping := make(model.RelationMessageMapping) - - err = suite.connector.SetupReplication(nil, &protos.SetupReplicationInput{ - FlowJobName: simpleHappyFlowName, - TableNameMapping: tableNameMapping, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - - tableNameSchemaMapping := make(map[string]*protos.TableSchema) - - getTblSchemaInput := &protos.GetTableSchemaBatchInput{ - TableIdentifiers: []string{simpleHappyFlowSrcTableName}, - PeerConnectionConfig: nil, - } - tableNameSchema, err := suite.connector.GetTableSchema(getTblSchemaInput) - suite.failTestError(err) - suite.Equal(&protos.GetTableSchemaBatchOutput{ - TableNameSchemaMapping: map[string]*protos.TableSchema{ - simpleHappyFlowSrcTableName: { - TableIdentifier: simpleHappyFlowSrcTableName, - Columns: map[string]string{ - "id": string(qvalue.QValueKindInt32), - "name": string(qvalue.QValueKindString), - }, - PrimaryKeyColumn: "id", - }, - }}, tableNameSchema) - tableNameSchemaMapping[simpleHappyFlowDstTableName] = - tableNameSchema.TableNameSchemaMapping[simpleHappyFlowSrcTableName] - - // pulling with no recordsWithSchemaDelta. - recordsWithSchemaDelta, err := suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: simpleHappyFlowName, - LastSyncState: nil, - IdleTimeout: 5 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.failTestError(err) - suite.Equal(0, len(recordsWithSchemaDelta.RecordBatch.Records)) - suite.Nil(recordsWithSchemaDelta.TableSchemaDelta) - suite.Equal(int64(0), recordsWithSchemaDelta.RecordBatch.FirstCheckPointID) - suite.Equal(int64(0), recordsWithSchemaDelta.RecordBatch.LastCheckPointID) - relationMessageMapping = recordsWithSchemaDelta.RelationMessageMapping - - // pulling after inserting records. - suite.insertSimpleRecords(simpleHappyFlowSrcTableName) - recordsWithSchemaDelta, err = suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: simpleHappyFlowName, - LastSyncState: nil, - IdleTimeout: 5 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.failTestError(err) - suite.Nil(recordsWithSchemaDelta.TableSchemaDelta) - suite.validateInsertedSimpleRecords(recordsWithSchemaDelta.RecordBatch.Records, - simpleHappyFlowSrcTableName, simpleHappyFlowDstTableName) - suite.Greater(recordsWithSchemaDelta.RecordBatch.FirstCheckPointID, int64(0)) - suite.GreaterOrEqual(recordsWithSchemaDelta.RecordBatch.LastCheckPointID, - recordsWithSchemaDelta.RecordBatch.FirstCheckPointID) - currentCheckPointID := recordsWithSchemaDelta.RecordBatch.LastCheckPointID - relationMessageMapping = recordsWithSchemaDelta.RelationMessageMapping - - // pulling after mutating records. - suite.mutateSimpleRecords(simpleHappyFlowSrcTableName) - recordsWithSchemaDelta, err = suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: simpleHappyFlowName, - LastSyncState: &protos.LastSyncState{ - Checkpoint: recordsWithSchemaDelta.RecordBatch.LastCheckPointID, - LastSyncedAt: nil, - }, - IdleTimeout: 5 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.failTestError(err) - suite.Nil(recordsWithSchemaDelta.TableSchemaDelta) - suite.validateSimpleMutatedRecords(recordsWithSchemaDelta.RecordBatch.Records, - simpleHappyFlowSrcTableName, simpleHappyFlowDstTableName) - suite.GreaterOrEqual(recordsWithSchemaDelta.RecordBatch.FirstCheckPointID, currentCheckPointID) - suite.GreaterOrEqual(recordsWithSchemaDelta.RecordBatch.LastCheckPointID, - recordsWithSchemaDelta.RecordBatch.FirstCheckPointID) - - err = suite.connector.PullFlowCleanup(simpleHappyFlowName) - suite.failTestError(err) - - suite.dropTable(simpleHappyFlowSrcTableName) -} - -func (suite *PostgresCDCTestSuite) TestAllTypesHappyFlow() { - allTypesHappyFlowName := "all_types_happy_flow_testing" - allTypesHappyFlowSrcTableName := "pgpeer_test.all_types_table" - allTypesHappyFlowDstTableName := "all_types_table_dst" - - _, err := suite.connector.pool.Exec(context.Background(), - fmt.Sprintf(`CREATE TABLE IF NOT EXISTS %s(id BIGINT PRIMARY KEY, - c1 BIGINT, c2 BIT, c3 VARBIT, c4 BOOLEAN, c6 BYTEA, c7 CHARACTER, c8 VARCHAR, - c9 CIDR, c11 DATE, c12 FLOAT, c13 DOUBLE PRECISION, c14 INET, c15 INTEGER, - c16 INTERVAL, c17 JSON, c18 JSONB, c21 MACADDR, c22 MONEY, c23 DECIMAL, c24 OID, c28 REAL, - c29 SMALLINT, c30 SMALLSERIAL, c31 SERIAL, c32 TEXT, c33 TIMESTAMP, c34 TIMESTAMPTZ, - c35 TIME, c36 TIMETZ, c37 TSQUERY, c38 TSVECTOR, c39 TXID_SNAPSHOT, c40 UUID, c41 XML)`, - allTypesHappyFlowSrcTableName)) - suite.failTestError(err) - - ensurePullabilityOutput, err := suite.connector.EnsurePullability(&protos.EnsurePullabilityBatchInput{ - FlowJobName: allTypesHappyFlowName, - SourceTableIdentifiers: []string{allTypesHappyFlowSrcTableName}, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - tableRelID := ensurePullabilityOutput.TableIdentifierMapping[allTypesHappyFlowSrcTableName]. - GetPostgresTableIdentifier().RelId - relationMessageMapping := make(model.RelationMessageMapping) - - relIDTableNameMapping := map[uint32]string{ - tableRelID: allTypesHappyFlowSrcTableName, - } - tableNameMapping := map[string]string{ - allTypesHappyFlowSrcTableName: allTypesHappyFlowDstTableName, - } - err = suite.connector.SetupReplication(nil, &protos.SetupReplicationInput{ - FlowJobName: allTypesHappyFlowName, - TableNameMapping: tableNameMapping, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - - tableNameSchemaMapping := make(map[string]*protos.TableSchema) - getTblSchemaInput := &protos.GetTableSchemaBatchInput{ - TableIdentifiers: []string{allTypesHappyFlowSrcTableName}, - PeerConnectionConfig: nil, - } - tableNameSchema, err := suite.connector.GetTableSchema(getTblSchemaInput) - suite.failTestError(err) - suite.Equal(&protos.GetTableSchemaBatchOutput{ - TableNameSchemaMapping: map[string]*protos.TableSchema{ - allTypesHappyFlowSrcTableName: { - TableIdentifier: allTypesHappyFlowSrcTableName, - Columns: map[string]string{ - "id": string(qvalue.QValueKindInt64), - "c1": string(qvalue.QValueKindInt64), - "c2": string(qvalue.QValueKindBit), - "c3": string(qvalue.QValueKindBit), - "c4": string(qvalue.QValueKindBoolean), - "c6": string(qvalue.QValueKindBytes), - "c7": string(qvalue.QValueKindString), - "c8": string(qvalue.QValueKindString), - "c9": string(qvalue.QValueKindString), - "c11": string(qvalue.QValueKindDate), - "c12": string(qvalue.QValueKindFloat64), - "c13": string(qvalue.QValueKindFloat64), - "c14": string(qvalue.QValueKindString), - "c15": string(qvalue.QValueKindInt32), - "c16": string(qvalue.QValueKindString), - "c17": string(qvalue.QValueKindJSON), - "c18": string(qvalue.QValueKindJSON), - "c21": string(qvalue.QValueKindString), - "c22": string(qvalue.QValueKindString), - "c23": string(qvalue.QValueKindNumeric), - "c24": string(qvalue.QValueKindString), - "c28": string(qvalue.QValueKindFloat32), - "c29": string(qvalue.QValueKindInt16), - "c30": string(qvalue.QValueKindInt16), - "c31": string(qvalue.QValueKindInt32), - "c32": string(qvalue.QValueKindString), - "c33": string(qvalue.QValueKindTimestamp), - "c34": string(qvalue.QValueKindTimestampTZ), - "c35": string(qvalue.QValueKindTime), - "c36": string(qvalue.QValueKindTimeTZ), - "c37": string(qvalue.QValueKindString), - "c38": string(qvalue.QValueKindString), - "c39": string(qvalue.QValueKindString), - "c40": string(qvalue.QValueKindUUID), - "c41": string(qvalue.QValueKindString), - }, - PrimaryKeyColumn: "id", - }, - }, - }, tableNameSchema) - tableNameSchemaMapping[allTypesHappyFlowDstTableName] = - tableNameSchema.TableNameSchemaMapping[allTypesHappyFlowSrcTableName] - - _, err = suite.connector.pool.Exec(context.Background(), - fmt.Sprintf(`INSERT INTO %s SELECT 2, 2, b'1', b'101', - true, $1, 's', 'test', '1.1.10.2'::cidr, - CURRENT_DATE, 1.23, 1.234, '192.168.1.5'::inet, 1, - '5 years 2 months 29 days 1 minute 2 seconds 200 milliseconds 20000 microseconds'::interval, - '{"sai":1}'::json, '{"sai":1}'::jsonb, '08:00:2b:01:02:03'::macaddr, - 1.2, 1.23, 4::oid, 1.23, 1, 1, 1, 'test', now(), now(), now()::time, now()::timetz, - 'fat & rat'::tsquery, 'a fat cat sat on a mat and ate a fat rat'::tsvector, - txid_current_snapshot(), '66073c38-b8df-4bdb-bbca-1c97596b8940'::uuid, xmlcomment('hello')`, - allTypesHappyFlowSrcTableName), - suite.randBytea(32)) - suite.failTestError(err) - records, err := suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: allTypesHappyFlowName, - LastSyncState: nil, - IdleTimeout: 5 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.failTestError(err) - require.Equal(suite.T(), 1, len(records.RecordBatch.Records)) - - items := records.RecordBatch.Records[0].GetItems() - numCols := items.Len() - if numCols != 35 { - jsonStr, err := items.ToJSON() - suite.failTestError(err) - fmt.Printf("record batch json: %s\n", jsonStr) - suite.FailNow("expected 35 columns, got %d", numCols) - } - - err = suite.connector.PullFlowCleanup(allTypesHappyFlowName) - suite.failTestError(err) - - suite.dropTable(allTypesHappyFlowSrcTableName) -} - -func (suite *PostgresCDCTestSuite) TestToastHappyFlow() { - toastHappyFlowName := "toast_happy_flow_testing" - toastHappyFlowSrcTableName := "pgpeer_test.toast_table" - toastHappyFlowDstTableName := "toast_table_dst" - - _, err := suite.connector.pool.Exec(context.Background(), - fmt.Sprintf(`CREATE TABLE %s(id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - n_t TEXT, lz4_t TEXT COMPRESSION LZ4, n_b BYTEA, lz4_b BYTEA COMPRESSION LZ4)`, toastHappyFlowSrcTableName)) - suite.failTestError(err) - - ensurePullabilityOutput, err := suite.connector.EnsurePullability(&protos.EnsurePullabilityBatchInput{ - FlowJobName: toastHappyFlowName, - SourceTableIdentifiers: []string{toastHappyFlowSrcTableName}, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - tableRelID := ensurePullabilityOutput.TableIdentifierMapping[toastHappyFlowSrcTableName]. - GetPostgresTableIdentifier().RelId - - relIDTableNameMapping := map[uint32]string{ - tableRelID: toastHappyFlowSrcTableName, - } - tableNameMapping := map[string]string{ - toastHappyFlowSrcTableName: toastHappyFlowDstTableName, - } - relationMessageMapping := make(model.RelationMessageMapping) - - err = suite.connector.SetupReplication(nil, &protos.SetupReplicationInput{ - FlowJobName: toastHappyFlowName, - TableNameMapping: tableNameMapping, - PeerConnectionConfig: nil, // not used by the connector itself. - }) - suite.failTestError(err) - - tableNameSchemaMapping := make(map[string]*protos.TableSchema) - getTblSchemaInput := &protos.GetTableSchemaBatchInput{ - TableIdentifiers: []string{toastHappyFlowSrcTableName}, - PeerConnectionConfig: nil, - } - tableNameSchema, err := suite.connector.GetTableSchema(getTblSchemaInput) - suite.failTestError(err) - suite.Equal(&protos.GetTableSchemaBatchOutput{ - TableNameSchemaMapping: map[string]*protos.TableSchema{ - toastHappyFlowSrcTableName: { - TableIdentifier: toastHappyFlowSrcTableName, - Columns: map[string]string{ - "id": string(qvalue.QValueKindInt32), - "n_t": string(qvalue.QValueKindString), - "lz4_t": string(qvalue.QValueKindString), - "n_b": string(qvalue.QValueKindBytes), - "lz4_b": string(qvalue.QValueKindBytes), - }, - PrimaryKeyColumn: "id", - }, - }}, tableNameSchema) - tableNameSchemaMapping[toastHappyFlowDstTableName] = - tableNameSchema.TableNameSchemaMapping[toastHappyFlowSrcTableName] - - suite.insertToastRecords(toastHappyFlowSrcTableName) - recordsWithSchemaDelta, err := suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: toastHappyFlowName, - LastSyncState: nil, - IdleTimeout: 10 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.failTestError(err) - recordsWithSchemaDelta, err = suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: toastHappyFlowName, - LastSyncState: nil, - IdleTimeout: 10 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.failTestError(err) - suite.Nil(recordsWithSchemaDelta.TableSchemaDelta) - suite.validateInsertedToastRecords(recordsWithSchemaDelta.RecordBatch.Records, - toastHappyFlowSrcTableName, toastHappyFlowDstTableName) - suite.Greater(recordsWithSchemaDelta.RecordBatch.FirstCheckPointID, int64(0)) - suite.GreaterOrEqual(recordsWithSchemaDelta.RecordBatch.LastCheckPointID, - recordsWithSchemaDelta.RecordBatch.FirstCheckPointID) - relationMessageMapping = recordsWithSchemaDelta.RelationMessageMapping - - suite.mutateToastRecords(toastHappyFlowSrcTableName) - recordsWithSchemaDelta, err = suite.connector.PullRecords(&model.PullRecordsRequest{ - FlowJobName: toastHappyFlowName, - LastSyncState: &protos.LastSyncState{ - Checkpoint: recordsWithSchemaDelta.RecordBatch.LastCheckPointID, - LastSyncedAt: nil, - }, - IdleTimeout: 10 * time.Second, - MaxBatchSize: 100, - SrcTableIDNameMapping: relIDTableNameMapping, - TableNameMapping: tableNameMapping, - TableNameSchemaMapping: tableNameSchemaMapping, - RelationMessageMapping: relationMessageMapping, - }) - suite.failTestError(err) - suite.validateMutatedToastRecords(recordsWithSchemaDelta.RecordBatch.Records, toastHappyFlowSrcTableName, - toastHappyFlowDstTableName) - - err = suite.connector.PullFlowCleanup(toastHappyFlowName) - suite.failTestError(err) - - suite.dropTable(toastHappyFlowSrcTableName) -} - -func TestPostgresTestSuite(t *testing.T) { - suite.Run(t, new(PostgresCDCTestSuite)) -} diff --git a/flow/connectors/postgres/postgres_schema_delta_test.go b/flow/connectors/postgres/postgres_schema_delta_test.go new file mode 100644 index 000000000..eec14c915 --- /dev/null +++ b/flow/connectors/postgres/postgres_schema_delta_test.go @@ -0,0 +1,248 @@ +package connpostgres + +import ( + "context" + "fmt" + "testing" + + "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/model/qvalue" + "github.com/jackc/pgx/v5" + "github.com/stretchr/testify/suite" +) + +type PostgresSchemaDeltaTestSuite struct { + suite.Suite + connector *PostgresConnector +} + +const schemaDeltaTestSchemaName = "pgschema_delta_test" + +func (suite *PostgresSchemaDeltaTestSuite) failTestError(err error) { + if err != nil { + suite.FailNow(err.Error()) + } +} + +func (suite *PostgresSchemaDeltaTestSuite) SetupSuite() { + var err error + suite.connector, err = NewPostgresConnector(context.Background(), &protos.PostgresConfig{ + Host: "localhost", + Port: 7132, + User: "postgres", + Password: "postgres", + Database: "postgres", + }) + suite.failTestError(err) + + setupTx, err := suite.connector.pool.Begin(context.Background()) + suite.failTestError(err) + defer func() { + err := setupTx.Rollback(context.Background()) + if err != pgx.ErrTxClosed { + suite.failTestError(err) + } + }() + _, err = setupTx.Exec(context.Background(), fmt.Sprintf("DROP SCHEMA IF EXISTS %s CASCADE", + schemaDeltaTestSchemaName)) + suite.failTestError(err) + _, err = setupTx.Exec(context.Background(), fmt.Sprintf("CREATE SCHEMA %s", schemaDeltaTestSchemaName)) + suite.failTestError(err) + err = setupTx.Commit(context.Background()) + suite.failTestError(err) +} + +func (suite *PostgresSchemaDeltaTestSuite) TearDownSuite() { + teardownTx, err := suite.connector.pool.Begin(context.Background()) + suite.failTestError(err) + defer func() { + err := teardownTx.Rollback(context.Background()) + if err != pgx.ErrTxClosed { + suite.failTestError(err) + } + }() + _, err = teardownTx.Exec(context.Background(), fmt.Sprintf("DROP SCHEMA IF EXISTS %s CASCADE", + schemaDeltaTestSchemaName)) + suite.failTestError(err) + err = teardownTx.Commit(context.Background()) + suite.failTestError(err) + + suite.True(suite.connector.ConnectionActive()) + err = suite.connector.Close() + suite.failTestError(err) + suite.False(suite.connector.ConnectionActive()) +} + +func (suite *PostgresSchemaDeltaTestSuite) TestSimpleAddColumn() { + tableName := fmt.Sprintf("%s.simple_add_column", schemaDeltaTestSchemaName) + _, err := suite.connector.pool.Exec(context.Background(), + fmt.Sprintf("CREATE TABLE %s(id INT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: []*protos.DeltaAddedColumn{{ + ColumnName: "hi", + ColumnType: string(qvalue.QValueKindInt64), + }}, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(&protos.TableSchema{ + TableIdentifier: tableName, + Columns: map[string]string{ + "id": string(qvalue.QValueKindInt32), + "hi": string(qvalue.QValueKindInt64), + }, + PrimaryKeyColumns: []string{"id"}, + }, output.TableNameSchemaMapping[tableName]) +} + +func (suite *PostgresSchemaDeltaTestSuite) TestAddAllColumnTypes() { + tableName := fmt.Sprintf("%s.add_drop_all_column_types", schemaDeltaTestSchemaName) + _, err := suite.connector.pool.Exec(context.Background(), + fmt.Sprintf("CREATE TABLE %s(id INT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: tableName, + // goal is to test all types we're currently mapping to, not all QValue types + Columns: map[string]string{ + "id": string(qvalue.QValueKindInt32), + "c1": string(qvalue.QValueKindBit), + "c2": string(qvalue.QValueKindBoolean), + "c3": string(qvalue.QValueKindBytes), + "c4": string(qvalue.QValueKindDate), + "c5": string(qvalue.QValueKindFloat32), + "c6": string(qvalue.QValueKindFloat64), + "c7": string(qvalue.QValueKindInt16), + "c8": string(qvalue.QValueKindInt32), + "c9": string(qvalue.QValueKindInt64), + "c10": string(qvalue.QValueKindJSON), + "c11": string(qvalue.QValueKindNumeric), + "c12": string(qvalue.QValueKindString), + "c13": string(qvalue.QValueKindTime), + "c14": string(qvalue.QValueKindTimestamp), + "c15": string(qvalue.QValueKindTimestampTZ), + "c16": string(qvalue.QValueKindUUID), + }, + PrimaryKeyColumns: []string{"id"}, + } + addedColumns := make([]*protos.DeltaAddedColumn, 0) + for columnName, columnType := range expectedTableSchema.Columns { + if columnName != "id" { + addedColumns = append(addedColumns, &protos.DeltaAddedColumn{ + ColumnName: columnName, + ColumnType: columnType, + }) + } + } + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: addedColumns, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(expectedTableSchema, output.TableNameSchemaMapping[tableName]) +} + +func (suite *PostgresSchemaDeltaTestSuite) TestAddTrickyColumnNames() { + tableName := fmt.Sprintf("%s.add_drop_tricky_column_names", schemaDeltaTestSchemaName) + _, err := suite.connector.pool.Exec(context.Background(), + fmt.Sprintf("CREATE TABLE %s(id INT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: tableName, + Columns: map[string]string{ + "id": string(qvalue.QValueKindInt32), + "c1": string(qvalue.QValueKindString), + "C1": string(qvalue.QValueKindString), + "C 1": string(qvalue.QValueKindString), + "right": string(qvalue.QValueKindString), + "select": string(qvalue.QValueKindString), + "XMIN": string(qvalue.QValueKindString), + "Cariño": string(qvalue.QValueKindString), + "±ªþ³§": string(qvalue.QValueKindString), + "カラム": string(qvalue.QValueKindString), + }, + PrimaryKeyColumns: []string{"id"}, + } + addedColumns := make([]*protos.DeltaAddedColumn, 0) + for columnName, columnType := range expectedTableSchema.Columns { + if columnName != "id" { + addedColumns = append(addedColumns, &protos.DeltaAddedColumn{ + ColumnName: columnName, + ColumnType: columnType, + }) + } + } + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: addedColumns, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(expectedTableSchema, output.TableNameSchemaMapping[tableName]) +} + +func (suite *PostgresSchemaDeltaTestSuite) TestAddDropWhitespaceColumnNames() { + tableName := fmt.Sprintf("%s.add_drop_whitespace_column_names", schemaDeltaTestSchemaName) + _, err := suite.connector.pool.Exec(context.Background(), + fmt.Sprintf("CREATE TABLE %s(\" \" INT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: tableName, + Columns: map[string]string{ + " ": string(qvalue.QValueKindInt32), + " ": string(qvalue.QValueKindString), + " ": string(qvalue.QValueKindInt64), + " ": string(qvalue.QValueKindDate), + }, + PrimaryKeyColumns: []string{" "}, + } + addedColumns := make([]*protos.DeltaAddedColumn, 0) + for columnName, columnType := range expectedTableSchema.Columns { + if columnName != " " { + addedColumns = append(addedColumns, &protos.DeltaAddedColumn{ + ColumnName: columnName, + ColumnType: columnType, + }) + } + } + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: addedColumns, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(expectedTableSchema, output.TableNameSchemaMapping[tableName]) +} + +func TestPostgresSchemaDeltaTestSuite(t *testing.T) { + suite.Run(t, new(PostgresSchemaDeltaTestSuite)) +} diff --git a/flow/connectors/postgres/qrep.go b/flow/connectors/postgres/qrep.go index 3c0d3124b..2287b8115 100644 --- a/flow/connectors/postgres/qrep.go +++ b/flow/connectors/postgres/qrep.go @@ -6,15 +6,14 @@ import ( "text/template" "time" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" - utils "github.com/PeerDB-io/peer-flow/connectors/utils/partition" + "github.com/PeerDB-io/peer-flow/connectors/utils" + partition_utils "github.com/PeerDB-io/peer-flow/connectors/utils/partition" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" "github.com/google/uuid" "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgtype" log "github.com/sirupsen/logrus" - "google.golang.org/protobuf/types/known/timestamppb" ) const qRepMetadataTableName = "_peerdb_query_replication_metadata" @@ -55,7 +54,7 @@ func (c *PostgresConnector) GetQRepPartitions( return nil, fmt.Errorf("failed to set transaction snapshot: %w", err) } - // TODO re-enable locing of the watermark table. + // TODO re-enable locking of the watermark table. // // lock the table while we get the partitions. // lockQuery := fmt.Sprintf("LOCK %s IN EXCLUSIVE MODE", config.WatermarkTable) // if _, err = tx.Exec(c.ctx, lockQuery); err != nil { @@ -63,36 +62,7 @@ func (c *PostgresConnector) GetQRepPartitions( // log.Warnf("failed to lock table %s: %v", config.WatermarkTable, err) // } - if config.NumRowsPerPartition > 0 { - return c.getNumRowsPartitions(tx, config, last) - } - - minValue, maxValue, err := c.getMinMaxValues(tx, config, last) - if err != nil { - return nil, err - } - - var partitions []*protos.QRepPartition - switch v := minValue.(type) { - case int64: - maxValue := maxValue.(int64) + 1 - partitions, err = c.getIntPartitions(v, maxValue, config.BatchSizeInt) - case time.Time: - maxValue := maxValue.(time.Time).Add(time.Microsecond) - partitions, err = c.getTimePartitions(v, maxValue, config.BatchDurationSeconds) - // only hit when there is no data in the source table - case nil: - log.Warnf("no records to replicate for flow job %s, returning", config.FlowJobName) - return make([]*protos.QRepPartition, 0), nil - default: - return nil, fmt.Errorf("unsupported type: %T", v) - } - - if err != nil { - return nil, err - } - - return partitions, nil + return c.getNumRowsPartitions(tx, config, last) } func (c *PostgresConnector) setTransactionSnapshot(tx pgx.Tx) error { @@ -116,6 +86,48 @@ func (c *PostgresConnector) getNumRowsPartitions( quotedWatermarkColumn := fmt.Sprintf("\"%s\"", config.WatermarkColumn) if config.WatermarkColumn == "xmin" { quotedWatermarkColumn = fmt.Sprintf("%s::text::bigint", quotedWatermarkColumn) + + minVal, maxVal, err := c.getMinMaxValues(tx, config, last) + if err != nil { + return nil, fmt.Errorf("failed to get min max values for xmin: %w", err) + } + + // we know these are int64s so we can just cast them + minValInt := minVal.(int64) + maxValInt := maxVal.(int64) + + // we will only return 1 partition for xmin: + // if there is no last partition, we will return a partition with the min and max values + // if there is a last partition, we will return a partition with the last partition's end value + 1 and the max value + if last != nil && last.Range != nil { + minValInt += 1 + } + + if minValInt > maxValInt { + // log and return an empty partition + log.WithFields(log.Fields{ + "flowName": config.FlowJobName, + }).Infof("xmin min value is greater than max value, returning empty partition") + return make([]*protos.QRepPartition, 0), nil + } + + log.WithFields(log.Fields{ + "flowName": config.FlowJobName, + }).Infof("single xmin partition range: %v - %v", minValInt, maxValInt) + + partition := &protos.QRepPartition{ + PartitionId: uuid.New().String(), + Range: &protos.PartitionRange{ + Range: &protos.PartitionRange_IntRange{ + IntRange: &protos.IntPartitionRange{ + Start: minValInt, + End: maxValInt, + }, + }, + }, + } + + return []*protos.QRepPartition{partition}, nil } whereClause := "" @@ -123,8 +135,13 @@ func (c *PostgresConnector) getNumRowsPartitions( whereClause = fmt.Sprintf(`WHERE %s > $1`, quotedWatermarkColumn) } + parsedWatermarkTable, err := utils.ParseSchemaTable(config.WatermarkTable) + if err != nil { + return nil, fmt.Errorf("unable to parse watermark table: %w", err) + } + // Query to get the total number of rows in the table - countQuery := fmt.Sprintf("SELECT COUNT(*) FROM %s %s", config.WatermarkTable, whereClause) + countQuery := fmt.Sprintf(`SELECT COUNT(*) FROM %s %s`, parsedWatermarkTable.String(), whereClause) var row pgx.Row var minVal interface{} = nil if last != nil && last.Range != nil { @@ -133,8 +150,6 @@ func (c *PostgresConnector) getNumRowsPartitions( minVal = lastRange.IntRange.End case *protos.PartitionRange_TimestampRange: minVal = lastRange.TimestampRange.End.AsTime() - case *protos.PartitionRange_XminRange: - minVal = lastRange.XminRange.End } row = tx.QueryRow(c.ctx, countQuery, minVal) @@ -174,7 +189,7 @@ func (c *PostgresConnector) getNumRowsPartitions( `, numPartitions, quotedWatermarkColumn, - config.WatermarkTable, + parsedWatermarkTable.String(), ) log.Infof("[row_based_next] partitions query: %s", partitionsQuery) rows, err = tx.Query(c.ctx, partitionsQuery, minVal) @@ -189,7 +204,7 @@ func (c *PostgresConnector) getNumRowsPartitions( `, numPartitions, quotedWatermarkColumn, - config.WatermarkTable, + parsedWatermarkTable.String(), ) log.Infof("[row_based] partitions query: %s", partitionsQuery) rows, err = tx.Query(c.ctx, partitionsQuery) @@ -201,7 +216,7 @@ func (c *PostgresConnector) getNumRowsPartitions( return nil, fmt.Errorf("failed to query for partitions: %w", err) } - partitionHelper := utils.NewPartitionHelper() + partitionHelper := partition_utils.NewPartitionHelper() for rows.Next() { var bucket int64 var start, end interface{} @@ -233,8 +248,14 @@ func (c *PostgresConnector) getMinMaxValues( if config.WatermarkColumn == "xmin" { quotedWatermarkColumn = fmt.Sprintf("%s::text::bigint", quotedWatermarkColumn) } + + parsedWatermarkTable, err := utils.ParseSchemaTable(config.WatermarkTable) + if err != nil { + return nil, nil, fmt.Errorf("unable to parse watermark table: %w", err) + } + // Get the maximum value from the database - maxQuery := fmt.Sprintf("SELECT MAX(%[1]s) FROM %[2]s", quotedWatermarkColumn, config.WatermarkTable) + maxQuery := fmt.Sprintf("SELECT MAX(%[1]s) FROM %[2]s", quotedWatermarkColumn, parsedWatermarkTable.String()) row := tx.QueryRow(c.ctx, maxQuery) if err := row.Scan(&maxValue); err != nil { return nil, nil, fmt.Errorf("failed to query for max value: %w", err) @@ -253,10 +274,16 @@ func (c *PostgresConnector) getMinMaxValues( } case *protos.PartitionRange_TimestampRange: minValue = lastRange.TimestampRange.End.AsTime() + case *protos.PartitionRange_TidRange: + minValue = lastRange.TidRange.End + maxValue = &protos.TID{ + BlockNumber: maxValue.(pgtype.TID).BlockNumber, + OffsetNumber: uint32(maxValue.(pgtype.TID).OffsetNumber), + } } } else { // Otherwise get the minimum value from the database - minQuery := fmt.Sprintf("SELECT MIN(%[1]s) FROM %[2]s", quotedWatermarkColumn, config.WatermarkTable) + minQuery := fmt.Sprintf("SELECT MIN(%[1]s) FROM %[2]s", quotedWatermarkColumn, parsedWatermarkTable.String()) row := tx.QueryRow(c.ctx, minQuery) if err := row.Scan(&minValue); err != nil { log.WithFields(log.Fields{ @@ -272,10 +299,19 @@ func (c *PostgresConnector) getMinMaxValues( case int32: minValue = int64(v) maxValue = int64(maxValue.(int32)) + case pgtype.TID: + minValue = &protos.TID{ + BlockNumber: v.BlockNumber, + OffsetNumber: uint32(v.OffsetNumber), + } + maxValue = &protos.TID{ + BlockNumber: maxValue.(pgtype.TID).BlockNumber, + OffsetNumber: uint32(maxValue.(pgtype.TID).OffsetNumber), + } } } - err := tx.Commit(c.ctx) + err = tx.Commit(c.ctx) if err != nil { return nil, nil, fmt.Errorf("failed to commit transaction: %w", err) } @@ -283,6 +319,47 @@ func (c *PostgresConnector) getMinMaxValues( return minValue, maxValue, nil } +func (c *PostgresConnector) CheckForUpdatedMaxValue(config *protos.QRepConfig, + last *protos.QRepPartition) (bool, error) { + // for xmin lets always assume there are updates + if config.WatermarkColumn == "xmin" { + return true, nil + } + + tx, err := c.pool.Begin(c.ctx) + if err != nil { + return false, fmt.Errorf("unable to begin transaction for getting max value: %w", err) + } + defer func() { + deferErr := tx.Rollback(c.ctx) + if deferErr != pgx.ErrTxClosed && deferErr != nil { + log.WithFields(log.Fields{ + "flowName": config.FlowJobName, + }).Errorf("unexpected error rolling back transaction for getting max value: %v", err) + } + }() + + _, maxValue, err := c.getMinMaxValues(tx, config, last) + if err != nil { + return false, fmt.Errorf("error while getting min and max values: %w", err) + } + + switch x := last.Range.Range.(type) { + case *protos.PartitionRange_IntRange: + if maxValue.(int64) > x.IntRange.End { + return true, nil + } + case *protos.PartitionRange_TimestampRange: + if maxValue.(time.Time).After(x.TimestampRange.End.AsTime()) { + return true, nil + } + default: + return false, fmt.Errorf("unknown range type: %v", x) + } + + return false, nil +} + func (c *PostgresConnector) PullQRepRecords( config *protos.QRepConfig, partition *protos.QRepPartition) (*model.QRecordBatch, error) { @@ -290,8 +367,11 @@ func (c *PostgresConnector) PullQRepRecords( log.WithFields(log.Fields{ "partitionId": partition.PartitionId, }).Infof("pulling full table partition for flow job %s", config.FlowJobName) - executor := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, + executor, err := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, config.FlowJobName, partition.PartitionId) + if err != nil { + return nil, err + } query := config.Query return executor.ExecuteAndProcessQuery(query) } @@ -318,9 +398,6 @@ func (c *PostgresConnector) PullQRepRecords( OffsetNumber: uint16(x.TidRange.End.OffsetNumber), Valid: true, } - case *protos.PartitionRange_XminRange: - rangeStart = x.XminRange.Start - rangeEnd = x.XminRange.End default: return nil, fmt.Errorf("unknown range type: %v", x) } @@ -336,19 +413,18 @@ func (c *PostgresConnector) PullQRepRecords( return nil, err } - executor := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, + executor, err := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, config.FlowJobName, partition.PartitionId) - records, err := executor.ExecuteAndProcessQuery(query, - rangeStart, rangeEnd) if err != nil { return nil, err } - totalRecordsAtSource, err := c.getApproxTableCounts([]string{config.WatermarkTable}) + records, err := executor.ExecuteAndProcessQuery(query, + rangeStart, rangeEnd) if err != nil { return nil, err } - metrics.LogQRepPullMetrics(c.ctx, config.FlowJobName, int(records.NumRecords), totalRecordsAtSource) + return records, nil } @@ -362,10 +438,14 @@ func (c *PostgresConnector) PullQRepRecordStream( "flowName": config.FlowJobName, "partitionId": partition.PartitionId, }).Infof("pulling full table partition for flow job %s", config.FlowJobName) - executor := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, + executor, err := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, config.FlowJobName, partition.PartitionId) + if err != nil { + return 0, err + } + query := config.Query - _, err := executor.ExecuteAndProcessQueryStream(stream, query) + _, err = executor.ExecuteAndProcessQueryStream(stream, query) return 0, err } log.WithFields(log.Fields{ @@ -395,9 +475,6 @@ func (c *PostgresConnector) PullQRepRecordStream( OffsetNumber: uint16(x.TidRange.End.OffsetNumber), Valid: true, } - case *protos.PartitionRange_XminRange: - rangeStart = x.XminRange.Start - rangeEnd = x.XminRange.End default: return 0, fmt.Errorf("unknown range type: %v", x) } @@ -409,18 +486,17 @@ func (c *PostgresConnector) PullQRepRecordStream( return 0, err } - executor := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, + executor, err := NewQRepQueryExecutorSnapshot(c.pool, c.ctx, c.config.TransactionSnapshot, config.FlowJobName, partition.PartitionId) - numRecords, err := executor.ExecuteAndProcessQueryStream(stream, query, rangeStart, rangeEnd) if err != nil { return 0, err } - totalRecordsAtSource, err := c.getApproxTableCounts([]string{config.WatermarkTable}) + numRecords, err := executor.ExecuteAndProcessQueryStream(stream, query, rangeStart, rangeEnd) if err != nil { return 0, err } - metrics.LogQRepPullMetrics(c.ctx, config.FlowJobName, numRecords, totalRecordsAtSource) + log.WithFields(log.Fields{ "partition": partition.PartitionId, }).Infof("pulled %d records for flow job %s", numRecords, config.FlowJobName) @@ -432,7 +508,7 @@ func (c *PostgresConnector) SyncQRepRecords( partition *protos.QRepPartition, stream *model.QRecordStream, ) (int, error) { - dstTable, err := parseSchemaTable(config.DestinationTableIdentifier) + dstTable, err := utils.ParseSchemaTable(config.DestinationTableIdentifier) if err != nil { return 0, fmt.Errorf("failed to parse destination table identifier: %w", err) } @@ -466,7 +542,8 @@ func (c *PostgresConnector) SyncQRepRecords( switch syncMode { case protos.QRepSyncMode_QREP_SYNC_MODE_MULTI_INSERT: stagingTableSync := &QRepStagingTableSync{connector: c} - return stagingTableSync.SyncQRepRecords(config.FlowJobName, dstTable, partition, stream) + return stagingTableSync.SyncQRepRecords( + config.FlowJobName, dstTable, partition, stream, config.WriteMode) case protos.QRepSyncMode_QREP_SYNC_MODE_STORAGE_AVRO: return 0, fmt.Errorf("[postgres] SyncQRepRecords not implemented for storage avro sync mode") default: @@ -549,81 +626,3 @@ func (c *PostgresConnector) isPartitionSynced(partitionID string) (bool, error) return count > 0, nil } - -func (c *PostgresConnector) getTimePartitions( - start time.Time, - end time.Time, - batchDurationSeconds uint32, -) ([]*protos.QRepPartition, error) { - if batchDurationSeconds == 0 { - return nil, fmt.Errorf("batch duration must be greater than 0") - } - - batchDuration := time.Duration(batchDurationSeconds) * time.Second - var partitions []*protos.QRepPartition - - for start.Before(end) { - partitionEnd := start.Add(batchDuration) - if partitionEnd.After(end) { - partitionEnd = end - } - - rangePartition := protos.PartitionRange{ - Range: &protos.PartitionRange_TimestampRange{ - TimestampRange: &protos.TimestampPartitionRange{ - Start: timestamppb.New(start), - End: timestamppb.New(partitionEnd), - }, - }, - } - - partitions = append(partitions, &protos.QRepPartition{ - PartitionId: uuid.New().String(), - Range: &rangePartition, - }) - - start = partitionEnd - } - - return partitions, nil -} - -func (c *PostgresConnector) getIntPartitions( - start int64, end int64, batchSizeInt uint32) ([]*protos.QRepPartition, error) { - var partitions []*protos.QRepPartition - batchSize := int64(batchSizeInt) - - if batchSize == 0 { - return nil, fmt.Errorf("batch size cannot be 0") - } - - for start <= end { - partitionEnd := start + batchSize - // safeguard against integer overflow - if partitionEnd > end || partitionEnd < start { - partitionEnd = end - } - - rangePartition := protos.PartitionRange{ - Range: &protos.PartitionRange_IntRange{ - IntRange: &protos.IntPartitionRange{ - Start: start, - End: partitionEnd, - }, - }, - } - - partitions = append(partitions, &protos.QRepPartition{ - PartitionId: uuid.New().String(), - Range: &rangePartition, - }) - - if partitionEnd == end { - break - } - - start = partitionEnd - } - - return partitions, nil -} diff --git a/flow/connectors/postgres/qrep_partition_test.go b/flow/connectors/postgres/qrep_partition_test.go index ac084817d..a4e90a854 100644 --- a/flow/connectors/postgres/qrep_partition_test.go +++ b/flow/connectors/postgres/qrep_partition_test.go @@ -22,25 +22,6 @@ type testCase struct { wantErr bool } -func newTestCase(schema string, name string, duration uint32, wantErr bool) *testCase { - schemaQualifiedTable := fmt.Sprintf("%s.test", schema) - query := fmt.Sprintf( - `SELECT * FROM %s WHERE "from" >= {{.start}} AND "from" < {{.end}}`, - schemaQualifiedTable) - return &testCase{ - name: name, - config: &protos.QRepConfig{ - FlowJobName: "test_flow_job", - BatchDurationSeconds: duration, - Query: query, - WatermarkTable: schemaQualifiedTable, - WatermarkColumn: "from", - }, - want: []*protos.QRepPartition{}, - wantErr: wantErr, - } -} - func newTestCaseForNumRows(schema string, name string, rows uint32, expectedNum int) *testCase { schemaQualifiedTable := fmt.Sprintf("%s.test", schema) query := fmt.Sprintf( @@ -155,47 +136,6 @@ func TestGetQRepPartitions(t *testing.T) { // Define the test cases testCases := []*testCase{ - newTestCase( - schemaName, - "ensure all days are in 1 partition", - secondsInADay*100, - false, - ).appendPartition( - time.Date(2010, time.January, 1, 10, 0, 0, 0, time.UTC), - time.Date(2010, time.January, 30, 10, 0, 0, 1000, time.UTC), - ), - newTestCase( - schemaName, - "ensure all days are in 30 partitions", - secondsInADay, - false, - ).appendPartitions( - time.Date(2010, time.January, 1, 10, 0, 0, 0, time.UTC), - time.Date(2010, time.January, 30, 10, 0, 0, 0, time.UTC), - 29, - ).appendPartition( - time.Date(2010, time.January, 30, 10, 0, 0, 0, time.UTC), - time.Date(2010, time.January, 30, 10, 0, 0, 1000, time.UTC), - ), - newTestCase( - schemaName, - "ensure all days are in 60 partitions", - secondsInADay/2, - false, - ).appendPartitions( - time.Date(2010, time.January, 1, 10, 0, 0, 0, time.UTC), - time.Date(2010, time.January, 30, 10, 0, 0, 0, time.UTC), - 58, - ).appendPartition( - time.Date(2010, time.January, 30, 10, 0, 0, 0, time.UTC), - time.Date(2010, time.January, 30, 10, 0, 0, 1000, time.UTC), - ), - newTestCase( - schemaName, - "test for error condition with batch size 0", - 0, - true, - ), newTestCaseForNumRows( schemaName, "ensure all rows are in 1 partition if num_rows_per_partition is size of table", diff --git a/flow/connectors/postgres/qrep_query_executor.go b/flow/connectors/postgres/qrep_query_executor.go index 5e2ef59d5..c7d75c450 100644 --- a/flow/connectors/postgres/qrep_query_executor.go +++ b/flow/connectors/postgres/qrep_query_executor.go @@ -7,6 +7,7 @@ import ( "github.com/PeerDB-io/peer-flow/connectors/utils" "github.com/PeerDB-io/peer-flow/model" + "github.com/PeerDB-io/peer-flow/model/qvalue" "github.com/PeerDB-io/peer-flow/shared" util "github.com/PeerDB-io/peer-flow/utils" "github.com/jackc/pgx/v5" @@ -17,12 +18,13 @@ import ( ) type QRepQueryExecutor struct { - pool *pgxpool.Pool - ctx context.Context - snapshot string - testEnv bool - flowJobName string - partitionID string + pool *pgxpool.Pool + ctx context.Context + snapshot string + testEnv bool + flowJobName string + partitionID string + customTypeMap map[uint32]string } func NewQRepQueryExecutor(pool *pgxpool.Pool, ctx context.Context, @@ -37,18 +39,23 @@ func NewQRepQueryExecutor(pool *pgxpool.Pool, ctx context.Context, } func NewQRepQueryExecutorSnapshot(pool *pgxpool.Pool, ctx context.Context, snapshot string, - flowJobName string, partitionID string) *QRepQueryExecutor { + flowJobName string, partitionID string) (*QRepQueryExecutor, error) { log.WithFields(log.Fields{ "flowName": flowJobName, "partitionID": partitionID, }).Info("Declared new qrep executor for snapshot") - return &QRepQueryExecutor{ - pool: pool, - ctx: ctx, - snapshot: snapshot, - flowJobName: flowJobName, - partitionID: partitionID, + CustomTypeMap, err := utils.GetCustomDataTypes(ctx, pool) + if err != nil { + return nil, fmt.Errorf("failed to get custom data types: %w", err) } + return &QRepQueryExecutor{ + pool: pool, + ctx: ctx, + snapshot: snapshot, + flowJobName: flowJobName, + partitionID: partitionID, + customTypeMap: CustomTypeMap, + }, nil } func (qe *QRepQueryExecutor) SetTestEnv(testEnv bool) { @@ -89,11 +96,22 @@ func (qe *QRepQueryExecutor) executeQueryInTx(tx pgx.Tx, cursorName string, fetc } // FieldDescriptionsToSchema converts a slice of pgconn.FieldDescription to a QRecordSchema. -func fieldDescriptionsToSchema(fds []pgconn.FieldDescription) *model.QRecordSchema { +func (qe *QRepQueryExecutor) fieldDescriptionsToSchema(fds []pgconn.FieldDescription) *model.QRecordSchema { qfields := make([]*model.QField, len(fds)) for i, fd := range fds { cname := fd.Name ctype := postgresOIDToQValueKind(fd.DataTypeOID) + if ctype == qvalue.QValueKindInvalid { + var err error + if err != nil { + typeName, ok := qe.customTypeMap[fd.DataTypeOID] + if ok { + ctype = customTypeToQKind(typeName) + } else { + ctype = qvalue.QValueKindString + } + } + } // there isn't a way to know if a column is nullable or not // TODO fix this. cnullable := true @@ -118,7 +136,7 @@ func (qe *QRepQueryExecutor) ProcessRows( }).Info("Processing rows") // Iterate over the rows for rows.Next() { - record, err := mapRowToQRecord(rows, fieldDescriptions) + record, err := mapRowToQRecord(rows, fieldDescriptions, qe.customTypeMap) if err != nil { return nil, fmt.Errorf("failed to map row to QRecord: %w", err) } @@ -133,7 +151,7 @@ func (qe *QRepQueryExecutor) ProcessRows( batch := &model.QRecordBatch{ NumRecords: uint32(len(records)), Records: records, - Schema: fieldDescriptionsToSchema(fieldDescriptions), + Schema: qe.fieldDescriptionsToSchema(fieldDescriptions), } log.WithFields(log.Fields{ @@ -155,7 +173,7 @@ func (qe *QRepQueryExecutor) processRowsStream( // Iterate over the rows for rows.Next() { - record, err := mapRowToQRecord(rows, fieldDescriptions) + record, err := mapRowToQRecord(rows, fieldDescriptions, qe.customTypeMap) if err != nil { stream.Records <- &model.QRecordOrError{ Err: fmt.Errorf("failed to map row to QRecord: %w", err), @@ -214,7 +232,7 @@ func (qe *QRepQueryExecutor) processFetchedRows( fieldDescriptions := rows.FieldDescriptions() if !stream.IsSchemaSet() { - schema := fieldDescriptionsToSchema(fieldDescriptions) + schema := qe.fieldDescriptionsToSchema(fieldDescriptions) _ = stream.SetSchema(schema) } @@ -395,7 +413,8 @@ func (qe *QRepQueryExecutor) ExecuteAndProcessQueryStream( return totalRecordsFetched, nil } -func mapRowToQRecord(row pgx.Rows, fds []pgconn.FieldDescription) (*model.QRecord, error) { +func mapRowToQRecord(row pgx.Rows, fds []pgconn.FieldDescription, + customTypeMap map[uint32]string) (*model.QRecord, error) { // make vals an empty array of QValue of size len(fds) record := model.NewQRecord(len(fds)) @@ -405,11 +424,31 @@ func mapRowToQRecord(row pgx.Rows, fds []pgconn.FieldDescription) (*model.QRecor } for i, fd := range fds { - tmp, err := parseFieldFromPostgresOID(fd.DataTypeOID, values[i]) - if err != nil { - return nil, fmt.Errorf("failed to parse field: %w", err) + // Check if it's a custom type first + typeName, ok := customTypeMap[fd.DataTypeOID] + if !ok { + tmp, err := parseFieldFromPostgresOID(fd.DataTypeOID, values[i]) + if err != nil { + return nil, fmt.Errorf("failed to parse field: %w", err) + } + record.Set(i, *tmp) + } else { + customQKind := customTypeToQKind(typeName) + if customQKind == qvalue.QValueKindGeography || customQKind == qvalue.QValueKindGeometry { + wkbString, ok := values[i].(string) + wkt, err := GeoValidate(wkbString) + if err != nil || !ok { + values[i] = nil + } else { + values[i] = wkt + } + } + customTypeVal := qvalue.QValue{ + Kind: customQKind, + Value: values[i], + } + record.Set(i, customTypeVal) } - record.Set(i, *tmp) } return record, nil diff --git a/flow/connectors/postgres/qrep_sync_method.go b/flow/connectors/postgres/qrep_sync_method.go index 1990dd95c..f1e818f39 100644 --- a/flow/connectors/postgres/qrep_sync_method.go +++ b/flow/connectors/postgres/qrep_sync_method.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" + "github.com/PeerDB-io/peer-flow/connectors/utils" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" util "github.com/PeerDB-io/peer-flow/utils" @@ -30,43 +30,15 @@ type QRepStagingTableSync struct { func (s *QRepStagingTableSync) SyncQRepRecords( flowJobName string, - dstTableName *SchemaTable, + dstTableName *utils.SchemaTable, partition *protos.QRepPartition, stream *model.QRecordStream, + writeMode *protos.QRepWriteMode, ) (int, error) { partitionID := partition.PartitionId - runID, err := util.RandomUInt64() - if err != nil { - return -1, fmt.Errorf("failed to generate random runID: %v", err) - } - startTime := time.Now() - pool := s.connector.pool - - // create a staging temporary table with the same schema as the destination table - stagingTable := fmt.Sprintf("_%d_staging", runID) - - // create the staging temporary table if not exists - tmpTableStmt := fmt.Sprintf( - `CREATE TEMP TABLE %s AS SELECT * FROM %s LIMIT 0;`, - stagingTable, - dstTableName.String(), - ) - _, err = pool.Exec(context.Background(), tmpTableStmt) - if err != nil { - log.WithFields(log.Fields{ - "flowName": flowJobName, - "partitionID": partitionID, - "destinationTable": dstTableName, - }).Errorf( - "failed to create staging temporary table %s, statement: '%s'. Error: %v", - stagingTable, - tmpTableStmt, - err, - ) - return 0, fmt.Errorf("failed to create staging temporary table %s: %w", stagingTable, err) - } + pool := s.connector.pool schema, err := stream.Schema() if err != nil { log.WithFields(log.Fields{ @@ -77,30 +49,13 @@ func (s *QRepStagingTableSync) SyncQRepRecords( return 0, fmt.Errorf("failed to get schema from stream: %w", err) } - // Step 2: Insert records into the staging table. - copySource := model.NewQRecordBatchCopyFromSource(stream) - - // Perform the COPY FROM operation - syncRecordsStartTime := time.Now() - syncedRows, err := pool.CopyFrom( - context.Background(), - pgx.Identifier{stagingTable}, - schema.GetColumnNames(), - copySource, - ) - - if err != nil { - return -1, fmt.Errorf("failed to copy records into staging temporary table: %v", err) - } - metrics.LogQRepSyncMetrics(s.connector.ctx, flowJobName, syncedRows, time.Since(syncRecordsStartTime)) - // Second transaction - to handle rest of the processing - tx2, err := pool.Begin(context.Background()) + tx, err := pool.Begin(context.Background()) if err != nil { return 0, fmt.Errorf("failed to begin transaction: %v", err) } defer func() { - if err := tx2.Rollback(context.Background()); err != nil { + if err := tx.Rollback(context.Background()); err != nil { if err != pgx.ErrTxClosed { log.WithFields(log.Fields{ "flowName": flowJobName, @@ -111,41 +66,113 @@ func (s *QRepStagingTableSync) SyncQRepRecords( } }() - colNames := schema.GetColumnNames() - // wrap the column names in double quotes to handle reserved keywords - for i, colName := range colNames { - colNames[i] = fmt.Sprintf("\"%s\"", colName) - } - colNamesStr := strings.Join(colNames, ", ") - log.WithFields(log.Fields{ - "flowName": flowJobName, - "partitionID": partitionID, - }).Infof("Obtained column names and quoted them in QRep sync") - insertFromStagingStmt := fmt.Sprintf( - "INSERT INTO %s (%s) SELECT %s FROM %s", - dstTableName.String(), - colNamesStr, - colNamesStr, - stagingTable, - ) + // Step 2: Insert records into the destination table. + copySource := model.NewQRecordBatchCopyFromSource(stream) - _, err = tx2.Exec(context.Background(), insertFromStagingStmt) - if err != nil { - log.WithFields(log.Fields{ - "flowName": flowJobName, - "partitionID": partitionID, - "destinationTable": dstTableName, - }).Errorf("failed to execute statement '%s': %v", insertFromStagingStmt, err) - return -1, fmt.Errorf("failed to execute statements in a transaction: %v", err) + var numRowsSynced int64 + + if writeMode == nil || + writeMode.WriteType == protos.QRepWriteType_QREP_WRITE_MODE_APPEND { + // Perform the COPY FROM operation + numRowsSynced, err = tx.CopyFrom( + context.Background(), + pgx.Identifier{dstTableName.Schema, dstTableName.Table}, + schema.GetColumnNames(), + copySource, + ) + if err != nil { + return -1, fmt.Errorf("failed to copy records into destination table: %v", err) + } + } else { + // Step 2.1: Create a temp staging table + stagingTableName := fmt.Sprintf("_peerdb_staging_%s", util.RandomString(8)) + stagingTableIdentifier := pgx.Identifier{dstTableName.Schema, stagingTableName} + dstTableIdentifier := pgx.Identifier{dstTableName.Schema, dstTableName.Table} + + createStagingTableStmt := fmt.Sprintf( + "CREATE UNLOGGED TABLE %s (LIKE %s);", + stagingTableIdentifier.Sanitize(), + dstTableIdentifier.Sanitize(), + ) + + log.Infof("Creating staging table %s - '%s'", stagingTableName, createStagingTableStmt) + _, err = tx.Exec(context.Background(), createStagingTableStmt) + + if err != nil { + return -1, fmt.Errorf("failed to create staging table: %v", err) + } + + // Step 2.2: Insert records into the staging table + numRowsSynced, err = tx.CopyFrom( + context.Background(), + stagingTableIdentifier, + schema.GetColumnNames(), + copySource, + ) + if err != nil { + return -1, fmt.Errorf("failed to copy records into staging table: %v", err) + } + + // construct the SET clause for the upsert operation + upsertMatchColsList := writeMode.UpsertKeyColumns + upsertMatchCols := make(map[string]bool) + for _, col := range upsertMatchColsList { + upsertMatchCols[col] = true + } + + setClause := "" + for _, col := range schema.GetColumnNames() { + _, ok := upsertMatchCols[col] + if !ok { + setClause += fmt.Sprintf("%s = EXCLUDED.%s,", col, col) + } + } + + setClause = strings.TrimSuffix(setClause, ",") + selectStr := strings.Join(schema.GetColumnNames(), ", ") + + // Step 2.3: Perform the upsert operation, ON CONFLICT UPDATE + upsertStmt := fmt.Sprintf( + "INSERT INTO %s (%s) SELECT %s FROM %s ON CONFLICT (%s) DO UPDATE SET %s;", + dstTableIdentifier.Sanitize(), + selectStr, + selectStr, + stagingTableIdentifier.Sanitize(), + strings.Join(writeMode.UpsertKeyColumns, ", "), + setClause, + ) + log.Infof("Performing upsert operation: %s", upsertStmt) + res, err := tx.Exec(context.Background(), upsertStmt) + if err != nil { + return -1, fmt.Errorf("failed to perform upsert operation: %v", err) + } + + numRowsSynced = res.RowsAffected() + + // Step 2.4: Drop the staging table + dropStagingTableStmt := fmt.Sprintf( + "DROP TABLE %s;", + stagingTableIdentifier.Sanitize(), + ) + log.Infof("Dropping staging table %s", stagingTableName) + _, err = tx.Exec(context.Background(), dropStagingTableStmt) + if err != nil { + return -1, fmt.Errorf("failed to drop staging table: %v", err) + } } + log.WithFields(log.Fields{ + "flowName": flowJobName, + "partitionID": partitionID, + "destinationTable": dstTableName, + }).Infof("pushed %d records to %s", numRowsSynced, dstTableName) + // marshal the partition to json using protojson pbytes, err := protojson.Marshal(partition) if err != nil { return -1, fmt.Errorf("failed to marshal partition to json: %v", err) } - normalizeRecordsStartTime := time.Now() insertMetadataStmt := fmt.Sprintf( "INSERT INTO %s VALUES ($1, $2, $3, $4, $5);", qRepMetadataTableName, @@ -155,7 +182,7 @@ func (s *QRepStagingTableSync) SyncQRepRecords( "partitionID": partitionID, "destinationTable": dstTableName, }).Infof("Executing transaction inside Qrep sync") - rows, err := tx2.Exec( + _, err = tx.Exec( context.Background(), insertMetadataStmt, flowJobName, @@ -167,14 +194,8 @@ func (s *QRepStagingTableSync) SyncQRepRecords( if err != nil { return -1, fmt.Errorf("failed to execute statements in a transaction: %v", err) } - totalRecordsAtTarget, err := s.connector.getApproxTableCounts([]string{dstTableName.String()}) - if err != nil { - return -1, fmt.Errorf("failed to get total records at target: %v", err) - } - metrics.LogQRepNormalizeMetrics(s.connector.ctx, flowJobName, rows.RowsAffected(), - time.Since(normalizeRecordsStartTime), totalRecordsAtTarget) - err = tx2.Commit(context.Background()) + err = tx.Commit(context.Background()) if err != nil { return -1, fmt.Errorf("failed to commit transaction: %v", err) } diff --git a/flow/connectors/postgres/qvalue_convert.go b/flow/connectors/postgres/qvalue_convert.go index b9c7dcc90..2d0f5b2c2 100644 --- a/flow/connectors/postgres/qvalue_convert.go +++ b/flow/connectors/postgres/qvalue_convert.go @@ -1,6 +1,7 @@ package connpostgres import ( + "encoding/hex" "encoding/json" "errors" "fmt" @@ -13,6 +14,9 @@ import ( "github.com/jackc/pgx/v5/pgtype" "github.com/lib/pq/oid" log "github.com/sirupsen/logrus" + + //nolint:all + geom "github.com/twpayne/go-geos" ) func postgresOIDToQValueKind(recvOID uint32) qvalue.QValueKind { @@ -55,6 +59,8 @@ func postgresOIDToQValueKind(recvOID uint32) qvalue.QValueKind { return qvalue.QValueKindArrayInt32 case pgtype.Int8ArrayOID: return qvalue.QValueKindArrayInt64 + case pgtype.PointOID: + return qvalue.QValueKindPoint case pgtype.Float4ArrayOID: return qvalue.QValueKindArrayFloat32 case pgtype.Float8ArrayOID: @@ -77,8 +83,10 @@ func postgresOIDToQValueKind(recvOID uint32) qvalue.QValueKind { return qvalue.QValueKindString } else if recvOID == uint32(oid.T_tsquery) { // TSQUERY return qvalue.QValueKindString + } else if recvOID == uint32(oid.T_point) { // POINT + return qvalue.QValueKindPoint } - // log.Warnf("failed to get type name for oid: %v", recvOID) + return qvalue.QValueKindInvalid } else { log.Warnf("unsupported field type: %v - type name - %s; returning as string", recvOID, typeName.Name) @@ -337,6 +345,11 @@ func parseFieldFromQValueKind(qvalueKind qvalue.QValueKind, value interface{}) ( return nil, fmt.Errorf("failed to parse hstore: %w", err) } val = &qvalue.QValue{Kind: qvalue.QValueKindHStore, Value: hstoreVal} + case qvalue.QValueKindPoint: + xCoord := value.(pgtype.Point).P.X + yCoord := value.(pgtype.Point).P.Y + val = &qvalue.QValue{Kind: qvalue.QValueKindPoint, + Value: fmt.Sprintf("POINT(%f %f)", xCoord, yCoord)} default: // log.Warnf("unhandled QValueKind => %v, parsing as string", qvalueKind) textVal, ok := value.(string) @@ -380,3 +393,42 @@ func numericToRat(numVal *pgtype.Numeric) (*big.Rat, error) { // handle invalid numeric return nil, errors.New("invalid numeric") } + +func customTypeToQKind(typeName string) qvalue.QValueKind { + var qValueKind qvalue.QValueKind + switch typeName { + case "geometry": + qValueKind = qvalue.QValueKindGeometry + case "geography": + qValueKind = qvalue.QValueKindGeography + default: + qValueKind = qvalue.QValueKindString + } + return qValueKind +} + +// returns the WKT representation of the geometry object if it is valid +func GeoValidate(hexWkb string) (string, error) { + // Decode the WKB hex string into binary + wkb, hexErr := hex.DecodeString(hexWkb) + if hexErr != nil { + log.Warnf("Ignoring invalid WKB: %s", hexWkb) + return "", hexErr + } + + // UnmarshalWKB performs geometry validation along with WKB parsing + geometryObject, geoErr := geom.NewGeomFromWKB(wkb) + if geoErr != nil { + log.Warnf("Ignoring invalid geometry WKB %s: %v", hexWkb, geoErr) + return "", geoErr + } + + invalidReason := geometryObject.IsValidReason() + if invalidReason != "Valid Geometry" { + log.Warnf("Ignoring invalid geometry shape %s: %s", hexWkb, invalidReason) + return "", errors.New(invalidReason) + } + + wkt := geometryObject.ToWKT() + return wkt, nil +} diff --git a/flow/connectors/s3/qrep.go b/flow/connectors/s3/qrep.go index e3b30bd4e..b34f9a2cf 100644 --- a/flow/connectors/s3/qrep.go +++ b/flow/connectors/s3/qrep.go @@ -61,9 +61,9 @@ func (c *S3Connector) writeToAvroFile( return 0, fmt.Errorf("failed to parse bucket path: %w", err) } - s3Key := fmt.Sprintf("%s/%s/%s.avro", s3o.Prefix, jobName, partitionID) + s3AvroFileKey := fmt.Sprintf("%s/%s/%s.avro", s3o.Prefix, jobName, partitionID) writer := avro.NewPeerDBOCFWriter(c.ctx, stream, avroSchema) - numRecords, err := writer.WriteRecordsToS3(s3o.Bucket, s3Key) + numRecords, err := writer.WriteRecordsToS3(s3o.Bucket, s3AvroFileKey, c.creds) if err != nil { return 0, fmt.Errorf("failed to write records to S3: %w", err) } diff --git a/flow/connectors/s3/s3.go b/flow/connectors/s3/s3.go index 17e9fb49a..fb6ed77d4 100644 --- a/flow/connectors/s3/s3.go +++ b/flow/connectors/s3/s3.go @@ -4,31 +4,82 @@ import ( "context" "fmt" + metadataStore "github.com/PeerDB-io/peer-flow/connectors/external_metadata" "github.com/PeerDB-io/peer-flow/connectors/utils" "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/model" "github.com/aws/aws-sdk-go/service/s3" log "github.com/sirupsen/logrus" ) type S3Connector struct { - ctx context.Context - url string - client s3.S3 + ctx context.Context + url string + pgMetadata *metadataStore.PostgresMetadataStore + client s3.S3 + creds utils.S3PeerCredentials } func NewS3Connector(ctx context.Context, - s3ProtoConfig *protos.S3Config) (*S3Connector, error) { - s3Client, err := utils.CreateS3Client() + config *protos.S3Config) (*S3Connector, error) { + keyID := "" + if config.AccessKeyId != nil { + keyID = *config.AccessKeyId + } + secretKey := "" + if config.SecretAccessKey != nil { + secretKey = *config.SecretAccessKey + } + roleArn := "" + if config.RoleArn != nil { + roleArn = *config.RoleArn + } + region := "" + if config.Region != nil { + region = *config.Region + } + endpoint := "" + if config.Endpoint != nil { + endpoint = *config.Endpoint + } + s3PeerCreds := utils.S3PeerCredentials{ + AccessKeyID: keyID, + SecretAccessKey: secretKey, + AwsRoleArn: roleArn, + Region: region, + Endpoint: endpoint, + } + s3Client, err := utils.CreateS3Client(s3PeerCreds) if err != nil { return nil, fmt.Errorf("failed to create S3 client: %w", err) } + metadataSchemaName := "peerdb_s3_metadata" // #nosec G101 + pgMetadata, err := metadataStore.NewPostgresMetadataStore(ctx, + config.GetMetadataDb(), metadataSchemaName) + if err != nil { + log.Errorf("failed to create postgres metadata store: %v", err) + return nil, err + } + return &S3Connector{ - ctx: ctx, - url: s3ProtoConfig.Url, - client: *s3Client, + ctx: ctx, + url: config.Url, + pgMetadata: pgMetadata, + client: *s3Client, + creds: s3PeerCreds, }, nil } +func (c *S3Connector) CreateRawTable(req *protos.CreateRawTableInput) (*protos.CreateRawTableOutput, error) { + log.Infof("CreateRawTable for S3 is a no-op") + return nil, nil +} + +func (c *S3Connector) InitializeTableSchema(req map[string]*protos.TableSchema) error { + log.Infof("InitializeTableSchema for S3 is a no-op") + return nil +} + func (c *S3Connector) Close() error { log.Debugf("Closing s3 connector is a noop") return nil @@ -38,3 +89,112 @@ func (c *S3Connector) ConnectionActive() bool { _, err := c.client.ListBuckets(nil) return err == nil } + +func (c *S3Connector) NeedsSetupMetadataTables() bool { + return c.pgMetadata.NeedsSetupMetadata() +} + +func (c *S3Connector) SetupMetadataTables() error { + err := c.pgMetadata.SetupMetadata() + if err != nil { + log.Errorf("failed to setup metadata tables: %v", err) + return err + } + + return nil +} + +func (c *S3Connector) GetLastSyncBatchID(jobName string) (int64, error) { + syncBatchID, err := c.pgMetadata.GetLastBatchID(jobName) + if err != nil { + return 0, err + } + + return syncBatchID, nil +} + +func (c *S3Connector) GetLastOffset(jobName string) (*protos.LastSyncState, error) { + res, err := c.pgMetadata.FetchLastOffset(jobName) + if err != nil { + return nil, err + } + + return res, nil +} + +// update offset for a job +func (c *S3Connector) updateLastOffset(jobName string, offset int64) error { + err := c.pgMetadata.UpdateLastOffset(jobName, offset) + if err != nil { + log.Errorf("failed to update last offset: %v", err) + return err + } + + return nil +} + +func (c *S3Connector) SyncRecords(req *model.SyncRecordsRequest) (*model.SyncResponse, error) { + syncBatchID, err := c.GetLastSyncBatchID(req.FlowJobName) + if err != nil { + return nil, fmt.Errorf("failed to get previous syncBatchID: %w", err) + } + syncBatchID = syncBatchID + 1 + + tableNameRowsMapping := make(map[string]uint32) + streamReq := model.NewRecordsToStreamRequest(req.Records.GetRecords(), tableNameRowsMapping, syncBatchID) + streamRes, err := utils.RecordsToRawTableStream(streamReq) + if err != nil { + return nil, fmt.Errorf("failed to convert records to raw table stream: %w", err) + } + recordStream := streamRes.Stream + qrepConfig := &protos.QRepConfig{ + FlowJobName: req.FlowJobName, + DestinationTableIdentifier: fmt.Sprintf("raw_table_%s", req.FlowJobName), + } + partition := &protos.QRepPartition{ + PartitionId: fmt.Sprint(syncBatchID), + } + numRecords, err := c.SyncQRepRecords(qrepConfig, partition, recordStream) + if err != nil { + return nil, err + } + log.Infof("Synced %d records", numRecords) + + lastCheckpoint, err := req.Records.GetLastCheckpoint() + if err != nil { + return nil, fmt.Errorf("failed to get last checkpoint: %w", err) + } + + err = c.updateLastOffset(req.FlowJobName, lastCheckpoint) + if err != nil { + log.Errorf("failed to update last offset for s3 cdc: %v", err) + return nil, err + } + err = c.pgMetadata.IncrementID(req.FlowJobName) + if err != nil { + log.Errorf("%v", err) + return nil, err + } + + return &model.SyncResponse{ + FirstSyncedCheckPointID: req.Records.GetFirstCheckpoint(), + LastSyncedCheckPointID: lastCheckpoint, + NumRecordsSynced: int64(numRecords), + TableNameRowsMapping: tableNameRowsMapping, + }, nil +} + +func (c *S3Connector) SetupNormalizedTables(req *protos.SetupNormalizedTableBatchInput) ( + *protos.SetupNormalizedTableBatchOutput, + error) { + log.Infof("SetupNormalizedTables for S3 is a no-op") + return nil, nil +} + +func (c *S3Connector) SyncFlowCleanup(jobName string) error { + err := c.pgMetadata.DropMetadata(jobName) + if err != nil { + return err + } + return nil +} diff --git a/flow/connectors/snowflake/client.go b/flow/connectors/snowflake/client.go index 14bba3d56..285b4882b 100644 --- a/flow/connectors/snowflake/client.go +++ b/flow/connectors/snowflake/client.go @@ -10,6 +10,7 @@ import ( peersql "github.com/PeerDB-io/peer-flow/connectors/sql" "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/model/qvalue" util "github.com/PeerDB-io/peer-flow/utils" ) @@ -55,7 +56,7 @@ func NewSnowflakeClient(ctx context.Context, config *protos.SnowflakeConfig) (*S } genericExecutor := *peersql.NewGenericSQLQueryExecutor( - ctx, database, snowflakeTypeToQValueKindMap, qValueKindToSnowflakeTypeMap) + ctx, database, snowflakeTypeToQValueKindMap, qvalue.QValueKindToSnowflakeTypeMap) return &SnowflakeClient{ GenericSQLQueryExecutor: genericExecutor, diff --git a/flow/connectors/snowflake/qrep.go b/flow/connectors/snowflake/qrep.go index 23fc9418c..094dbcaff 100644 --- a/flow/connectors/snowflake/qrep.go +++ b/flow/connectors/snowflake/qrep.go @@ -203,7 +203,7 @@ func (c *SnowflakeConnector) createStage(stageName string, config *protos.QRepCo } func (c *SnowflakeConnector) createExternalStage(stageName string, config *protos.QRepConfig) (string, error) { - awsCreds, err := utils.GetAWSSecrets() + awsCreds, err := utils.GetAWSSecrets(utils.S3PeerCredentials{}) if err != nil { log.WithFields(log.Fields{ "flowName": config.FlowJobName, @@ -253,7 +253,7 @@ func (c *SnowflakeConnector) ConsolidateQRepPartitions(config *protos.QRepConfig case protos.QRepSyncMode_QREP_SYNC_MODE_MULTI_INSERT: return fmt.Errorf("multi-insert sync mode not supported for snowflake") case protos.QRepSyncMode_QREP_SYNC_MODE_STORAGE_AVRO: - allCols, err := c.getColsFromTable(destTable) + colInfo, err := c.getColsFromTable(destTable) if err != nil { log.WithFields(log.Fields{ "flowName": config.FlowJobName, @@ -261,6 +261,7 @@ func (c *SnowflakeConnector) ConsolidateQRepPartitions(config *protos.QRepConfig return fmt.Errorf("failed to get columns from table %s: %w", destTable, err) } + allCols := colInfo.Columns err = CopyStageToDestination(c, config, destTable, stageName, allCols) if err != nil { log.WithFields(log.Fields{ @@ -283,7 +284,7 @@ func (c *SnowflakeConnector) CleanupQRepFlow(config *protos.QRepConfig) error { return c.dropStage(config.StagingPath, config.FlowJobName) } -func (c *SnowflakeConnector) getColsFromTable(tableName string) ([]string, error) { +func (c *SnowflakeConnector) getColsFromTable(tableName string) (*model.ColumnInformation, error) { // parse the table name to get the schema and table name components, err := parseTableName(tableName) if err != nil { @@ -296,7 +297,7 @@ func (c *SnowflakeConnector) getColsFromTable(tableName string) ([]string, error //nolint:gosec queryString := fmt.Sprintf(` - SELECT column_name + SELECT column_name, data_type FROM information_schema.columns WHERE UPPER(table_name) = '%s' AND UPPER(table_schema) = '%s' `, components.tableIdentifier, components.schemaIdentifier) @@ -307,16 +308,24 @@ func (c *SnowflakeConnector) getColsFromTable(tableName string) ([]string, error } defer rows.Close() - var cols []string + columnMap := map[string]string{} for rows.Next() { - var col string - if err := rows.Scan(&col); err != nil { + var colName string + var colType string + if err := rows.Scan(&colName, &colType); err != nil { return nil, fmt.Errorf("failed to scan row: %w", err) } - cols = append(cols, col) + columnMap[colName] = colType + } + var cols []string + for k := range columnMap { + cols = append(cols, k) } - return cols, nil + return &model.ColumnInformation{ + ColumnMap: columnMap, + Columns: cols, + }, nil } // dropStage drops the stage for the given job. @@ -342,7 +351,7 @@ func (c *SnowflakeConnector) dropStage(stagingPath string, job string) error { log.Infof("Deleting contents of bucket %s with prefix %s/%s", s3o.Bucket, s3o.Prefix, job) // deleting the contents of the bucket with prefix - s3svc, err := utils.CreateS3Client() + s3svc, err := utils.CreateS3Client(utils.S3PeerCredentials{}) if err != nil { log.WithFields(log.Fields{ "flowName": job, diff --git a/flow/connectors/snowflake/qrep_avro_sync.go b/flow/connectors/snowflake/qrep_avro_sync.go index 02f29b648..d7d8286cc 100644 --- a/flow/connectors/snowflake/qrep_avro_sync.go +++ b/flow/connectors/snowflake/qrep_avro_sync.go @@ -9,15 +9,20 @@ import ( "github.com/PeerDB-io/peer-flow/connectors/utils" avro "github.com/PeerDB-io/peer-flow/connectors/utils/avro" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" + "github.com/PeerDB-io/peer-flow/model/qvalue" util "github.com/PeerDB-io/peer-flow/utils" log "github.com/sirupsen/logrus" _ "github.com/snowflakedb/gosnowflake" "go.temporal.io/sdk/activity" ) +type CopyInfo struct { + transformationSQL string + columnsSQL string +} + type SnowflakeAvroSyncMethod struct { config *protos.QRepConfig connector *SnowflakeConnector @@ -54,7 +59,8 @@ func (s *SnowflakeAvroSyncMethod) SyncRecords( return 0, err } - numRecords, localFilePath, err := s.writeToAvroFile(stream, avroSchema, "17", flowJobName) + partitionID := util.RandomString(16) + numRecords, localFilePath, err := s.writeToAvroFile(stream, avroSchema, partitionID, flowJobName) if err != nil { return 0, err } @@ -73,11 +79,12 @@ func (s *SnowflakeAvroSyncMethod) SyncRecords( "flowName": flowJobName, }).Infof("Created stage %s", stage) - allCols, err := s.connector.getColsFromTable(s.config.DestinationTableIdentifier) + colInfo, err := s.connector.getColsFromTable(s.config.DestinationTableIdentifier) if err != nil { return 0, err } + allCols := colInfo.Columns err = s.putFileToStage(localFilePath, stage) if err != nil { return 0, err @@ -115,6 +122,17 @@ func (s *SnowflakeAvroSyncMethod) SyncQRepRecords( "partitionID": partition.PartitionId, }).Infof("sync function called and schema acquired") + err = s.addMissingColumns( + config.FlowJobName, + schema, + dstTableSchema, + dstTableName, + partition, + ) + if err != nil { + return 0, err + } + avroSchema, err := s.getAvroSchema(dstTableName, schema, config.FlowJobName) if err != nil { return 0, err @@ -141,7 +159,6 @@ func (s *SnowflakeAvroSyncMethod) SyncQRepRecords( stage := s.connector.getStageNameForJob(config.FlowJobName) - putFileStartTime := time.Now() err = s.putFileToStage(localFilePath, stage) if err != nil { return 0, err @@ -150,8 +167,6 @@ func (s *SnowflakeAvroSyncMethod) SyncQRepRecords( "flowName": config.FlowJobName, "partitionID": partition.PartitionId, }).Infof("Put file to stage in Avro sync for snowflake") - metrics.LogQRepSyncMetrics(s.connector.ctx, config.FlowJobName, int64(numRecords), - time.Since(putFileStartTime)) err = s.insertMetadata(partition, config.FlowJobName, startTime) if err != nil { @@ -163,6 +178,78 @@ func (s *SnowflakeAvroSyncMethod) SyncQRepRecords( return numRecords, nil } +func (s *SnowflakeAvroSyncMethod) addMissingColumns( + flowJobName string, + schema *model.QRecordSchema, + dstTableSchema []*sql.ColumnType, + dstTableName string, + partition *protos.QRepPartition, +) error { + // check if avro schema has additional columns compared to destination table + // if so, we need to add those columns to the destination table + colsToTypes := map[string]qvalue.QValueKind{} + for _, col := range schema.Fields { + hasColumn := false + // check ignoring case + for _, dstCol := range dstTableSchema { + if strings.EqualFold(col.Name, dstCol.Name()) { + hasColumn = true + break + } + } + + if !hasColumn { + log.WithFields(log.Fields{ + "flowName": flowJobName, + "partitionID": partition.PartitionId, + }).Infof("adding column %s to destination table %s", col.Name, dstTableName) + colsToTypes[col.Name] = col.Type + } + } + + if len(colsToTypes) > 0 { + tx, err := s.connector.database.Begin() + if err != nil { + return fmt.Errorf("failed to begin transaction: %w", err) + } + + for colName, colType := range colsToTypes { + sfColType, err := colType.ToDWHColumnType(qvalue.QDWHTypeSnowflake) + if err != nil { + return fmt.Errorf("failed to convert QValueKind to Snowflake column type: %w", err) + } + upperCasedColName := strings.ToUpper(colName) + alterTableCmd := fmt.Sprintf("ALTER TABLE %s ", dstTableName) + alterTableCmd += fmt.Sprintf("ADD COLUMN IF NOT EXISTS \"%s\" %s;", upperCasedColName, sfColType) + + log.WithFields(log.Fields{ + "flowName": flowJobName, + "partitionID": partition.PartitionId, + }).Infof("altering destination table %s with command `%s`", dstTableName, alterTableCmd) + + if _, err := tx.Exec(alterTableCmd); err != nil { + return fmt.Errorf("failed to alter destination table: %w", err) + } + } + + if err := tx.Commit(); err != nil { + return fmt.Errorf("failed to commit transaction: %w", err) + } + + log.WithFields(log.Fields{ + "flowName": flowJobName, + "partitionID": partition.PartitionId, + }).Infof("successfully added missing columns to destination table %s", dstTableName) + } else { + log.WithFields(log.Fields{ + "flowName": flowJobName, + "partitionID": partition.PartitionId, + }).Infof("no missing columns found in destination table %s", dstTableName) + } + + return nil +} + func (s *SnowflakeAvroSyncMethod) getAvroSchema( dstTableName string, schema *model.QRecordSchema, @@ -186,14 +273,14 @@ func (s *SnowflakeAvroSyncMethod) writeToAvroFile( flowJobName string, ) (int, string, error) { var numRecords int - ocfWriter := avro.NewPeerDBOCFWriter(s.connector.ctx, stream, avroSchema) if s.config.StagingPath == "" { + ocfWriter := avro.NewPeerDBOCFWriterWithCompression(s.connector.ctx, stream, avroSchema) tmpDir, err := os.MkdirTemp("", "peerdb-avro") if err != nil { return 0, "", fmt.Errorf("failed to create temp dir: %w", err) } - localFilePath := fmt.Sprintf("%s/%s.avro", tmpDir, partitionID) + localFilePath := fmt.Sprintf("%s/%s.avro.zst", tmpDir, partitionID) log.WithFields(log.Fields{ "flowName": flowJobName, "partitionID": partitionID, @@ -205,17 +292,18 @@ func (s *SnowflakeAvroSyncMethod) writeToAvroFile( return numRecords, localFilePath, nil } else if strings.HasPrefix(s.config.StagingPath, "s3://") { + ocfWriter := avro.NewPeerDBOCFWriter(s.connector.ctx, stream, avroSchema) s3o, err := utils.NewS3BucketAndPrefix(s.config.StagingPath) if err != nil { return 0, "", fmt.Errorf("failed to parse staging path: %w", err) } - s3Key := fmt.Sprintf("%s/%s/%s.avro", s3o.Prefix, s.config.FlowJobName, partitionID) + s3AvroFileKey := fmt.Sprintf("%s/%s/%s.avro", s3o.Prefix, s.config.FlowJobName, partitionID) log.WithFields(log.Fields{ "flowName": flowJobName, "partitionID": partitionID, }).Infof("OCF: Writing records to S3") - numRecords, err = ocfWriter.WriteRecordsToS3(s3o.Bucket, s3Key) + numRecords, err = ocfWriter.WriteRecordsToS3(s3o.Bucket, s3AvroFileKey, utils.S3PeerCredentials{}) if err != nil { return 0, "", fmt.Errorf("failed to write records to S3: %w", err) } @@ -251,6 +339,39 @@ func (s *SnowflakeAvroSyncMethod) putFileToStage(localFilePath string, stage str return nil } +func (sc *SnowflakeConnector) GetCopyTransformation(dstTableName string) (*CopyInfo, error) { + colInfo, colsErr := sc.getColsFromTable(dstTableName) + if colsErr != nil { + return nil, fmt.Errorf("failed to get columns from destination table: %w", colsErr) + } + + var transformations []string + var columnOrder []string + for colName, colType := range colInfo.ColumnMap { + if colName == "_PEERDB_IS_DELETED" { + continue + } + columnOrder = append(columnOrder, fmt.Sprintf("\"%s\"", colName)) + switch colType { + case "GEOGRAPHY": + transformations = append(transformations, + fmt.Sprintf("TO_GEOGRAPHY($1:\"%s\"::string, true) AS \"%s\"", strings.ToLower(colName), colName)) + case "GEOMETRY": + transformations = append(transformations, + fmt.Sprintf("TO_GEOMETRY($1:\"%s\"::string, true) AS \"%s\"", strings.ToLower(colName), colName)) + case "NUMBER": + transformations = append(transformations, + fmt.Sprintf("$1:\"%s\" AS \"%s\"", strings.ToLower(colName), colName)) + default: + transformations = append(transformations, + fmt.Sprintf("($1:\"%s\")::%s AS \"%s\"", strings.ToLower(colName), colType, colName)) + } + } + transformationSQL := strings.Join(transformations, ",") + columnsSQL := strings.Join(columnOrder, ",") + return &CopyInfo{transformationSQL, columnsSQL}, nil +} + func CopyStageToDestination( connector *SnowflakeConnector, config *protos.QRepConfig, @@ -263,7 +384,6 @@ func CopyStageToDestination( }).Infof("Copying stage to destination %s", dstTableName) copyOpts := []string{ "FILE_FORMAT = (TYPE = AVRO)", - "MATCH_BY_COLUMN_NAME='CASE_INSENSITIVE'", "PURGE = TRUE", "ON_ERROR = 'CONTINUE'", } @@ -278,9 +398,13 @@ func CopyStageToDestination( } } + copyTransformation, err := connector.GetCopyTransformation(dstTableName) + if err != nil { + return fmt.Errorf("failed to get copy transformation: %w", err) + } switch appendMode { case true: - err := writeHandler.HandleAppendMode(config.FlowJobName) + err := writeHandler.HandleAppendMode(config.FlowJobName, copyTransformation) if err != nil { return fmt.Errorf("failed to handle append mode: %w", err) } @@ -288,7 +412,7 @@ func CopyStageToDestination( case false: upsertKeyCols := config.WriteMode.UpsertKeyColumns err := writeHandler.HandleUpsertMode(allCols, upsertKeyCols, config.WatermarkColumn, - config.FlowJobName) + config.FlowJobName, copyTransformation) if err != nil { return fmt.Errorf("failed to handle upsert mode: %w", err) } @@ -348,9 +472,12 @@ func NewSnowflakeAvroWriteHandler( } } -func (s *SnowflakeAvroWriteHandler) HandleAppendMode(flowJobName string) error { +func (s *SnowflakeAvroWriteHandler) HandleAppendMode( + flowJobName string, + copyInfo *CopyInfo) error { //nolint:gosec - copyCmd := fmt.Sprintf("COPY INTO %s FROM @%s %s", s.dstTableName, s.stage, strings.Join(s.copyOpts, ",")) + copyCmd := fmt.Sprintf("COPY INTO %s(%s) FROM (SELECT %s FROM @%s) %s", + s.dstTableName, copyInfo.columnsSQL, copyInfo.transformationSQL, s.stage, strings.Join(s.copyOpts, ",")) log.Infof("running copy command: %s", copyCmd) _, err := s.connector.database.Exec(copyCmd) if err != nil { @@ -424,6 +551,7 @@ func (s *SnowflakeAvroWriteHandler) HandleUpsertMode( upsertKeyCols []string, watermarkCol string, flowJobName string, + copyInfo *CopyInfo, ) error { runID, err := util.RandomUInt64() if err != nil { @@ -443,8 +571,8 @@ func (s *SnowflakeAvroWriteHandler) HandleUpsertMode( }).Infof("created temp table %s", tempTableName) //nolint:gosec - copyCmd := fmt.Sprintf("COPY INTO %s FROM @%s %s", - tempTableName, s.stage, strings.Join(s.copyOpts, ",")) + copyCmd := fmt.Sprintf("COPY INTO %s(%s) FROM (SELECT %s FROM @%s) %s", + tempTableName, copyInfo.columnsSQL, copyInfo.transformationSQL, s.stage, strings.Join(s.copyOpts, ",")) _, err = s.connector.database.Exec(copyCmd) if err != nil { return fmt.Errorf("failed to run COPY INTO command: %w", err) @@ -467,8 +595,10 @@ func (s *SnowflakeAvroWriteHandler) HandleUpsertMode( if err != nil { return err } - metrics.LogQRepNormalizeMetrics(s.connector.ctx, flowJobName, rowCount, time.Since(startTime), - totalRowsAtTarget) + log.WithFields(log.Fields{ + "flowName": flowJobName, + }).Infof("merged %d rows into destination table %s, total rows at target: %d", + rowCount, s.dstTableName, totalRowsAtTarget) } else { log.WithFields(log.Fields{ "flowName": flowJobName, @@ -477,7 +607,7 @@ func (s *SnowflakeAvroWriteHandler) HandleUpsertMode( log.WithFields(log.Fields{ "flowName": flowJobName, - }).Infof("merged data from temp table %s into destination table %s", - tempTableName, s.dstTableName) + }).Infof("merged data from temp table %s into destination table %s, time taken %v", + tempTableName, s.dstTableName, time.Since(startTime)) return nil } diff --git a/flow/connectors/snowflake/qvalue_convert.go b/flow/connectors/snowflake/qvalue_convert.go index 70204e86a..421281834 100644 --- a/flow/connectors/snowflake/qvalue_convert.go +++ b/flow/connectors/snowflake/qvalue_convert.go @@ -6,40 +6,10 @@ import ( "github.com/PeerDB-io/peer-flow/model/qvalue" ) -var qValueKindToSnowflakeTypeMap = map[qvalue.QValueKind]string{ - qvalue.QValueKindBoolean: "BOOLEAN", - qvalue.QValueKindInt16: "INTEGER", - qvalue.QValueKindInt32: "INTEGER", - qvalue.QValueKindInt64: "INTEGER", - qvalue.QValueKindFloat32: "FLOAT", - qvalue.QValueKindFloat64: "FLOAT", - qvalue.QValueKindNumeric: "NUMBER(38, 9)", - qvalue.QValueKindString: "STRING", - qvalue.QValueKindJSON: "VARIANT", - qvalue.QValueKindTimestamp: "TIMESTAMP_NTZ", - qvalue.QValueKindTimestampTZ: "TIMESTAMP_TZ", - qvalue.QValueKindTime: "TIME", - qvalue.QValueKindDate: "DATE", - qvalue.QValueKindBit: "BINARY", - qvalue.QValueKindBytes: "BINARY", - qvalue.QValueKindStruct: "STRING", - qvalue.QValueKindUUID: "STRING", - qvalue.QValueKindTimeTZ: "STRING", - qvalue.QValueKindInvalid: "STRING", - qvalue.QValueKindHStore: "STRING", - - // array types will be mapped to STRING - qvalue.QValueKindArrayFloat32: "VARIANT", - qvalue.QValueKindArrayFloat64: "VARIANT", - qvalue.QValueKindArrayInt32: "VARIANT", - qvalue.QValueKindArrayInt64: "VARIANT", - qvalue.QValueKindArrayString: "VARIANT", -} - var snowflakeTypeToQValueKindMap = map[string]qvalue.QValueKind{ "INT": qvalue.QValueKindInt32, "BIGINT": qvalue.QValueKindInt64, - "FLOAT": qvalue.QValueKindFloat32, + "FLOAT": qvalue.QValueKindFloat64, "DOUBLE": qvalue.QValueKindFloat64, "REAL": qvalue.QValueKindFloat64, "VARCHAR": qvalue.QValueKindString, @@ -60,13 +30,17 @@ var snowflakeTypeToQValueKindMap = map[string]qvalue.QValueKind{ "DECIMAL": qvalue.QValueKindNumeric, "NUMERIC": qvalue.QValueKindNumeric, "VARIANT": qvalue.QValueKindJSON, + "GEOMETRY": qvalue.QValueKindGeometry, + "GEOGRAPHY": qvalue.QValueKindGeography, } -func qValueKindToSnowflakeType(colType qvalue.QValueKind) string { - if val, ok := qValueKindToSnowflakeTypeMap[colType]; ok { - return val +func qValueKindToSnowflakeType(colType qvalue.QValueKind) (string, error) { + val, err := colType.ToDWHColumnType(qvalue.QDWHTypeSnowflake) + if err != nil { + return "", err } - return "STRING" + + return val, err } func snowflakeTypeToQValueKind(name string) (qvalue.QValueKind, error) { diff --git a/flow/connectors/snowflake/snowflake.go b/flow/connectors/snowflake/snowflake.go index 431076433..3fe717dd4 100644 --- a/flow/connectors/snowflake/snowflake.go +++ b/flow/connectors/snowflake/snowflake.go @@ -11,7 +11,6 @@ import ( "time" "github.com/PeerDB-io/peer-flow/connectors/utils" - "github.com/PeerDB-io/peer-flow/connectors/utils/metrics" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/model" "github.com/PeerDB-io/peer-flow/model/qvalue" @@ -19,6 +18,7 @@ import ( "github.com/google/uuid" log "github.com/sirupsen/logrus" "github.com/snowflakedb/gosnowflake" + "go.temporal.io/sdk/activity" "golang.org/x/exp/maps" ) @@ -59,6 +59,8 @@ const ( getTableNametoUnchangedColsSQL = `SELECT _PEERDB_DESTINATION_TABLE_NAME, ARRAY_AGG(DISTINCT _PEERDB_UNCHANGED_TOAST_COLUMNS) FROM %s.%s WHERE _PEERDB_BATCH_ID > %d AND _PEERDB_BATCH_ID <= %d GROUP BY _PEERDB_DESTINATION_TABLE_NAME` + getTableSchemaSQL = `SELECT COLUMN_NAME, DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA=? AND TABLE_NAME=?` insertJobMetadataSQL = "INSERT INTO %s.%s VALUES (?,?,?,?)" @@ -74,6 +76,7 @@ const ( dropTableIfExistsSQL = "DROP TABLE IF EXISTS %s.%s" deleteJobMetadataSQL = "DELETE FROM %s.%s WHERE MIRROR_JOB_NAME=?" isDeletedColumnName = "_PEERDB_IS_DELETED" + checkSchemaExistsSQL = "SELECT TO_BOOLEAN(COUNT(1)) FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME=?" syncRecordsChunkSize = 1024 ) @@ -211,12 +214,78 @@ func (c *SnowflakeConnector) SetupMetadataTables() error { return nil } +// only used for testing atm. doesn't return info about pkey or ReplicaIdentity [which is PG specific anyway]. +func (c *SnowflakeConnector) GetTableSchema( + req *protos.GetTableSchemaBatchInput) (*protos.GetTableSchemaBatchOutput, error) { + res := make(map[string]*protos.TableSchema) + for _, tableName := range req.TableIdentifiers { + tableSchema, err := c.getTableSchemaForTable(strings.ToUpper(tableName)) + if err != nil { + return nil, err + } + res[tableName] = tableSchema + utils.RecordHeartbeatWithRecover(c.ctx, fmt.Sprintf("fetched schema for table %s", tableName)) + } + + return &protos.GetTableSchemaBatchOutput{ + TableNameSchemaMapping: res, + }, nil +} + +func (c *SnowflakeConnector) getTableSchemaForTable(tableName string) (*protos.TableSchema, error) { + tableNameComponents, err := parseTableName(tableName) + if err != nil { + return nil, fmt.Errorf("error while parsing table schema and name: %w", err) + } + rows, err := c.database.QueryContext(c.ctx, getTableSchemaSQL, tableNameComponents.schemaIdentifier, + tableNameComponents.tableIdentifier) + if err != nil { + return nil, fmt.Errorf("error querying Snowflake peer for schema of table %s: %w", tableName, err) + } + defer func() { + // not sure if the errors these two return are same or different? + err = errors.Join(rows.Close(), rows.Err()) + if err != nil { + log.Errorf("error while closing rows for reading schema of table %s: %v", tableName, err) + } + }() + + res := &protos.TableSchema{ + TableIdentifier: tableName, + Columns: make(map[string]string), + } + + var columnName, columnType string + for rows.Next() { + err = rows.Scan(&columnName, &columnType) + if err != nil { + return nil, fmt.Errorf("error reading row for schema of table %s: %w", tableName, err) + } + genericColType, err := snowflakeTypeToQValueKind(columnType) + if err != nil { + // we use string for invalid types + genericColType = qvalue.QValueKindString + } + + res.Columns[columnName] = string(genericColType) + } + + return res, nil +} + func (c *SnowflakeConnector) GetLastOffset(jobName string) (*protos.LastSyncState, error) { rows, err := c.database.QueryContext(c.ctx, fmt.Sprintf(getLastOffsetSQL, peerDBInternalSchema, mirrorJobsTableIdentifier), jobName) if err != nil { return nil, fmt.Errorf("error querying Snowflake peer for last syncedID: %w", err) } + defer func() { + // not sure if the errors these two return are same or different? + err = errors.Join(rows.Close(), rows.Err()) + if err != nil { + log.Errorf("error while closing rows for reading last offset of job %s: %v", jobName, err) + } + }() if !rows.Next() { log.Warnf("No row found for job %s, returning nil", jobName) @@ -231,7 +300,6 @@ func (c *SnowflakeConnector) GetLastOffset(jobName string) (*protos.LastSyncStat log.Warnf("Assuming zero offset means no sync has happened for job %s, returning nil", jobName) return nil, nil } - return &protos.LastSyncState{ Checkpoint: result, }, nil @@ -359,18 +427,14 @@ func (c *SnowflakeConnector) InitializeTableSchema(req map[string]*protos.TableS return nil } -// ReplayTableSchemaDelta changes a destination table to match the schema at source +// ReplayTableSchemaDeltas changes a destination table to match the schema at source // This could involve adding or dropping multiple columns. -func (c *SnowflakeConnector) ReplayTableSchemaDelta(flowJobName string, schemaDelta *protos.TableSchemaDelta) error { - if (schemaDelta == nil) || (len(schemaDelta.AddedColumns) == 0 && len(schemaDelta.DroppedColumns) == 0) { - return nil - } - - // Postgres is cool and supports transactional DDL. So we use a transaction. +func (c *SnowflakeConnector) ReplayTableSchemaDeltas(flowJobName string, + schemaDeltas []*protos.TableSchemaDelta) error { tableSchemaModifyTx, err := c.database.Begin() if err != nil { - return fmt.Errorf("error starting transaction for schema modification for table %s: %w", - schemaDelta.SrcTableName, err) + return fmt.Errorf("error starting transaction for schema modification: %w", + err) } defer func() { deferErr := tableSchemaModifyTx.Rollback() @@ -381,52 +445,44 @@ func (c *SnowflakeConnector) ReplayTableSchemaDelta(flowJobName string, schemaDe } }() - for _, droppedColumn := range schemaDelta.DroppedColumns { - _, err = tableSchemaModifyTx.Exec(fmt.Sprintf("ALTER TABLE %s DROP COLUMN %s", schemaDelta.DstTableName, - droppedColumn)) - if err != nil { - return fmt.Errorf("failed to drop column %s for table %s: %w", droppedColumn, - schemaDelta.SrcTableName, err) + for _, schemaDelta := range schemaDeltas { + if schemaDelta == nil || len(schemaDelta.AddedColumns) == 0 { + return nil } - log.WithFields(log.Fields{ - "flowName": flowJobName, - "tableName": schemaDelta.SrcTableName, - }).Infof("[schema delta replay] dropped column %s", droppedColumn) - } - for _, addedColumn := range schemaDelta.AddedColumns { - _, err = tableSchemaModifyTx.Exec(fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s", schemaDelta.DstTableName, - addedColumn.ColumnName, qValueKindToSnowflakeType(qvalue.QValueKind(addedColumn.ColumnType)))) - if err != nil { - return fmt.Errorf("failed to add column %s for table %s: %w", addedColumn.ColumnName, - schemaDelta.SrcTableName, err) + + for _, addedColumn := range schemaDelta.AddedColumns { + sfColtype, err := qValueKindToSnowflakeType(qvalue.QValueKind(addedColumn.ColumnType)) + if err != nil { + return fmt.Errorf("failed to convert column type %s to snowflake type: %w", + addedColumn.ColumnType, err) + } + _, err = tableSchemaModifyTx.Exec(fmt.Sprintf("ALTER TABLE %s ADD COLUMN IF NOT EXISTS \"%s\" %s", + schemaDelta.DstTableName, strings.ToUpper(addedColumn.ColumnName), sfColtype)) + if err != nil { + return fmt.Errorf("failed to add column %s for table %s: %w", addedColumn.ColumnName, + schemaDelta.DstTableName, err) + } + log.WithFields(log.Fields{ + "flowName": flowJobName, + "srcTableName": schemaDelta.SrcTableName, + "dstTableName": schemaDelta.DstTableName, + }).Infof("[schema delta replay] added column %s with data type %s", addedColumn.ColumnName, + addedColumn.ColumnType) } - log.WithFields(log.Fields{ - "flowName": flowJobName, - "tableName": schemaDelta.SrcTableName, - }).Infof("[schema delta replay] added column %s with data type %s", addedColumn.ColumnName, - addedColumn.ColumnType) } err = tableSchemaModifyTx.Commit() if err != nil { - return fmt.Errorf("failed to commit transaction for table schema modification for table %s: %w", - schemaDelta.SrcTableName, err) + return fmt.Errorf("failed to commit transaction for table schema modification: %w", + err) } return nil } func (c *SnowflakeConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.SyncResponse, error) { - if len(req.Records.Records) == 0 { - return &model.SyncResponse{ - FirstSyncedCheckPointID: 0, - LastSyncedCheckPointID: 0, - NumRecordsSynced: 0, - }, nil - } - rawTableIdentifier := getRawTableIdentifier(req.FlowJobName) - log.Printf("pushing %d records to Snowflake table %s", len(req.Records.Records), rawTableIdentifier) + log.Infof("pushing records to Snowflake table %s", rawTableIdentifier) syncBatchID, err := c.GetLastSyncBatchID(req.FlowJobName) if err != nil { @@ -434,6 +490,15 @@ func (c *SnowflakeConnector) SyncRecords(req *model.SyncRecordsRequest) (*model. } syncBatchID = syncBatchID + 1 + var res *model.SyncResponse + if req.SyncMode == protos.QRepSyncMode_QREP_SYNC_MODE_STORAGE_AVRO { + log.Infof("sync mode is for flow %s is AVRO", req.FlowJobName) + res, err = c.syncRecordsViaAvro(req, rawTableIdentifier, syncBatchID) + if err != nil { + return nil, err + } + } + // transaction for SyncRecords syncRecordsTx, err := c.database.BeginTx(c.ctx, nil) if err != nil { @@ -450,13 +515,8 @@ func (c *SnowflakeConnector) SyncRecords(req *model.SyncRecordsRequest) (*model. } }() - var res *model.SyncResponse - if req.SyncMode == protos.QRepSyncMode_QREP_SYNC_MODE_STORAGE_AVRO { - res, err = c.syncRecordsViaAvro(req, rawTableIdentifier, syncBatchID) - if err != nil { - return nil, err - } - } else if req.SyncMode == protos.QRepSyncMode_QREP_SYNC_MODE_MULTI_INSERT { + if req.SyncMode == protos.QRepSyncMode_QREP_SYNC_MODE_MULTI_INSERT { + log.Infof("sync mode is for flow %s is MULTI_INSERT", req.FlowJobName) res, err = c.syncRecordsViaSQL(req, rawTableIdentifier, syncBatchID, syncRecordsTx) if err != nil { return nil, err @@ -479,15 +539,13 @@ func (c *SnowflakeConnector) SyncRecords(req *model.SyncRecordsRequest) (*model. func (c *SnowflakeConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, rawTableIdentifier string, syncBatchID int64, syncRecordsTx *sql.Tx) (*model.SyncResponse, error) { - records := make([]snowflakeRawRecord, 0) tableNameRowsMapping := make(map[string]uint32) first := true var firstCP int64 = 0 - lastCP := req.Records.LastCheckPointID - for _, record := range req.Records.Records { + for record := range req.Records.GetRecords() { switch typedRecord := record.(type) { case *model.InsertRecord: // json.Marshal converts bytes in Hex automatically to BASE64 string. @@ -505,7 +563,7 @@ func (c *SnowflakeConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, ra recordType: 0, matchData: "", batchID: syncBatchID, - unchangedToastColumns: utils.KeysToString(typedRecord.UnchangedToastColumns), + unchangedToastColumns: "", }) tableNameRowsMapping[typedRecord.DestinationTableName] += 1 case *model.UpdateRecord: @@ -545,7 +603,7 @@ func (c *SnowflakeConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, ra recordType: 2, matchData: itemsJSON, batchID: syncBatchID, - unchangedToastColumns: utils.KeysToString(typedRecord.UnchangedToastColumns), + unchangedToastColumns: "", }) tableNameRowsMapping[typedRecord.DestinationTableName] += 1 default: @@ -560,10 +618,8 @@ func (c *SnowflakeConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, ra // inserting records into raw table. numRecords := len(records) - startTime := time.Now() for begin := 0; begin < numRecords; begin += syncRecordsChunkSize { end := begin + syncRecordsChunkSize - if end > numRecords { end = numRecords } @@ -572,194 +628,31 @@ func (c *SnowflakeConnector) syncRecordsViaSQL(req *model.SyncRecordsRequest, ra return nil, err } } - metrics.LogSyncMetrics(c.ctx, req.FlowJobName, int64(numRecords), time.Since(startTime)) + + lastCheckpoint, err := req.Records.GetLastCheckpoint() + if err != nil { + return nil, err + } return &model.SyncResponse{ FirstSyncedCheckPointID: firstCP, - LastSyncedCheckPointID: lastCP, + LastSyncedCheckPointID: lastCheckpoint, NumRecordsSynced: int64(len(records)), CurrentSyncBatchID: syncBatchID, TableNameRowsMapping: tableNameRowsMapping, }, nil } -func (c *SnowflakeConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, rawTableIdentifier string, - syncBatchID int64) (*model.SyncResponse, error) { - recordStream := model.NewQRecordStream(len(req.Records.Records)) - - err := recordStream.SetSchema(&model.QRecordSchema{ - Fields: []*model.QField{ - { - Name: "_peerdb_uid", - Type: qvalue.QValueKindString, - Nullable: false, - }, - { - Name: "_peerdb_timestamp", - Type: qvalue.QValueKindInt64, - Nullable: false, - }, - { - Name: "_peerdb_destination_table_name", - Type: qvalue.QValueKindString, - Nullable: false, - }, - { - Name: "_peerdb_data", - Type: qvalue.QValueKindString, - Nullable: false, - }, - { - Name: "_peerdb_record_type", - Type: qvalue.QValueKindInt64, - Nullable: true, - }, - { - Name: "_peerdb_match_data", - Type: qvalue.QValueKindString, - Nullable: true, - }, - { - Name: "_peerdb_batch_id", - Type: qvalue.QValueKindInt64, - Nullable: true, - }, - { - Name: "_peerdb_unchanged_toast_columns", - Type: qvalue.QValueKindString, - Nullable: true, - }, - }, - }) - if err != nil { - return nil, err - } - - first := true - var firstCP int64 = 0 - lastCP := req.Records.LastCheckPointID +func (c *SnowflakeConnector) syncRecordsViaAvro( + req *model.SyncRecordsRequest, + rawTableIdentifier string, + syncBatchID int64, +) (*model.SyncResponse, error) { tableNameRowsMapping := make(map[string]uint32) - - for _, record := range req.Records.Records { - var entries [8]qvalue.QValue - switch typedRecord := record.(type) { - case *model.InsertRecord: - // json.Marshal converts bytes in Hex automatically to BASE64 string. - itemsJSON, err := typedRecord.Items.ToJSON() - if err != nil { - return nil, fmt.Errorf("failed to serialize insert record items to JSON: %w", err) - } - - // add insert record to the raw table - entries[2] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: typedRecord.DestinationTableName, - } - entries[3] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: itemsJSON, - } - entries[4] = qvalue.QValue{ - Kind: qvalue.QValueKindInt64, - Value: 0, - } - entries[5] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: "", - } - entries[7] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: utils.KeysToString(typedRecord.UnchangedToastColumns), - } - tableNameRowsMapping[typedRecord.DestinationTableName] += 1 - case *model.UpdateRecord: - newItemsJSON, err := typedRecord.NewItems.ToJSON() - if err != nil { - return nil, fmt.Errorf("failed to serialize update record new items to JSON: %w", err) - } - oldItemsJSON, err := typedRecord.OldItems.ToJSON() - if err != nil { - return nil, fmt.Errorf("failed to serialize update record old items to JSON: %w", err) - } - - // add update record to the raw table - entries[2] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: typedRecord.DestinationTableName, - } - entries[3] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: newItemsJSON, - } - entries[4] = qvalue.QValue{ - Kind: qvalue.QValueKindInt64, - Value: 1, - } - entries[5] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: oldItemsJSON, - } - entries[7] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: utils.KeysToString(typedRecord.UnchangedToastColumns), - } - tableNameRowsMapping[typedRecord.DestinationTableName] += 1 - case *model.DeleteRecord: - itemsJSON, err := typedRecord.Items.ToJSON() - if err != nil { - return nil, fmt.Errorf("failed to serialize delete record items to JSON: %w", err) - } - - // append delete record to the raw table - entries[2] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: typedRecord.DestinationTableName, - } - entries[3] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: itemsJSON, - } - entries[4] = qvalue.QValue{ - Kind: qvalue.QValueKindInt64, - Value: 2, - } - entries[5] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: itemsJSON, - } - entries[7] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: utils.KeysToString(typedRecord.UnchangedToastColumns), - } - tableNameRowsMapping[typedRecord.DestinationTableName] += 1 - default: - return nil, fmt.Errorf("record type %T not supported in Snowflake flow connector", typedRecord) - } - - if first { - firstCP = record.GetCheckPointID() - first = false - } - - entries[0] = qvalue.QValue{ - Kind: qvalue.QValueKindString, - Value: uuid.New().String(), - } - entries[1] = qvalue.QValue{ - Kind: qvalue.QValueKindInt64, - Value: time.Now().UnixNano(), - } - entries[6] = qvalue.QValue{ - Kind: qvalue.QValueKindInt64, - Value: syncBatchID, - } - - recordStream.Records <- &model.QRecordOrError{ - Record: &model.QRecord{ - NumEntries: 8, - Entries: entries[:], - }, - } + streamReq := model.NewRecordsToStreamRequest(req.Records.GetRecords(), tableNameRowsMapping, syncBatchID) + streamRes, err := utils.RecordsToRawTableStream(streamReq) + if err != nil { + return nil, fmt.Errorf("failed to convert records to raw table stream: %w", err) } qrepConfig := &protos.QRepConfig{ @@ -774,18 +667,20 @@ func (c *SnowflakeConnector) syncRecordsViaAvro(req *model.SyncRecordsRequest, r return nil, err } - startTime := time.Now() - close(recordStream.Records) - numRecords, err := avroSyncer.SyncRecords(destinationTableSchema, recordStream, req.FlowJobName) + numRecords, err := avroSyncer.SyncRecords(destinationTableSchema, streamRes.Stream, req.FlowJobName) + if err != nil { + return nil, err + } + + lastCheckpoint, err := req.Records.GetLastCheckpoint() if err != nil { return nil, err } - metrics.LogSyncMetrics(c.ctx, req.FlowJobName, int64(numRecords), time.Since(startTime)) return &model.SyncResponse{ - FirstSyncedCheckPointID: firstCP, - LastSyncedCheckPointID: lastCP, - NumRecordsSynced: int64(len(req.Records.Records)), + FirstSyncedCheckPointID: req.Records.GetFirstCheckpoint(), + LastSyncedCheckPointID: lastCheckpoint, + NumRecordsSynced: int64(numRecords), CurrentSyncBatchID: syncBatchID, TableNameRowsMapping: tableNameRowsMapping, }, nil @@ -804,7 +699,7 @@ func (c *SnowflakeConnector) NormalizeRecords(req *model.NormalizeRecordsRequest // normalize has caught up with sync, chill until more records are loaded. if syncBatchID == normalizeBatchID { return &model.NormalizeResponse{ - Done: true, + Done: false, StartBatchID: normalizeBatchID, EndBatchID: syncBatchID, }, nil @@ -817,7 +712,7 @@ func (c *SnowflakeConnector) NormalizeRecords(req *model.NormalizeRecordsRequest // sync hasn't created job metadata yet, chill. if !jobMetadataExists { return &model.NormalizeResponse{ - Done: true, + Done: false, }, nil } destinationTableNames, err := c.getDistinctTableNamesInBatch(req.FlowJobName, syncBatchID, normalizeBatchID) @@ -846,7 +741,6 @@ func (c *SnowflakeConnector) NormalizeRecords(req *model.NormalizeRecordsRequest }() var totalRowsAffected int64 = 0 - startTime := time.Now() // execute merge statements per table that uses CTEs to merge data into the normalized table for _, destinationTableName := range destinationTableNames { rowsAffected, err := c.generateAndExecuteMergeStatement( @@ -861,14 +755,7 @@ func (c *SnowflakeConnector) NormalizeRecords(req *model.NormalizeRecordsRequest } totalRowsAffected += rowsAffected } - if totalRowsAffected > 0 { - totalRowsAtTarget, err := c.getTableCounts(destinationTableNames) - if err != nil { - return nil, err - } - metrics.LogNormalizeMetrics(c.ctx, req.FlowJobName, totalRowsAffected, time.Since(startTime), - totalRowsAtTarget) - } + // updating metadata with new normalizeBatchID err = c.updateNormalizeMetadata(req.FlowJobName, syncBatchID, normalizeRecordsTx) if err != nil { @@ -898,7 +785,8 @@ func (c *SnowflakeConnector) CreateRawTable(req *protos.CreateRawTableInput) (*p if err != nil { return nil, err } - // there is no easy way to check if a table has the same schema in Snowflake, so just executing the CREATE TABLE IF NOT EXISTS blindly. + // there is no easy way to check if a table has the same schema in Snowflake, + // so just executing the CREATE TABLE IF NOT EXISTS blindly. _, err = createRawTableTx.ExecContext(c.ctx, fmt.Sprintf(createRawTableSQL, peerDBInternalSchema, rawTableIdentifier)) if err != nil { @@ -936,16 +824,26 @@ func (c *SnowflakeConnector) SyncFlowCleanup(jobName string) error { } }() - _, err = syncFlowCleanupTx.ExecContext(c.ctx, fmt.Sprintf(dropTableIfExistsSQL, peerDBInternalSchema, - getRawTableIdentifier(jobName))) + row := syncFlowCleanupTx.QueryRowContext(c.ctx, checkSchemaExistsSQL, peerDBInternalSchema) + var schemaExists bool + err = row.Scan(&schemaExists) if err != nil { - return fmt.Errorf("unable to drop raw table: %w", err) + return fmt.Errorf("unable to check if internal schema exists: %w", err) } - _, err = syncFlowCleanupTx.ExecContext(c.ctx, - fmt.Sprintf(deleteJobMetadataSQL, peerDBInternalSchema, mirrorJobsTableIdentifier), jobName) - if err != nil { - return fmt.Errorf("unable to delete job metadata: %w", err) + + if schemaExists { + _, err = syncFlowCleanupTx.ExecContext(c.ctx, fmt.Sprintf(dropTableIfExistsSQL, peerDBInternalSchema, + getRawTableIdentifier(jobName))) + if err != nil { + return fmt.Errorf("unable to drop raw table: %w", err) + } + _, err = syncFlowCleanupTx.ExecContext(c.ctx, + fmt.Sprintf(deleteJobMetadataSQL, peerDBInternalSchema, mirrorJobsTableIdentifier), jobName) + if err != nil { + return fmt.Errorf("unable to delete job metadata: %w", err) + } } + err = syncFlowCleanupTx.Commit() if err != nil { return fmt.Errorf("unable to commit transaction for sync flow cleanup: %w", err) @@ -980,16 +878,14 @@ func generateCreateTableSQLForNormalizedTable( sourceTableSchema *protos.TableSchema, ) string { createTableSQLArray := make([]string, 0, len(sourceTableSchema.Columns)) - primaryColUpper := strings.ToUpper(sourceTableSchema.PrimaryKeyColumn) for columnName, genericColumnType := range sourceTableSchema.Columns { columnNameUpper := strings.ToUpper(columnName) - if primaryColUpper == columnNameUpper { - createTableSQLArray = append(createTableSQLArray, fmt.Sprintf(`"%s" %s PRIMARY KEY,`, - columnNameUpper, qValueKindToSnowflakeType(qvalue.QValueKind(genericColumnType)))) - } else { - createTableSQLArray = append(createTableSQLArray, fmt.Sprintf(`"%s" %s,`, columnNameUpper, - qValueKindToSnowflakeType(qvalue.QValueKind(genericColumnType)))) + sfColType, err := qValueKindToSnowflakeType(qvalue.QValueKind(genericColumnType)) + if err != nil { + log.Warnf("failed to convert column type %s to snowflake type: %v", genericColumnType, err) + continue } + createTableSQLArray = append(createTableSQLArray, fmt.Sprintf(`"%s" %s,`, columnNameUpper, sfColType)) } // add a _peerdb_is_deleted column to the normalized table @@ -997,6 +893,15 @@ func generateCreateTableSQLForNormalizedTable( createTableSQLArray = append(createTableSQLArray, fmt.Sprintf(`"%s" BOOLEAN DEFAULT FALSE,`, isDeletedColumnName)) + // add composite primary key to the table + primaryKeyColsUpperQuoted := make([]string, 0) + for _, primaryKeyCol := range sourceTableSchema.PrimaryKeyColumns { + primaryKeyColsUpperQuoted = append(primaryKeyColsUpperQuoted, + fmt.Sprintf(`"%s"`, strings.ToUpper(primaryKeyCol))) + } + createTableSQLArray = append(createTableSQLArray, fmt.Sprintf("PRIMARY KEY(%s),", + strings.TrimSuffix(strings.Join(primaryKeyColsUpperQuoted, ","), ","))) + return fmt.Sprintf(createNormalizedTableSQL, sourceTableIdentifier, strings.TrimSuffix(strings.Join(createTableSQLArray, ""), ",")) } @@ -1045,19 +950,32 @@ func (c *SnowflakeConnector) generateAndExecuteMergeStatement( flattenedCastsSQLArray := make([]string, 0, len(normalizedTableSchema.Columns)) for columnName, genericColumnType := range normalizedTableSchema.Columns { - sfType := qValueKindToSnowflakeType(qvalue.QValueKind(genericColumnType)) + sfType, err := qValueKindToSnowflakeType(qvalue.QValueKind(genericColumnType)) + if err != nil { + return 0, fmt.Errorf("failed to convert column type %s to snowflake type: %w", + genericColumnType, err) + } + targetColumnName := fmt.Sprintf(`"%s"`, strings.ToUpper(columnName)) switch qvalue.QValueKind(genericColumnType) { case qvalue.QValueKindBytes, qvalue.QValueKindBit: - flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("BASE64_DECODE_BINARY(%s:%s) "+ + flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("BASE64_DECODE_BINARY(%s:\"%s\") "+ "AS %s,", toVariantColumnName, columnName, targetColumnName)) + case qvalue.QValueKindGeography: + flattenedCastsSQLArray = append(flattenedCastsSQLArray, + fmt.Sprintf("TO_GEOGRAPHY(CAST(%s:\"%s\" AS STRING),true) AS %s,", + toVariantColumnName, columnName, targetColumnName)) + case qvalue.QValueKindGeometry: + flattenedCastsSQLArray = append(flattenedCastsSQLArray, + fmt.Sprintf("TO_GEOMETRY(CAST(%s:\"%s\" AS STRING),true) AS %s,", + toVariantColumnName, columnName, targetColumnName)) // TODO: https://github.com/PeerDB-io/peerdb/issues/189 - handle time types and interval types // case model.ColumnTypeTime: // flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("TIME_FROM_PARTS(0,0,0,%s:%s:"+ // "Microseconds*1000) "+ // "AS %s,", toVariantColumnName, columnName, columnName)) default: - flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("CAST(%s:%s AS %s) AS %s,", + flattenedCastsSQLArray = append(flattenedCastsSQLArray, fmt.Sprintf("CAST(%s:\"%s\" AS %s) AS %s,", toVariantColumnName, columnName, sfType, targetColumnName)) } } @@ -1079,9 +997,13 @@ func (c *SnowflakeConnector) generateAndExecuteMergeStatement( updateStatementsforToastCols := c.generateUpdateStatement(columnNames, unchangedToastColumns) updateStringToastCols := strings.Join(updateStatementsforToastCols, " ") - // TARGET. = SOURCE. - pkeyColStr := fmt.Sprintf("TARGET.%s = SOURCE.%s", - normalizedTableSchema.PrimaryKeyColumn, normalizedTableSchema.PrimaryKeyColumn) + pkeySelectSQLArray := make([]string, 0, len(normalizedTableSchema.PrimaryKeyColumns)) + for _, pkeyColName := range normalizedTableSchema.PrimaryKeyColumns { + pkeySelectSQLArray = append(pkeySelectSQLArray, fmt.Sprintf("TARGET.%s = SOURCE.%s", + pkeyColName, pkeyColName)) + } + // TARGET. = SOURCE. AND TARGET. = SOURCE. ... + pkeySelectSQL := strings.Join(pkeySelectSQLArray, " AND ") deletePart := "DELETE" if softDelete { @@ -1090,8 +1012,8 @@ func (c *SnowflakeConnector) generateAndExecuteMergeStatement( mergeStatement := fmt.Sprintf(mergeStatementSQL, destinationTableIdentifier, toVariantColumnName, rawTableIdentifier, normalizeBatchID, syncBatchID, flattenedCastsSQL, - normalizedTableSchema.PrimaryKeyColumn, pkeyColStr, insertColumnsSQL, insertValuesSQL, - updateStringToastCols, deletePart) + fmt.Sprintf("(%s)", strings.Join(normalizedTableSchema.PrimaryKeyColumns, ",")), + pkeySelectSQL, insertColumnsSQL, insertValuesSQL, updateStringToastCols, deletePart) result, err := normalizeRecordsTx.ExecContext(c.ctx, mergeStatement, destinationTableIdentifier) if err != nil { @@ -1157,7 +1079,8 @@ func (c *SnowflakeConnector) updateSyncMetadata(flowJobName string, lastCP int64 return nil } -func (c *SnowflakeConnector) updateNormalizeMetadata(flowJobName string, normalizeBatchID int64, normalizeRecordsTx *sql.Tx) error { +func (c *SnowflakeConnector) updateNormalizeMetadata(flowJobName string, + normalizeBatchID int64, normalizeRecordsTx *sql.Tx) error { jobMetadataExists, err := c.jobMetadataExists(flowJobName) if err != nil { return fmt.Errorf("failed to get sync status for flow job: %w", err) @@ -1225,3 +1148,48 @@ func (c *SnowflakeConnector) generateUpdateStatement(allCols []string, unchanged } return updateStmts } + +func (c *SnowflakeConnector) RenameTables(req *protos.RenameTablesInput) (*protos.RenameTablesOutput, error) { + renameTablesTx, err := c.database.BeginTx(c.ctx, nil) + if err != nil { + return nil, fmt.Errorf("unable to begin transaction for rename tables: %w", err) + } + + for _, renameRequest := range req.RenameTableOptions { + src := renameRequest.CurrentName + dst := renameRequest.NewName + + log.WithFields(log.Fields{ + "flowName": req.FlowJobName, + }).Infof("renaming table '%s' to '%s'...", src, dst) + + activity.RecordHeartbeat(c.ctx, fmt.Sprintf("renaming table '%s' to '%s'...", src, dst)) + + // drop the dst table if exists + _, err = renameTablesTx.ExecContext(c.ctx, fmt.Sprintf("DROP TABLE IF EXISTS %s", dst)) + if err != nil { + return nil, fmt.Errorf("unable to drop table %s: %w", dst, err) + } + + // rename the src table to dst + _, err = renameTablesTx.ExecContext(c.ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", src, dst)) + if err != nil { + return nil, fmt.Errorf("unable to rename table %s to %s: %w", src, dst, err) + } + + log.WithFields(log.Fields{ + "flowName": req.FlowJobName, + }).Infof("successfully renamed table '%s' to '%s'", src, dst) + + activity.RecordHeartbeat(c.ctx, fmt.Sprintf("successfully renamed table '%s' to '%s'", src, dst)) + } + + err = renameTablesTx.Commit() + if err != nil { + return nil, fmt.Errorf("unable to commit transaction for rename tables: %w", err) + } + + return &protos.RenameTablesOutput{ + FlowJobName: req.FlowJobName, + }, nil +} diff --git a/flow/connectors/sql/query_executor.go b/flow/connectors/sql/query_executor.go index b3e5803a3..29291b40f 100644 --- a/flow/connectors/sql/query_executor.go +++ b/flow/connectors/sql/query_executor.go @@ -10,6 +10,7 @@ import ( "github.com/PeerDB-io/peer-flow/model" "github.com/PeerDB-io/peer-flow/model/qvalue" + "github.com/google/uuid" "github.com/jmoiron/sqlx" log "github.com/sirupsen/logrus" "go.temporal.io/sdk/activity" @@ -123,6 +124,16 @@ func (g *GenericSQLQueryExecutor) CountRows(schemaName string, tableName string) return count, err } +func (g *GenericSQLQueryExecutor) CountNonNullRows( + schemaName string, + tableName string, + columnName string) (int64, error) { + var count int64 + err := g.db.QueryRowx("SELECT COUNT(CASE WHEN " + columnName + + " IS NOT NULL THEN 1 END) AS non_null_count FROM " + schemaName + "." + tableName).Scan(&count) + return count, err +} + func (g *GenericSQLQueryExecutor) columnTypeToQField(ct *sql.ColumnType) (*model.QField, error) { qvKind, ok := g.dbtypeToQValueKind[ct.DatabaseTypeName()] if !ok { @@ -198,6 +209,8 @@ func (g *GenericSQLQueryExecutor) processRows(rows *sqlx.Rows) (*model.QRecordBa case qvalue.QValueKindNumeric: var s sql.NullString values[i] = &s + case qvalue.QValueKindUUID: + values[i] = new([]byte) default: values[i] = new(interface{}) } @@ -373,6 +386,20 @@ func toQValue(kind qvalue.QValueKind, val interface{}) (qvalue.QValue, error) { return qvalue.QValue{Kind: kind, Value: *v}, nil } + case qvalue.QValueKindUUID: + if v, ok := val.(*[]byte); ok && v != nil { + // convert byte array to string + uuidVal, err := uuid.FromBytes(*v) + if err != nil { + return qvalue.QValue{}, fmt.Errorf("failed to parse uuid: %v", *v) + } + return qvalue.QValue{Kind: qvalue.QValueKindString, Value: uuidVal.String()}, nil + } + + if v, ok := val.(*[16]byte); ok && v != nil { + return qvalue.QValue{Kind: qvalue.QValueKindString, Value: *v}, nil + } + case qvalue.QValueKindJSON: vraw := val.(*interface{}) vstring := (*vraw).(string) diff --git a/flow/connectors/utils/avro/avro_writer.go b/flow/connectors/utils/avro/avro_writer.go index 6fd0d22bd..0b4cf09d7 100644 --- a/flow/connectors/utils/avro/avro_writer.go +++ b/flow/connectors/utils/avro/avro_writer.go @@ -13,6 +13,7 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/s3/s3manager" + "github.com/klauspost/compress/zstd" "github.com/linkedin/goavro/v2" log "github.com/sirupsen/logrus" uber_atomic "go.uber.org/atomic" @@ -22,6 +23,8 @@ type PeerDBOCFWriter struct { ctx context.Context stream *model.QRecordStream avroSchema *model.QRecordAvroSchemaDefinition + compress bool + writer io.WriteCloser } func NewPeerDBOCFWriter( @@ -33,17 +36,50 @@ func NewPeerDBOCFWriter( ctx: ctx, stream: stream, avroSchema: avroSchema, + compress: false, } } +func NewPeerDBOCFWriterWithCompression( + ctx context.Context, + stream *model.QRecordStream, + avroSchema *model.QRecordAvroSchemaDefinition, +) *PeerDBOCFWriter { + return &PeerDBOCFWriter{ + ctx: ctx, + stream: stream, + avroSchema: avroSchema, + compress: true, + } +} + +func (p *PeerDBOCFWriter) initWriteCloser(w io.Writer) error { + var err error + if p.compress { + p.writer, err = zstd.NewWriter(w) + if err != nil { + return fmt.Errorf("error while initializing zstd encoding writer: %w", err) + } + } else { + p.writer = &nopWriteCloser{w} + } + return nil +} + func (p *PeerDBOCFWriter) createOCFWriter(w io.Writer) (*goavro.OCFWriter, error) { + err := p.initWriteCloser(w) + if err != nil { + return nil, fmt.Errorf("failed to create compressed writer: %w", err) + } + ocfWriter, err := goavro.NewOCFWriter(goavro.OCFConfig{ - W: w, + W: p.writer, Schema: p.avroSchema.Schema, }) if err != nil { return nil, fmt.Errorf("failed to create OCF writer: %w", err) } + return ocfWriter, nil } @@ -107,6 +143,9 @@ func (p *PeerDBOCFWriter) WriteOCF(w io.Writer) (int, error) { if err != nil { return 0, fmt.Errorf("failed to create OCF writer: %w", err) } + // we have to keep a reference to the underlying writer as goavro doesn't provide any access to it + defer p.writer.Close() + numRows, err := p.writeRecordsToOCFWriter(ocfWriter) if err != nil { return 0, fmt.Errorf("failed to write records to OCF writer: %w", err) @@ -114,7 +153,7 @@ func (p *PeerDBOCFWriter) WriteOCF(w io.Writer) (int, error) { return numRows, nil } -func (p *PeerDBOCFWriter) WriteRecordsToS3(bucketName, key string) (int, error) { +func (p *PeerDBOCFWriter) WriteRecordsToS3(bucketName, key string, s3Creds utils.S3PeerCredentials) (int, error) { r, w := io.Pipe() numRowsWritten := make(chan int, 1) go func() { @@ -126,7 +165,7 @@ func (p *PeerDBOCFWriter) WriteRecordsToS3(bucketName, key string) (int, error) numRowsWritten <- numRows }() - s3svc, err := utils.CreateS3Client() + s3svc, err := utils.CreateS3Client(s3Creds) if err != nil { log.Errorf("failed to create S3 client: %v", err) return 0, fmt.Errorf("failed to create S3 client: %w", err) @@ -160,3 +199,14 @@ func (p *PeerDBOCFWriter) WriteRecordsToAvroFile(filePath string) (int, error) { defer file.Close() return p.WriteOCF(file) } + +type nopWriteCloser struct { + io.Writer +} + +func (n *nopWriteCloser) Close() error { + if closer, ok := n.Writer.(io.Closer); ok { + return closer.Close() + } + return nil +} diff --git a/flow/connectors/utils/aws.go b/flow/connectors/utils/aws.go index e936ea619..5ba14d161 100644 --- a/flow/connectors/utils/aws.go +++ b/flow/connectors/utils/aws.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" ) @@ -15,17 +16,45 @@ type AWSSecrets struct { SecretAccessKey string AwsRoleArn string Region string + Endpoint string } -func GetAWSSecrets() (*AWSSecrets, error) { - awsRegion := os.Getenv("AWS_REGION") +type S3PeerCredentials struct { + AccessKeyID string + SecretAccessKey string + AwsRoleArn string + Region string + Endpoint string +} + +func GetAWSSecrets(creds S3PeerCredentials) (*AWSSecrets, error) { + awsRegion := creds.Region + if awsRegion == "" { + awsRegion = os.Getenv("AWS_REGION") + } if awsRegion == "" { return nil, fmt.Errorf("AWS_REGION must be set") } - awsKey := os.Getenv("AWS_ACCESS_KEY_ID") - awsSecret := os.Getenv("AWS_SECRET_ACCESS_KEY") - awsRoleArn := os.Getenv("AWS_ROLE_ARN") + awsEndpoint := creds.Endpoint + if awsEndpoint == "" { + awsEndpoint = os.Getenv("AWS_ENDPOINT") + } + + awsKey := creds.AccessKeyID + if awsKey == "" { + awsKey = os.Getenv("AWS_ACCESS_KEY_ID") + } + + awsSecret := creds.SecretAccessKey + if awsSecret == "" { + awsSecret = os.Getenv("AWS_SECRET_ACCESS_KEY") + } + + awsRoleArn := creds.AwsRoleArn + if awsRoleArn == "" { + awsRoleArn = os.Getenv("AWS_ROLE_ARN") + } // one of (awsKey and awsSecret) or awsRoleArn must be set if awsKey == "" && awsSecret == "" && awsRoleArn == "" { @@ -37,6 +66,7 @@ func GetAWSSecrets() (*AWSSecrets, error) { SecretAccessKey: awsSecret, AwsRoleArn: awsRoleArn, Region: awsRegion, + Endpoint: awsEndpoint, }, nil } @@ -66,15 +96,22 @@ func NewS3BucketAndPrefix(s3Path string) (*S3BucketAndPrefix, error) { }, nil } -func CreateS3Client() (*s3.S3, error) { - awsSecrets, err := GetAWSSecrets() +func CreateS3Client(s3Creds S3PeerCredentials) (*s3.S3, error) { + awsSecrets, err := GetAWSSecrets(s3Creds) if err != nil { return nil, fmt.Errorf("failed to get AWS secrets: %w", err) } - sess := session.Must(session.NewSession(&aws.Config{ - Region: aws.String(awsSecrets.Region), - })) + config := &aws.Config{ + Region: aws.String(awsSecrets.Region), + Endpoint: aws.String(awsSecrets.Endpoint), + } + + if s3Creds.AccessKeyID != "" && s3Creds.SecretAccessKey != "" { + config.Credentials = credentials.NewStaticCredentials(s3Creds.AccessKeyID, s3Creds.SecretAccessKey, "") + } + + sess := session.Must(session.NewSession(config)) s3svc := s3.New(sess) return s3svc, nil diff --git a/flow/connectors/utils/catalog/env.go b/flow/connectors/utils/catalog/env.go index 3e29e82a6..e1bd33d25 100644 --- a/flow/connectors/utils/catalog/env.go +++ b/flow/connectors/utils/catalog/env.go @@ -22,6 +22,11 @@ func GetCatalogConnectionPoolFromEnv() (*pgxpool.Pool, error) { return nil, fmt.Errorf("unable to establish connection with catalog: %w", err) } + err = catalogConn.Ping(context.Background()) + if err != nil { + return nil, fmt.Errorf("unable to establish connection with catalog: %w", err) + } + return catalogConn, nil } diff --git a/flow/connectors/utils/env.go b/flow/connectors/utils/env.go new file mode 100644 index 000000000..2911e3d8e --- /dev/null +++ b/flow/connectors/utils/env.go @@ -0,0 +1,47 @@ +package utils + +import ( + "os" + "strconv" +) + +// GetEnv returns the value of the environment variable with the given name +// and a boolean indicating whether the environment variable exists. +func GetEnv(name string) (string, bool) { + val, exists := os.LookupEnv(name) + return val, exists +} + +// GetEnvBool returns the value of the environment variable with the given name +// or defaultValue if the environment variable is not set or is not a valid +// boolean value. +func GetEnvBool(name string, defaultValue bool) bool { + val, ok := GetEnv(name) + if !ok { + return defaultValue + } + + b, err := strconv.ParseBool(val) + if err != nil { + return defaultValue + } + + return b +} + +// GetEnvInt returns the value of the environment variable with the given name +// or defaultValue if the environment variable is not set or is not a valid +// integer value. +func GetEnvInt(name string, defaultValue int) int { + val, ok := GetEnv(name) + if !ok { + return defaultValue + } + + i, err := strconv.Atoi(val) + if err != nil { + return defaultValue + } + + return i +} diff --git a/flow/connectors/utils/heartbeat.go b/flow/connectors/utils/heartbeat.go index 78f550387..b16735b6b 100644 --- a/flow/connectors/utils/heartbeat.go +++ b/flow/connectors/utils/heartbeat.go @@ -5,6 +5,7 @@ import ( "fmt" "time" + log "github.com/sirupsen/logrus" "go.temporal.io/sdk/activity" ) @@ -18,7 +19,7 @@ func HeartbeatRoutine( go func() { for { msg := fmt.Sprintf("heartbeat #%d: %s", counter, message()) - activity.RecordHeartbeat(ctx, msg) + RecordHeartbeatWithRecover(ctx, msg) counter += 1 to := time.After(interval) select { @@ -30,3 +31,15 @@ func HeartbeatRoutine( }() return shutdown } + +// if the functions are being called outside the context of a Temporal workflow, +// activity.RecordHeartbeat panics, this is a bandaid for that. +func RecordHeartbeatWithRecover(ctx context.Context, details ...interface{}) { + defer func() { + if r := recover(); r != nil { + log.Warnln("ignoring panic from activity.RecordHeartbeat") + log.Warnln("this can happen when function is invoked outside of a Temporal workflow") + } + }() + activity.RecordHeartbeat(ctx, details...) +} diff --git a/flow/connectors/utils/map.go b/flow/connectors/utils/map.go index 829da94bf..769ef225a 100644 --- a/flow/connectors/utils/map.go +++ b/flow/connectors/utils/map.go @@ -2,7 +2,11 @@ package utils import "strings" -func KeysToString(m map[string]bool) string { +func KeysToString(m map[string]struct{}) string { + if m == nil { + return "" + } + var keys []string for k := range m { keys = append(keys, k) diff --git a/flow/connectors/utils/metrics/metrics.go b/flow/connectors/utils/metrics/metrics.go deleted file mode 100644 index 19a364dd1..000000000 --- a/flow/connectors/utils/metrics/metrics.go +++ /dev/null @@ -1,120 +0,0 @@ -package metrics - -import ( - "context" - "fmt" - "time" - - "github.com/PeerDB-io/peer-flow/model" - "github.com/PeerDB-io/peer-flow/shared" - "go.temporal.io/sdk/activity" -) - -func LogPullMetrics( - ctx context.Context, - flowJobName string, - recordBatch *model.RecordBatch, - totalRecordsAtSource int64, -) { - if ctx.Value(shared.EnableMetricsKey) != true { - return - } - - metricsHandler := activity.GetMetricsHandler(ctx) - insertRecordsPulledGauge := metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.insert_records_pulled", flowJobName)) - updateRecordsPulledGauge := metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.update_records_pulled", flowJobName)) - deleteRecordsPulledGauge := metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.delete_records_pulled", flowJobName)) - totalRecordsPulledGauge := metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.total_records_pulled", flowJobName)) - totalRecordsAtSourceGauge := metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.records_at_source", flowJobName)) - - insertRecords, updateRecords, deleteRecords := 0, 0, 0 - for _, record := range recordBatch.Records { - switch record.(type) { - case *model.InsertRecord: - insertRecords++ - case *model.UpdateRecord: - updateRecords++ - case *model.DeleteRecord: - deleteRecords++ - } - } - - insertRecordsPulledGauge.Update(float64(insertRecords)) - updateRecordsPulledGauge.Update(float64(updateRecords)) - deleteRecordsPulledGauge.Update(float64(deleteRecords)) - totalRecordsPulledGauge.Update(float64(len(recordBatch.Records))) - totalRecordsAtSourceGauge.Update(float64(totalRecordsAtSource)) -} - -func LogSyncMetrics(ctx context.Context, flowJobName string, recordsCount int64, duration time.Duration) { - if ctx.Value(shared.EnableMetricsKey) != true { - return - } - - metricsHandler := activity.GetMetricsHandler(ctx) - recordsSyncedPerSecondGauge := - metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.records_synced_per_second", flowJobName)) - recordsSyncedPerSecondGauge.Update(float64(recordsCount) / duration.Seconds()) -} - -func LogNormalizeMetrics( - ctx context.Context, - flowJobName string, - recordsCount int64, - duration time.Duration, - totalRecordsAtTarget int64, -) { - if ctx.Value(shared.EnableMetricsKey) != true { - return - } - - metricsHandler := activity.GetMetricsHandler(ctx) - recordsNormalizedPerSecondGauge := - metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.records_normalized_per_second", flowJobName)) - totalRecordsAtTargetGauge := - metricsHandler.Gauge(fmt.Sprintf("cdcflow.%s.records_at_target", flowJobName)) - - recordsNormalizedPerSecondGauge.Update(float64(recordsCount) / duration.Seconds()) - totalRecordsAtTargetGauge.Update(float64(totalRecordsAtTarget)) -} - -func LogQRepPullMetrics(ctx context.Context, flowJobName string, - numRecords int, totalRecordsAtSource int64) { - if ctx.Value(shared.EnableMetricsKey) != true { - return - } - - metricsHandler := activity.GetMetricsHandler(ctx) - totalRecordsPulledGauge := metricsHandler.Gauge(fmt.Sprintf("qrepflow.%s.total_records_pulled", flowJobName)) - totalRecordsAtSourceGauge := metricsHandler.Gauge(fmt.Sprintf("qrepflow.%s.records_at_source", flowJobName)) - - totalRecordsPulledGauge.Update(float64(numRecords)) - totalRecordsAtSourceGauge.Update(float64(totalRecordsAtSource)) -} - -func LogQRepSyncMetrics(ctx context.Context, flowJobName string, recordsCount int64, duration time.Duration) { - if ctx.Value(shared.EnableMetricsKey) != true { - return - } - - metricsHandler := activity.GetMetricsHandler(ctx) - recordsSyncedPerSecondGauge := - metricsHandler.Gauge(fmt.Sprintf("qrepflow.%s.records_synced_per_second", flowJobName)) - recordsSyncedPerSecondGauge.Update(float64(recordsCount) / duration.Seconds()) -} - -func LogQRepNormalizeMetrics(ctx context.Context, flowJobName string, - normalizedRecordsCount int64, duration time.Duration, totalRecordsAtTarget int64) { - if ctx.Value(shared.EnableMetricsKey) != true { - return - } - - metricsHandler := activity.GetMetricsHandler(ctx) - recordsSyncedPerSecondGauge := - metricsHandler.Gauge(fmt.Sprintf("qrepflow.%s.records_normalized_per_second", flowJobName)) - totalRecordsAtTargetGauge := - metricsHandler.Gauge(fmt.Sprintf("qrepflow.%s.records_at_target", flowJobName)) - - recordsSyncedPerSecondGauge.Update(float64(normalizedRecordsCount) / duration.Seconds()) - totalRecordsAtTargetGauge.Update(float64(totalRecordsAtTarget)) -} diff --git a/flow/connectors/utils/monitoring/monitoring.go b/flow/connectors/utils/monitoring/monitoring.go index f297e7cee..ed63d30f7 100644 --- a/flow/connectors/utils/monitoring/monitoring.go +++ b/flow/connectors/utils/monitoring/monitoring.go @@ -11,6 +11,7 @@ import ( "github.com/jackc/pgx/v5/pgtype" "github.com/jackc/pgx/v5/pgxpool" log "github.com/sirupsen/logrus" + "google.golang.org/protobuf/proto" ) type CatalogMirrorMonitor struct { @@ -25,8 +26,8 @@ type CDCBatchInfo struct { StartTime time.Time } -func NewCatalogMirrorMonitor(catalogConn *pgxpool.Pool) CatalogMirrorMonitor { - return CatalogMirrorMonitor{ +func NewCatalogMirrorMonitor(catalogConn *pgxpool.Pool) *CatalogMirrorMonitor { + return &CatalogMirrorMonitor{ catalogConn: catalogConn, } } @@ -66,7 +67,7 @@ func (c *CatalogMirrorMonitor) UpdateLatestLSNAtSourceForCDCFlow(ctx context.Con "UPDATE peerdb_stats.cdc_flows SET latest_lsn_at_source=$1 WHERE flow_name=$2", uint64(latestLSNAtSource), flowJobName) if err != nil { - return fmt.Errorf("error while updating flow in cdc_flows: %w", err) + return fmt.Errorf("[source] error while updating flow in cdc_flows: %w", err) } return nil } @@ -81,7 +82,7 @@ func (c *CatalogMirrorMonitor) UpdateLatestLSNAtTargetForCDCFlow(ctx context.Con "UPDATE peerdb_stats.cdc_flows SET latest_lsn_at_target=$1 WHERE flow_name=$2", uint64(latestLSNAtTarget), flowJobName) if err != nil { - return fmt.Errorf("error while updating flow in cdc_flows: %w", err) + return fmt.Errorf("[target] error while updating flow in cdc_flows: %w", err) } return nil } @@ -103,8 +104,32 @@ func (c *CatalogMirrorMonitor) AddCDCBatchForFlow(ctx context.Context, flowJobNa return nil } -func (c *CatalogMirrorMonitor) UpdateEndTimeForCDCBatch(ctx context.Context, flowJobName string, - batchID int64) error { +// update num records and end-lsn for a cdc batch +func (c *CatalogMirrorMonitor) UpdateNumRowsAndEndLSNForCDCBatch( + ctx context.Context, + flowJobName string, + batchID int64, + numRows uint32, + batchEndLSN pglogrepl.LSN, +) error { + if c == nil || c.catalogConn == nil { + return nil + } + + _, err := c.catalogConn.Exec(ctx, + "UPDATE peerdb_stats.cdc_batches SET rows_in_batch=$1,batch_end_lsn=$2 WHERE flow_name=$3 AND batch_id=$4", + numRows, uint64(batchEndLSN), flowJobName, batchID) + if err != nil { + return fmt.Errorf("error while updating batch in cdc_batch: %w", err) + } + return nil +} + +func (c *CatalogMirrorMonitor) UpdateEndTimeForCDCBatch( + ctx context.Context, + flowJobName string, + batchID int64, +) error { if c == nil || c.catalogConn == nil { return nil } @@ -152,19 +177,57 @@ func (c *CatalogMirrorMonitor) AddCDCBatchTablesForFlow(ctx context.Context, flo return nil } -func (c *CatalogMirrorMonitor) InitializeQRepRun(ctx context.Context, flowJobName string, runUUID string, - startTime time.Time) error { +func (c *CatalogMirrorMonitor) InitializeQRepRun( + ctx context.Context, + config *protos.QRepConfig, + runUUID string, + partitions []*protos.QRepPartition, +) error { if c == nil || c.catalogConn == nil { return nil } + flowJobName := config.GetFlowJobName() _, err := c.catalogConn.Exec(ctx, - "INSERT INTO peerdb_stats.qrep_runs(flow_name,run_uuid,start_time) VALUES($1,$2,$3) ON CONFLICT DO NOTHING", - flowJobName, runUUID, startTime) + "INSERT INTO peerdb_stats.qrep_runs(flow_name,run_uuid) VALUES($1,$2) ON CONFLICT DO NOTHING", + flowJobName, runUUID) if err != nil { return fmt.Errorf("error while inserting qrep run in qrep_runs: %w", err) } + cfgBytes, err := proto.Marshal(config) + if err != nil { + return fmt.Errorf("unable to marshal flow config: %w", err) + } + + _, err = c.catalogConn.Exec(ctx, + "UPDATE peerdb_stats.qrep_runs SET config_proto = $1 WHERE flow_name = $2", + cfgBytes, flowJobName) + if err != nil { + return fmt.Errorf("unable to update flow config in catalog: %w", err) + } + + for _, partition := range partitions { + if err := c.addPartitionToQRepRun(ctx, flowJobName, runUUID, partition); err != nil { + return fmt.Errorf("unable to add partition to qrep run: %w", err) + } + } + + return nil +} + +func (c *CatalogMirrorMonitor) UpdateStartTimeForQRepRun(ctx context.Context, runUUID string) error { + if c == nil || c.catalogConn == nil { + return nil + } + + _, err := c.catalogConn.Exec(ctx, + "UPDATE peerdb_stats.qrep_runs SET start_time=$1 WHERE run_uuid=$2", + time.Now(), runUUID) + if err != nil { + return fmt.Errorf("error while updating num_rows_to_sync for run_uuid %s in qrep_runs: %w", runUUID, err) + } + return nil } @@ -183,12 +246,17 @@ func (c *CatalogMirrorMonitor) UpdateEndTimeForQRepRun(ctx context.Context, runU return nil } -func (c *CatalogMirrorMonitor) AddPartitionToQRepRun(ctx context.Context, flowJobName string, +func (c *CatalogMirrorMonitor) addPartitionToQRepRun(ctx context.Context, flowJobName string, runUUID string, partition *protos.QRepPartition) error { if c == nil || c.catalogConn == nil { return nil } + if partition.Range == nil && partition.FullTablePartition { + log.Infof("partition %s is a full table partition. Metrics logging is skipped.", partition.PartitionId) + return nil + } + var rangeStart, rangeEnd string switch x := partition.Range.Range.(type) { case *protos.PartitionRange_IntRange: @@ -223,10 +291,10 @@ func (c *CatalogMirrorMonitor) AddPartitionToQRepRun(ctx context.Context, flowJo _, err := c.catalogConn.Exec(ctx, `INSERT INTO peerdb_stats.qrep_partitions - (flow_name,run_uuid,partition_uuid,partition_start,partition_end,start_time,restart_count) - VALUES($1,$2,$3,$4,$5,$6,$7) ON CONFLICT(run_uuid,partition_uuid) DO UPDATE SET + (flow_name,run_uuid,partition_uuid,partition_start,partition_end,restart_count) + VALUES($1,$2,$3,$4,$5,$6) ON CONFLICT(run_uuid,partition_uuid) DO UPDATE SET restart_count=qrep_partitions.restart_count+1`, - flowJobName, runUUID, partition.PartitionId, rangeStart, rangeEnd, time.Now(), 0) + flowJobName, runUUID, partition.PartitionId, rangeStart, rangeEnd, 0) if err != nil { return fmt.Errorf("error while inserting qrep partition in qrep_partitions: %w", err) } @@ -234,6 +302,23 @@ func (c *CatalogMirrorMonitor) AddPartitionToQRepRun(ctx context.Context, flowJo return nil } +func (c *CatalogMirrorMonitor) UpdateStartTimeForPartition( + ctx context.Context, + runUUID string, + partition *protos.QRepPartition, +) error { + if c == nil || c.catalogConn == nil { + return nil + } + + _, err := c.catalogConn.Exec(ctx, `UPDATE peerdb_stats.qrep_partitions SET start_time=$1 + WHERE run_uuid=$2 AND partition_uuid=$3`, time.Now(), runUUID, partition.PartitionId) + if err != nil { + return fmt.Errorf("error while updating qrep partition in qrep_partitions: %w", err) + } + return nil +} + func (c *CatalogMirrorMonitor) UpdatePullEndTimeAndRowsForPartition(ctx context.Context, runUUID string, partition *protos.QRepPartition, rowsInPartition int64) error { if c == nil || c.catalogConn == nil { diff --git a/flow/connectors/utils/partition/partition.go b/flow/connectors/utils/partition/partition.go index b25c39f6b..7fcc40a69 100644 --- a/flow/connectors/utils/partition/partition.go +++ b/flow/connectors/utils/partition/partition.go @@ -1,4 +1,4 @@ -package utils +package partition_utils import ( "fmt" @@ -133,20 +133,6 @@ func createTIDPartition(start pgtype.TID, end pgtype.TID) *protos.QRepPartition } } -func createXMINPartition(start uint32, end uint32) *protos.QRepPartition { - return &protos.QRepPartition{ - PartitionId: uuid.New().String(), - Range: &protos.PartitionRange{ - Range: &protos.PartitionRange_XminRange{ - XminRange: &protos.XMINPartitionRange{ - Start: start, - End: end, - }, - }, - }, - } -} - type PartitionHelper struct { prevStart interface{} prevEnd interface{} @@ -196,10 +182,6 @@ func (p *PartitionHelper) AddPartition(start interface{}, end interface{}) error p.partitions = append(p.partitions, createTIDPartition(v, end.(pgtype.TID))) p.prevStart = v p.prevEnd = end - case pgtype.Uint32: - p.partitions = append(p.partitions, createXMINPartition(v.Uint32, end.(uint32))) - p.prevStart = v - p.prevEnd = end default: return fmt.Errorf("unsupported type: %T", v) } diff --git a/flow/connectors/utils/postgres.go b/flow/connectors/utils/postgres.go index 883cf3679..cd6d9983a 100644 --- a/flow/connectors/utils/postgres.go +++ b/flow/connectors/utils/postgres.go @@ -1,10 +1,13 @@ package utils import ( + "context" "fmt" "net/url" + "strings" "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/jackc/pgx/v5/pgxpool" ) func GetPGConnectionString(pgConfig *protos.PostgresConfig) string { @@ -20,3 +23,51 @@ func GetPGConnectionString(pgConfig *protos.PostgresConfig) string { ) return connString } + +func GetCustomDataTypes(ctx context.Context, pool *pgxpool.Pool) (map[uint32]string, error) { + rows, err := pool.Query(ctx, ` + SELECT t.oid, t.typname as type + FROM pg_type t + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace + WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) + AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el WHERE el.oid = t.typelem AND el.typarray = t.oid) + AND n.nspname NOT IN ('pg_catalog', 'information_schema'); + `) + if err != nil { + return nil, fmt.Errorf("failed to get custom types: %w", err) + } + + customTypeMap := map[uint32]string{} + for rows.Next() { + var typeID uint32 + var typeName string + if err := rows.Scan(&typeID, &typeName); err != nil { + return nil, fmt.Errorf("failed to scan row: %w", err) + } + customTypeMap[typeID] = typeName + } + return customTypeMap, nil +} + +// SchemaTable is a table in a schema. +type SchemaTable struct { + Schema string + Table string +} + +func (t *SchemaTable) String() string { + return fmt.Sprintf(`"%s"."%s"`, t.Schema, t.Table) +} + +// ParseSchemaTable parses a table name into schema and table name. +func ParseSchemaTable(tableName string) (*SchemaTable, error) { + parts := strings.Split(tableName, ".") + if len(parts) != 2 { + return nil, fmt.Errorf("invalid table name: %s", tableName) + } + + return &SchemaTable{ + Schema: parts[0], + Table: parts[1], + }, nil +} diff --git a/flow/connectors/utils/stream.go b/flow/connectors/utils/stream.go new file mode 100644 index 000000000..9359c1565 --- /dev/null +++ b/flow/connectors/utils/stream.go @@ -0,0 +1,200 @@ +package utils + +import ( + "fmt" + "time" + + "github.com/PeerDB-io/peer-flow/model" + "github.com/PeerDB-io/peer-flow/model/qvalue" + "github.com/google/uuid" +) + +func RecordsToRawTableStream(req *model.RecordsToStreamRequest) (*model.RecordsToStreamResponse, error) { + recordStream := model.NewQRecordStream(1 << 16) + err := recordStream.SetSchema(&model.QRecordSchema{ + Fields: []*model.QField{ + { + Name: "_peerdb_uid", + Type: qvalue.QValueKindString, + Nullable: false, + }, + { + Name: "_peerdb_timestamp", + Type: qvalue.QValueKindInt64, + Nullable: false, + }, + { + Name: "_peerdb_destination_table_name", + Type: qvalue.QValueKindString, + Nullable: false, + }, + { + Name: "_peerdb_data", + Type: qvalue.QValueKindString, + Nullable: false, + }, + { + Name: "_peerdb_record_type", + Type: qvalue.QValueKindInt64, + Nullable: true, + }, + { + Name: "_peerdb_match_data", + Type: qvalue.QValueKindString, + Nullable: true, + }, + { + Name: "_peerdb_batch_id", + Type: qvalue.QValueKindInt64, + Nullable: true, + }, + { + Name: "_peerdb_unchanged_toast_columns", + Type: qvalue.QValueKindString, + Nullable: true, + }, + }, + }) + if err != nil { + return nil, err + } + + go func() { + for record := range req.GetRecords() { + qRecordOrError := recordToQRecordOrError(req.TableMapping, req.BatchID, record) + recordStream.Records <- qRecordOrError + } + + close(recordStream.Records) + }() + + return &model.RecordsToStreamResponse{ + Stream: recordStream, + }, nil +} + +func recordToQRecordOrError(tableMapping map[string]uint32, batchID int64, record model.Record) *model.QRecordOrError { + var entries [8]qvalue.QValue + switch typedRecord := record.(type) { + case *model.InsertRecord: + // json.Marshal converts bytes in Hex automatically to BASE64 string. + itemsJSON, err := typedRecord.Items.ToJSON() + if err != nil { + return &model.QRecordOrError{ + Err: fmt.Errorf("failed to serialize insert record items to JSON: %w", err), + } + } + + // add insert record to the raw table + entries[2] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: typedRecord.DestinationTableName, + } + entries[3] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: itemsJSON, + } + entries[4] = qvalue.QValue{ + Kind: qvalue.QValueKindInt64, + Value: 0, + } + entries[5] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: "", + } + entries[7] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: "", + } + tableMapping[typedRecord.DestinationTableName] += 1 + case *model.UpdateRecord: + newItemsJSON, err := typedRecord.NewItems.ToJSON() + if err != nil { + return &model.QRecordOrError{ + Err: fmt.Errorf("failed to serialize update record new items to JSON: %w", err), + } + } + oldItemsJSON, err := typedRecord.OldItems.ToJSON() + if err != nil { + return &model.QRecordOrError{ + Err: fmt.Errorf("failed to serialize update record old items to JSON: %w", err), + } + } + + entries[2] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: typedRecord.DestinationTableName, + } + entries[3] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: newItemsJSON, + } + entries[4] = qvalue.QValue{ + Kind: qvalue.QValueKindInt64, + Value: 1, + } + entries[5] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: oldItemsJSON, + } + entries[7] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: KeysToString(typedRecord.UnchangedToastColumns), + } + tableMapping[typedRecord.DestinationTableName] += 1 + case *model.DeleteRecord: + itemsJSON, err := typedRecord.Items.ToJSON() + if err != nil { + return &model.QRecordOrError{ + Err: fmt.Errorf("failed to serialize delete record items to JSON: %w", err), + } + } + + // append delete record to the raw table + entries[2] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: typedRecord.DestinationTableName, + } + entries[3] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: itemsJSON, + } + entries[4] = qvalue.QValue{ + Kind: qvalue.QValueKindInt64, + Value: 2, + } + entries[5] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: itemsJSON, + } + entries[7] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: "", + } + tableMapping[typedRecord.DestinationTableName] += 1 + default: + return &model.QRecordOrError{ + Err: fmt.Errorf("unknown record type: %T", typedRecord), + } + } + + entries[0] = qvalue.QValue{ + Kind: qvalue.QValueKindString, + Value: uuid.New().String(), + } + entries[1] = qvalue.QValue{ + Kind: qvalue.QValueKindInt64, + Value: time.Now().UnixNano(), + } + entries[6] = qvalue.QValue{ + Kind: qvalue.QValueKindInt64, + Value: batchID, + } + + return &model.QRecordOrError{ + Record: &model.QRecord{ + NumEntries: 8, + Entries: entries[:], + }, + } +} diff --git a/flow/e2e/bigquery/peer_flow_bq_test.go b/flow/e2e/bigquery/peer_flow_bq_test.go index 649f59ede..400ecc3d0 100644 --- a/flow/e2e/bigquery/peer_flow_bq_test.go +++ b/flow/e2e/bigquery/peer_flow_bq_test.go @@ -924,6 +924,7 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Multi_Table_BQ() { env.AssertExpectations(s.T()) } +// TODO: not checking schema exactly, add later func (s *PeerFlowE2ETestSuiteBQ) Test_Simple_Schema_Changes_BQ() { env := s.NewTestWorkflowEnvironment() e2e.RegisterWorkflowsAndActivities(env) @@ -966,7 +967,6 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Simple_Schema_Changes_BQ() { // verify we got our first row. e2e.NormalizeFlowCountQuery(env, connectionGen, 2) - s.compareTableSchemasBQ("test_simple_schema_changes") s.compareTableContentsBQ("test_simple_schema_changes", "id,c1") // alter source table, add column c2 and insert another row. @@ -981,7 +981,6 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Simple_Schema_Changes_BQ() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 4) - s.compareTableSchemasBQ("test_simple_schema_changes") s.compareTableContentsBQ("test_simple_schema_changes", "id,c1,c2") // alter source table, add column c3, drop column c2 and insert another row. @@ -996,7 +995,6 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Simple_Schema_Changes_BQ() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 6) - s.compareTableSchemasBQ("test_simple_schema_changes") s.compareTableContentsBQ("test_simple_schema_changes", "id,c1,c3") // alter source table, drop column c3 and insert another row. @@ -1011,7 +1009,6 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Simple_Schema_Changes_BQ() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 8) - s.compareTableSchemasBQ("test_simple_schema_changes") s.compareTableContentsBQ("test_simple_schema_changes", "id,c1") }() @@ -1027,3 +1024,232 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Simple_Schema_Changes_BQ() { env.AssertExpectations(s.T()) } + +func (s *PeerFlowE2ETestSuiteBQ) Test_Composite_PKey_BQ() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_simple_cpkey") + dstTableName := "test_simple_cpkey" + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + PRIMARY KEY(id,t) + ); + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.bqHelper.Peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 4, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t) VALUES ($1,$2) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + // verify we got our 10 rows + e2e.NormalizeFlowCountQuery(env, connectionGen, 2) + s.compareTableContentsBQ(dstTableName, "id,c1,c2,t") + + _, err := s.pool.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + s.compareTableContentsBQ(dstTableName, "id,c1,c2,t") + + env.AssertExpectations(s.T()) +} + +func (s *PeerFlowE2ETestSuiteBQ) Test_Composite_PKey_Toast_1_BQ() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_cpkey_toast1") + dstTableName := "test_cpkey_toast1" + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + t2 TEXT, + PRIMARY KEY(id,t) + );CREATE OR REPLACE FUNCTION random_string( int ) RETURNS TEXT as $$ + SELECT string_agg(substring('0123456789bcdfghjkmnpqrstvwxyz', + round(random() * 30)::integer, 1), '') FROM generate_series(1, $1); + $$ language sql; + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_toast1_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.bqHelper.Peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 2, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + rowsTx, err := s.pool.Begin(context.Background()) + s.NoError(err) + + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = rowsTx.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t,t2) VALUES ($1,$2,random_string(9000)) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + _, err = rowsTx.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = rowsTx.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + + err = rowsTx.Commit(context.Background()) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // verify our updates and delete happened + s.compareTableContentsBQ(dstTableName, "id,c1,c2,t,t2") + + env.AssertExpectations(s.T()) +} + +func (s *PeerFlowE2ETestSuiteBQ) Test_Composite_PKey_Toast_2_BQ() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_cpkey_toast2") + dstTableName := "test_cpkey_toast2" + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + t2 TEXT, + PRIMARY KEY(id,t) + );CREATE OR REPLACE FUNCTION random_string( int ) RETURNS TEXT as $$ + SELECT string_agg(substring('0123456789bcdfghjkmnpqrstvwxyz', + round(random() * 30)::integer, 1), '') FROM generate_series(1, $1); + $$ language sql; + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_toast2_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.bqHelper.Peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 4, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t,t2) VALUES ($1,$2,random_string(9000)) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + e2e.NormalizeFlowCountQuery(env, connectionGen, 2) + _, err = s.pool.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // verify our updates and delete happened + s.compareTableContentsBQ(dstTableName, "id,c1,c2,t,t2") + + env.AssertExpectations(s.T()) +} diff --git a/flow/e2e/bigquery/qrep_flow_bq_test.go b/flow/e2e/bigquery/qrep_flow_bq_test.go index fb0fc3b87..5e6374cc1 100644 --- a/flow/e2e/bigquery/qrep_flow_bq_test.go +++ b/flow/e2e/bigquery/qrep_flow_bq_test.go @@ -3,8 +3,6 @@ package e2e_bigquery import ( "context" "fmt" - "sort" - "strings" connpostgres "github.com/PeerDB-io/peer-flow/connectors/postgres" "github.com/PeerDB-io/peer-flow/e2e" @@ -29,32 +27,6 @@ func (s *PeerFlowE2ETestSuiteBQ) setupBQDestinationTable(dstTable string) { fmt.Printf("created table on bigquery: %s.%s. %v\n", s.bqHelper.Config.DatasetId, dstTable, err) } -func (s *PeerFlowE2ETestSuiteBQ) compareTableSchemasBQ(tableName string) { - // read rows from source table - pgQueryExecutor := connpostgres.NewQRepQueryExecutor(s.pool, context.Background(), "testflow", "testpart") - pgQueryExecutor.SetTestEnv(true) - - pgRows, err := pgQueryExecutor.ExecuteAndProcessQuery( - fmt.Sprintf("SELECT * FROM e2e_test_%s.%s ORDER BY id", bigquerySuffix, tableName), - ) - s.NoError(err) - sort.Slice(pgRows.Schema.Fields, func(i int, j int) bool { - return strings.Compare(pgRows.Schema.Fields[i].Name, pgRows.Schema.Fields[j].Name) == -1 - }) - - // read rows from destination table - qualifiedTableName := fmt.Sprintf("`%s.%s`", s.bqHelper.Config.DatasetId, tableName) - bqRows, err := s.bqHelper.ExecuteAndProcessQuery( - fmt.Sprintf("SELECT * FROM %s ORDER BY id", qualifiedTableName), - ) - s.NoError(err) - sort.Slice(bqRows.Schema.Fields, func(i int, j int) bool { - return strings.Compare(bqRows.Schema.Fields[i].Name, bqRows.Schema.Fields[j].Name) == -1 - }) - - s.True(pgRows.Schema.EqualNames(bqRows.Schema), "schemas from source and destination tables are not equal") -} - func (s *PeerFlowE2ETestSuiteBQ) compareTableContentsBQ(tableName string, colsString string) { // read rows from source table pgQueryExecutor := connpostgres.NewQRepQueryExecutor(s.pool, context.Background(), "testflow", "testpart") diff --git a/flow/e2e/congen.go b/flow/e2e/congen.go index 5f5816ee5..c5228258b 100644 --- a/flow/e2e/congen.go +++ b/flow/e2e/congen.go @@ -34,6 +34,12 @@ func cleanPostgres(pool *pgxpool.Pool, suffix string) error { return fmt.Errorf("failed to drop e2e_test schema: %w", err) } + // drop the S3 metadata database if it exists + _, err = pool.Exec(context.Background(), "DROP SCHEMA IF EXISTS peerdb_s3_metadata CASCADE") + if err != nil { + return fmt.Errorf("failed to drop metadata schema: %w", err) + } + // drop all open slots with the given suffix _, err = pool.Exec( context.Background(), @@ -145,9 +151,17 @@ func GenerateSnowflakePeer(snowflakeConfig *protos.SnowflakeConfig) (*protos.Pee } func (c *FlowConnectionGenerationConfig) GenerateFlowConnectionConfigs() (*protos.FlowConnectionConfigs, error) { + tblMappings := []*protos.TableMapping{} + for k, v := range c.TableNameMapping { + tblMappings = append(tblMappings, &protos.TableMapping{ + SourceTableIdentifier: k, + DestinationTableIdentifier: v, + }) + } + ret := &protos.FlowConnectionConfigs{} ret.FlowJobName = c.FlowJobName - ret.TableNameMapping = c.TableNameMapping + ret.TableMappings = tblMappings ret.Source = GeneratePostgresPeer(c.PostgresPort) ret.Destination = c.Destination ret.CdcSyncMode = c.CDCSyncMode @@ -185,6 +199,7 @@ func (c *QRepFlowConnectionGenerationConfig) GenerateQRepConfig( ret.WriteMode = &protos.QRepWriteMode{ WriteType: protos.QRepWriteType_QREP_WRITE_MODE_APPEND, } + ret.NumRowsPerPartition = 1000 return ret, nil } diff --git a/flow/e2e/eventhub/eventhub_helper.go b/flow/e2e/eventhub/eventhub_helper.go deleted file mode 100644 index e488da428..000000000 --- a/flow/e2e/eventhub/eventhub_helper.go +++ /dev/null @@ -1,129 +0,0 @@ -package e2e_eventhub - -import ( - "context" - "sync" - - "github.com/Azure/azure-amqp-common-go/v4/aad" - "github.com/Azure/azure-amqp-common-go/v4/auth" - eventhub "github.com/Azure/azure-event-hubs-go/v3" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/PeerDB-io/peer-flow/generated/protos" - log "github.com/sirupsen/logrus" -) - -type EventHubTestHelper struct { - creds *azidentity.DefaultAzureCredential - ehConfig *protos.EventHubConfig - tokenProvider auth.TokenProvider -} - -func NewEventHubTestHelper(pgConf *protos.PostgresConfig) (*EventHubTestHelper, error) { - defaultAzureCreds, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Errorf("failed to get default azure credentials: %v", err) - return nil, err - } - log.Info("got default azure credentials") - - jwtTokenProvider, err := aad.NewJWTProvider(aad.JWTProviderWithEnvironmentVars()) - if err != nil { - log.Errorf("failed to get jwt token provider: %v", err) - return nil, err - } - log.Info("got jwt token provider") - - ehConfig := &protos.EventHubConfig{ - Namespace: "peerdb-dev", - ResourceGroup: "peerdb-resource", - Location: "eastus", - MetadataDb: pgConf, - } - - return &EventHubTestHelper{ - creds: defaultAzureCreds, - tokenProvider: jwtTokenProvider, - ehConfig: ehConfig, - }, nil -} - -func (h *EventHubTestHelper) GetPeer() *protos.Peer { - return &protos.Peer{ - Name: "test_eh_peer", - Type: protos.DBType_EVENTHUB, - Config: &protos.Peer_EventhubConfig{ - EventhubConfig: h.ehConfig, - }, - } -} - -// consume all messages from the eventhub with the given name. -// returns as a list of strings. -func (h *EventHubTestHelper) ConsumeAllMessages( - ctx context.Context, - name string, - expectedNum int, -) ([]string, error) { - hub, err := eventhub.NewHub(h.ehConfig.Namespace, name, h.tokenProvider) - if err != nil { - log.Errorf("failed to create eventhub hub [%s]: %v", name, err) - return nil, err - } - - var messages []string - - // create a WaitGroup to wait for all messages to be consumed - wg := sync.WaitGroup{} - wg.Add(expectedNum) - - handler := func(c context.Context, event *eventhub.Event) error { - messages = append(messages, string(event.Data)) - log.Infof("received message: %s", string(event.Data)) - wg.Done() - return nil - } - - // listen to each partition of the Event Hub - runtimeInfo, err := hub.GetRuntimeInformation(ctx) - if err != nil { - log.Errorf("failed to get runtime info for eventhub [%s]: %v", name, err) - return nil, err - } - - var listenerHandles []*eventhub.ListenerHandle - - for _, partitionID := range runtimeInfo.PartitionIDs { - // Start receiving messages - // - // Receive blocks while attempting to connect to hub, then runs until listenerHandle.Close() is called - // <- listenerHandle.Done() signals listener has stopped - // listenerHandle.Err() provides the last error the receiver encountered - listenerHandle, err := hub.Receive(ctx, partitionID, handler) - if err != nil { - log.Errorf("failed to receive messages from eventhub [%s]: %v", name, err) - return nil, err - } - - listenerHandles = append(listenerHandles, listenerHandle) - } - - // wait for all messages to be consumed - wg.Wait() - - // close all the listeners - for _, listenerHandle := range listenerHandles { - listenerHandle.Close(ctx) - } - - err = hub.Close(ctx) - if err != nil { - log.Errorf("failed to close eventhub [%s]: %v", name, err) - return nil, err - } - - return messages, nil -} - -func (h *EventHubTestHelper) CleanUp() error { - return nil -} diff --git a/flow/e2e/eventhub/peer_flow_eh_test.go b/flow/e2e/eventhub/peer_flow_eh_test.go deleted file mode 100644 index 546297550..000000000 --- a/flow/e2e/eventhub/peer_flow_eh_test.go +++ /dev/null @@ -1,163 +0,0 @@ -package e2e_eventhub - -import ( - "context" - "fmt" - "os" - "testing" - "time" - - "github.com/PeerDB-io/peer-flow/e2e" - util "github.com/PeerDB-io/peer-flow/utils" - peerflow "github.com/PeerDB-io/peer-flow/workflows" - "github.com/jackc/pgx/v5/pgxpool" - "github.com/joho/godotenv" - log "github.com/sirupsen/logrus" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - "go.temporal.io/sdk/testsuite" -) - -const eventhubSuffix = "eventhub" - -type PeerFlowE2ETestSuiteEH struct { - suite.Suite - testsuite.WorkflowTestSuite - - pool *pgxpool.Pool - ehHelper *EventHubTestHelper -} - -func TestPeerFlowE2ETestSuiteEH(t *testing.T) { - suite.Run(t, new(PeerFlowE2ETestSuiteEH)) -} - -func (s *PeerFlowE2ETestSuiteEH) setupEventHub() error { - enableEHT := os.Getenv("ENABLE_EVENT_HUB_TESTS") - if enableEHT == "" { - return nil - } - - pgConf := e2e.GetTestPostgresConf() - helper, err := NewEventHubTestHelper(pgConf) - if err != nil { - return err - } - - s.ehHelper = helper - return nil -} - -func (s *PeerFlowE2ETestSuiteEH) SetupSuite() { - err := godotenv.Load() - if err != nil { - // it's okay if the .env file is not present - // we will use the default values - log.Infof("Unable to load .env file, using default values from env") - } - - log.SetReportCaller(true) - - pool, err := e2e.SetupPostgres(eventhubSuffix) - if err != nil { - s.Fail("failed to setup postgres", err) - } - s.pool = pool - - err = s.setupEventHub() - if err != nil { - s.Fail("failed to setup eventhub", err) - } -} - -// Implement TearDownAllSuite interface to tear down the test suite -func (s *PeerFlowE2ETestSuiteEH) TearDownSuite() { - err := e2e.TearDownPostgres(s.pool, eventhubSuffix) - if err != nil { - s.Fail("failed to drop Postgres schema", err) - } - - if s.ehHelper != nil { - err = s.ehHelper.CleanUp() - if err != nil { - s.Fail("failed to clean up eventhub", err) - } - } -} - -func (s *PeerFlowE2ETestSuiteEH) Test_Complete_Simple_Flow_EH() { - if s.ehHelper == nil { - s.T().Skip("Skipping EventHub test") - } - - env := s.NewTestWorkflowEnvironment() - e2e.RegisterWorkflowsAndActivities(env) - - ru, err := util.RandomUInt64() - s.NoError(err) - - jobName := fmt.Sprintf("test_complete_single_col_flow_eh_%d", ru) - schemaQualifiedName := fmt.Sprintf("e2e_test.%s", jobName) - _, err = s.pool.Exec(context.Background(), ` - CREATE TABLE `+schemaQualifiedName+` ( - id SERIAL PRIMARY KEY, - key TEXT NOT NULL, - value TEXT NOT NULL - ); - `) - s.NoError(err) - - connectionGen := e2e.FlowConnectionGenerationConfig{ - FlowJobName: jobName, - TableNameMapping: map[string]string{schemaQualifiedName: jobName}, - PostgresPort: e2e.PostgresPort, - Destination: s.ehHelper.GetPeer(), - } - - flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() - s.NoError(err) - - peerFlowInput := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, - } - - // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup - // and then insert 10 rows into the source table - go func() { - e2e.SetupCDCFlowStatusQuery(env, connectionGen) - // insert 10 rows into the source table - for i := 0; i < 10; i++ { - testKey := fmt.Sprintf("test_key_%d", i) - testValue := fmt.Sprintf("test_value_%d", i) - _, err = s.pool.Exec(context.Background(), ` - INSERT INTO `+schemaQualifiedName+` (key, value) VALUES ($1, $2) - `, testKey, testValue) - s.NoError(err) - } - fmt.Println("Inserted 10 rows into the source table") - }() - - env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &peerFlowInput, nil) - - // Verify workflow completes without error - s.True(env.IsWorkflowCompleted()) - err = env.GetWorkflowError() - - // allow only continue as new error - s.Error(err) - s.Contains(err.Error(), "continue as new") - - // Verify that the destination table has 10 rows - // make context with timeout - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - msgs, err := s.ehHelper.ConsumeAllMessages(ctx, jobName, 10) - - require.NoError(s.T(), err) - - require.Equal(s.T(), 10, len(msgs)) - - env.AssertExpectations(s.T()) -} diff --git a/flow/e2e/postgres/peer_flow_pg_test.go b/flow/e2e/postgres/peer_flow_pg_test.go index 6198f605e..45666f036 100644 --- a/flow/e2e/postgres/peer_flow_pg_test.go +++ b/flow/e2e/postgres/peer_flow_pg_test.go @@ -5,6 +5,8 @@ import ( "fmt" "github.com/PeerDB-io/peer-flow/e2e" + "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/model/qvalue" peerflow "github.com/PeerDB-io/peer-flow/workflows" ) @@ -121,6 +123,19 @@ func (s *PeerFlowE2ETestSuitePG) Test_Simple_Schema_Changes_PG() { // verify we got our first row. e2e.NormalizeFlowCountQuery(env, connectionGen, 2) + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: dstTableName, + Columns: map[string]string{ + "id": string(qvalue.QValueKindInt64), + "c1": string(qvalue.QValueKindInt64), + }, + PrimaryKeyColumns: []string{"id"}, + } + output, err := s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) err = s.comparePGTables(srcTableName, dstTableName, "id,c1") s.NoError(err) @@ -136,7 +151,21 @@ func (s *PeerFlowE2ETestSuitePG) Test_Simple_Schema_Changes_PG() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 4) - err = s.comparePGTables(srcTableName, dstTableName, "id,c1") + expectedTableSchema = &protos.TableSchema{ + TableIdentifier: dstTableName, + Columns: map[string]string{ + "id": string(qvalue.QValueKindInt64), + "c1": string(qvalue.QValueKindInt64), + "c2": string(qvalue.QValueKindInt64), + }, + PrimaryKeyColumns: []string{"id"}, + } + output, err = s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) + err = s.comparePGTables(srcTableName, dstTableName, "id,c1,c2") s.NoError(err) // alter source table, add column c3, drop column c2 and insert another row. @@ -151,7 +180,22 @@ func (s *PeerFlowE2ETestSuitePG) Test_Simple_Schema_Changes_PG() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 6) - err = s.comparePGTables(srcTableName, dstTableName, "id,c1") + expectedTableSchema = &protos.TableSchema{ + TableIdentifier: dstTableName, + Columns: map[string]string{ + "id": string(qvalue.QValueKindInt64), + "c1": string(qvalue.QValueKindInt64), + "c2": string(qvalue.QValueKindInt64), + "c3": string(qvalue.QValueKindInt64), + }, + PrimaryKeyColumns: []string{"id"}, + } + output, err = s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) + err = s.comparePGTables(srcTableName, dstTableName, "id,c1,c3") s.NoError(err) // alter source table, drop column c3 and insert another row. @@ -166,6 +210,21 @@ func (s *PeerFlowE2ETestSuitePG) Test_Simple_Schema_Changes_PG() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 8) + expectedTableSchema = &protos.TableSchema{ + TableIdentifier: dstTableName, + Columns: map[string]string{ + "id": string(qvalue.QValueKindInt64), + "c1": string(qvalue.QValueKindInt64), + "c2": string(qvalue.QValueKindInt64), + "c3": string(qvalue.QValueKindInt64), + }, + PrimaryKeyColumns: []string{"id"}, + } + output, err = s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) err = s.comparePGTables(srcTableName, dstTableName, "id,c1") s.NoError(err) }() @@ -182,3 +241,236 @@ func (s *PeerFlowE2ETestSuitePG) Test_Simple_Schema_Changes_PG() { env.AssertExpectations(s.T()) } + +func (s *PeerFlowE2ETestSuitePG) Test_Composite_PKey_PG() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_simple_cpkey") + dstTableName := s.attachSchemaSuffix("test_simple_cpkey_dst") + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + PRIMARY KEY(id,t) + ); + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 4, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t) VALUES ($1,$2) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + // verify we got our 10 rows + e2e.NormalizeFlowCountQuery(env, connectionGen, 2) + err = s.comparePGTables(srcTableName, dstTableName, "id,c1,c2,t") + s.NoError(err) + + _, err := s.pool.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + err = s.comparePGTables(srcTableName, dstTableName, "id,c1,c2,t") + s.NoError(err) + + env.AssertExpectations(s.T()) +} + +func (s *PeerFlowE2ETestSuitePG) Test_Composite_PKey_Toast_1_PG() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_cpkey_toast1") + dstTableName := s.attachSchemaSuffix("test_cpkey_toast1_dst") + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + t2 TEXT, + PRIMARY KEY(id,t) + );CREATE OR REPLACE FUNCTION random_string( int ) RETURNS TEXT as $$ + SELECT string_agg(substring('0123456789bcdfghjkmnpqrstvwxyz', + round(random() * 30)::integer, 1), '') FROM generate_series(1, $1); + $$ language sql; + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_toast1_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 2, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + rowsTx, err := s.pool.Begin(context.Background()) + s.NoError(err) + + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = rowsTx.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t,t2) VALUES ($1,$2,random_string(9000)) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + _, err = rowsTx.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = rowsTx.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + + err = rowsTx.Commit(context.Background()) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // verify our updates and delete happened + err = s.comparePGTables(srcTableName, dstTableName, "id,c1,c2,t,t2") + s.NoError(err) + + env.AssertExpectations(s.T()) +} + +func (s *PeerFlowE2ETestSuitePG) Test_Composite_PKey_Toast_2_PG() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_cpkey_toast2") + dstTableName := s.attachSchemaSuffix("test_cpkey_toast2_dst") + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + t2 TEXT, + PRIMARY KEY(id,t) + );CREATE OR REPLACE FUNCTION random_string( int ) RETURNS TEXT as $$ + SELECT string_agg(substring('0123456789bcdfghjkmnpqrstvwxyz', + round(random() * 30)::integer, 1), '') FROM generate_series(1, $1); + $$ language sql; + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_toast2_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 4, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t,t2) VALUES ($1,$2,random_string(9000)) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + e2e.NormalizeFlowCountQuery(env, connectionGen, 2) + _, err = s.pool.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // verify our updates and delete happened + err = s.comparePGTables(srcTableName, dstTableName, "id,c1,c2,t,t2") + s.NoError(err) + + env.AssertExpectations(s.T()) +} diff --git a/flow/e2e/postgres/qrep_flow_pg_test.go b/flow/e2e/postgres/qrep_flow_pg_test.go index 0aa56ea92..df1653b99 100644 --- a/flow/e2e/postgres/qrep_flow_pg_test.go +++ b/flow/e2e/postgres/qrep_flow_pg_test.go @@ -6,6 +6,7 @@ import ( "strings" "testing" + connpostgres "github.com/PeerDB-io/peer-flow/connectors/postgres" "github.com/PeerDB-io/peer-flow/e2e" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/jackc/pgx/v5/pgxpool" @@ -21,8 +22,9 @@ type PeerFlowE2ETestSuitePG struct { suite.Suite testsuite.WorkflowTestSuite - pool *pgxpool.Pool - peer *protos.Peer + pool *pgxpool.Pool + peer *protos.Peer + connector *connpostgres.PostgresConnector } func TestPeerFlowE2ETestSuitePG(t *testing.T) { @@ -46,6 +48,16 @@ func (s *PeerFlowE2ETestSuitePG) SetupSuite() { } s.pool = pool s.peer = generatePGPeer(e2e.GetTestPostgresConf()) + + s.connector, err = connpostgres.NewPostgresConnector(context.Background(), + &protos.PostgresConfig{ + Host: "localhost", + Port: 7132, + User: "postgres", + Password: "postgres", + Database: "postgres", + }) + s.NoError(err) } // Implement TearDownAllSuite interface to tear down the test suite @@ -130,11 +142,15 @@ func (s *PeerFlowE2ETestSuitePG) Test_Complete_QRep_Flow_Multi_Insert_PG() { numRows := 10 + //nolint:gosec srcTable := "test_qrep_flow_avro_pg_1" s.setupSourceTable(srcTable, numRows) + //nolint:gosec dstTable := "test_qrep_flow_avro_pg_2" - e2e.CreateSourceTableQRep(s.pool, postgresSuffix, dstTable) // the name is misleading, but this is the destination table + // the name is misleading, but this is the destination table + err := e2e.CreateSourceTableQRep(s.pool, postgresSuffix, dstTable) + s.NoError(err) srcSchemaQualified := fmt.Sprintf("%s_%s.%s", "e2e_test", postgresSuffix, srcTable) dstSchemaQualified := fmt.Sprintf("%s_%s.%s", "e2e_test", postgresSuffix, dstTable) diff --git a/flow/e2e/s3/cdc_s3_test.go b/flow/e2e/s3/cdc_s3_test.go new file mode 100644 index 000000000..aaca2a125 --- /dev/null +++ b/flow/e2e/s3/cdc_s3_test.go @@ -0,0 +1,157 @@ +package e2e_s3 + +import ( + "context" + "fmt" + "time" + + "github.com/PeerDB-io/peer-flow/e2e" + peerflow "github.com/PeerDB-io/peer-flow/workflows" + "github.com/stretchr/testify/require" +) + +func (s *PeerFlowE2ETestSuiteS3) attachSchemaSuffix(tableName string) string { + return fmt.Sprintf("e2e_test_%s.%s", s3Suffix, tableName) +} + +func (s *PeerFlowE2ETestSuiteS3) attachSuffix(input string) string { + return fmt.Sprintf("%s_%s", input, s3Suffix) +} + +func (s *PeerFlowE2ETestSuiteS3) Test_Complete_Simple_Flow_S3() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_simple_flow_s3") + dstTableName := fmt.Sprintf("%s.%s", "peerdb_test_s3", "test_simple_flow_s3") + flowJobName := s.attachSuffix("test_simple_flow") + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE %s ( + id SERIAL PRIMARY KEY, + key TEXT NOT NULL, + value TEXT NOT NULL + ); + `, srcTableName)) + s.NoError(err) + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: flowJobName, + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.s3Helper.GetPeer(), + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 5, + MaxBatchSize: 5, + } + + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + s.NoError(err) + //insert 20 rows + for i := 1; i <= 20; i++ { + testKey := fmt.Sprintf("test_key_%d", i) + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s (key, value) VALUES ($1, $2) + `, srcTableName), testKey, testValue) + s.NoError(err) + } + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + fmt.Println("JobName: ", flowJobName) + files, err := s.s3Helper.ListAllFiles(ctx, flowJobName) + fmt.Println("Files in Test_Complete_Simple_Flow_S3: ", len(files)) + require.NoError(s.T(), err) + + require.Equal(s.T(), 4, len(files)) + + env.AssertExpectations(s.T()) +} + +func (s *PeerFlowE2ETestSuiteS3) Test_Complete_Simple_Flow_GCS_Interop() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + setupErr := s.setupS3("gcs") + if setupErr != nil { + s.Fail("failed to setup S3", setupErr) + } + + srcTableName := s.attachSchemaSuffix("test_simple_flow_gcs_interop") + dstTableName := fmt.Sprintf("%s.%s", "peerdb_test_gcs_interop", "test_simple_flow_gcs_interop") + flowJobName := s.attachSuffix("test_simple_flow") + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE %s ( + id SERIAL PRIMARY KEY, + key TEXT NOT NULL, + value TEXT NOT NULL + ); + `, srcTableName)) + s.NoError(err) + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: flowJobName, + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.s3Helper.GetPeer(), + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 5, + MaxBatchSize: 5, + } + + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + s.NoError(err) + //insert 20 rows + for i := 1; i <= 20; i++ { + testKey := fmt.Sprintf("test_key_%d", i) + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s (key, value) VALUES ($1, $2) + `, srcTableName), testKey, testValue) + s.NoError(err) + } + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + fmt.Println("JobName: ", flowJobName) + files, err := s.s3Helper.ListAllFiles(ctx, flowJobName) + fmt.Println("Files in Test_Complete_Simple_Flow_GCS: ", len(files)) + require.NoError(s.T(), err) + + require.Equal(s.T(), 4, len(files)) + + env.AssertExpectations(s.T()) +} diff --git a/flow/e2e/s3/qrep_flow_s3_test.go b/flow/e2e/s3/qrep_flow_s3_test.go index e47d23592..2fca18a70 100644 --- a/flow/e2e/s3/qrep_flow_s3_test.go +++ b/flow/e2e/s3/qrep_flow_s3_test.go @@ -37,8 +37,12 @@ func (s *PeerFlowE2ETestSuiteS3) setupSourceTable(tableName string, rowCount int s.NoError(err) } -func (s *PeerFlowE2ETestSuiteS3) setupS3() error { - helper, err := NewS3TestHelper() +func (s *PeerFlowE2ETestSuiteS3) setupS3(mode string) error { + switchToGCS := false + if mode == "gcs" { + switchToGCS = true + } + helper, err := NewS3TestHelper(switchToGCS) if err != nil { return err } @@ -63,7 +67,7 @@ func (s *PeerFlowE2ETestSuiteS3) SetupSuite() { } s.pool = pool - err = s.setupS3() + err = s.setupS3("s3") if err != nil { s.Fail("failed to setup S3", err) } diff --git a/flow/e2e/s3/s3_helper.go b/flow/e2e/s3/s3_helper.go index 9dbe958ee..7ea629ad2 100644 --- a/flow/e2e/s3/s3_helper.go +++ b/flow/e2e/s3/s3_helper.go @@ -2,9 +2,13 @@ package e2e_s3 import ( "context" + "encoding/json" "fmt" + "os" + "time" "github.com/PeerDB-io/peer-flow/connectors/utils" + "github.com/PeerDB-io/peer-flow/e2e" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/s3" @@ -12,27 +16,61 @@ import ( ) const ( - peerName string = "test_s3_peer" - bucketName string = "peerdb-test-bucket" - prefixName string = "test-s3" + peerName string = "test_s3_peer" ) type S3TestHelper struct { - client *s3.S3 - s3Config *protos.S3Config + client *s3.S3 + s3Config *protos.S3Config + bucketName string + prefix string } -func NewS3TestHelper() (*S3TestHelper, error) { - client, err := utils.CreateS3Client() +func NewS3TestHelper(switchToGCS bool) (*S3TestHelper, error) { + credsPath := os.Getenv("TEST_S3_CREDS") + bucketName := "peerdb-test-bucket" + if switchToGCS { + credsPath = os.Getenv("TEST_GCS_CREDS") + bucketName = "peerdb_staging" + } + + content, err := e2e.ReadFileToBytes(credsPath) + if err != nil { + return nil, fmt.Errorf("failed to read file: %w", err) + } + + var config utils.S3PeerCredentials + err = json.Unmarshal(content, &config) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal json: %w", err) + } + endpoint := "" + if switchToGCS { + endpoint = "https://storage.googleapis.com" + } + client, err := utils.CreateS3Client(config) if err != nil { return nil, err } - log.Infof("S3 client obtained") + prefix := fmt.Sprintf("peerdb_test/%d", time.Now().UnixNano()) return &S3TestHelper{ client, &protos.S3Config{ - Url: fmt.Sprintf("s3://%s/%s", bucketName, prefixName), + Url: fmt.Sprintf("s3://%s/%s", bucketName, prefix), + AccessKeyId: &config.AccessKeyID, + SecretAccessKey: &config.SecretAccessKey, + Region: &config.Region, + Endpoint: &endpoint, + MetadataDb: &protos.PostgresConfig{ + Host: "localhost", + Port: 7132, + Password: "postgres", + User: "postgres", + Database: "postgres", + }, }, + bucketName, + prefix, }, nil } @@ -52,8 +90,9 @@ func (h *S3TestHelper) ListAllFiles( ctx context.Context, jobName string, ) ([]*s3.Object, error) { - Bucket := bucketName - Prefix := fmt.Sprintf("%s/%s/", prefixName, jobName) + + Bucket := h.bucketName + Prefix := fmt.Sprintf("%s/%s/", h.prefix, jobName) files, err := h.client.ListObjects(&s3.ListObjectsInput{ Bucket: &Bucket, Prefix: &Prefix, @@ -68,8 +107,8 @@ func (h *S3TestHelper) ListAllFiles( // Delete all generated objects during the test func (h *S3TestHelper) CleanUp() error { - Bucket := bucketName - Prefix := prefixName + Bucket := h.bucketName + Prefix := h.prefix files, err := h.client.ListObjects(&s3.ListObjectsInput{ Bucket: &Bucket, Prefix: &Prefix, diff --git a/flow/e2e/snowflake/peer_flow_sf_test.go b/flow/e2e/snowflake/peer_flow_sf_test.go index 796402d03..9f92496eb 100644 --- a/flow/e2e/snowflake/peer_flow_sf_test.go +++ b/flow/e2e/snowflake/peer_flow_sf_test.go @@ -3,10 +3,13 @@ package e2e_snowflake import ( "context" "fmt" + "strings" "testing" + connsnowflake "github.com/PeerDB-io/peer-flow/connectors/snowflake" "github.com/PeerDB-io/peer-flow/e2e" "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/model/qvalue" peerflow "github.com/PeerDB-io/peer-flow/workflows" "github.com/jackc/pgx/v5/pgxpool" "github.com/joho/godotenv" @@ -21,8 +24,9 @@ type PeerFlowE2ETestSuiteSF struct { suite.Suite testsuite.WorkflowTestSuite - pool *pgxpool.Pool - sfHelper *SnowflakeTestHelper + pool *pgxpool.Pool + sfHelper *SnowflakeTestHelper + connector *connsnowflake.SnowflakeConnector } func TestPeerFlowE2ETestSuiteSF(t *testing.T) { @@ -69,6 +73,10 @@ func (s *PeerFlowE2ETestSuiteSF) SetupSuite() { if err != nil { s.Fail("failed to setup snowflake", err) } + + s.connector, err = connsnowflake.NewSnowflakeConnector(context.Background(), + s.sfHelper.Config) + s.NoError(err) } // Implement TearDownAllSuite interface to tear down the test suite @@ -84,6 +92,9 @@ func (s *PeerFlowE2ETestSuiteSF) TearDownSuite() { s.Fail("failed to clean up Snowflake", err) } } + + err = s.connector.Close() + s.NoError(err) } func (s *PeerFlowE2ETestSuiteSF) Test_Complete_Simple_Flow_SF() { @@ -220,6 +231,93 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_Simple_Flow_SF_Avro_CDC() { env.AssertExpectations(s.T()) } +func (s *PeerFlowE2ETestSuiteSF) Test_Invalid_Geo_SF_Avro_CDC() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_invalid_geo_sf_avro_cdc") + dstTableName := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, "test_invalid_geo_sf_avro_cdc") + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE %s ( + id SERIAL PRIMARY KEY, + line GEOMETRY(LINESTRING) NOT NULL, + poly GEOGRAPHY(POLYGON) NOT NULL + ); + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_invalid_geo_sf_avro_cdc"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.sfHelper.Peer, + CDCSyncMode: protos.QRepSyncMode_QREP_SYNC_MODE_STORAGE_AVRO, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 2, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert 10 rows into the source table + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + // insert 10 rows into the source table + for i := 0; i < 4; i++ { + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s (line,poly) VALUES ($1,$2) + `, srcTableName), "010200000001000000000000000000F03F0000000000000040", + "0103000020e6100000010000000c0000001a8361d35dc64140afdb8d2b1bc3c9bf1b8ed4685fc641405ba64c"+ + "579dc2c9bf6a6ad95a5fc64140cd82767449c2c9bf9570fbf85ec641408a07944db9c2c9bf729a18a55ec6414021b8b748c7c2c9bfba46de4c"+ + "5fc64140f2567052abc2c9bf2df9c5925fc641409394e16573c2c9bf2df9c5925fc6414049eceda9afc1c9bfdd1cc1a05fc64140fe43faedebc0"+ + "c9bf4694f6065fc64140fe43faedebc0c9bfffe7305f5ec641406693d6f2ddc0c9bf1a8361d35dc64140afdb8d2b1bc3c9bf", + ) + s.NoError(err) + } + fmt.Println("Inserted 4 invalid geography rows into the source table") + for i := 4; i < 10; i++ { + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s (line,poly) VALUES ($1,$2) + `, srcTableName), "010200000002000000000000000000F03F000000000000004000000000000008400000000000001040", + "010300000001000000050000000000000000000000000000000000000000000000"+ + "00000000000000000000f03f000000000000f03f000000000000f03f0000000000"+ + "00f03f000000000000000000000000000000000000000000000000") + s.NoError(err) + } + fmt.Println("Inserted 6 valid geography rows and 10 total rows into source") + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // We inserted 4 invalid shapes in each. + // They should have filtered out as null on destination + lineCount, err := s.sfHelper.CountNonNullRows("test_invalid_geo_sf_avro_cdc", "line") + s.NoError(err) + s.Equal(6, lineCount) + + polyCount, err := s.sfHelper.CountNonNullRows("test_invalid_geo_sf_avro_cdc", "poly") + s.NoError(err) + s.Equal(6, polyCount) + + // TODO: verify that the data is correctly synced to the destination table + // on the bigquery side + + env.AssertExpectations(s.T()) +} + func (s *PeerFlowE2ETestSuiteSF) Test_Toast_SF() { env := s.NewTestWorkflowEnvironment() e2e.RegisterWorkflowsAndActivities(env) @@ -585,7 +683,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Types_SF() { c14 INET,c15 INTEGER,c16 INTERVAL,c17 JSON,c18 JSONB,c21 MACADDR,c22 MONEY, c23 NUMERIC,c24 OID,c28 REAL,c29 SMALLINT,c30 SMALLSERIAL,c31 SERIAL,c32 TEXT, c33 TIMESTAMP,c34 TIMESTAMPTZ,c35 TIME, c36 TIMETZ,c37 TSQUERY,c38 TSVECTOR, - c39 TXID_SNAPSHOT,c40 UUID,c41 XML); + c39 TXID_SNAPSHOT,c40 UUID,c41 XML, c42 GEOMETRY(POINT), c43 GEOGRAPHY(POINT), + c44 GEOGRAPHY(POLYGON), c45 GEOGRAPHY(LINESTRING), c46 GEOMETRY(LINESTRING), c47 GEOMETRY(POLYGON)); CREATE OR REPLACE FUNCTION random_bytea(bytea_length integer) RETURNS bytea AS $body$ SELECT decode(string_agg(lpad(to_hex(width_bucket(random(), 0, 1, 256)-1),2,'0') ,''), 'hex') @@ -626,7 +725,10 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Types_SF() { 1.2,1.23,4::oid,1.23,1,1,1,'test',now(),now(),now()::time,now()::timetz, 'fat & rat'::tsquery,'a fat cat sat on a mat and ate a fat rat'::tsvector, txid_current_snapshot(), - '66073c38-b8df-4bdb-bbca-1c97596b8940'::uuid,xmlcomment('hello'); + '66073c38-b8df-4bdb-bbca-1c97596b8940'::uuid,xmlcomment('hello'), + 'POINT(1 2)','POINT(40.7128 -74.0060)','POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))', + 'LINESTRING(-74.0060 40.7128, -73.9352 40.7306, -73.9123 40.7831)','LINESTRING(0 0, 1 1, 2 2)', + 'POLYGON((-74.0060 40.7128, -73.9352 40.7306, -73.9123 40.7831, -74.0060 40.7128))'; `, srcTableName)) s.NoError(err) fmt.Println("Executed an insert with all types") @@ -645,7 +747,7 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Types_SF() { noNulls, err := s.sfHelper.CheckNull("test_types_sf", []string{"c41", "c1", "c2", "c3", "c4", "c6", "c39", "c40", "id", "c9", "c11", "c12", "c13", "c14", "c15", "c16", "c17", "c18", "c21", "c22", "c23", "c24", "c28", "c29", "c30", "c31", "c33", "c34", "c35", "c36", - "c37", "c38", "c7", "c8", "c32"}) + "c37", "c38", "c7", "c8", "c32", "c42", "c43", "c44", "c45", "c46"}) if err != nil { fmt.Println("error %w", err) } @@ -668,7 +770,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Types_SF_Avro_CDC() { c14 INET,c15 INTEGER,c16 INTERVAL,c17 JSON,c18 JSONB,c21 MACADDR,c22 MONEY, c23 NUMERIC,c24 OID,c28 REAL,c29 SMALLINT,c30 SMALLSERIAL,c31 SERIAL,c32 TEXT, c33 TIMESTAMP,c34 TIMESTAMPTZ,c35 TIME, c36 TIMETZ,c37 TSQUERY,c38 TSVECTOR, - c39 TXID_SNAPSHOT,c40 UUID,c41 XML); + c39 TXID_SNAPSHOT,c40 UUID,c41 XML, c42 GEOMETRY(POINT), c43 GEOGRAPHY(POINT), + c44 GEOGRAPHY(POLYGON), c45 GEOGRAPHY(LINESTRING), c46 GEOMETRY(LINESTRING), c47 GEOMETRY(POLYGON)); CREATE OR REPLACE FUNCTION random_bytea(bytea_length integer) RETURNS bytea AS $body$ SELECT decode(string_agg(lpad(to_hex(width_bucket(random(), 0, 1, 256)-1),2,'0') ,''), 'hex') @@ -710,7 +813,10 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Types_SF_Avro_CDC() { 1.2,1.23,4::oid,1.23,1,1,1,'test',now(),now(),now()::time,now()::timetz, 'fat & rat'::tsquery,'a fat cat sat on a mat and ate a fat rat'::tsvector, txid_current_snapshot(), - '66073c38-b8df-4bdb-bbca-1c97596b8940'::uuid,xmlcomment('hello'); + '66073c38-b8df-4bdb-bbca-1c97596b8940'::uuid,xmlcomment('hello'), + 'POINT(1 2)','POINT(40.7128 -74.0060)','POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))', + 'LINESTRING(-74.0060 40.7128, -73.9352 40.7306, -73.9123 40.7831)','LINESTRING(0 0, 1 1, 2 2)', + 'POLYGON((-74.0060 40.7128, -73.9352 40.7306, -73.9123 40.7831, -74.0060 40.7128))'; `, srcTableName)) s.NoError(err) fmt.Println("Executed an insert with all types") @@ -729,7 +835,7 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Types_SF_Avro_CDC() { noNulls, err := s.sfHelper.CheckNull("test_types_sf_avro_cdc", []string{"c41", "c1", "c2", "c3", "c4", "c6", "c39", "c40", "id", "c9", "c11", "c12", "c13", "c14", "c15", "c16", "c17", "c18", "c21", "c22", "c23", "c24", "c28", "c29", "c30", "c31", "c33", "c34", "c35", "c36", - "c37", "c38", "c7", "c8", "c32"}) + "c37", "c38", "c7", "c8", "c32", "c42", "c43", "c44", "c45", "c46"}) if err != nil { fmt.Println("error %w", err) } @@ -841,7 +947,19 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Simple_Schema_Changes_SF() { // verify we got our first row. e2e.NormalizeFlowCountQuery(env, connectionGen, 2) - s.compareTableSchemasSF("test_simple_schema_changes") + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: strings.ToUpper(dstTableName), + Columns: map[string]string{ + "ID": string(qvalue.QValueKindNumeric), + "C1": string(qvalue.QValueKindNumeric), + "_PEERDB_IS_DELETED": string(qvalue.QValueKindBoolean), + }, + } + output, err := s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) s.compareTableContentsSF("test_simple_schema_changes", "id,c1", false) // alter source table, add column c2 and insert another row. @@ -856,7 +974,20 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Simple_Schema_Changes_SF() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 4) - s.compareTableSchemasSF("test_simple_schema_changes") + expectedTableSchema = &protos.TableSchema{ + TableIdentifier: strings.ToUpper(dstTableName), + Columns: map[string]string{ + "ID": string(qvalue.QValueKindNumeric), + "C1": string(qvalue.QValueKindNumeric), + "C2": string(qvalue.QValueKindNumeric), + "_PEERDB_IS_DELETED": string(qvalue.QValueKindBoolean), + }, + } + output, err = s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) s.compareTableContentsSF("test_simple_schema_changes", "id,c1,c2", false) // alter source table, add column c3, drop column c2 and insert another row. @@ -871,7 +1002,21 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Simple_Schema_Changes_SF() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 6) - s.compareTableSchemasSF("test_simple_schema_changes") + expectedTableSchema = &protos.TableSchema{ + TableIdentifier: strings.ToUpper(dstTableName), + Columns: map[string]string{ + "ID": string(qvalue.QValueKindNumeric), + "C1": string(qvalue.QValueKindNumeric), + "C2": string(qvalue.QValueKindNumeric), + "C3": string(qvalue.QValueKindNumeric), + "_PEERDB_IS_DELETED": string(qvalue.QValueKindBoolean), + }, + } + output, err = s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) s.compareTableContentsSF("test_simple_schema_changes", "id,c1,c3", false) // alter source table, drop column c3 and insert another row. @@ -886,7 +1031,21 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Simple_Schema_Changes_SF() { // verify we got our two rows, if schema did not match up it will error. e2e.NormalizeFlowCountQuery(env, connectionGen, 8) - s.compareTableSchemasSF("test_simple_schema_changes") + expectedTableSchema = &protos.TableSchema{ + TableIdentifier: strings.ToUpper(dstTableName), + Columns: map[string]string{ + "ID": string(qvalue.QValueKindNumeric), + "C1": string(qvalue.QValueKindNumeric), + "C2": string(qvalue.QValueKindNumeric), + "C3": string(qvalue.QValueKindNumeric), + "_PEERDB_IS_DELETED": string(qvalue.QValueKindBoolean), + }, + } + output, err = s.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{dstTableName}, + }) + s.NoError(err) + s.Equal(expectedTableSchema, output.TableNameSchemaMapping[dstTableName]) s.compareTableContentsSF("test_simple_schema_changes", "id,c1", false) }() @@ -902,3 +1061,233 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Simple_Schema_Changes_SF() { env.AssertExpectations(s.T()) } + +func (s *PeerFlowE2ETestSuiteSF) Test_Composite_PKey_SF() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_simple_cpkey") + dstTableName := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, "test_simple_cpkey") + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + PRIMARY KEY(id,t) + ); + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.sfHelper.Peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 5, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t) VALUES ($1,$2) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + // verify we got our 10 rows + e2e.NormalizeFlowCountQuery(env, connectionGen, 2) + s.compareTableContentsSF("test_simple_cpkey", "id,c1,c2,t", false) + + _, err := s.pool.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // verify our updates and delete happened + s.compareTableContentsSF("test_simple_cpkey", "id,c1,c2,t", false) + + env.AssertExpectations(s.T()) +} + +func (s *PeerFlowE2ETestSuiteSF) Test_Composite_PKey_Toast_1_SF() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_cpkey_toast1") + dstTableName := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, "test_cpkey_toast1") + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + t2 TEXT, + PRIMARY KEY(id,t) + );CREATE OR REPLACE FUNCTION random_string( int ) RETURNS TEXT as $$ + SELECT string_agg(substring('0123456789bcdfghjkmnpqrstvwxyz', + round(random() * 30)::integer, 1), '') FROM generate_series(1, $1); + $$ language sql; + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_toast1_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.sfHelper.Peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 2, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + rowsTx, err := s.pool.Begin(context.Background()) + s.NoError(err) + + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = rowsTx.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t,t2) VALUES ($1,$2,random_string(9000)) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + _, err = rowsTx.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = rowsTx.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + + err = rowsTx.Commit(context.Background()) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // verify our updates and delete happened + s.compareTableContentsSF("test_cpkey_toast1", "id,c1,c2,t,t2", false) + + env.AssertExpectations(s.T()) +} + +func (s *PeerFlowE2ETestSuiteSF) Test_Composite_PKey_Toast_2_SF() { + env := s.NewTestWorkflowEnvironment() + e2e.RegisterWorkflowsAndActivities(env) + + srcTableName := s.attachSchemaSuffix("test_cpkey_toast2") + dstTableName := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, "test_cpkey_toast2") + + _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` + CREATE TABLE IF NOT EXISTS %s ( + id INT GENERATED ALWAYS AS IDENTITY, + c1 INT GENERATED BY DEFAULT AS IDENTITY, + c2 INT, + t TEXT, + t2 TEXT, + PRIMARY KEY(id,t) + );CREATE OR REPLACE FUNCTION random_string( int ) RETURNS TEXT as $$ + SELECT string_agg(substring('0123456789bcdfghjkmnpqrstvwxyz', + round(random() * 30)::integer, 1), '') FROM generate_series(1, $1); + $$ language sql; + `, srcTableName)) + s.NoError(err) + + connectionGen := e2e.FlowConnectionGenerationConfig{ + FlowJobName: s.attachSuffix("test_cpkey_toast2_flow"), + TableNameMapping: map[string]string{srcTableName: dstTableName}, + PostgresPort: e2e.PostgresPort, + Destination: s.sfHelper.Peer, + } + + flowConnConfig, err := connectionGen.GenerateFlowConnectionConfigs() + s.NoError(err) + + limits := peerflow.CDCFlowLimits{ + TotalSyncFlows: 4, + MaxBatchSize: 100, + } + + // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup + // and then insert, update and delete rows in the table. + go func() { + e2e.SetupCDCFlowStatusQuery(env, connectionGen) + + // insert 10 rows into the source table + for i := 0; i < 10; i++ { + testValue := fmt.Sprintf("test_value_%d", i) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` + INSERT INTO %s(c2,t,t2) VALUES ($1,$2,random_string(9000)) + `, srcTableName), i, testValue) + s.NoError(err) + } + fmt.Println("Inserted 10 rows into the source table") + + e2e.NormalizeFlowCountQuery(env, connectionGen, 2) + _, err = s.pool.Exec(context.Background(), + fmt.Sprintf(`UPDATE %s SET c1=c1+1 WHERE MOD(c2,2)=$1`, srcTableName), 1) + s.NoError(err) + _, err = s.pool.Exec(context.Background(), fmt.Sprintf(`DELETE FROM %s WHERE MOD(c2,2)=$1`, srcTableName), 0) + s.NoError(err) + }() + + env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) + + // Verify workflow completes without error + s.True(env.IsWorkflowCompleted()) + err = env.GetWorkflowError() + + // allow only continue as new error + s.Error(err) + s.Contains(err.Error(), "continue as new") + + // verify our updates and delete happened + s.compareTableContentsSF("test_cpkey_toast2", "id,c1,c2,t,t2", false) + + env.AssertExpectations(s.T()) +} diff --git a/flow/e2e/snowflake/qrep_flow_sf_test.go b/flow/e2e/snowflake/qrep_flow_sf_test.go index 01d2532e5..cdcfaeca9 100644 --- a/flow/e2e/snowflake/qrep_flow_sf_test.go +++ b/flow/e2e/snowflake/qrep_flow_sf_test.go @@ -3,8 +3,6 @@ package e2e_snowflake import ( "context" "fmt" - "sort" - "strings" connpostgres "github.com/PeerDB-io/peer-flow/connectors/postgres" "github.com/PeerDB-io/peer-flow/e2e" @@ -32,33 +30,6 @@ func (s *PeerFlowE2ETestSuiteSF) setupSFDestinationTable(dstTable string) { fmt.Printf("created table on snowflake: %s.%s. %v\n", s.sfHelper.testSchemaName, dstTable, err) } -func (s *PeerFlowE2ETestSuiteSF) compareTableSchemasSF(tableName string) { - // read rows from source table - pgQueryExecutor := connpostgres.NewQRepQueryExecutor(s.pool, context.Background(), "testflow", "testpart") - pgQueryExecutor.SetTestEnv(true) - pgRows, err := pgQueryExecutor.ExecuteAndProcessQuery( - fmt.Sprintf("SELECT * FROM e2e_test_%s.%s LIMIT 0", snowflakeSuffix, tableName), - ) - require.NoError(s.T(), err) - sort.Slice(pgRows.Schema.Fields, func(i int, j int) bool { - return strings.Compare(pgRows.Schema.Fields[i].Name, pgRows.Schema.Fields[j].Name) == -1 - }) - - // read rows from destination table - qualifiedTableName := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, tableName) - // excluding soft-delete column during schema conversion - sfSelQuery := fmt.Sprintf(`SELECT * EXCLUDE _PEERDB_IS_DELETED FROM %s LIMIT 0`, qualifiedTableName) - fmt.Printf("running query on snowflake: %s\n", sfSelQuery) - - sfRows, err := s.sfHelper.ExecuteAndProcessQuery(sfSelQuery) - require.NoError(s.T(), err) - sort.Slice(sfRows.Schema.Fields, func(i int, j int) bool { - return strings.Compare(sfRows.Schema.Fields[i].Name, sfRows.Schema.Fields[j].Name) == -1 - }) - - s.True(pgRows.Schema.EqualNames(sfRows.Schema), "schemas from source and destination tables are not equal") -} - func (s *PeerFlowE2ETestSuiteSF) compareTableContentsSF(tableName string, selector string, caseSensitive bool) { // read rows from source table pgQueryExecutor := connpostgres.NewQRepQueryExecutor(s.pool, context.Background(), "testflow", "testpart") @@ -137,7 +108,7 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_QRep_Flow_Avro_SF_Upsert_Simple() dstSchemaQualified := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, tblName) - query := fmt.Sprintf("SELECT * FROM e2e_test_%s.%s WHERE updated_at >= {{.start}} AND updated_at < {{.end}}", + query := fmt.Sprintf("SELECT * FROM e2e_test_%s.%s WHERE updated_at BETWEEN {{.start}} AND {{.end}}", snowflakeSuffix, tblName) qrepConfig, err := e2e.CreateQRepWorkflowConfig( @@ -182,7 +153,7 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_QRep_Flow_Avro_SF_S3() { dstSchemaQualified := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, tblName) - query := fmt.Sprintf("SELECT * FROM e2e_test_%s.%s WHERE updated_at >= {{.start}} AND updated_at < {{.end}}", + query := fmt.Sprintf("SELECT * FROM e2e_test_%s.%s WHERE updated_at BETWEEN {{.start}} AND {{.end}}", snowflakeSuffix, tblName) qrepConfig, err := e2e.CreateQRepWorkflowConfig( @@ -202,7 +173,6 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_QRep_Flow_Avro_SF_S3() { // Verify workflow completes without error s.True(env.IsWorkflowCompleted()) - // assert that error contains "invalid connection configs" err = env.GetWorkflowError() s.NoError(err) @@ -247,7 +217,6 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_QRep_Flow_Avro_SF_Upsert_XMIN() { // Verify workflow completes without error s.True(env.IsWorkflowCompleted()) - // assert that error contains "invalid connection configs" err = env.GetWorkflowError() s.NoError(err) @@ -269,7 +238,7 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_QRep_Flow_Avro_SF_S3_Integration( dstSchemaQualified := fmt.Sprintf("%s.%s", s.sfHelper.testSchemaName, tblName) - query := fmt.Sprintf("SELECT * FROM e2e_test_%s.%s WHERE updated_at >= {{.start}} AND updated_at < {{.end}}", + query := fmt.Sprintf("SELECT * FROM e2e_test_%s.%s WHERE updated_at BETWEEN {{.start}} AND {{.end}}", snowflakeSuffix, tblName) sfPeer := s.sfHelper.Peer @@ -292,7 +261,6 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_QRep_Flow_Avro_SF_S3_Integration( // Verify workflow completes without error s.True(env.IsWorkflowCompleted()) - // assert that error contains "invalid connection configs" err = env.GetWorkflowError() s.NoError(err) diff --git a/flow/e2e/snowflake/snowflake_helper.go b/flow/e2e/snowflake/snowflake_helper.go index 2a39daf93..8a96b1ad9 100644 --- a/flow/e2e/snowflake/snowflake_helper.go +++ b/flow/e2e/snowflake/snowflake_helper.go @@ -118,6 +118,16 @@ func (s *SnowflakeTestHelper) CountRows(tableName string) (int, error) { return int(res), nil } +// CountRows(tableName) returns the non-null number of rows in the given table. +func (s *SnowflakeTestHelper) CountNonNullRows(tableName string, columnName string) (int, error) { + res, err := s.testClient.CountNonNullRows(s.testSchemaName, tableName, columnName) + if err != nil { + return 0, err + } + + return int(res), nil +} + func (s *SnowflakeTestHelper) CheckNull(tableName string, colNames []string) (bool, error) { return s.testClient.CheckNull(s.testSchemaName, tableName, colNames) } diff --git a/flow/e2e/snowflake/snowflake_schema_delta_test.go b/flow/e2e/snowflake/snowflake_schema_delta_test.go new file mode 100644 index 000000000..d17b60f79 --- /dev/null +++ b/flow/e2e/snowflake/snowflake_schema_delta_test.go @@ -0,0 +1,204 @@ +package e2e_snowflake + +import ( + "context" + "fmt" + "testing" + + connsnowflake "github.com/PeerDB-io/peer-flow/connectors/snowflake" + "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/model/qvalue" + "github.com/stretchr/testify/suite" +) + +const schemaDeltaTestSchemaName = "PUBLIC" + +type SnowflakeSchemaDeltaTestSuite struct { + suite.Suite + connector *connsnowflake.SnowflakeConnector + sfTestHelper *SnowflakeTestHelper +} + +func (suite *SnowflakeSchemaDeltaTestSuite) failTestError(err error) { + if err != nil { + suite.FailNow(err.Error()) + } +} + +func (suite *SnowflakeSchemaDeltaTestSuite) SetupSuite() { + var err error + + suite.sfTestHelper, err = NewSnowflakeTestHelper() + suite.failTestError(err) + + suite.connector, err = connsnowflake.NewSnowflakeConnector(context.Background(), + suite.sfTestHelper.Config) + suite.failTestError(err) +} + +func (suite *SnowflakeSchemaDeltaTestSuite) TearDownSuite() { + err := suite.sfTestHelper.Cleanup() + suite.failTestError(err) + err = suite.connector.Close() + suite.failTestError(err) +} + +func (suite *SnowflakeSchemaDeltaTestSuite) TestSimpleAddColumn() { + tableName := fmt.Sprintf("%s.SIMPLE_ADD_COLUMN", schemaDeltaTestSchemaName) + err := suite.sfTestHelper.RunCommand(fmt.Sprintf("CREATE TABLE %s(ID TEXT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: []*protos.DeltaAddedColumn{{ + ColumnName: "HI", + ColumnType: string(qvalue.QValueKindJSON), + }}, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(&protos.TableSchema{ + TableIdentifier: tableName, + Columns: map[string]string{ + "ID": string(qvalue.QValueKindString), + "HI": string(qvalue.QValueKindJSON), + }, + }, output.TableNameSchemaMapping[tableName]) +} + +func (suite *SnowflakeSchemaDeltaTestSuite) TestAddAllColumnTypes() { + tableName := fmt.Sprintf("%s.ADD_DROP_ALL_COLUMN_TYPES", schemaDeltaTestSchemaName) + err := suite.sfTestHelper.RunCommand(fmt.Sprintf("CREATE TABLE %s(ID TEXT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: tableName, + // goal is to test all types we're currently mapping to, not all QValue types + Columns: map[string]string{ + "ID": string(qvalue.QValueKindString), + "C1": string(qvalue.QValueKindBoolean), + "C2": string(qvalue.QValueKindBytes), + "C3": string(qvalue.QValueKindDate), + "C4": string(qvalue.QValueKindFloat64), + "C5": string(qvalue.QValueKindJSON), + "C6": string(qvalue.QValueKindNumeric), + "C7": string(qvalue.QValueKindString), + "C8": string(qvalue.QValueKindTime), + "C9": string(qvalue.QValueKindTimestamp), + "C10": string(qvalue.QValueKindTimestampTZ), + }, + } + addedColumns := make([]*protos.DeltaAddedColumn, 0) + for columnName, columnType := range expectedTableSchema.Columns { + if columnName != "ID" { + addedColumns = append(addedColumns, &protos.DeltaAddedColumn{ + ColumnName: columnName, + ColumnType: columnType, + }) + } + } + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: addedColumns, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(expectedTableSchema, output.TableNameSchemaMapping[tableName]) +} + +func (suite *SnowflakeSchemaDeltaTestSuite) TestAddTrickyColumnNames() { + tableName := fmt.Sprintf("%s.ADD_DROP_TRICKY_COLUMN_NAMES", schemaDeltaTestSchemaName) + err := suite.sfTestHelper.RunCommand(fmt.Sprintf("CREATE TABLE %s(id TEXT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: tableName, + // strings.ToUpper also does Unicode uppercasing :) + Columns: map[string]string{ + "ID": string(qvalue.QValueKindString), + "C1": string(qvalue.QValueKindString), + "C 1": string(qvalue.QValueKindString), + "RIGHT": string(qvalue.QValueKindString), + "SELECT": string(qvalue.QValueKindString), + "XMIN": string(qvalue.QValueKindString), + "CARIÑO": string(qvalue.QValueKindString), + "±ªÞ³§": string(qvalue.QValueKindString), + "カラム": string(qvalue.QValueKindString), + }, + } + addedColumns := make([]*protos.DeltaAddedColumn, 0) + for columnName, columnType := range expectedTableSchema.Columns { + if columnName != "ID" { + addedColumns = append(addedColumns, &protos.DeltaAddedColumn{ + ColumnName: columnName, + ColumnType: columnType, + }) + } + } + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: addedColumns, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(expectedTableSchema, output.TableNameSchemaMapping[tableName]) +} + +func (suite *SnowflakeSchemaDeltaTestSuite) TestAddWhitespaceColumnNames() { + tableName := fmt.Sprintf("%s.ADD_DROP_WHITESPACE_COLUMN_NAMES", schemaDeltaTestSchemaName) + err := suite.sfTestHelper.RunCommand(fmt.Sprintf("CREATE TABLE %s(\" \" TEXT PRIMARY KEY)", tableName)) + suite.failTestError(err) + + expectedTableSchema := &protos.TableSchema{ + TableIdentifier: tableName, + Columns: map[string]string{ + " ": string(qvalue.QValueKindString), + " ": string(qvalue.QValueKindString), + " ": string(qvalue.QValueKindTime), + " ": string(qvalue.QValueKindDate), + }, + } + addedColumns := make([]*protos.DeltaAddedColumn, 0) + for columnName, columnType := range expectedTableSchema.Columns { + if columnName != " " { + addedColumns = append(addedColumns, &protos.DeltaAddedColumn{ + ColumnName: columnName, + ColumnType: columnType, + }) + } + } + + err = suite.connector.ReplayTableSchemaDeltas("schema_delta_flow", []*protos.TableSchemaDelta{{ + SrcTableName: tableName, + DstTableName: tableName, + AddedColumns: addedColumns, + }}) + suite.failTestError(err) + + output, err := suite.connector.GetTableSchema(&protos.GetTableSchemaBatchInput{ + TableIdentifiers: []string{tableName}, + }) + suite.failTestError(err) + suite.Equal(expectedTableSchema, output.TableNameSchemaMapping[tableName]) +} + +func TestSnowflakeSchemaDeltaTestSuite(t *testing.T) { + suite.Run(t, new(SnowflakeSchemaDeltaTestSuite)) +} diff --git a/flow/e2e/test_utils.go b/flow/e2e/test_utils.go index f3b3b00fa..4ea032bf3 100644 --- a/flow/e2e/test_utils.go +++ b/flow/e2e/test_utils.go @@ -152,7 +152,14 @@ func CreateSourceTableQRep(pool *pgxpool.Pool, suffix string, tableName string) "f7 jsonb", "f8 smallint", } - + if strings.Contains(tableName, "sf") { + tblFields = append(tblFields, "geometry_point geometry(point)", + "geography_point geography(point)", + "geometry_linestring geometry(linestring)", + "geography_linestring geography(linestring)", + "geometry_polygon geometry(polygon)", + "geography_polygon geography(polygon)") + } tblFieldStr := strings.Join(tblFields, ",") _, err := pool.Exec(context.Background(), fmt.Sprintf(` @@ -187,6 +194,13 @@ func PopulateSourceTable(pool *pgxpool.Pool, suffix string, tableName string, ro for i := 0; i < rowCount-1; i++ { id := uuid.New().String() ids = append(ids, id) + geoValues := "" + if strings.Contains(tableName, "sf") { + geoValues = `,'POINT(1 2)','POINT(40.7128 -74.0060)', + 'LINESTRING(0 0, 1 1, 2 2)', + 'LINESTRING(-74.0060 40.7128, -73.9352 40.7306, -73.9123 40.7831)', + 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))','POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))'` + } row := fmt.Sprintf(` ( '%s', '%s', CURRENT_TIMESTAMP, 3.86487206688919, CURRENT_TIMESTAMP, @@ -198,13 +212,19 @@ func PopulateSourceTable(pool *pgxpool.Pool, suffix string, tableName string, ro CURRENT_TIMESTAMP, 1, ARRAY['text1', 'text2'], ARRAY[123, 456], ARRAY[789, 012], ARRAY['varchar1', 'varchar2'], '{"key": 8.5}', '[{"key1": "value1", "key2": "value2", "key3": "value3"}]', - '{"key": "value"}', 15 + '{"key": "value"}', 15 %s )`, id, uuid.New().String(), uuid.New().String(), - uuid.New().String(), uuid.New().String(), uuid.New().String(), uuid.New().String()) + uuid.New().String(), uuid.New().String(), uuid.New().String(), uuid.New().String(), geoValues) rows = append(rows, row) } + geoColumns := "" + if strings.Contains(tableName, "sf") { + geoColumns = ",geometry_point, geography_point," + + "geometry_linestring, geography_linestring," + + "geometry_polygon, geography_polygon" + } _, err := pool.Exec(context.Background(), fmt.Sprintf(` INSERT INTO e2e_test_%s.%s ( id, card_id, "from", price, created_at, @@ -213,9 +233,10 @@ func PopulateSourceTable(pool *pgxpool.Pool, suffix string, tableName string, ro deal_id, ethereum_transaction_id, ignore_price, card_eth_value, paid_eth_price, card_bought_notified, address, account_id, asset_id, status, transaction_id, settled_at, reference_id, - settle_at, settlement_delay_reason, f1, f2, f3, f4, f5, f6, f7, f8 + settle_at, settlement_delay_reason, f1, f2, f3, f4, f5, f6, f7, f8 + %s ) VALUES %s; - `, suffix, tableName, strings.Join(rows, ","))) + `, suffix, tableName, geoColumns, strings.Join(rows, ","))) if err != nil { return err } diff --git a/flow/generated/protos/flow.pb.go b/flow/generated/protos/flow.pb.go index dd6de4cca..0dffeecc7 100644 --- a/flow/generated/protos/flow.pb.go +++ b/flow/generated/protos/flow.pb.go @@ -299,6 +299,69 @@ func (x *RelationMessage) GetColumns() []*RelationMessageColumn { return nil } +type TableMapping struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + SourceTableIdentifier string `protobuf:"bytes,1,opt,name=source_table_identifier,json=sourceTableIdentifier,proto3" json:"source_table_identifier,omitempty"` + DestinationTableIdentifier string `protobuf:"bytes,2,opt,name=destination_table_identifier,json=destinationTableIdentifier,proto3" json:"destination_table_identifier,omitempty"` + PartitionKey string `protobuf:"bytes,3,opt,name=partition_key,json=partitionKey,proto3" json:"partition_key,omitempty"` +} + +func (x *TableMapping) Reset() { + *x = TableMapping{} + if protoimpl.UnsafeEnabled { + mi := &file_flow_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TableMapping) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TableMapping) ProtoMessage() {} + +func (x *TableMapping) ProtoReflect() protoreflect.Message { + mi := &file_flow_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TableMapping.ProtoReflect.Descriptor instead. +func (*TableMapping) Descriptor() ([]byte, []int) { + return file_flow_proto_rawDescGZIP(), []int{3} +} + +func (x *TableMapping) GetSourceTableIdentifier() string { + if x != nil { + return x.SourceTableIdentifier + } + return "" +} + +func (x *TableMapping) GetDestinationTableIdentifier() string { + if x != nil { + return x.DestinationTableIdentifier + } + return "" +} + +func (x *TableMapping) GetPartitionKey() string { + if x != nil { + return x.PartitionKey + } + return "" +} + type FlowConnectionConfigs struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -308,7 +371,7 @@ type FlowConnectionConfigs struct { Destination *Peer `protobuf:"bytes,2,opt,name=destination,proto3" json:"destination,omitempty"` FlowJobName string `protobuf:"bytes,3,opt,name=flow_job_name,json=flowJobName,proto3" json:"flow_job_name,omitempty"` TableSchema *TableSchema `protobuf:"bytes,4,opt,name=table_schema,json=tableSchema,proto3" json:"table_schema,omitempty"` - TableNameMapping map[string]string `protobuf:"bytes,5,rep,name=table_name_mapping,json=tableNameMapping,proto3" json:"table_name_mapping,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TableMappings []*TableMapping `protobuf:"bytes,5,rep,name=table_mappings,json=tableMappings,proto3" json:"table_mappings,omitempty"` SrcTableIdNameMapping map[uint32]string `protobuf:"bytes,6,rep,name=src_table_id_name_mapping,json=srcTableIdNameMapping,proto3" json:"src_table_id_name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` TableNameSchemaMapping map[string]*TableSchema `protobuf:"bytes,7,rep,name=table_name_schema_mapping,json=tableNameSchemaMapping,proto3" json:"table_name_schema_mapping,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // This is an optional peer that will be used to hold metadata in cases where @@ -331,12 +394,14 @@ type FlowConnectionConfigs struct { // the below two are for eventhub only PushBatchSize int64 `protobuf:"varint,21,opt,name=push_batch_size,json=pushBatchSize,proto3" json:"push_batch_size,omitempty"` PushParallelism int64 `protobuf:"varint,22,opt,name=push_parallelism,json=pushParallelism,proto3" json:"push_parallelism,omitempty"` + // if true, then the flow will be resynced + Resync bool `protobuf:"varint,23,opt,name=resync,proto3" json:"resync,omitempty"` } func (x *FlowConnectionConfigs) Reset() { *x = FlowConnectionConfigs{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[3] + mi := &file_flow_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -349,7 +414,7 @@ func (x *FlowConnectionConfigs) String() string { func (*FlowConnectionConfigs) ProtoMessage() {} func (x *FlowConnectionConfigs) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[3] + mi := &file_flow_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -362,7 +427,7 @@ func (x *FlowConnectionConfigs) ProtoReflect() protoreflect.Message { // Deprecated: Use FlowConnectionConfigs.ProtoReflect.Descriptor instead. func (*FlowConnectionConfigs) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{3} + return file_flow_proto_rawDescGZIP(), []int{4} } func (x *FlowConnectionConfigs) GetSource() *Peer { @@ -393,9 +458,9 @@ func (x *FlowConnectionConfigs) GetTableSchema() *TableSchema { return nil } -func (x *FlowConnectionConfigs) GetTableNameMapping() map[string]string { +func (x *FlowConnectionConfigs) GetTableMappings() []*TableMapping { if x != nil { - return x.TableNameMapping + return x.TableMappings } return nil } @@ -519,6 +584,178 @@ func (x *FlowConnectionConfigs) GetPushParallelism() int64 { return 0 } +func (x *FlowConnectionConfigs) GetResync() bool { + if x != nil { + return x.Resync + } + return false +} + +type RenameTableOption struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CurrentName string `protobuf:"bytes,1,opt,name=current_name,json=currentName,proto3" json:"current_name,omitempty"` + NewName string `protobuf:"bytes,2,opt,name=new_name,json=newName,proto3" json:"new_name,omitempty"` +} + +func (x *RenameTableOption) Reset() { + *x = RenameTableOption{} + if protoimpl.UnsafeEnabled { + mi := &file_flow_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RenameTableOption) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RenameTableOption) ProtoMessage() {} + +func (x *RenameTableOption) ProtoReflect() protoreflect.Message { + mi := &file_flow_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RenameTableOption.ProtoReflect.Descriptor instead. +func (*RenameTableOption) Descriptor() ([]byte, []int) { + return file_flow_proto_rawDescGZIP(), []int{5} +} + +func (x *RenameTableOption) GetCurrentName() string { + if x != nil { + return x.CurrentName + } + return "" +} + +func (x *RenameTableOption) GetNewName() string { + if x != nil { + return x.NewName + } + return "" +} + +type RenameTablesInput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + FlowJobName string `protobuf:"bytes,1,opt,name=flow_job_name,json=flowJobName,proto3" json:"flow_job_name,omitempty"` + Peer *Peer `protobuf:"bytes,2,opt,name=peer,proto3" json:"peer,omitempty"` + RenameTableOptions []*RenameTableOption `protobuf:"bytes,3,rep,name=rename_table_options,json=renameTableOptions,proto3" json:"rename_table_options,omitempty"` +} + +func (x *RenameTablesInput) Reset() { + *x = RenameTablesInput{} + if protoimpl.UnsafeEnabled { + mi := &file_flow_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RenameTablesInput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RenameTablesInput) ProtoMessage() {} + +func (x *RenameTablesInput) ProtoReflect() protoreflect.Message { + mi := &file_flow_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RenameTablesInput.ProtoReflect.Descriptor instead. +func (*RenameTablesInput) Descriptor() ([]byte, []int) { + return file_flow_proto_rawDescGZIP(), []int{6} +} + +func (x *RenameTablesInput) GetFlowJobName() string { + if x != nil { + return x.FlowJobName + } + return "" +} + +func (x *RenameTablesInput) GetPeer() *Peer { + if x != nil { + return x.Peer + } + return nil +} + +func (x *RenameTablesInput) GetRenameTableOptions() []*RenameTableOption { + if x != nil { + return x.RenameTableOptions + } + return nil +} + +type RenameTablesOutput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + FlowJobName string `protobuf:"bytes,1,opt,name=flow_job_name,json=flowJobName,proto3" json:"flow_job_name,omitempty"` +} + +func (x *RenameTablesOutput) Reset() { + *x = RenameTablesOutput{} + if protoimpl.UnsafeEnabled { + mi := &file_flow_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RenameTablesOutput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RenameTablesOutput) ProtoMessage() {} + +func (x *RenameTablesOutput) ProtoReflect() protoreflect.Message { + mi := &file_flow_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RenameTablesOutput.ProtoReflect.Descriptor instead. +func (*RenameTablesOutput) Descriptor() ([]byte, []int) { + return file_flow_proto_rawDescGZIP(), []int{7} +} + +func (x *RenameTablesOutput) GetFlowJobName() string { + if x != nil { + return x.FlowJobName + } + return "" +} + type SyncFlowOptions struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -531,7 +768,7 @@ type SyncFlowOptions struct { func (x *SyncFlowOptions) Reset() { *x = SyncFlowOptions{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[4] + mi := &file_flow_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -544,7 +781,7 @@ func (x *SyncFlowOptions) String() string { func (*SyncFlowOptions) ProtoMessage() {} func (x *SyncFlowOptions) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[4] + mi := &file_flow_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -557,7 +794,7 @@ func (x *SyncFlowOptions) ProtoReflect() protoreflect.Message { // Deprecated: Use SyncFlowOptions.ProtoReflect.Descriptor instead. func (*SyncFlowOptions) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{4} + return file_flow_proto_rawDescGZIP(), []int{8} } func (x *SyncFlowOptions) GetBatchSize() int32 { @@ -585,7 +822,7 @@ type NormalizeFlowOptions struct { func (x *NormalizeFlowOptions) Reset() { *x = NormalizeFlowOptions{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[5] + mi := &file_flow_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -598,7 +835,7 @@ func (x *NormalizeFlowOptions) String() string { func (*NormalizeFlowOptions) ProtoMessage() {} func (x *NormalizeFlowOptions) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[5] + mi := &file_flow_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -611,7 +848,7 @@ func (x *NormalizeFlowOptions) ProtoReflect() protoreflect.Message { // Deprecated: Use NormalizeFlowOptions.ProtoReflect.Descriptor instead. func (*NormalizeFlowOptions) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{5} + return file_flow_proto_rawDescGZIP(), []int{9} } func (x *NormalizeFlowOptions) GetBatchSize() int32 { @@ -633,7 +870,7 @@ type LastSyncState struct { func (x *LastSyncState) Reset() { *x = LastSyncState{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[6] + mi := &file_flow_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -646,7 +883,7 @@ func (x *LastSyncState) String() string { func (*LastSyncState) ProtoMessage() {} func (x *LastSyncState) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[6] + mi := &file_flow_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -659,7 +896,7 @@ func (x *LastSyncState) ProtoReflect() protoreflect.Message { // Deprecated: Use LastSyncState.ProtoReflect.Descriptor instead. func (*LastSyncState) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{6} + return file_flow_proto_rawDescGZIP(), []int{10} } func (x *LastSyncState) GetCheckpoint() int64 { @@ -690,7 +927,7 @@ type StartFlowInput struct { func (x *StartFlowInput) Reset() { *x = StartFlowInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[7] + mi := &file_flow_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -703,7 +940,7 @@ func (x *StartFlowInput) String() string { func (*StartFlowInput) ProtoMessage() {} func (x *StartFlowInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[7] + mi := &file_flow_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -716,7 +953,7 @@ func (x *StartFlowInput) ProtoReflect() protoreflect.Message { // Deprecated: Use StartFlowInput.ProtoReflect.Descriptor instead. func (*StartFlowInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{7} + return file_flow_proto_rawDescGZIP(), []int{11} } func (x *StartFlowInput) GetLastSyncState() *LastSyncState { @@ -758,7 +995,7 @@ type StartNormalizeInput struct { func (x *StartNormalizeInput) Reset() { *x = StartNormalizeInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[8] + mi := &file_flow_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -771,7 +1008,7 @@ func (x *StartNormalizeInput) String() string { func (*StartNormalizeInput) ProtoMessage() {} func (x *StartNormalizeInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[8] + mi := &file_flow_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -784,7 +1021,7 @@ func (x *StartNormalizeInput) ProtoReflect() protoreflect.Message { // Deprecated: Use StartNormalizeInput.ProtoReflect.Descriptor instead. func (*StartNormalizeInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{8} + return file_flow_proto_rawDescGZIP(), []int{12} } func (x *StartNormalizeInput) GetFlowConnectionConfigs() *FlowConnectionConfigs { @@ -806,7 +1043,7 @@ type GetLastSyncedIDInput struct { func (x *GetLastSyncedIDInput) Reset() { *x = GetLastSyncedIDInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[9] + mi := &file_flow_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -819,7 +1056,7 @@ func (x *GetLastSyncedIDInput) String() string { func (*GetLastSyncedIDInput) ProtoMessage() {} func (x *GetLastSyncedIDInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[9] + mi := &file_flow_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -832,7 +1069,7 @@ func (x *GetLastSyncedIDInput) ProtoReflect() protoreflect.Message { // Deprecated: Use GetLastSyncedIDInput.ProtoReflect.Descriptor instead. func (*GetLastSyncedIDInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{9} + return file_flow_proto_rawDescGZIP(), []int{13} } func (x *GetLastSyncedIDInput) GetPeerConnectionConfig() *Peer { @@ -862,7 +1099,7 @@ type EnsurePullabilityInput struct { func (x *EnsurePullabilityInput) Reset() { *x = EnsurePullabilityInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[10] + mi := &file_flow_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -875,7 +1112,7 @@ func (x *EnsurePullabilityInput) String() string { func (*EnsurePullabilityInput) ProtoMessage() {} func (x *EnsurePullabilityInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[10] + mi := &file_flow_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -888,7 +1125,7 @@ func (x *EnsurePullabilityInput) ProtoReflect() protoreflect.Message { // Deprecated: Use EnsurePullabilityInput.ProtoReflect.Descriptor instead. func (*EnsurePullabilityInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{10} + return file_flow_proto_rawDescGZIP(), []int{14} } func (x *EnsurePullabilityInput) GetPeerConnectionConfig() *Peer { @@ -925,7 +1162,7 @@ type EnsurePullabilityBatchInput struct { func (x *EnsurePullabilityBatchInput) Reset() { *x = EnsurePullabilityBatchInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[11] + mi := &file_flow_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -938,7 +1175,7 @@ func (x *EnsurePullabilityBatchInput) String() string { func (*EnsurePullabilityBatchInput) ProtoMessage() {} func (x *EnsurePullabilityBatchInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[11] + mi := &file_flow_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -951,7 +1188,7 @@ func (x *EnsurePullabilityBatchInput) ProtoReflect() protoreflect.Message { // Deprecated: Use EnsurePullabilityBatchInput.ProtoReflect.Descriptor instead. func (*EnsurePullabilityBatchInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{11} + return file_flow_proto_rawDescGZIP(), []int{15} } func (x *EnsurePullabilityBatchInput) GetPeerConnectionConfig() *Peer { @@ -986,7 +1223,7 @@ type PostgresTableIdentifier struct { func (x *PostgresTableIdentifier) Reset() { *x = PostgresTableIdentifier{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[12] + mi := &file_flow_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -999,7 +1236,7 @@ func (x *PostgresTableIdentifier) String() string { func (*PostgresTableIdentifier) ProtoMessage() {} func (x *PostgresTableIdentifier) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[12] + mi := &file_flow_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1012,7 +1249,7 @@ func (x *PostgresTableIdentifier) ProtoReflect() protoreflect.Message { // Deprecated: Use PostgresTableIdentifier.ProtoReflect.Descriptor instead. func (*PostgresTableIdentifier) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{12} + return file_flow_proto_rawDescGZIP(), []int{16} } func (x *PostgresTableIdentifier) GetRelId() uint32 { @@ -1036,7 +1273,7 @@ type TableIdentifier struct { func (x *TableIdentifier) Reset() { *x = TableIdentifier{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[13] + mi := &file_flow_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1049,7 +1286,7 @@ func (x *TableIdentifier) String() string { func (*TableIdentifier) ProtoMessage() {} func (x *TableIdentifier) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[13] + mi := &file_flow_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1062,7 +1299,7 @@ func (x *TableIdentifier) ProtoReflect() protoreflect.Message { // Deprecated: Use TableIdentifier.ProtoReflect.Descriptor instead. func (*TableIdentifier) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{13} + return file_flow_proto_rawDescGZIP(), []int{17} } func (m *TableIdentifier) GetTableIdentifier() isTableIdentifier_TableIdentifier { @@ -1100,7 +1337,7 @@ type EnsurePullabilityOutput struct { func (x *EnsurePullabilityOutput) Reset() { *x = EnsurePullabilityOutput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[14] + mi := &file_flow_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1113,7 +1350,7 @@ func (x *EnsurePullabilityOutput) String() string { func (*EnsurePullabilityOutput) ProtoMessage() {} func (x *EnsurePullabilityOutput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[14] + mi := &file_flow_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1126,7 +1363,7 @@ func (x *EnsurePullabilityOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use EnsurePullabilityOutput.ProtoReflect.Descriptor instead. func (*EnsurePullabilityOutput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{14} + return file_flow_proto_rawDescGZIP(), []int{18} } func (x *EnsurePullabilityOutput) GetTableIdentifier() *TableIdentifier { @@ -1147,7 +1384,7 @@ type EnsurePullabilityBatchOutput struct { func (x *EnsurePullabilityBatchOutput) Reset() { *x = EnsurePullabilityBatchOutput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[15] + mi := &file_flow_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1160,7 +1397,7 @@ func (x *EnsurePullabilityBatchOutput) String() string { func (*EnsurePullabilityBatchOutput) ProtoMessage() {} func (x *EnsurePullabilityBatchOutput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[15] + mi := &file_flow_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1173,7 +1410,7 @@ func (x *EnsurePullabilityBatchOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use EnsurePullabilityBatchOutput.ProtoReflect.Descriptor instead. func (*EnsurePullabilityBatchOutput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{15} + return file_flow_proto_rawDescGZIP(), []int{19} } func (x *EnsurePullabilityBatchOutput) GetTableIdentifierMapping() map[string]*TableIdentifier { @@ -1201,7 +1438,7 @@ type SetupReplicationInput struct { func (x *SetupReplicationInput) Reset() { *x = SetupReplicationInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[16] + mi := &file_flow_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1214,7 +1451,7 @@ func (x *SetupReplicationInput) String() string { func (*SetupReplicationInput) ProtoMessage() {} func (x *SetupReplicationInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[16] + mi := &file_flow_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1227,7 +1464,7 @@ func (x *SetupReplicationInput) ProtoReflect() protoreflect.Message { // Deprecated: Use SetupReplicationInput.ProtoReflect.Descriptor instead. func (*SetupReplicationInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{16} + return file_flow_proto_rawDescGZIP(), []int{20} } func (x *SetupReplicationInput) GetPeerConnectionConfig() *Peer { @@ -1291,7 +1528,7 @@ type SetupReplicationOutput struct { func (x *SetupReplicationOutput) Reset() { *x = SetupReplicationOutput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[17] + mi := &file_flow_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1304,7 +1541,7 @@ func (x *SetupReplicationOutput) String() string { func (*SetupReplicationOutput) ProtoMessage() {} func (x *SetupReplicationOutput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[17] + mi := &file_flow_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1317,7 +1554,7 @@ func (x *SetupReplicationOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use SetupReplicationOutput.ProtoReflect.Descriptor instead. func (*SetupReplicationOutput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{17} + return file_flow_proto_rawDescGZIP(), []int{21} } func (x *SetupReplicationOutput) GetSlotName() string { @@ -1348,7 +1585,7 @@ type CreateRawTableInput struct { func (x *CreateRawTableInput) Reset() { *x = CreateRawTableInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[18] + mi := &file_flow_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1361,7 +1598,7 @@ func (x *CreateRawTableInput) String() string { func (*CreateRawTableInput) ProtoMessage() {} func (x *CreateRawTableInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[18] + mi := &file_flow_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1374,7 +1611,7 @@ func (x *CreateRawTableInput) ProtoReflect() protoreflect.Message { // Deprecated: Use CreateRawTableInput.ProtoReflect.Descriptor instead. func (*CreateRawTableInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{18} + return file_flow_proto_rawDescGZIP(), []int{22} } func (x *CreateRawTableInput) GetPeerConnectionConfig() *Peer { @@ -1416,7 +1653,7 @@ type CreateRawTableOutput struct { func (x *CreateRawTableOutput) Reset() { *x = CreateRawTableOutput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[19] + mi := &file_flow_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1429,7 +1666,7 @@ func (x *CreateRawTableOutput) String() string { func (*CreateRawTableOutput) ProtoMessage() {} func (x *CreateRawTableOutput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[19] + mi := &file_flow_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1442,7 +1679,7 @@ func (x *CreateRawTableOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use CreateRawTableOutput.ProtoReflect.Descriptor instead. func (*CreateRawTableOutput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{19} + return file_flow_proto_rawDescGZIP(), []int{23} } func (x *CreateRawTableOutput) GetTableIdentifier() string { @@ -1461,14 +1698,14 @@ type TableSchema struct { // list of column names and types, types can be one of the following: // "string", "int", "float", "bool", "timestamp". Columns map[string]string `protobuf:"bytes,2,rep,name=columns,proto3" json:"columns,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - PrimaryKeyColumn string `protobuf:"bytes,3,opt,name=primary_key_column,json=primaryKeyColumn,proto3" json:"primary_key_column,omitempty"` + PrimaryKeyColumns []string `protobuf:"bytes,3,rep,name=primary_key_columns,json=primaryKeyColumns,proto3" json:"primary_key_columns,omitempty"` IsReplicaIdentityFull bool `protobuf:"varint,4,opt,name=is_replica_identity_full,json=isReplicaIdentityFull,proto3" json:"is_replica_identity_full,omitempty"` } func (x *TableSchema) Reset() { *x = TableSchema{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[20] + mi := &file_flow_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1481,7 +1718,7 @@ func (x *TableSchema) String() string { func (*TableSchema) ProtoMessage() {} func (x *TableSchema) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[20] + mi := &file_flow_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1494,7 +1731,7 @@ func (x *TableSchema) ProtoReflect() protoreflect.Message { // Deprecated: Use TableSchema.ProtoReflect.Descriptor instead. func (*TableSchema) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{20} + return file_flow_proto_rawDescGZIP(), []int{24} } func (x *TableSchema) GetTableIdentifier() string { @@ -1511,11 +1748,11 @@ func (x *TableSchema) GetColumns() map[string]string { return nil } -func (x *TableSchema) GetPrimaryKeyColumn() string { +func (x *TableSchema) GetPrimaryKeyColumns() []string { if x != nil { - return x.PrimaryKeyColumn + return x.PrimaryKeyColumns } - return "" + return nil } func (x *TableSchema) GetIsReplicaIdentityFull() bool { @@ -1537,7 +1774,7 @@ type GetTableSchemaBatchInput struct { func (x *GetTableSchemaBatchInput) Reset() { *x = GetTableSchemaBatchInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[21] + mi := &file_flow_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1550,7 +1787,7 @@ func (x *GetTableSchemaBatchInput) String() string { func (*GetTableSchemaBatchInput) ProtoMessage() {} func (x *GetTableSchemaBatchInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[21] + mi := &file_flow_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1563,7 +1800,7 @@ func (x *GetTableSchemaBatchInput) ProtoReflect() protoreflect.Message { // Deprecated: Use GetTableSchemaBatchInput.ProtoReflect.Descriptor instead. func (*GetTableSchemaBatchInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{21} + return file_flow_proto_rawDescGZIP(), []int{25} } func (x *GetTableSchemaBatchInput) GetPeerConnectionConfig() *Peer { @@ -1591,7 +1828,7 @@ type GetTableSchemaBatchOutput struct { func (x *GetTableSchemaBatchOutput) Reset() { *x = GetTableSchemaBatchOutput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[22] + mi := &file_flow_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1604,7 +1841,7 @@ func (x *GetTableSchemaBatchOutput) String() string { func (*GetTableSchemaBatchOutput) ProtoMessage() {} func (x *GetTableSchemaBatchOutput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[22] + mi := &file_flow_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1617,7 +1854,7 @@ func (x *GetTableSchemaBatchOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use GetTableSchemaBatchOutput.ProtoReflect.Descriptor instead. func (*GetTableSchemaBatchOutput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{22} + return file_flow_proto_rawDescGZIP(), []int{26} } func (x *GetTableSchemaBatchOutput) GetTableNameSchemaMapping() map[string]*TableSchema { @@ -1640,7 +1877,7 @@ type SetupNormalizedTableInput struct { func (x *SetupNormalizedTableInput) Reset() { *x = SetupNormalizedTableInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[23] + mi := &file_flow_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1653,7 +1890,7 @@ func (x *SetupNormalizedTableInput) String() string { func (*SetupNormalizedTableInput) ProtoMessage() {} func (x *SetupNormalizedTableInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[23] + mi := &file_flow_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1666,7 +1903,7 @@ func (x *SetupNormalizedTableInput) ProtoReflect() protoreflect.Message { // Deprecated: Use SetupNormalizedTableInput.ProtoReflect.Descriptor instead. func (*SetupNormalizedTableInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{23} + return file_flow_proto_rawDescGZIP(), []int{27} } func (x *SetupNormalizedTableInput) GetPeerConnectionConfig() *Peer { @@ -1702,7 +1939,7 @@ type SetupNormalizedTableBatchInput struct { func (x *SetupNormalizedTableBatchInput) Reset() { *x = SetupNormalizedTableBatchInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[24] + mi := &file_flow_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1715,7 +1952,7 @@ func (x *SetupNormalizedTableBatchInput) String() string { func (*SetupNormalizedTableBatchInput) ProtoMessage() {} func (x *SetupNormalizedTableBatchInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[24] + mi := &file_flow_proto_msgTypes[28] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1728,7 +1965,7 @@ func (x *SetupNormalizedTableBatchInput) ProtoReflect() protoreflect.Message { // Deprecated: Use SetupNormalizedTableBatchInput.ProtoReflect.Descriptor instead. func (*SetupNormalizedTableBatchInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{24} + return file_flow_proto_rawDescGZIP(), []int{28} } func (x *SetupNormalizedTableBatchInput) GetPeerConnectionConfig() *Peer { @@ -1757,7 +1994,7 @@ type SetupNormalizedTableOutput struct { func (x *SetupNormalizedTableOutput) Reset() { *x = SetupNormalizedTableOutput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[25] + mi := &file_flow_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1770,7 +2007,7 @@ func (x *SetupNormalizedTableOutput) String() string { func (*SetupNormalizedTableOutput) ProtoMessage() {} func (x *SetupNormalizedTableOutput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[25] + mi := &file_flow_proto_msgTypes[29] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1783,7 +2020,7 @@ func (x *SetupNormalizedTableOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use SetupNormalizedTableOutput.ProtoReflect.Descriptor instead. func (*SetupNormalizedTableOutput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{25} + return file_flow_proto_rawDescGZIP(), []int{29} } func (x *SetupNormalizedTableOutput) GetTableIdentifier() string { @@ -1811,7 +2048,7 @@ type SetupNormalizedTableBatchOutput struct { func (x *SetupNormalizedTableBatchOutput) Reset() { *x = SetupNormalizedTableBatchOutput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[26] + mi := &file_flow_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1824,7 +2061,7 @@ func (x *SetupNormalizedTableBatchOutput) String() string { func (*SetupNormalizedTableBatchOutput) ProtoMessage() {} func (x *SetupNormalizedTableBatchOutput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[26] + mi := &file_flow_proto_msgTypes[30] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1837,7 +2074,7 @@ func (x *SetupNormalizedTableBatchOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use SetupNormalizedTableBatchOutput.ProtoReflect.Descriptor instead. func (*SetupNormalizedTableBatchOutput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{26} + return file_flow_proto_rawDescGZIP(), []int{30} } func (x *SetupNormalizedTableBatchOutput) GetTableExistsMapping() map[string]bool { @@ -1860,7 +2097,7 @@ type IntPartitionRange struct { func (x *IntPartitionRange) Reset() { *x = IntPartitionRange{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[27] + mi := &file_flow_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1873,7 +2110,7 @@ func (x *IntPartitionRange) String() string { func (*IntPartitionRange) ProtoMessage() {} func (x *IntPartitionRange) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[27] + mi := &file_flow_proto_msgTypes[31] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1886,7 +2123,7 @@ func (x *IntPartitionRange) ProtoReflect() protoreflect.Message { // Deprecated: Use IntPartitionRange.ProtoReflect.Descriptor instead. func (*IntPartitionRange) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{27} + return file_flow_proto_rawDescGZIP(), []int{31} } func (x *IntPartitionRange) GetStart() int64 { @@ -1915,7 +2152,7 @@ type TimestampPartitionRange struct { func (x *TimestampPartitionRange) Reset() { *x = TimestampPartitionRange{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[28] + mi := &file_flow_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1928,7 +2165,7 @@ func (x *TimestampPartitionRange) String() string { func (*TimestampPartitionRange) ProtoMessage() {} func (x *TimestampPartitionRange) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[28] + mi := &file_flow_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1941,7 +2178,7 @@ func (x *TimestampPartitionRange) ProtoReflect() protoreflect.Message { // Deprecated: Use TimestampPartitionRange.ProtoReflect.Descriptor instead. func (*TimestampPartitionRange) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{28} + return file_flow_proto_rawDescGZIP(), []int{32} } func (x *TimestampPartitionRange) GetStart() *timestamppb.Timestamp { @@ -1970,7 +2207,7 @@ type TID struct { func (x *TID) Reset() { *x = TID{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[29] + mi := &file_flow_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1983,7 +2220,7 @@ func (x *TID) String() string { func (*TID) ProtoMessage() {} func (x *TID) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[29] + mi := &file_flow_proto_msgTypes[33] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1996,7 +2233,7 @@ func (x *TID) ProtoReflect() protoreflect.Message { // Deprecated: Use TID.ProtoReflect.Descriptor instead. func (*TID) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{29} + return file_flow_proto_rawDescGZIP(), []int{33} } func (x *TID) GetBlockNumber() uint32 { @@ -2025,7 +2262,7 @@ type TIDPartitionRange struct { func (x *TIDPartitionRange) Reset() { *x = TIDPartitionRange{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[30] + mi := &file_flow_proto_msgTypes[34] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2038,7 +2275,7 @@ func (x *TIDPartitionRange) String() string { func (*TIDPartitionRange) ProtoMessage() {} func (x *TIDPartitionRange) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[30] + mi := &file_flow_proto_msgTypes[34] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2051,7 +2288,7 @@ func (x *TIDPartitionRange) ProtoReflect() protoreflect.Message { // Deprecated: Use TIDPartitionRange.ProtoReflect.Descriptor instead. func (*TIDPartitionRange) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{30} + return file_flow_proto_rawDescGZIP(), []int{34} } func (x *TIDPartitionRange) GetStart() *TID { @@ -2068,61 +2305,6 @@ func (x *TIDPartitionRange) GetEnd() *TID { return nil } -type XMINPartitionRange struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Start uint32 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` - End uint32 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` -} - -func (x *XMINPartitionRange) Reset() { - *x = XMINPartitionRange{} - if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[31] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *XMINPartitionRange) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*XMINPartitionRange) ProtoMessage() {} - -func (x *XMINPartitionRange) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[31] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use XMINPartitionRange.ProtoReflect.Descriptor instead. -func (*XMINPartitionRange) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{31} -} - -func (x *XMINPartitionRange) GetStart() uint32 { - if x != nil { - return x.Start - } - return 0 -} - -func (x *XMINPartitionRange) GetEnd() uint32 { - if x != nil { - return x.End - } - return 0 -} - type PartitionRange struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2135,14 +2317,13 @@ type PartitionRange struct { // *PartitionRange_IntRange // *PartitionRange_TimestampRange // *PartitionRange_TidRange - // *PartitionRange_XminRange Range isPartitionRange_Range `protobuf_oneof:"range"` } func (x *PartitionRange) Reset() { *x = PartitionRange{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[32] + mi := &file_flow_proto_msgTypes[35] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2155,7 +2336,7 @@ func (x *PartitionRange) String() string { func (*PartitionRange) ProtoMessage() {} func (x *PartitionRange) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[32] + mi := &file_flow_proto_msgTypes[35] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2168,7 +2349,7 @@ func (x *PartitionRange) ProtoReflect() protoreflect.Message { // Deprecated: Use PartitionRange.ProtoReflect.Descriptor instead. func (*PartitionRange) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{32} + return file_flow_proto_rawDescGZIP(), []int{35} } func (m *PartitionRange) GetRange() isPartitionRange_Range { @@ -2199,13 +2380,6 @@ func (x *PartitionRange) GetTidRange() *TIDPartitionRange { return nil } -func (x *PartitionRange) GetXminRange() *XMINPartitionRange { - if x, ok := x.GetRange().(*PartitionRange_XminRange); ok { - return x.XminRange - } - return nil -} - type isPartitionRange_Range interface { isPartitionRange_Range() } @@ -2222,18 +2396,12 @@ type PartitionRange_TidRange struct { TidRange *TIDPartitionRange `protobuf:"bytes,3,opt,name=tid_range,json=tidRange,proto3,oneof"` } -type PartitionRange_XminRange struct { - XminRange *XMINPartitionRange `protobuf:"bytes,4,opt,name=xmin_range,json=xminRange,proto3,oneof"` -} - func (*PartitionRange_IntRange) isPartitionRange_Range() {} func (*PartitionRange_TimestampRange) isPartitionRange_Range() {} func (*PartitionRange_TidRange) isPartitionRange_Range() {} -func (*PartitionRange_XminRange) isPartitionRange_Range() {} - type QRepWriteMode struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2246,7 +2414,7 @@ type QRepWriteMode struct { func (x *QRepWriteMode) Reset() { *x = QRepWriteMode{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[33] + mi := &file_flow_proto_msgTypes[36] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2259,7 +2427,7 @@ func (x *QRepWriteMode) String() string { func (*QRepWriteMode) ProtoMessage() {} func (x *QRepWriteMode) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[33] + mi := &file_flow_proto_msgTypes[36] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2272,7 +2440,7 @@ func (x *QRepWriteMode) ProtoReflect() protoreflect.Message { // Deprecated: Use QRepWriteMode.ProtoReflect.Descriptor instead. func (*QRepWriteMode) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{33} + return file_flow_proto_rawDescGZIP(), []int{36} } func (x *QRepWriteMode) GetWriteType() QRepWriteType { @@ -2303,9 +2471,11 @@ type QRepConfig struct { WatermarkColumn string `protobuf:"bytes,7,opt,name=watermark_column,json=watermarkColumn,proto3" json:"watermark_column,omitempty"` InitialCopyOnly bool `protobuf:"varint,8,opt,name=initial_copy_only,json=initialCopyOnly,proto3" json:"initial_copy_only,omitempty"` SyncMode QRepSyncMode `protobuf:"varint,9,opt,name=sync_mode,json=syncMode,proto3,enum=peerdb_flow.QRepSyncMode" json:"sync_mode,omitempty"` - BatchSizeInt uint32 `protobuf:"varint,10,opt,name=batch_size_int,json=batchSizeInt,proto3" json:"batch_size_int,omitempty"` - BatchDurationSeconds uint32 `protobuf:"varint,11,opt,name=batch_duration_seconds,json=batchDurationSeconds,proto3" json:"batch_duration_seconds,omitempty"` - MaxParallelWorkers uint32 `protobuf:"varint,12,opt,name=max_parallel_workers,json=maxParallelWorkers,proto3" json:"max_parallel_workers,omitempty"` + // DEPRECATED: eliminate when breaking changes are allowed. + BatchSizeInt uint32 `protobuf:"varint,10,opt,name=batch_size_int,json=batchSizeInt,proto3" json:"batch_size_int,omitempty"` + // DEPRECATED: eliminate when breaking changes are allowed. + BatchDurationSeconds uint32 `protobuf:"varint,11,opt,name=batch_duration_seconds,json=batchDurationSeconds,proto3" json:"batch_duration_seconds,omitempty"` + MaxParallelWorkers uint32 `protobuf:"varint,12,opt,name=max_parallel_workers,json=maxParallelWorkers,proto3" json:"max_parallel_workers,omitempty"` // time to wait between getting partitions to process WaitBetweenBatchesSeconds uint32 `protobuf:"varint,13,opt,name=wait_between_batches_seconds,json=waitBetweenBatchesSeconds,proto3" json:"wait_between_batches_seconds,omitempty"` WriteMode *QRepWriteMode `protobuf:"bytes,14,opt,name=write_mode,json=writeMode,proto3" json:"write_mode,omitempty"` @@ -2320,12 +2490,14 @@ type QRepConfig struct { // and instead uses the number of rows per partition to determine // how many rows to process per batch. NumRowsPerPartition uint32 `protobuf:"varint,16,opt,name=num_rows_per_partition,json=numRowsPerPartition,proto3" json:"num_rows_per_partition,omitempty"` + // Creates the watermark table on the destination as-is, can be used for some queries. + SetupWatermarkTableOnDestination bool `protobuf:"varint,17,opt,name=setup_watermark_table_on_destination,json=setupWatermarkTableOnDestination,proto3" json:"setup_watermark_table_on_destination,omitempty"` } func (x *QRepConfig) Reset() { *x = QRepConfig{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[34] + mi := &file_flow_proto_msgTypes[37] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2338,7 +2510,7 @@ func (x *QRepConfig) String() string { func (*QRepConfig) ProtoMessage() {} func (x *QRepConfig) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[34] + mi := &file_flow_proto_msgTypes[37] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2351,7 +2523,7 @@ func (x *QRepConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use QRepConfig.ProtoReflect.Descriptor instead. func (*QRepConfig) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{34} + return file_flow_proto_rawDescGZIP(), []int{37} } func (x *QRepConfig) GetFlowJobName() string { @@ -2466,6 +2638,13 @@ func (x *QRepConfig) GetNumRowsPerPartition() uint32 { return 0 } +func (x *QRepConfig) GetSetupWatermarkTableOnDestination() bool { + if x != nil { + return x.SetupWatermarkTableOnDestination + } + return false +} + type QRepPartition struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2479,7 +2658,7 @@ type QRepPartition struct { func (x *QRepPartition) Reset() { *x = QRepPartition{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[35] + mi := &file_flow_proto_msgTypes[38] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2492,7 +2671,7 @@ func (x *QRepPartition) String() string { func (*QRepPartition) ProtoMessage() {} func (x *QRepPartition) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[35] + mi := &file_flow_proto_msgTypes[38] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2505,7 +2684,7 @@ func (x *QRepPartition) ProtoReflect() protoreflect.Message { // Deprecated: Use QRepPartition.ProtoReflect.Descriptor instead. func (*QRepPartition) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{35} + return file_flow_proto_rawDescGZIP(), []int{38} } func (x *QRepPartition) GetPartitionId() string { @@ -2541,7 +2720,7 @@ type QRepPartitionBatch struct { func (x *QRepPartitionBatch) Reset() { *x = QRepPartitionBatch{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[36] + mi := &file_flow_proto_msgTypes[39] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2554,7 +2733,7 @@ func (x *QRepPartitionBatch) String() string { func (*QRepPartitionBatch) ProtoMessage() {} func (x *QRepPartitionBatch) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[36] + mi := &file_flow_proto_msgTypes[39] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2567,7 +2746,7 @@ func (x *QRepPartitionBatch) ProtoReflect() protoreflect.Message { // Deprecated: Use QRepPartitionBatch.ProtoReflect.Descriptor instead. func (*QRepPartitionBatch) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{36} + return file_flow_proto_rawDescGZIP(), []int{39} } func (x *QRepPartitionBatch) GetBatchId() int32 { @@ -2595,7 +2774,7 @@ type QRepParitionResult struct { func (x *QRepParitionResult) Reset() { *x = QRepParitionResult{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[37] + mi := &file_flow_proto_msgTypes[40] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2608,7 +2787,7 @@ func (x *QRepParitionResult) String() string { func (*QRepParitionResult) ProtoMessage() {} func (x *QRepParitionResult) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[37] + mi := &file_flow_proto_msgTypes[40] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2621,7 +2800,7 @@ func (x *QRepParitionResult) ProtoReflect() protoreflect.Message { // Deprecated: Use QRepParitionResult.ProtoReflect.Descriptor instead. func (*QRepParitionResult) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{37} + return file_flow_proto_rawDescGZIP(), []int{40} } func (x *QRepParitionResult) GetPartitions() []*QRepPartition { @@ -2642,7 +2821,7 @@ type DropFlowInput struct { func (x *DropFlowInput) Reset() { *x = DropFlowInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[38] + mi := &file_flow_proto_msgTypes[41] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2655,7 +2834,7 @@ func (x *DropFlowInput) String() string { func (*DropFlowInput) ProtoMessage() {} func (x *DropFlowInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[38] + mi := &file_flow_proto_msgTypes[41] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2668,7 +2847,7 @@ func (x *DropFlowInput) ProtoReflect() protoreflect.Message { // Deprecated: Use DropFlowInput.ProtoReflect.Descriptor instead. func (*DropFlowInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{38} + return file_flow_proto_rawDescGZIP(), []int{41} } func (x *DropFlowInput) GetFlowName() string { @@ -2690,7 +2869,7 @@ type DeltaAddedColumn struct { func (x *DeltaAddedColumn) Reset() { *x = DeltaAddedColumn{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[39] + mi := &file_flow_proto_msgTypes[42] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2703,7 +2882,7 @@ func (x *DeltaAddedColumn) String() string { func (*DeltaAddedColumn) ProtoMessage() {} func (x *DeltaAddedColumn) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[39] + mi := &file_flow_proto_msgTypes[42] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2716,7 +2895,7 @@ func (x *DeltaAddedColumn) ProtoReflect() protoreflect.Message { // Deprecated: Use DeltaAddedColumn.ProtoReflect.Descriptor instead. func (*DeltaAddedColumn) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{39} + return file_flow_proto_rawDescGZIP(), []int{42} } func (x *DeltaAddedColumn) GetColumnName() string { @@ -2738,16 +2917,15 @@ type TableSchemaDelta struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - SrcTableName string `protobuf:"bytes,1,opt,name=src_table_name,json=srcTableName,proto3" json:"src_table_name,omitempty"` - DstTableName string `protobuf:"bytes,2,opt,name=dst_table_name,json=dstTableName,proto3" json:"dst_table_name,omitempty"` - AddedColumns []*DeltaAddedColumn `protobuf:"bytes,3,rep,name=added_columns,json=addedColumns,proto3" json:"added_columns,omitempty"` - DroppedColumns []string `protobuf:"bytes,4,rep,name=dropped_columns,json=droppedColumns,proto3" json:"dropped_columns,omitempty"` + SrcTableName string `protobuf:"bytes,1,opt,name=src_table_name,json=srcTableName,proto3" json:"src_table_name,omitempty"` + DstTableName string `protobuf:"bytes,2,opt,name=dst_table_name,json=dstTableName,proto3" json:"dst_table_name,omitempty"` + AddedColumns []*DeltaAddedColumn `protobuf:"bytes,3,rep,name=added_columns,json=addedColumns,proto3" json:"added_columns,omitempty"` } func (x *TableSchemaDelta) Reset() { *x = TableSchemaDelta{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[40] + mi := &file_flow_proto_msgTypes[43] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2760,7 +2938,7 @@ func (x *TableSchemaDelta) String() string { func (*TableSchemaDelta) ProtoMessage() {} func (x *TableSchemaDelta) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[40] + mi := &file_flow_proto_msgTypes[43] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2773,7 +2951,7 @@ func (x *TableSchemaDelta) ProtoReflect() protoreflect.Message { // Deprecated: Use TableSchemaDelta.ProtoReflect.Descriptor instead. func (*TableSchemaDelta) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{40} + return file_flow_proto_rawDescGZIP(), []int{43} } func (x *TableSchemaDelta) GetSrcTableName() string { @@ -2797,26 +2975,19 @@ func (x *TableSchemaDelta) GetAddedColumns() []*DeltaAddedColumn { return nil } -func (x *TableSchemaDelta) GetDroppedColumns() []string { - if x != nil { - return x.DroppedColumns - } - return nil -} - type ReplayTableSchemaDeltaInput struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields FlowConnectionConfigs *FlowConnectionConfigs `protobuf:"bytes,1,opt,name=flow_connection_configs,json=flowConnectionConfigs,proto3" json:"flow_connection_configs,omitempty"` - TableSchemaDelta *TableSchemaDelta `protobuf:"bytes,2,opt,name=table_schema_delta,json=tableSchemaDelta,proto3" json:"table_schema_delta,omitempty"` + TableSchemaDeltas []*TableSchemaDelta `protobuf:"bytes,2,rep,name=table_schema_deltas,json=tableSchemaDeltas,proto3" json:"table_schema_deltas,omitempty"` } func (x *ReplayTableSchemaDeltaInput) Reset() { *x = ReplayTableSchemaDeltaInput{} if protoimpl.UnsafeEnabled { - mi := &file_flow_proto_msgTypes[41] + mi := &file_flow_proto_msgTypes[44] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2829,7 +3000,7 @@ func (x *ReplayTableSchemaDeltaInput) String() string { func (*ReplayTableSchemaDeltaInput) ProtoMessage() {} func (x *ReplayTableSchemaDeltaInput) ProtoReflect() protoreflect.Message { - mi := &file_flow_proto_msgTypes[41] + mi := &file_flow_proto_msgTypes[44] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2842,7 +3013,7 @@ func (x *ReplayTableSchemaDeltaInput) ProtoReflect() protoreflect.Message { // Deprecated: Use ReplayTableSchemaDeltaInput.ProtoReflect.Descriptor instead. func (*ReplayTableSchemaDeltaInput) Descriptor() ([]byte, []int) { - return file_flow_proto_rawDescGZIP(), []int{41} + return file_flow_proto_rawDescGZIP(), []int{44} } func (x *ReplayTableSchemaDeltaInput) GetFlowConnectionConfigs() *FlowConnectionConfigs { @@ -2852,9 +3023,9 @@ func (x *ReplayTableSchemaDeltaInput) GetFlowConnectionConfigs() *FlowConnection return nil } -func (x *ReplayTableSchemaDeltaInput) GetTableSchemaDelta() *TableSchemaDelta { +func (x *ReplayTableSchemaDeltaInput) GetTableSchemaDeltas() []*TableSchemaDelta { if x != nil { - return x.TableSchemaDelta + return x.TableSchemaDeltas } return nil } @@ -2889,7 +3060,18 @@ var file_flow_proto_rawDesc = []byte{ 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x07, 0x63, 0x6f, - 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0xb9, 0x0c, 0x0a, 0x15, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, + 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0xad, 0x01, 0x0a, 0x0c, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4d, + 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x36, 0x0a, 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, + 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x40, + 0x0a, 0x1c, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x61, + 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, + 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b, 0x65, + 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x4b, 0x65, 0x79, 0x22, 0xe6, 0x0b, 0x0a, 0x15, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x2a, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, @@ -2903,302 +3085,317 @@ var file_flow_proto_rawDesc = []byte{ 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0b, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x12, 0x66, 0x0a, 0x12, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, - 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, - 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, - 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x73, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, - 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, - 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x77, 0x0a, 0x19, 0x73, 0x72, - 0x63, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, - 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, - 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, - 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x73, 0x2e, 0x53, 0x72, 0x63, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x4e, 0x61, 0x6d, 0x65, - 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x15, 0x73, 0x72, - 0x63, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, - 0x69, 0x6e, 0x67, 0x12, 0x79, 0x0a, 0x19, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, - 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, - 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, - 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, - 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x16, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, - 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x37, - 0x0a, 0x0d, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x18, - 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, - 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x0c, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x50, 0x65, 0x65, 0x72, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x61, 0x78, 0x5f, 0x62, - 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0d, 0x52, - 0x0c, 0x6d, 0x61, 0x78, 0x42, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x26, 0x0a, - 0x0f, 0x64, 0x6f, 0x5f, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x63, 0x6f, 0x70, 0x79, - 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x64, 0x6f, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, - 0x6c, 0x43, 0x6f, 0x70, 0x79, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, - 0x12, 0x44, 0x0a, 0x1f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x6e, 0x75, 0x6d, - 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x1b, 0x73, 0x6e, 0x61, 0x70, 0x73, - 0x68, 0x6f, 0x74, 0x4e, 0x75, 0x6d, 0x52, 0x6f, 0x77, 0x73, 0x50, 0x65, 0x72, 0x50, 0x61, 0x72, - 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x41, 0x0a, 0x1d, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, - 0x6f, 0x74, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x5f, - 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x1a, 0x73, - 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x4d, 0x61, 0x78, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, - 0x65, 0x6c, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x12, 0x44, 0x0a, 0x1f, 0x73, 0x6e, 0x61, - 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, - 0x5f, 0x69, 0x6e, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x18, 0x0e, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x1b, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x4e, 0x75, 0x6d, 0x54, - 0x61, 0x62, 0x6c, 0x65, 0x73, 0x49, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x12, - 0x47, 0x0a, 0x12, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, - 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x70, 0x65, - 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, 0x65, 0x70, 0x53, 0x79, - 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x10, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, - 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x3d, 0x0a, 0x0d, 0x63, 0x64, 0x63, 0x5f, - 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x19, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, - 0x65, 0x70, 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x63, 0x64, 0x63, 0x53, - 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x73, 0x6e, 0x61, 0x70, 0x73, - 0x68, 0x6f, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, - 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, - 0x53, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x28, 0x0a, 0x10, 0x63, - 0x64, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, - 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x64, 0x63, 0x53, 0x74, 0x61, 0x67, 0x69, 0x6e, - 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x6f, 0x66, 0x74, 0x5f, 0x64, 0x65, - 0x6c, 0x65, 0x74, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x73, 0x6f, 0x66, 0x74, - 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x70, 0x75, - 0x73, 0x68, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x15, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x0d, 0x70, 0x75, 0x73, 0x68, 0x42, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, - 0x7a, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x75, 0x73, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6c, - 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x18, 0x16, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x70, 0x75, - 0x73, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x1a, 0x43, 0x0a, - 0x15, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, - 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x1a, 0x48, 0x0a, 0x1a, 0x53, 0x72, 0x63, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, - 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x63, 0x0a, 0x1b, - 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4d, - 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2e, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, - 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x8d, 0x02, 0x0a, 0x0f, 0x53, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, 0x77, 0x4f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, - 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, - 0x53, 0x69, 0x7a, 0x65, 0x12, 0x72, 0x0a, 0x18, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, - 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, 0x77, 0x4f, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x16, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x1a, 0x67, 0x0a, 0x1b, 0x52, 0x65, 0x6c, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, - 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x35, 0x0a, 0x14, 0x4e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x46, 0x6c, - 0x6f, 0x77, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, - 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, - 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x71, 0x0a, 0x0d, 0x4c, 0x61, 0x73, 0x74, - 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x68, 0x65, - 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x40, 0x0a, 0x0e, 0x6c, 0x61, 0x73, - 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x6c, - 0x61, 0x73, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x65, 0x64, 0x41, 0x74, 0x22, 0xd6, 0x03, 0x0a, 0x0e, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x46, 0x6c, 0x6f, 0x77, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x42, - 0x0a, 0x0f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, - 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4c, 0x61, 0x73, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x52, 0x0d, 0x6c, 0x61, 0x73, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x12, 0x5a, 0x0a, 0x17, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x15, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x48, - 0x0a, 0x11, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, - 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, 0x77, - 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x0f, 0x73, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, - 0x77, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x71, 0x0a, 0x18, 0x72, 0x65, 0x6c, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x70, - 0x70, 0x69, 0x6e, 0x67, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x70, 0x65, 0x65, - 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x46, 0x6c, - 0x6f, 0x77, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x16, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x1a, 0x67, 0x0a, 0x1b, 0x52, - 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, - 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, - 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x22, 0x71, 0x0a, 0x13, 0x53, 0x74, 0x61, 0x72, 0x74, 0x4e, 0x6f, 0x72, - 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x5a, 0x0a, 0x17, 0x66, - 0x6c, 0x6f, 0x77, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, - 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, - 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, - 0x52, 0x15, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x22, 0x84, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x4c, - 0x61, 0x73, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x65, 0x64, 0x49, 0x44, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x12, 0x48, 0x0a, 0x16, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, - 0x50, 0x65, 0x65, 0x72, 0x52, 0x14, 0x70, 0x65, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, - 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xbe, - 0x01, 0x0a, 0x16, 0x45, 0x6e, 0x73, 0x75, 0x72, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x48, 0x0a, 0x16, 0x70, 0x65, 0x65, - 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, - 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x14, 0x70, - 0x65, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, - 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x36, 0x0a, 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, - 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, - 0xc5, 0x01, 0x0a, 0x1b, 0x45, 0x6e, 0x73, 0x75, 0x72, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, - 0x48, 0x0a, 0x16, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x6d, 0x61, 0x12, 0x40, 0x0a, 0x0e, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, + 0x69, 0x6e, 0x67, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4d, 0x61, + 0x70, 0x70, 0x69, 0x6e, 0x67, 0x52, 0x0d, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4d, 0x61, 0x70, 0x70, + 0x69, 0x6e, 0x67, 0x73, 0x12, 0x77, 0x0a, 0x19, 0x73, 0x72, 0x63, 0x5f, 0x74, 0x61, 0x62, 0x6c, + 0x65, 0x5f, 0x69, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, + 0x67, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, + 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x2e, 0x53, 0x72, 0x63, 0x54, + 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, + 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x15, 0x73, 0x72, 0x63, 0x54, 0x61, 0x62, 0x6c, 0x65, + 0x49, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x79, 0x0a, + 0x19, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x3e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, + 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x73, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x16, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x37, 0x0a, 0x0d, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, - 0x65, 0x65, 0x72, 0x52, 0x14, 0x70, 0x65, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, - 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, - 0x18, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, - 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x16, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x73, 0x22, 0x30, 0x0a, 0x17, 0x50, 0x6f, 0x73, 0x74, 0x67, - 0x72, 0x65, 0x73, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, - 0x65, 0x72, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x65, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x05, 0x72, 0x65, 0x6c, 0x49, 0x64, 0x22, 0x89, 0x01, 0x0a, 0x0f, 0x54, 0x61, - 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x62, 0x0a, - 0x19, 0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, - 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x50, - 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x48, 0x00, 0x52, 0x17, 0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, - 0x65, 0x73, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, - 0x72, 0x42, 0x12, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, - 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, 0x62, 0x0a, 0x17, 0x45, 0x6e, 0x73, 0x75, 0x72, 0x65, 0x50, - 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x12, 0x47, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, - 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, - 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, - 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x49, - 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, 0x88, 0x02, 0x0a, 0x1c, 0x45, 0x6e, - 0x73, 0x75, 0x72, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x42, - 0x61, 0x74, 0x63, 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x7f, 0x0a, 0x18, 0x74, 0x61, - 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x5f, 0x6d, - 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x70, - 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x45, 0x6e, 0x73, 0x75, 0x72, - 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x42, 0x61, 0x74, 0x63, - 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, - 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x16, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, - 0x66, 0x69, 0x65, 0x72, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x1a, 0x67, 0x0a, 0x1b, 0x54, - 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x4d, 0x61, - 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, - 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, - 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x22, 0x9a, 0x04, 0x0a, 0x15, 0x53, 0x65, 0x74, 0x75, 0x70, 0x52, 0x65, - 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x48, + 0x65, 0x65, 0x72, 0x52, 0x0c, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x50, 0x65, 0x65, + 0x72, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x61, 0x78, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, + 0x69, 0x7a, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x6d, 0x61, 0x78, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x64, 0x6f, 0x5f, 0x69, 0x6e, + 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x63, 0x6f, 0x70, 0x79, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0d, 0x64, 0x6f, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x43, 0x6f, 0x70, 0x79, 0x12, + 0x29, 0x0a, 0x10, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x70, 0x75, 0x62, 0x6c, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x44, 0x0a, 0x1f, 0x73, 0x6e, + 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x5f, + 0x70, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0c, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x1b, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x4e, 0x75, 0x6d, + 0x52, 0x6f, 0x77, 0x73, 0x50, 0x65, 0x72, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x41, 0x0a, 0x1d, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x6d, 0x61, 0x78, + 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, + 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x1a, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, + 0x74, 0x4d, 0x61, 0x78, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x57, 0x6f, 0x72, 0x6b, + 0x65, 0x72, 0x73, 0x12, 0x44, 0x0a, 0x1f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, + 0x6e, 0x75, 0x6d, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x5f, 0x69, 0x6e, 0x5f, 0x70, 0x61, + 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x1b, 0x73, 0x6e, + 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x4e, 0x75, 0x6d, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x49, + 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x12, 0x47, 0x0a, 0x12, 0x73, 0x6e, 0x61, + 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, + 0x0f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, + 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, 0x65, 0x70, 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, + 0x52, 0x10, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, + 0x64, 0x65, 0x12, 0x3d, 0x0a, 0x0d, 0x63, 0x64, 0x63, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x6d, + 0x6f, 0x64, 0x65, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, 0x65, 0x70, 0x53, 0x79, 0x6e, 0x63, + 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x63, 0x64, 0x63, 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, + 0x65, 0x12, 0x32, 0x0a, 0x15, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x73, 0x74, + 0x61, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x13, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x53, 0x74, 0x61, 0x67, 0x69, 0x6e, + 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x28, 0x0a, 0x10, 0x63, 0x64, 0x63, 0x5f, 0x73, 0x74, 0x61, + 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0e, 0x63, 0x64, 0x63, 0x53, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, + 0x1f, 0x0a, 0x0b, 0x73, 0x6f, 0x66, 0x74, 0x5f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x13, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x73, 0x6f, 0x66, 0x74, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, + 0x12, 0x32, 0x0a, 0x15, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x13, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, + 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x70, 0x75, 0x73, 0x68, 0x5f, 0x62, 0x61, 0x74, + 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x15, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x70, + 0x75, 0x73, 0x68, 0x42, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x29, 0x0a, 0x10, + 0x70, 0x75, 0x73, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, + 0x18, 0x16, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x70, 0x75, 0x73, 0x68, 0x50, 0x61, 0x72, 0x61, + 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x79, 0x6e, + 0x63, 0x18, 0x17, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x72, 0x65, 0x73, 0x79, 0x6e, 0x63, 0x1a, + 0x48, 0x0a, 0x1a, 0x53, 0x72, 0x63, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x4e, 0x61, 0x6d, + 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x63, 0x0a, 0x1b, 0x54, 0x61, 0x62, + 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4d, 0x61, 0x70, 0x70, + 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2e, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x51, + 0x0a, 0x11, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x65, 0x77, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6e, 0x65, 0x77, 0x4e, 0x61, 0x6d, + 0x65, 0x22, 0xb1, 0x01, 0x0a, 0x11, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x54, 0x61, 0x62, 0x6c, + 0x65, 0x73, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, + 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x04, 0x70, + 0x65, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x04, 0x70, + 0x65, 0x65, 0x72, 0x12, 0x50, 0x0a, 0x14, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x74, 0x61, + 0x62, 0x6c, 0x65, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, + 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x12, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x38, 0x0a, 0x12, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x54, + 0x61, 0x62, 0x6c, 0x65, 0x73, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x22, 0x0a, 0x0d, 0x66, + 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x22, + 0x8d, 0x02, 0x0a, 0x0f, 0x53, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, 0x77, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, + 0x7a, 0x65, 0x12, 0x72, 0x0a, 0x18, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, + 0x6f, 0x77, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, 0x77, 0x4f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x16, + 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4d, + 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x1a, 0x67, 0x0a, 0x1b, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, + 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0x35, 0x0a, 0x14, 0x4e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x46, 0x6c, 0x6f, 0x77, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, + 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, + 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x71, 0x0a, 0x0d, 0x4c, 0x61, 0x73, 0x74, 0x53, 0x79, + 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x40, 0x0a, 0x0e, 0x6c, 0x61, 0x73, 0x74, 0x5f, + 0x73, 0x79, 0x6e, 0x63, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x6c, 0x61, 0x73, + 0x74, 0x53, 0x79, 0x6e, 0x63, 0x65, 0x64, 0x41, 0x74, 0x22, 0xd6, 0x03, 0x0a, 0x0e, 0x53, 0x74, + 0x61, 0x72, 0x74, 0x46, 0x6c, 0x6f, 0x77, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x42, 0x0a, 0x0f, + 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, + 0x6c, 0x6f, 0x77, 0x2e, 0x4c, 0x61, 0x73, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x52, 0x0d, 0x6c, 0x61, 0x73, 0x74, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x65, + 0x12, 0x5a, 0x0a, 0x17, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, + 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x15, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x48, 0x0a, 0x11, + 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, + 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, 0x77, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x0f, 0x73, 0x79, 0x6e, 0x63, 0x46, 0x6c, 0x6f, 0x77, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x71, 0x0a, 0x18, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, + 0x6e, 0x67, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x46, 0x6c, 0x6f, 0x77, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x16, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x1a, 0x67, 0x0a, 0x1b, 0x52, 0x65, 0x6c, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x70, 0x70, + 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x71, 0x0a, 0x13, 0x53, 0x74, 0x61, 0x72, 0x74, 0x4e, 0x6f, 0x72, 0x6d, 0x61, + 0x6c, 0x69, 0x7a, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x5a, 0x0a, 0x17, 0x66, 0x6c, 0x6f, + 0x77, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x15, + 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x73, 0x22, 0x84, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x73, + 0x74, 0x53, 0x79, 0x6e, 0x63, 0x65, 0x64, 0x49, 0x44, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x48, 0x0a, 0x16, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x14, 0x70, 0x65, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x66, 0x0a, 0x12, - 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, - 0x6e, 0x67, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x65, 0x74, 0x75, 0x70, 0x52, 0x65, 0x70, 0x6c, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x54, 0x61, 0x62, - 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, - 0x70, 0x69, 0x6e, 0x67, 0x12, 0x3d, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, - 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, - 0x65, 0x72, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, - 0x65, 0x65, 0x72, 0x12, 0x26, 0x0a, 0x0f, 0x64, 0x6f, 0x5f, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, - 0x6c, 0x5f, 0x63, 0x6f, 0x70, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x64, 0x6f, - 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x43, 0x6f, 0x70, 0x79, 0x12, 0x3a, 0x0a, 0x19, 0x65, - 0x78, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, - 0x65, 0x78, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x43, 0x0a, 0x1e, 0x65, 0x78, 0x69, 0x73, 0x74, - 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x1b, 0x65, 0x78, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x43, 0x0a, 0x15, + 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xbe, 0x01, 0x0a, + 0x16, 0x45, 0x6e, 0x73, 0x75, 0x72, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x79, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x48, 0x0a, 0x16, 0x70, 0x65, 0x65, 0x72, 0x5f, + 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, + 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x14, 0x70, 0x65, 0x65, + 0x72, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, + 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x36, 0x0a, 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, + 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, + 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, 0xc5, 0x01, + 0x0a, 0x1b, 0x45, 0x6e, 0x73, 0x75, 0x72, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x48, 0x0a, + 0x16, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, + 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, + 0x72, 0x52, 0x14, 0x70, 0x65, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, + 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x18, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, + 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x16, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x66, 0x69, 0x65, 0x72, 0x73, 0x22, 0x30, 0x0a, 0x17, 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, + 0x73, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, + 0x12, 0x15, 0x0a, 0x06, 0x72, 0x65, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x05, 0x72, 0x65, 0x6c, 0x49, 0x64, 0x22, 0x89, 0x01, 0x0a, 0x0f, 0x54, 0x61, 0x62, 0x6c, + 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x62, 0x0a, 0x19, 0x70, + 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, + 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, + 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x50, 0x6f, 0x73, + 0x74, 0x67, 0x72, 0x65, 0x73, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x66, 0x69, 0x65, 0x72, 0x48, 0x00, 0x52, 0x17, 0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, + 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x42, + 0x12, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, + 0x69, 0x65, 0x72, 0x22, 0x62, 0x0a, 0x17, 0x45, 0x6e, 0x73, 0x75, 0x72, 0x65, 0x50, 0x75, 0x6c, + 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x47, + 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, + 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, + 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, 0x88, 0x02, 0x0a, 0x1c, 0x45, 0x6e, 0x73, 0x75, + 0x72, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x7f, 0x0a, 0x18, 0x74, 0x61, 0x62, 0x6c, + 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x5f, 0x6d, 0x61, 0x70, + 0x70, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x45, 0x6e, 0x73, 0x75, 0x72, 0x65, 0x50, + 0x75, 0x6c, 0x6c, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x4f, + 0x75, 0x74, 0x70, 0x75, 0x74, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, + 0x69, 0x66, 0x69, 0x65, 0x72, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x16, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x1a, 0x67, 0x0a, 0x1b, 0x54, 0x61, 0x62, + 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x4d, 0x61, 0x70, 0x70, + 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, + 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x9a, 0x04, 0x0a, 0x15, 0x53, 0x65, 0x74, 0x75, 0x70, 0x52, 0x65, 0x70, 0x6c, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x48, 0x0a, 0x16, + 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, + 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, + 0x52, 0x14, 0x70, 0x65, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, + 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, + 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x66, 0x0a, 0x12, 0x74, 0x61, + 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, + 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x53, 0x65, 0x74, 0x75, 0x70, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, + 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, + 0x6e, 0x67, 0x12, 0x3d, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, + 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, + 0x52, 0x0f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x65, 0x65, + 0x72, 0x12, 0x26, 0x0a, 0x0f, 0x64, 0x6f, 0x5f, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, + 0x63, 0x6f, 0x70, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x64, 0x6f, 0x49, 0x6e, + 0x69, 0x74, 0x69, 0x61, 0x6c, 0x43, 0x6f, 0x70, 0x79, 0x12, 0x3a, 0x0a, 0x19, 0x65, 0x78, 0x69, + 0x73, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x65, 0x78, + 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x43, 0x0a, 0x1e, 0x65, 0x78, 0x69, 0x73, 0x74, 0x69, 0x6e, + 0x67, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6c, + 0x6f, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, 0x65, + 0x78, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x53, 0x6c, 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x43, 0x0a, 0x15, 0x54, 0x61, + 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0x5a, 0x0a, 0x16, 0x53, 0x65, 0x74, 0x75, 0x70, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x6c, 0x6f, + 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x6c, + 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, + 0x6f, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, + 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xed, 0x02, 0x0a, 0x13, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x61, 0x77, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x12, 0x48, 0x0a, 0x16, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, + 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x14, 0x70, 0x65, 0x65, 0x72, 0x43, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, + 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x64, 0x0a, 0x12, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, + 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, + 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x43, 0x72, 0x65, 0x61, + 0x74, 0x65, 0x52, 0x61, 0x77, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x5a, 0x0a, 0x16, 0x53, 0x65, 0x74, 0x75, 0x70, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x73, - 0x6c, 0x6f, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x73, 0x6c, 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x73, 0x6e, 0x61, 0x70, - 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0c, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xed, 0x02, - 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x61, 0x77, 0x54, 0x61, 0x62, 0x6c, 0x65, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x48, 0x0a, 0x16, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63, 0x6f, - 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, - 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x14, 0x70, 0x65, 0x65, 0x72, 0x43, - 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, - 0x61, 0x6d, 0x65, 0x12, 0x64, 0x0a, 0x12, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, - 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x36, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x43, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x52, 0x61, 0x77, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, - 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, - 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x3d, 0x0a, 0x0d, 0x63, 0x64, 0x63, - 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x19, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, - 0x52, 0x65, 0x70, 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x63, 0x64, 0x63, - 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x1a, 0x43, 0x0a, 0x15, 0x54, 0x61, 0x62, 0x6c, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x41, 0x0a, - 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x61, 0x77, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, - 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, - 0x22, 0x9c, 0x02, 0x0a, 0x0b, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x12, 0x29, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, - 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x61, 0x62, 0x6c, - 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x3f, 0x0a, 0x07, 0x63, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x70, - 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x2c, 0x0a, 0x12, - 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x63, 0x6f, 0x6c, 0x75, - 0x6d, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, - 0x79, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x37, 0x0a, 0x18, 0x69, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x3d, 0x0a, 0x0d, 0x63, 0x64, 0x63, 0x5f, 0x73, + 0x79, 0x6e, 0x63, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, + 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, 0x65, + 0x70, 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x63, 0x64, 0x63, 0x53, 0x79, + 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x1a, 0x43, 0x0a, 0x15, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, + 0x61, 0x6d, 0x65, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x41, 0x0a, 0x14, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x61, 0x77, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, + 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, + 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x22, 0x9e, + 0x02, 0x0a, 0x0b, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x29, + 0x0a, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x49, + 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x3f, 0x0a, 0x07, 0x63, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x70, 0x72, + 0x69, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, + 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x37, 0x0a, 0x18, 0x69, 0x73, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x5f, 0x66, 0x75, 0x6c, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x15, 0x69, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x46, @@ -3310,29 +3507,21 @@ var file_flow_proto_rawDesc = []byte{ 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x49, 0x44, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x22, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x49, 0x44, 0x52, - 0x03, 0x65, 0x6e, 0x64, 0x22, 0x3c, 0x0a, 0x12, 0x58, 0x4d, 0x49, 0x4e, 0x50, 0x61, 0x72, 0x74, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x65, - 0x6e, 0x64, 0x22, 0xaa, 0x02, 0x0a, 0x0e, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x3d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x72, 0x61, 0x6e, - 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x49, 0x6e, 0x74, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x52, - 0x61, 0x6e, 0x67, 0x65, 0x12, 0x4f, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, - 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x61, - 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x3d, 0x0a, 0x09, 0x74, 0x69, 0x64, 0x5f, 0x72, 0x61, 0x6e, - 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x49, 0x44, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x08, 0x74, 0x69, 0x64, 0x52, - 0x61, 0x6e, 0x67, 0x65, 0x12, 0x40, 0x0a, 0x0a, 0x78, 0x6d, 0x69, 0x6e, 0x5f, 0x72, 0x61, 0x6e, - 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x58, 0x4d, 0x49, 0x4e, 0x50, 0x61, 0x72, 0x74, 0x69, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x09, 0x78, 0x6d, 0x69, - 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, + 0x03, 0x65, 0x6e, 0x64, 0x22, 0xe8, 0x01, 0x0a, 0x0e, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x3d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x72, + 0x61, 0x6e, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x49, 0x6e, 0x74, 0x50, 0x61, 0x72, 0x74, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, + 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x4f, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x3d, 0x0a, 0x09, 0x74, 0x69, 0x64, 0x5f, 0x72, + 0x61, 0x6e, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x49, 0x44, 0x50, 0x61, 0x72, 0x74, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x08, 0x74, 0x69, + 0x64, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x78, 0x0a, 0x0d, 0x51, 0x52, 0x65, 0x70, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, @@ -3340,7 +3529,7 @@ var file_flow_proto_rawDesc = []byte{ 0x52, 0x09, 0x77, 0x72, 0x69, 0x74, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x10, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x4b, - 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x96, 0x06, 0x0a, 0x0a, 0x51, 0x52, + 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0xe6, 0x06, 0x0a, 0x0a, 0x51, 0x52, 0x65, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x0b, @@ -3390,6 +3579,11 @@ var file_flow_proto_rawDesc = []byte{ 0x16, 0x6e, 0x75, 0x6d, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x13, 0x6e, 0x75, 0x6d, 0x52, 0x6f, 0x77, 0x73, 0x50, 0x65, 0x72, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x4e, 0x0a, 0x24, 0x73, 0x65, 0x74, 0x75, 0x70, 0x5f, 0x77, 0x61, 0x74, 0x65, + 0x72, 0x6d, 0x61, 0x72, 0x6b, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6f, 0x6e, 0x5f, 0x64, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x20, 0x73, 0x65, 0x74, 0x75, 0x70, 0x57, 0x61, 0x74, 0x65, 0x72, 0x6d, 0x61, 0x72, 0x6b, + 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x6e, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x97, 0x01, 0x0a, 0x0d, 0x51, 0x52, 0x65, 0x70, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x61, 0x72, 0x74, @@ -3420,7 +3614,7 @@ var file_flow_proto_rawDesc = []byte{ 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x22, - 0xcb, 0x01, 0x0a, 0x10, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x44, + 0xa2, 0x01, 0x0a, 0x10, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x73, 0x72, 0x63, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x72, 0x63, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x64, 0x73, @@ -3430,41 +3624,39 @@ var file_flow_proto_rawDesc = []byte{ 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x41, 0x64, 0x64, 0x65, 0x64, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x0c, 0x61, 0x64, 0x64, 0x65, 0x64, 0x43, 0x6f, 0x6c, - 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x27, 0x0a, 0x0f, 0x64, 0x72, 0x6f, 0x70, 0x70, 0x65, 0x64, 0x5f, - 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x64, - 0x72, 0x6f, 0x70, 0x70, 0x65, 0x64, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0xc6, 0x01, - 0x0a, 0x1b, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x5a, 0x0a, - 0x17, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, - 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, - 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x73, 0x52, 0x15, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x4b, 0x0a, 0x12, 0x74, 0x61, 0x62, - 0x6c, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x44, - 0x65, 0x6c, 0x74, 0x61, 0x52, 0x10, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x2a, 0x50, 0x0a, 0x0c, 0x51, 0x52, 0x65, 0x70, 0x53, 0x79, - 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x1f, 0x0a, 0x1b, 0x51, 0x52, 0x45, 0x50, 0x5f, 0x53, - 0x59, 0x4e, 0x43, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x4d, 0x55, 0x4c, 0x54, 0x49, 0x5f, 0x49, - 0x4e, 0x53, 0x45, 0x52, 0x54, 0x10, 0x00, 0x12, 0x1f, 0x0a, 0x1b, 0x51, 0x52, 0x45, 0x50, 0x5f, - 0x53, 0x59, 0x4e, 0x43, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, - 0x45, 0x5f, 0x41, 0x56, 0x52, 0x4f, 0x10, 0x01, 0x2a, 0x66, 0x0a, 0x0d, 0x51, 0x52, 0x65, 0x70, - 0x57, 0x72, 0x69, 0x74, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x16, 0x51, 0x52, 0x45, - 0x50, 0x5f, 0x57, 0x52, 0x49, 0x54, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x41, 0x50, 0x50, - 0x45, 0x4e, 0x44, 0x10, 0x00, 0x12, 0x1a, 0x0a, 0x16, 0x51, 0x52, 0x45, 0x50, 0x5f, 0x57, 0x52, - 0x49, 0x54, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x55, 0x50, 0x53, 0x45, 0x52, 0x54, 0x10, - 0x01, 0x12, 0x1d, 0x0a, 0x19, 0x51, 0x52, 0x45, 0x50, 0x5f, 0x57, 0x52, 0x49, 0x54, 0x45, 0x5f, - 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x4f, 0x56, 0x45, 0x52, 0x57, 0x52, 0x49, 0x54, 0x45, 0x10, 0x02, - 0x42, 0x76, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, - 0x6c, 0x6f, 0x77, 0x42, 0x09, 0x46, 0x6c, 0x6f, 0x77, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, - 0x5a, 0x10, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x73, 0xa2, 0x02, 0x03, 0x50, 0x58, 0x58, 0xaa, 0x02, 0x0a, 0x50, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x46, 0x6c, 0x6f, 0x77, 0xca, 0x02, 0x0a, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x46, 0x6c, - 0x6f, 0x77, 0xe2, 0x02, 0x16, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x46, 0x6c, 0x6f, 0x77, 0x5c, - 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0a, 0x50, 0x65, - 0x65, 0x72, 0x64, 0x62, 0x46, 0x6c, 0x6f, 0x77, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x75, 0x6d, 0x6e, 0x73, 0x22, 0xc8, 0x01, 0x0a, 0x1b, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, + 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x12, 0x5a, 0x0a, 0x17, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x63, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, + 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x15, 0x66, 0x6c, 0x6f, 0x77, 0x43, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, + 0x12, 0x4d, 0x0a, 0x13, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, + 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x54, 0x61, 0x62, 0x6c, + 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x52, 0x11, 0x74, 0x61, + 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x73, 0x2a, + 0x50, 0x0a, 0x0c, 0x51, 0x52, 0x65, 0x70, 0x53, 0x79, 0x6e, 0x63, 0x4d, 0x6f, 0x64, 0x65, 0x12, + 0x1f, 0x0a, 0x1b, 0x51, 0x52, 0x45, 0x50, 0x5f, 0x53, 0x59, 0x4e, 0x43, 0x5f, 0x4d, 0x4f, 0x44, + 0x45, 0x5f, 0x4d, 0x55, 0x4c, 0x54, 0x49, 0x5f, 0x49, 0x4e, 0x53, 0x45, 0x52, 0x54, 0x10, 0x00, + 0x12, 0x1f, 0x0a, 0x1b, 0x51, 0x52, 0x45, 0x50, 0x5f, 0x53, 0x59, 0x4e, 0x43, 0x5f, 0x4d, 0x4f, + 0x44, 0x45, 0x5f, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x5f, 0x41, 0x56, 0x52, 0x4f, 0x10, + 0x01, 0x2a, 0x66, 0x0a, 0x0d, 0x51, 0x52, 0x65, 0x70, 0x57, 0x72, 0x69, 0x74, 0x65, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x16, 0x51, 0x52, 0x45, 0x50, 0x5f, 0x57, 0x52, 0x49, 0x54, 0x45, + 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x41, 0x50, 0x50, 0x45, 0x4e, 0x44, 0x10, 0x00, 0x12, 0x1a, + 0x0a, 0x16, 0x51, 0x52, 0x45, 0x50, 0x5f, 0x57, 0x52, 0x49, 0x54, 0x45, 0x5f, 0x4d, 0x4f, 0x44, + 0x45, 0x5f, 0x55, 0x50, 0x53, 0x45, 0x52, 0x54, 0x10, 0x01, 0x12, 0x1d, 0x0a, 0x19, 0x51, 0x52, + 0x45, 0x50, 0x5f, 0x57, 0x52, 0x49, 0x54, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x4f, 0x56, + 0x45, 0x52, 0x57, 0x52, 0x49, 0x54, 0x45, 0x10, 0x02, 0x42, 0x76, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, + 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x42, 0x09, 0x46, 0x6c, + 0x6f, 0x77, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x10, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0xa2, 0x02, 0x03, 0x50, 0x58, + 0x58, 0xaa, 0x02, 0x0a, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x46, 0x6c, 0x6f, 0x77, 0xca, 0x02, + 0x0a, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x46, 0x6c, 0x6f, 0x77, 0xe2, 0x02, 0x16, 0x50, 0x65, + 0x65, 0x72, 0x64, 0x62, 0x46, 0x6c, 0x6f, 0x77, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0a, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x46, 0x6c, 0x6f, + 0x77, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -3480,135 +3672,138 @@ func file_flow_proto_rawDescGZIP() []byte { } var file_flow_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_flow_proto_msgTypes = make([]protoimpl.MessageInfo, 54) +var file_flow_proto_msgTypes = make([]protoimpl.MessageInfo, 56) var file_flow_proto_goTypes = []interface{}{ (QRepSyncMode)(0), // 0: peerdb_flow.QRepSyncMode (QRepWriteType)(0), // 1: peerdb_flow.QRepWriteType (*TableNameMapping)(nil), // 2: peerdb_flow.TableNameMapping (*RelationMessageColumn)(nil), // 3: peerdb_flow.RelationMessageColumn (*RelationMessage)(nil), // 4: peerdb_flow.RelationMessage - (*FlowConnectionConfigs)(nil), // 5: peerdb_flow.FlowConnectionConfigs - (*SyncFlowOptions)(nil), // 6: peerdb_flow.SyncFlowOptions - (*NormalizeFlowOptions)(nil), // 7: peerdb_flow.NormalizeFlowOptions - (*LastSyncState)(nil), // 8: peerdb_flow.LastSyncState - (*StartFlowInput)(nil), // 9: peerdb_flow.StartFlowInput - (*StartNormalizeInput)(nil), // 10: peerdb_flow.StartNormalizeInput - (*GetLastSyncedIDInput)(nil), // 11: peerdb_flow.GetLastSyncedIDInput - (*EnsurePullabilityInput)(nil), // 12: peerdb_flow.EnsurePullabilityInput - (*EnsurePullabilityBatchInput)(nil), // 13: peerdb_flow.EnsurePullabilityBatchInput - (*PostgresTableIdentifier)(nil), // 14: peerdb_flow.PostgresTableIdentifier - (*TableIdentifier)(nil), // 15: peerdb_flow.TableIdentifier - (*EnsurePullabilityOutput)(nil), // 16: peerdb_flow.EnsurePullabilityOutput - (*EnsurePullabilityBatchOutput)(nil), // 17: peerdb_flow.EnsurePullabilityBatchOutput - (*SetupReplicationInput)(nil), // 18: peerdb_flow.SetupReplicationInput - (*SetupReplicationOutput)(nil), // 19: peerdb_flow.SetupReplicationOutput - (*CreateRawTableInput)(nil), // 20: peerdb_flow.CreateRawTableInput - (*CreateRawTableOutput)(nil), // 21: peerdb_flow.CreateRawTableOutput - (*TableSchema)(nil), // 22: peerdb_flow.TableSchema - (*GetTableSchemaBatchInput)(nil), // 23: peerdb_flow.GetTableSchemaBatchInput - (*GetTableSchemaBatchOutput)(nil), // 24: peerdb_flow.GetTableSchemaBatchOutput - (*SetupNormalizedTableInput)(nil), // 25: peerdb_flow.SetupNormalizedTableInput - (*SetupNormalizedTableBatchInput)(nil), // 26: peerdb_flow.SetupNormalizedTableBatchInput - (*SetupNormalizedTableOutput)(nil), // 27: peerdb_flow.SetupNormalizedTableOutput - (*SetupNormalizedTableBatchOutput)(nil), // 28: peerdb_flow.SetupNormalizedTableBatchOutput - (*IntPartitionRange)(nil), // 29: peerdb_flow.IntPartitionRange - (*TimestampPartitionRange)(nil), // 30: peerdb_flow.TimestampPartitionRange - (*TID)(nil), // 31: peerdb_flow.TID - (*TIDPartitionRange)(nil), // 32: peerdb_flow.TIDPartitionRange - (*XMINPartitionRange)(nil), // 33: peerdb_flow.XMINPartitionRange - (*PartitionRange)(nil), // 34: peerdb_flow.PartitionRange - (*QRepWriteMode)(nil), // 35: peerdb_flow.QRepWriteMode - (*QRepConfig)(nil), // 36: peerdb_flow.QRepConfig - (*QRepPartition)(nil), // 37: peerdb_flow.QRepPartition - (*QRepPartitionBatch)(nil), // 38: peerdb_flow.QRepPartitionBatch - (*QRepParitionResult)(nil), // 39: peerdb_flow.QRepParitionResult - (*DropFlowInput)(nil), // 40: peerdb_flow.DropFlowInput - (*DeltaAddedColumn)(nil), // 41: peerdb_flow.DeltaAddedColumn - (*TableSchemaDelta)(nil), // 42: peerdb_flow.TableSchemaDelta - (*ReplayTableSchemaDeltaInput)(nil), // 43: peerdb_flow.ReplayTableSchemaDeltaInput - nil, // 44: peerdb_flow.FlowConnectionConfigs.TableNameMappingEntry - nil, // 45: peerdb_flow.FlowConnectionConfigs.SrcTableIdNameMappingEntry - nil, // 46: peerdb_flow.FlowConnectionConfigs.TableNameSchemaMappingEntry - nil, // 47: peerdb_flow.SyncFlowOptions.RelationMessageMappingEntry - nil, // 48: peerdb_flow.StartFlowInput.RelationMessageMappingEntry - nil, // 49: peerdb_flow.EnsurePullabilityBatchOutput.TableIdentifierMappingEntry - nil, // 50: peerdb_flow.SetupReplicationInput.TableNameMappingEntry - nil, // 51: peerdb_flow.CreateRawTableInput.TableNameMappingEntry - nil, // 52: peerdb_flow.TableSchema.ColumnsEntry - nil, // 53: peerdb_flow.GetTableSchemaBatchOutput.TableNameSchemaMappingEntry - nil, // 54: peerdb_flow.SetupNormalizedTableBatchInput.TableNameSchemaMappingEntry - nil, // 55: peerdb_flow.SetupNormalizedTableBatchOutput.TableExistsMappingEntry - (*Peer)(nil), // 56: peerdb_peers.Peer - (*timestamppb.Timestamp)(nil), // 57: google.protobuf.Timestamp + (*TableMapping)(nil), // 5: peerdb_flow.TableMapping + (*FlowConnectionConfigs)(nil), // 6: peerdb_flow.FlowConnectionConfigs + (*RenameTableOption)(nil), // 7: peerdb_flow.RenameTableOption + (*RenameTablesInput)(nil), // 8: peerdb_flow.RenameTablesInput + (*RenameTablesOutput)(nil), // 9: peerdb_flow.RenameTablesOutput + (*SyncFlowOptions)(nil), // 10: peerdb_flow.SyncFlowOptions + (*NormalizeFlowOptions)(nil), // 11: peerdb_flow.NormalizeFlowOptions + (*LastSyncState)(nil), // 12: peerdb_flow.LastSyncState + (*StartFlowInput)(nil), // 13: peerdb_flow.StartFlowInput + (*StartNormalizeInput)(nil), // 14: peerdb_flow.StartNormalizeInput + (*GetLastSyncedIDInput)(nil), // 15: peerdb_flow.GetLastSyncedIDInput + (*EnsurePullabilityInput)(nil), // 16: peerdb_flow.EnsurePullabilityInput + (*EnsurePullabilityBatchInput)(nil), // 17: peerdb_flow.EnsurePullabilityBatchInput + (*PostgresTableIdentifier)(nil), // 18: peerdb_flow.PostgresTableIdentifier + (*TableIdentifier)(nil), // 19: peerdb_flow.TableIdentifier + (*EnsurePullabilityOutput)(nil), // 20: peerdb_flow.EnsurePullabilityOutput + (*EnsurePullabilityBatchOutput)(nil), // 21: peerdb_flow.EnsurePullabilityBatchOutput + (*SetupReplicationInput)(nil), // 22: peerdb_flow.SetupReplicationInput + (*SetupReplicationOutput)(nil), // 23: peerdb_flow.SetupReplicationOutput + (*CreateRawTableInput)(nil), // 24: peerdb_flow.CreateRawTableInput + (*CreateRawTableOutput)(nil), // 25: peerdb_flow.CreateRawTableOutput + (*TableSchema)(nil), // 26: peerdb_flow.TableSchema + (*GetTableSchemaBatchInput)(nil), // 27: peerdb_flow.GetTableSchemaBatchInput + (*GetTableSchemaBatchOutput)(nil), // 28: peerdb_flow.GetTableSchemaBatchOutput + (*SetupNormalizedTableInput)(nil), // 29: peerdb_flow.SetupNormalizedTableInput + (*SetupNormalizedTableBatchInput)(nil), // 30: peerdb_flow.SetupNormalizedTableBatchInput + (*SetupNormalizedTableOutput)(nil), // 31: peerdb_flow.SetupNormalizedTableOutput + (*SetupNormalizedTableBatchOutput)(nil), // 32: peerdb_flow.SetupNormalizedTableBatchOutput + (*IntPartitionRange)(nil), // 33: peerdb_flow.IntPartitionRange + (*TimestampPartitionRange)(nil), // 34: peerdb_flow.TimestampPartitionRange + (*TID)(nil), // 35: peerdb_flow.TID + (*TIDPartitionRange)(nil), // 36: peerdb_flow.TIDPartitionRange + (*PartitionRange)(nil), // 37: peerdb_flow.PartitionRange + (*QRepWriteMode)(nil), // 38: peerdb_flow.QRepWriteMode + (*QRepConfig)(nil), // 39: peerdb_flow.QRepConfig + (*QRepPartition)(nil), // 40: peerdb_flow.QRepPartition + (*QRepPartitionBatch)(nil), // 41: peerdb_flow.QRepPartitionBatch + (*QRepParitionResult)(nil), // 42: peerdb_flow.QRepParitionResult + (*DropFlowInput)(nil), // 43: peerdb_flow.DropFlowInput + (*DeltaAddedColumn)(nil), // 44: peerdb_flow.DeltaAddedColumn + (*TableSchemaDelta)(nil), // 45: peerdb_flow.TableSchemaDelta + (*ReplayTableSchemaDeltaInput)(nil), // 46: peerdb_flow.ReplayTableSchemaDeltaInput + nil, // 47: peerdb_flow.FlowConnectionConfigs.SrcTableIdNameMappingEntry + nil, // 48: peerdb_flow.FlowConnectionConfigs.TableNameSchemaMappingEntry + nil, // 49: peerdb_flow.SyncFlowOptions.RelationMessageMappingEntry + nil, // 50: peerdb_flow.StartFlowInput.RelationMessageMappingEntry + nil, // 51: peerdb_flow.EnsurePullabilityBatchOutput.TableIdentifierMappingEntry + nil, // 52: peerdb_flow.SetupReplicationInput.TableNameMappingEntry + nil, // 53: peerdb_flow.CreateRawTableInput.TableNameMappingEntry + nil, // 54: peerdb_flow.TableSchema.ColumnsEntry + nil, // 55: peerdb_flow.GetTableSchemaBatchOutput.TableNameSchemaMappingEntry + nil, // 56: peerdb_flow.SetupNormalizedTableBatchInput.TableNameSchemaMappingEntry + nil, // 57: peerdb_flow.SetupNormalizedTableBatchOutput.TableExistsMappingEntry + (*Peer)(nil), // 58: peerdb_peers.Peer + (*timestamppb.Timestamp)(nil), // 59: google.protobuf.Timestamp } var file_flow_proto_depIdxs = []int32{ 3, // 0: peerdb_flow.RelationMessage.columns:type_name -> peerdb_flow.RelationMessageColumn - 56, // 1: peerdb_flow.FlowConnectionConfigs.source:type_name -> peerdb_peers.Peer - 56, // 2: peerdb_flow.FlowConnectionConfigs.destination:type_name -> peerdb_peers.Peer - 22, // 3: peerdb_flow.FlowConnectionConfigs.table_schema:type_name -> peerdb_flow.TableSchema - 44, // 4: peerdb_flow.FlowConnectionConfigs.table_name_mapping:type_name -> peerdb_flow.FlowConnectionConfigs.TableNameMappingEntry - 45, // 5: peerdb_flow.FlowConnectionConfigs.src_table_id_name_mapping:type_name -> peerdb_flow.FlowConnectionConfigs.SrcTableIdNameMappingEntry - 46, // 6: peerdb_flow.FlowConnectionConfigs.table_name_schema_mapping:type_name -> peerdb_flow.FlowConnectionConfigs.TableNameSchemaMappingEntry - 56, // 7: peerdb_flow.FlowConnectionConfigs.metadata_peer:type_name -> peerdb_peers.Peer + 58, // 1: peerdb_flow.FlowConnectionConfigs.source:type_name -> peerdb_peers.Peer + 58, // 2: peerdb_flow.FlowConnectionConfigs.destination:type_name -> peerdb_peers.Peer + 26, // 3: peerdb_flow.FlowConnectionConfigs.table_schema:type_name -> peerdb_flow.TableSchema + 5, // 4: peerdb_flow.FlowConnectionConfigs.table_mappings:type_name -> peerdb_flow.TableMapping + 47, // 5: peerdb_flow.FlowConnectionConfigs.src_table_id_name_mapping:type_name -> peerdb_flow.FlowConnectionConfigs.SrcTableIdNameMappingEntry + 48, // 6: peerdb_flow.FlowConnectionConfigs.table_name_schema_mapping:type_name -> peerdb_flow.FlowConnectionConfigs.TableNameSchemaMappingEntry + 58, // 7: peerdb_flow.FlowConnectionConfigs.metadata_peer:type_name -> peerdb_peers.Peer 0, // 8: peerdb_flow.FlowConnectionConfigs.snapshot_sync_mode:type_name -> peerdb_flow.QRepSyncMode 0, // 9: peerdb_flow.FlowConnectionConfigs.cdc_sync_mode:type_name -> peerdb_flow.QRepSyncMode - 47, // 10: peerdb_flow.SyncFlowOptions.relation_message_mapping:type_name -> peerdb_flow.SyncFlowOptions.RelationMessageMappingEntry - 57, // 11: peerdb_flow.LastSyncState.last_synced_at:type_name -> google.protobuf.Timestamp - 8, // 12: peerdb_flow.StartFlowInput.last_sync_state:type_name -> peerdb_flow.LastSyncState - 5, // 13: peerdb_flow.StartFlowInput.flow_connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs - 6, // 14: peerdb_flow.StartFlowInput.sync_flow_options:type_name -> peerdb_flow.SyncFlowOptions - 48, // 15: peerdb_flow.StartFlowInput.relation_message_mapping:type_name -> peerdb_flow.StartFlowInput.RelationMessageMappingEntry - 5, // 16: peerdb_flow.StartNormalizeInput.flow_connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs - 56, // 17: peerdb_flow.GetLastSyncedIDInput.peer_connection_config:type_name -> peerdb_peers.Peer - 56, // 18: peerdb_flow.EnsurePullabilityInput.peer_connection_config:type_name -> peerdb_peers.Peer - 56, // 19: peerdb_flow.EnsurePullabilityBatchInput.peer_connection_config:type_name -> peerdb_peers.Peer - 14, // 20: peerdb_flow.TableIdentifier.postgres_table_identifier:type_name -> peerdb_flow.PostgresTableIdentifier - 15, // 21: peerdb_flow.EnsurePullabilityOutput.table_identifier:type_name -> peerdb_flow.TableIdentifier - 49, // 22: peerdb_flow.EnsurePullabilityBatchOutput.table_identifier_mapping:type_name -> peerdb_flow.EnsurePullabilityBatchOutput.TableIdentifierMappingEntry - 56, // 23: peerdb_flow.SetupReplicationInput.peer_connection_config:type_name -> peerdb_peers.Peer - 50, // 24: peerdb_flow.SetupReplicationInput.table_name_mapping:type_name -> peerdb_flow.SetupReplicationInput.TableNameMappingEntry - 56, // 25: peerdb_flow.SetupReplicationInput.destination_peer:type_name -> peerdb_peers.Peer - 56, // 26: peerdb_flow.CreateRawTableInput.peer_connection_config:type_name -> peerdb_peers.Peer - 51, // 27: peerdb_flow.CreateRawTableInput.table_name_mapping:type_name -> peerdb_flow.CreateRawTableInput.TableNameMappingEntry - 0, // 28: peerdb_flow.CreateRawTableInput.cdc_sync_mode:type_name -> peerdb_flow.QRepSyncMode - 52, // 29: peerdb_flow.TableSchema.columns:type_name -> peerdb_flow.TableSchema.ColumnsEntry - 56, // 30: peerdb_flow.GetTableSchemaBatchInput.peer_connection_config:type_name -> peerdb_peers.Peer - 53, // 31: peerdb_flow.GetTableSchemaBatchOutput.table_name_schema_mapping:type_name -> peerdb_flow.GetTableSchemaBatchOutput.TableNameSchemaMappingEntry - 56, // 32: peerdb_flow.SetupNormalizedTableInput.peer_connection_config:type_name -> peerdb_peers.Peer - 22, // 33: peerdb_flow.SetupNormalizedTableInput.source_table_schema:type_name -> peerdb_flow.TableSchema - 56, // 34: peerdb_flow.SetupNormalizedTableBatchInput.peer_connection_config:type_name -> peerdb_peers.Peer - 54, // 35: peerdb_flow.SetupNormalizedTableBatchInput.table_name_schema_mapping:type_name -> peerdb_flow.SetupNormalizedTableBatchInput.TableNameSchemaMappingEntry - 55, // 36: peerdb_flow.SetupNormalizedTableBatchOutput.table_exists_mapping:type_name -> peerdb_flow.SetupNormalizedTableBatchOutput.TableExistsMappingEntry - 57, // 37: peerdb_flow.TimestampPartitionRange.start:type_name -> google.protobuf.Timestamp - 57, // 38: peerdb_flow.TimestampPartitionRange.end:type_name -> google.protobuf.Timestamp - 31, // 39: peerdb_flow.TIDPartitionRange.start:type_name -> peerdb_flow.TID - 31, // 40: peerdb_flow.TIDPartitionRange.end:type_name -> peerdb_flow.TID - 29, // 41: peerdb_flow.PartitionRange.int_range:type_name -> peerdb_flow.IntPartitionRange - 30, // 42: peerdb_flow.PartitionRange.timestamp_range:type_name -> peerdb_flow.TimestampPartitionRange - 32, // 43: peerdb_flow.PartitionRange.tid_range:type_name -> peerdb_flow.TIDPartitionRange - 33, // 44: peerdb_flow.PartitionRange.xmin_range:type_name -> peerdb_flow.XMINPartitionRange - 1, // 45: peerdb_flow.QRepWriteMode.write_type:type_name -> peerdb_flow.QRepWriteType - 56, // 46: peerdb_flow.QRepConfig.source_peer:type_name -> peerdb_peers.Peer - 56, // 47: peerdb_flow.QRepConfig.destination_peer:type_name -> peerdb_peers.Peer - 0, // 48: peerdb_flow.QRepConfig.sync_mode:type_name -> peerdb_flow.QRepSyncMode - 35, // 49: peerdb_flow.QRepConfig.write_mode:type_name -> peerdb_flow.QRepWriteMode - 34, // 50: peerdb_flow.QRepPartition.range:type_name -> peerdb_flow.PartitionRange - 37, // 51: peerdb_flow.QRepPartitionBatch.partitions:type_name -> peerdb_flow.QRepPartition - 37, // 52: peerdb_flow.QRepParitionResult.partitions:type_name -> peerdb_flow.QRepPartition - 41, // 53: peerdb_flow.TableSchemaDelta.added_columns:type_name -> peerdb_flow.DeltaAddedColumn - 5, // 54: peerdb_flow.ReplayTableSchemaDeltaInput.flow_connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs - 42, // 55: peerdb_flow.ReplayTableSchemaDeltaInput.table_schema_delta:type_name -> peerdb_flow.TableSchemaDelta - 22, // 56: peerdb_flow.FlowConnectionConfigs.TableNameSchemaMappingEntry.value:type_name -> peerdb_flow.TableSchema - 4, // 57: peerdb_flow.SyncFlowOptions.RelationMessageMappingEntry.value:type_name -> peerdb_flow.RelationMessage - 4, // 58: peerdb_flow.StartFlowInput.RelationMessageMappingEntry.value:type_name -> peerdb_flow.RelationMessage - 15, // 59: peerdb_flow.EnsurePullabilityBatchOutput.TableIdentifierMappingEntry.value:type_name -> peerdb_flow.TableIdentifier - 22, // 60: peerdb_flow.GetTableSchemaBatchOutput.TableNameSchemaMappingEntry.value:type_name -> peerdb_flow.TableSchema - 22, // 61: peerdb_flow.SetupNormalizedTableBatchInput.TableNameSchemaMappingEntry.value:type_name -> peerdb_flow.TableSchema - 62, // [62:62] is the sub-list for method output_type - 62, // [62:62] is the sub-list for method input_type - 62, // [62:62] is the sub-list for extension type_name - 62, // [62:62] is the sub-list for extension extendee - 0, // [0:62] is the sub-list for field type_name + 58, // 10: peerdb_flow.RenameTablesInput.peer:type_name -> peerdb_peers.Peer + 7, // 11: peerdb_flow.RenameTablesInput.rename_table_options:type_name -> peerdb_flow.RenameTableOption + 49, // 12: peerdb_flow.SyncFlowOptions.relation_message_mapping:type_name -> peerdb_flow.SyncFlowOptions.RelationMessageMappingEntry + 59, // 13: peerdb_flow.LastSyncState.last_synced_at:type_name -> google.protobuf.Timestamp + 12, // 14: peerdb_flow.StartFlowInput.last_sync_state:type_name -> peerdb_flow.LastSyncState + 6, // 15: peerdb_flow.StartFlowInput.flow_connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs + 10, // 16: peerdb_flow.StartFlowInput.sync_flow_options:type_name -> peerdb_flow.SyncFlowOptions + 50, // 17: peerdb_flow.StartFlowInput.relation_message_mapping:type_name -> peerdb_flow.StartFlowInput.RelationMessageMappingEntry + 6, // 18: peerdb_flow.StartNormalizeInput.flow_connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs + 58, // 19: peerdb_flow.GetLastSyncedIDInput.peer_connection_config:type_name -> peerdb_peers.Peer + 58, // 20: peerdb_flow.EnsurePullabilityInput.peer_connection_config:type_name -> peerdb_peers.Peer + 58, // 21: peerdb_flow.EnsurePullabilityBatchInput.peer_connection_config:type_name -> peerdb_peers.Peer + 18, // 22: peerdb_flow.TableIdentifier.postgres_table_identifier:type_name -> peerdb_flow.PostgresTableIdentifier + 19, // 23: peerdb_flow.EnsurePullabilityOutput.table_identifier:type_name -> peerdb_flow.TableIdentifier + 51, // 24: peerdb_flow.EnsurePullabilityBatchOutput.table_identifier_mapping:type_name -> peerdb_flow.EnsurePullabilityBatchOutput.TableIdentifierMappingEntry + 58, // 25: peerdb_flow.SetupReplicationInput.peer_connection_config:type_name -> peerdb_peers.Peer + 52, // 26: peerdb_flow.SetupReplicationInput.table_name_mapping:type_name -> peerdb_flow.SetupReplicationInput.TableNameMappingEntry + 58, // 27: peerdb_flow.SetupReplicationInput.destination_peer:type_name -> peerdb_peers.Peer + 58, // 28: peerdb_flow.CreateRawTableInput.peer_connection_config:type_name -> peerdb_peers.Peer + 53, // 29: peerdb_flow.CreateRawTableInput.table_name_mapping:type_name -> peerdb_flow.CreateRawTableInput.TableNameMappingEntry + 0, // 30: peerdb_flow.CreateRawTableInput.cdc_sync_mode:type_name -> peerdb_flow.QRepSyncMode + 54, // 31: peerdb_flow.TableSchema.columns:type_name -> peerdb_flow.TableSchema.ColumnsEntry + 58, // 32: peerdb_flow.GetTableSchemaBatchInput.peer_connection_config:type_name -> peerdb_peers.Peer + 55, // 33: peerdb_flow.GetTableSchemaBatchOutput.table_name_schema_mapping:type_name -> peerdb_flow.GetTableSchemaBatchOutput.TableNameSchemaMappingEntry + 58, // 34: peerdb_flow.SetupNormalizedTableInput.peer_connection_config:type_name -> peerdb_peers.Peer + 26, // 35: peerdb_flow.SetupNormalizedTableInput.source_table_schema:type_name -> peerdb_flow.TableSchema + 58, // 36: peerdb_flow.SetupNormalizedTableBatchInput.peer_connection_config:type_name -> peerdb_peers.Peer + 56, // 37: peerdb_flow.SetupNormalizedTableBatchInput.table_name_schema_mapping:type_name -> peerdb_flow.SetupNormalizedTableBatchInput.TableNameSchemaMappingEntry + 57, // 38: peerdb_flow.SetupNormalizedTableBatchOutput.table_exists_mapping:type_name -> peerdb_flow.SetupNormalizedTableBatchOutput.TableExistsMappingEntry + 59, // 39: peerdb_flow.TimestampPartitionRange.start:type_name -> google.protobuf.Timestamp + 59, // 40: peerdb_flow.TimestampPartitionRange.end:type_name -> google.protobuf.Timestamp + 35, // 41: peerdb_flow.TIDPartitionRange.start:type_name -> peerdb_flow.TID + 35, // 42: peerdb_flow.TIDPartitionRange.end:type_name -> peerdb_flow.TID + 33, // 43: peerdb_flow.PartitionRange.int_range:type_name -> peerdb_flow.IntPartitionRange + 34, // 44: peerdb_flow.PartitionRange.timestamp_range:type_name -> peerdb_flow.TimestampPartitionRange + 36, // 45: peerdb_flow.PartitionRange.tid_range:type_name -> peerdb_flow.TIDPartitionRange + 1, // 46: peerdb_flow.QRepWriteMode.write_type:type_name -> peerdb_flow.QRepWriteType + 58, // 47: peerdb_flow.QRepConfig.source_peer:type_name -> peerdb_peers.Peer + 58, // 48: peerdb_flow.QRepConfig.destination_peer:type_name -> peerdb_peers.Peer + 0, // 49: peerdb_flow.QRepConfig.sync_mode:type_name -> peerdb_flow.QRepSyncMode + 38, // 50: peerdb_flow.QRepConfig.write_mode:type_name -> peerdb_flow.QRepWriteMode + 37, // 51: peerdb_flow.QRepPartition.range:type_name -> peerdb_flow.PartitionRange + 40, // 52: peerdb_flow.QRepPartitionBatch.partitions:type_name -> peerdb_flow.QRepPartition + 40, // 53: peerdb_flow.QRepParitionResult.partitions:type_name -> peerdb_flow.QRepPartition + 44, // 54: peerdb_flow.TableSchemaDelta.added_columns:type_name -> peerdb_flow.DeltaAddedColumn + 6, // 55: peerdb_flow.ReplayTableSchemaDeltaInput.flow_connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs + 45, // 56: peerdb_flow.ReplayTableSchemaDeltaInput.table_schema_deltas:type_name -> peerdb_flow.TableSchemaDelta + 26, // 57: peerdb_flow.FlowConnectionConfigs.TableNameSchemaMappingEntry.value:type_name -> peerdb_flow.TableSchema + 4, // 58: peerdb_flow.SyncFlowOptions.RelationMessageMappingEntry.value:type_name -> peerdb_flow.RelationMessage + 4, // 59: peerdb_flow.StartFlowInput.RelationMessageMappingEntry.value:type_name -> peerdb_flow.RelationMessage + 19, // 60: peerdb_flow.EnsurePullabilityBatchOutput.TableIdentifierMappingEntry.value:type_name -> peerdb_flow.TableIdentifier + 26, // 61: peerdb_flow.GetTableSchemaBatchOutput.TableNameSchemaMappingEntry.value:type_name -> peerdb_flow.TableSchema + 26, // 62: peerdb_flow.SetupNormalizedTableBatchInput.TableNameSchemaMappingEntry.value:type_name -> peerdb_flow.TableSchema + 63, // [63:63] is the sub-list for method output_type + 63, // [63:63] is the sub-list for method input_type + 63, // [63:63] is the sub-list for extension type_name + 63, // [63:63] is the sub-list for extension extendee + 0, // [0:63] is the sub-list for field type_name } func init() { file_flow_proto_init() } @@ -3655,7 +3850,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FlowConnectionConfigs); i { + switch v := v.(*TableMapping); i { case 0: return &v.state case 1: @@ -3667,7 +3862,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SyncFlowOptions); i { + switch v := v.(*FlowConnectionConfigs); i { case 0: return &v.state case 1: @@ -3679,7 +3874,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NormalizeFlowOptions); i { + switch v := v.(*RenameTableOption); i { case 0: return &v.state case 1: @@ -3691,7 +3886,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LastSyncState); i { + switch v := v.(*RenameTablesInput); i { case 0: return &v.state case 1: @@ -3703,7 +3898,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartFlowInput); i { + switch v := v.(*RenameTablesOutput); i { case 0: return &v.state case 1: @@ -3715,7 +3910,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartNormalizeInput); i { + switch v := v.(*SyncFlowOptions); i { case 0: return &v.state case 1: @@ -3727,7 +3922,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetLastSyncedIDInput); i { + switch v := v.(*NormalizeFlowOptions); i { case 0: return &v.state case 1: @@ -3739,7 +3934,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnsurePullabilityInput); i { + switch v := v.(*LastSyncState); i { case 0: return &v.state case 1: @@ -3751,7 +3946,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnsurePullabilityBatchInput); i { + switch v := v.(*StartFlowInput); i { case 0: return &v.state case 1: @@ -3763,7 +3958,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PostgresTableIdentifier); i { + switch v := v.(*StartNormalizeInput); i { case 0: return &v.state case 1: @@ -3775,7 +3970,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TableIdentifier); i { + switch v := v.(*GetLastSyncedIDInput); i { case 0: return &v.state case 1: @@ -3787,7 +3982,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnsurePullabilityOutput); i { + switch v := v.(*EnsurePullabilityInput); i { case 0: return &v.state case 1: @@ -3799,7 +3994,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnsurePullabilityBatchOutput); i { + switch v := v.(*EnsurePullabilityBatchInput); i { case 0: return &v.state case 1: @@ -3811,7 +4006,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetupReplicationInput); i { + switch v := v.(*PostgresTableIdentifier); i { case 0: return &v.state case 1: @@ -3823,7 +4018,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetupReplicationOutput); i { + switch v := v.(*TableIdentifier); i { case 0: return &v.state case 1: @@ -3835,7 +4030,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateRawTableInput); i { + switch v := v.(*EnsurePullabilityOutput); i { case 0: return &v.state case 1: @@ -3847,7 +4042,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateRawTableOutput); i { + switch v := v.(*EnsurePullabilityBatchOutput); i { case 0: return &v.state case 1: @@ -3859,7 +4054,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TableSchema); i { + switch v := v.(*SetupReplicationInput); i { case 0: return &v.state case 1: @@ -3871,7 +4066,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetTableSchemaBatchInput); i { + switch v := v.(*SetupReplicationOutput); i { case 0: return &v.state case 1: @@ -3883,7 +4078,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetTableSchemaBatchOutput); i { + switch v := v.(*CreateRawTableInput); i { case 0: return &v.state case 1: @@ -3895,7 +4090,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetupNormalizedTableInput); i { + switch v := v.(*CreateRawTableOutput); i { case 0: return &v.state case 1: @@ -3907,7 +4102,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetupNormalizedTableBatchInput); i { + switch v := v.(*TableSchema); i { case 0: return &v.state case 1: @@ -3919,7 +4114,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetupNormalizedTableOutput); i { + switch v := v.(*GetTableSchemaBatchInput); i { case 0: return &v.state case 1: @@ -3931,7 +4126,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetupNormalizedTableBatchOutput); i { + switch v := v.(*GetTableSchemaBatchOutput); i { case 0: return &v.state case 1: @@ -3943,7 +4138,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*IntPartitionRange); i { + switch v := v.(*SetupNormalizedTableInput); i { case 0: return &v.state case 1: @@ -3955,7 +4150,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TimestampPartitionRange); i { + switch v := v.(*SetupNormalizedTableBatchInput); i { case 0: return &v.state case 1: @@ -3967,7 +4162,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TID); i { + switch v := v.(*SetupNormalizedTableOutput); i { case 0: return &v.state case 1: @@ -3979,7 +4174,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TIDPartitionRange); i { + switch v := v.(*SetupNormalizedTableBatchOutput); i { case 0: return &v.state case 1: @@ -3991,7 +4186,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*XMINPartitionRange); i { + switch v := v.(*IntPartitionRange); i { case 0: return &v.state case 1: @@ -4003,7 +4198,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PartitionRange); i { + switch v := v.(*TimestampPartitionRange); i { case 0: return &v.state case 1: @@ -4015,7 +4210,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QRepWriteMode); i { + switch v := v.(*TID); i { case 0: return &v.state case 1: @@ -4027,7 +4222,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QRepConfig); i { + switch v := v.(*TIDPartitionRange); i { case 0: return &v.state case 1: @@ -4039,7 +4234,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QRepPartition); i { + switch v := v.(*PartitionRange); i { case 0: return &v.state case 1: @@ -4051,7 +4246,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QRepPartitionBatch); i { + switch v := v.(*QRepWriteMode); i { case 0: return &v.state case 1: @@ -4063,7 +4258,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QRepParitionResult); i { + switch v := v.(*QRepConfig); i { case 0: return &v.state case 1: @@ -4075,7 +4270,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DropFlowInput); i { + switch v := v.(*QRepPartition); i { case 0: return &v.state case 1: @@ -4087,7 +4282,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeltaAddedColumn); i { + switch v := v.(*QRepPartitionBatch); i { case 0: return &v.state case 1: @@ -4099,7 +4294,7 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TableSchemaDelta); i { + switch v := v.(*QRepParitionResult); i { case 0: return &v.state case 1: @@ -4111,6 +4306,42 @@ func file_flow_proto_init() { } } file_flow_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DropFlowInput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_flow_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeltaAddedColumn); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_flow_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TableSchemaDelta); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_flow_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReplayTableSchemaDeltaInput); i { case 0: return &v.state @@ -4123,14 +4354,13 @@ func file_flow_proto_init() { } } } - file_flow_proto_msgTypes[13].OneofWrappers = []interface{}{ + file_flow_proto_msgTypes[17].OneofWrappers = []interface{}{ (*TableIdentifier_PostgresTableIdentifier)(nil), } - file_flow_proto_msgTypes[32].OneofWrappers = []interface{}{ + file_flow_proto_msgTypes[35].OneofWrappers = []interface{}{ (*PartitionRange_IntRange)(nil), (*PartitionRange_TimestampRange)(nil), (*PartitionRange_TidRange)(nil), - (*PartitionRange_XminRange)(nil), } type x struct{} out := protoimpl.TypeBuilder{ @@ -4138,7 +4368,7 @@ func file_flow_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_flow_proto_rawDesc, NumEnums: 2, - NumMessages: 54, + NumMessages: 56, NumExtensions: 0, NumServices: 0, }, diff --git a/flow/generated/protos/google/api/annotations.pb.go b/flow/generated/protos/google/api/annotations.pb.go new file mode 100644 index 000000000..15175f0e8 --- /dev/null +++ b/flow/generated/protos/google/api/annotations.pb.go @@ -0,0 +1,120 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc (unknown) +// source: google/api/annotations.proto + +package api + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + descriptorpb "google.golang.org/protobuf/types/descriptorpb" + reflect "reflect" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +var file_google_api_annotations_proto_extTypes = []protoimpl.ExtensionInfo{ + { + ExtendedType: (*descriptorpb.MethodOptions)(nil), + ExtensionType: (*HttpRule)(nil), + Field: 72295728, + Name: "google.api.http", + Tag: "bytes,72295728,opt,name=http", + Filename: "google/api/annotations.proto", + }, +} + +// Extension fields to descriptorpb.MethodOptions. +var ( + // See `HttpRule`. + // + // optional google.api.HttpRule http = 72295728; + E_Http = &file_google_api_annotations_proto_extTypes[0] +) + +var File_google_api_annotations_proto protoreflect.FileDescriptor + +var file_google_api_annotations_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, + 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x1a, 0x15, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x1a, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x3a, 0x4b, 0x0a, 0x04, 0x68, 0x74, 0x74, 0x70, 0x12, 0x1e, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, + 0x74, 0x68, 0x6f, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xb0, 0xca, 0xbc, 0x22, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x48, 0x74, 0x74, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x52, 0x04, 0x68, 0x74, 0x74, 0x70, + 0x42, 0x88, 0x01, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x61, 0x70, 0x69, 0x42, 0x10, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x1b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2f, 0x61, 0x70, 0x69, 0xa2, 0x02, 0x03, 0x47, 0x41, 0x58, 0xaa, 0x02, 0x0a, 0x47, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x41, 0x70, 0x69, 0xca, 0x02, 0x0a, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x5c, 0x41, 0x70, 0x69, 0xe2, 0x02, 0x16, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x41, 0x70, + 0x69, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0b, + 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var file_google_api_annotations_proto_goTypes = []interface{}{ + (*descriptorpb.MethodOptions)(nil), // 0: google.protobuf.MethodOptions + (*HttpRule)(nil), // 1: google.api.HttpRule +} +var file_google_api_annotations_proto_depIdxs = []int32{ + 0, // 0: google.api.http:extendee -> google.protobuf.MethodOptions + 1, // 1: google.api.http:type_name -> google.api.HttpRule + 2, // [2:2] is the sub-list for method output_type + 2, // [2:2] is the sub-list for method input_type + 1, // [1:2] is the sub-list for extension type_name + 0, // [0:1] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_google_api_annotations_proto_init() } +func file_google_api_annotations_proto_init() { + if File_google_api_annotations_proto != nil { + return + } + file_google_api_http_proto_init() + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_google_api_annotations_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 1, + NumServices: 0, + }, + GoTypes: file_google_api_annotations_proto_goTypes, + DependencyIndexes: file_google_api_annotations_proto_depIdxs, + ExtensionInfos: file_google_api_annotations_proto_extTypes, + }.Build() + File_google_api_annotations_proto = out.File + file_google_api_annotations_proto_rawDesc = nil + file_google_api_annotations_proto_goTypes = nil + file_google_api_annotations_proto_depIdxs = nil +} diff --git a/flow/generated/protos/google/api/http.pb.go b/flow/generated/protos/google/api/http.pb.go new file mode 100644 index 000000000..f2f45fe89 --- /dev/null +++ b/flow/generated/protos/google/api/http.pb.go @@ -0,0 +1,783 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc (unknown) +// source: google/api/http.proto + +package api + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +type Http struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*HttpRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + FullyDecodeReservedExpansion bool `protobuf:"varint,2,opt,name=fully_decode_reserved_expansion,json=fullyDecodeReservedExpansion,proto3" json:"fully_decode_reserved_expansion,omitempty"` +} + +func (x *Http) Reset() { + *x = Http{} + if protoimpl.UnsafeEnabled { + mi := &file_google_api_http_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Http) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Http) ProtoMessage() {} + +func (x *Http) ProtoReflect() protoreflect.Message { + mi := &file_google_api_http_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Http.ProtoReflect.Descriptor instead. +func (*Http) Descriptor() ([]byte, []int) { + return file_google_api_http_proto_rawDescGZIP(), []int{0} +} + +func (x *Http) GetRules() []*HttpRule { + if x != nil { + return x.Rules + } + return nil +} + +func (x *Http) GetFullyDecodeReservedExpansion() bool { + if x != nil { + return x.FullyDecodeReservedExpansion + } + return false +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They +// are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL +// query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP +// request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +type HttpRule struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + // + // Types that are assignable to Pattern: + // + // *HttpRule_Get + // *HttpRule_Put + // *HttpRule_Post + // *HttpRule_Delete + // *HttpRule_Patch + // *HttpRule_Custom + Pattern isHttpRule_Pattern `protobuf_oneof:"pattern"` + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + Body string `protobuf:"bytes,7,opt,name=body,proto3" json:"body,omitempty"` + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + ResponseBody string `protobuf:"bytes,12,opt,name=response_body,json=responseBody,proto3" json:"response_body,omitempty"` + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + AdditionalBindings []*HttpRule `protobuf:"bytes,11,rep,name=additional_bindings,json=additionalBindings,proto3" json:"additional_bindings,omitempty"` +} + +func (x *HttpRule) Reset() { + *x = HttpRule{} + if protoimpl.UnsafeEnabled { + mi := &file_google_api_http_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *HttpRule) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*HttpRule) ProtoMessage() {} + +func (x *HttpRule) ProtoReflect() protoreflect.Message { + mi := &file_google_api_http_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use HttpRule.ProtoReflect.Descriptor instead. +func (*HttpRule) Descriptor() ([]byte, []int) { + return file_google_api_http_proto_rawDescGZIP(), []int{1} +} + +func (x *HttpRule) GetSelector() string { + if x != nil { + return x.Selector + } + return "" +} + +func (m *HttpRule) GetPattern() isHttpRule_Pattern { + if m != nil { + return m.Pattern + } + return nil +} + +func (x *HttpRule) GetGet() string { + if x, ok := x.GetPattern().(*HttpRule_Get); ok { + return x.Get + } + return "" +} + +func (x *HttpRule) GetPut() string { + if x, ok := x.GetPattern().(*HttpRule_Put); ok { + return x.Put + } + return "" +} + +func (x *HttpRule) GetPost() string { + if x, ok := x.GetPattern().(*HttpRule_Post); ok { + return x.Post + } + return "" +} + +func (x *HttpRule) GetDelete() string { + if x, ok := x.GetPattern().(*HttpRule_Delete); ok { + return x.Delete + } + return "" +} + +func (x *HttpRule) GetPatch() string { + if x, ok := x.GetPattern().(*HttpRule_Patch); ok { + return x.Patch + } + return "" +} + +func (x *HttpRule) GetCustom() *CustomHttpPattern { + if x, ok := x.GetPattern().(*HttpRule_Custom); ok { + return x.Custom + } + return nil +} + +func (x *HttpRule) GetBody() string { + if x != nil { + return x.Body + } + return "" +} + +func (x *HttpRule) GetResponseBody() string { + if x != nil { + return x.ResponseBody + } + return "" +} + +func (x *HttpRule) GetAdditionalBindings() []*HttpRule { + if x != nil { + return x.AdditionalBindings + } + return nil +} + +type isHttpRule_Pattern interface { + isHttpRule_Pattern() +} + +type HttpRule_Get struct { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + Get string `protobuf:"bytes,2,opt,name=get,proto3,oneof"` +} + +type HttpRule_Put struct { + // Maps to HTTP PUT. Used for replacing a resource. + Put string `protobuf:"bytes,3,opt,name=put,proto3,oneof"` +} + +type HttpRule_Post struct { + // Maps to HTTP POST. Used for creating a resource or performing an action. + Post string `protobuf:"bytes,4,opt,name=post,proto3,oneof"` +} + +type HttpRule_Delete struct { + // Maps to HTTP DELETE. Used for deleting a resource. + Delete string `protobuf:"bytes,5,opt,name=delete,proto3,oneof"` +} + +type HttpRule_Patch struct { + // Maps to HTTP PATCH. Used for updating a resource. + Patch string `protobuf:"bytes,6,opt,name=patch,proto3,oneof"` +} + +type HttpRule_Custom struct { + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + Custom *CustomHttpPattern `protobuf:"bytes,8,opt,name=custom,proto3,oneof"` +} + +func (*HttpRule_Get) isHttpRule_Pattern() {} + +func (*HttpRule_Put) isHttpRule_Pattern() {} + +func (*HttpRule_Post) isHttpRule_Pattern() {} + +func (*HttpRule_Delete) isHttpRule_Pattern() {} + +func (*HttpRule_Patch) isHttpRule_Pattern() {} + +func (*HttpRule_Custom) isHttpRule_Pattern() {} + +// A custom pattern is used for defining custom HTTP verb. +type CustomHttpPattern struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The name of this custom HTTP verb. + Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"` + // The path matched by this custom verb. + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *CustomHttpPattern) Reset() { + *x = CustomHttpPattern{} + if protoimpl.UnsafeEnabled { + mi := &file_google_api_http_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CustomHttpPattern) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CustomHttpPattern) ProtoMessage() {} + +func (x *CustomHttpPattern) ProtoReflect() protoreflect.Message { + mi := &file_google_api_http_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CustomHttpPattern.ProtoReflect.Descriptor instead. +func (*CustomHttpPattern) Descriptor() ([]byte, []int) { + return file_google_api_http_proto_rawDescGZIP(), []int{2} +} + +func (x *CustomHttpPattern) GetKind() string { + if x != nil { + return x.Kind + } + return "" +} + +func (x *CustomHttpPattern) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +var File_google_api_http_proto protoreflect.FileDescriptor + +var file_google_api_http_proto_rawDesc = []byte{ + 0x0a, 0x15, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x68, 0x74, 0x74, + 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x61, 0x70, 0x69, 0x22, 0x79, 0x0a, 0x04, 0x48, 0x74, 0x74, 0x70, 0x12, 0x2a, 0x0a, 0x05, 0x72, + 0x75, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x74, 0x74, 0x70, 0x52, 0x75, 0x6c, 0x65, + 0x52, 0x05, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x45, 0x0a, 0x1f, 0x66, 0x75, 0x6c, 0x6c, 0x79, + 0x5f, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, + 0x5f, 0x65, 0x78, 0x70, 0x61, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x1c, 0x66, 0x75, 0x6c, 0x6c, 0x79, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x52, 0x65, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x64, 0x45, 0x78, 0x70, 0x61, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xda, + 0x02, 0x0a, 0x08, 0x48, 0x74, 0x74, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x03, 0x67, 0x65, 0x74, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x67, 0x65, 0x74, 0x12, 0x12, 0x0a, 0x03, 0x70, + 0x75, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x70, 0x75, 0x74, 0x12, + 0x14, 0x0a, 0x04, 0x70, 0x6f, 0x73, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x04, 0x70, 0x6f, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, + 0x16, 0x0a, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x12, 0x37, 0x0a, 0x06, 0x63, 0x75, 0x73, 0x74, 0x6f, + 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x48, 0x74, 0x74, 0x70, 0x50, + 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x48, 0x00, 0x52, 0x06, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, + 0x12, 0x12, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x62, 0x6f, 0x64, 0x79, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x5f, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x45, 0x0a, 0x13, 0x61, 0x64, 0x64, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, + 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x48, 0x74, 0x74, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x52, 0x12, 0x61, 0x64, + 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, + 0x42, 0x09, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x22, 0x3b, 0x0a, 0x11, 0x43, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x48, 0x74, 0x74, 0x70, 0x50, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, + 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x42, 0x84, 0x01, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x42, 0x09, 0x48, 0x74, 0x74, + 0x70, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x1b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, + 0x74, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x61, 0x70, 0x69, 0xf8, 0x01, 0x01, 0xa2, 0x02, 0x03, 0x47, 0x41, 0x58, 0xaa, 0x02, + 0x0a, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x41, 0x70, 0x69, 0xca, 0x02, 0x0a, 0x47, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x41, 0x70, 0x69, 0xe2, 0x02, 0x16, 0x47, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x5c, 0x41, 0x70, 0x69, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0xea, 0x02, 0x0b, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x70, 0x69, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_google_api_http_proto_rawDescOnce sync.Once + file_google_api_http_proto_rawDescData = file_google_api_http_proto_rawDesc +) + +func file_google_api_http_proto_rawDescGZIP() []byte { + file_google_api_http_proto_rawDescOnce.Do(func() { + file_google_api_http_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_api_http_proto_rawDescData) + }) + return file_google_api_http_proto_rawDescData +} + +var file_google_api_http_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_google_api_http_proto_goTypes = []interface{}{ + (*Http)(nil), // 0: google.api.Http + (*HttpRule)(nil), // 1: google.api.HttpRule + (*CustomHttpPattern)(nil), // 2: google.api.CustomHttpPattern +} +var file_google_api_http_proto_depIdxs = []int32{ + 1, // 0: google.api.Http.rules:type_name -> google.api.HttpRule + 2, // 1: google.api.HttpRule.custom:type_name -> google.api.CustomHttpPattern + 1, // 2: google.api.HttpRule.additional_bindings:type_name -> google.api.HttpRule + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_google_api_http_proto_init() } +func file_google_api_http_proto_init() { + if File_google_api_http_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_google_api_http_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Http); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_api_http_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*HttpRule); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_api_http_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CustomHttpPattern); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_google_api_http_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*HttpRule_Get)(nil), + (*HttpRule_Put)(nil), + (*HttpRule_Post)(nil), + (*HttpRule_Delete)(nil), + (*HttpRule_Patch)(nil), + (*HttpRule_Custom)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_google_api_http_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_google_api_http_proto_goTypes, + DependencyIndexes: file_google_api_http_proto_depIdxs, + MessageInfos: file_google_api_http_proto_msgTypes, + }.Build() + File_google_api_http_proto = out.File + file_google_api_http_proto_rawDesc = nil + file_google_api_http_proto_goTypes = nil + file_google_api_http_proto_depIdxs = nil +} diff --git a/flow/generated/protos/peers.pb.go b/flow/generated/protos/peers.pb.go index 41a872107..7f8bb4a37 100644 --- a/flow/generated/protos/peers.pb.go +++ b/flow/generated/protos/peers.pb.go @@ -23,13 +23,14 @@ const ( type DBType int32 const ( - DBType_BIGQUERY DBType = 0 - DBType_SNOWFLAKE DBType = 1 - DBType_MONGO DBType = 2 - DBType_POSTGRES DBType = 3 - DBType_EVENTHUB DBType = 4 - DBType_S3 DBType = 5 - DBType_SQLSERVER DBType = 6 + DBType_BIGQUERY DBType = 0 + DBType_SNOWFLAKE DBType = 1 + DBType_MONGO DBType = 2 + DBType_POSTGRES DBType = 3 + DBType_EVENTHUB DBType = 4 + DBType_S3 DBType = 5 + DBType_SQLSERVER DBType = 6 + DBType_EVENTHUB_GROUP DBType = 7 ) // Enum value maps for DBType. @@ -42,15 +43,17 @@ var ( 4: "EVENTHUB", 5: "S3", 6: "SQLSERVER", + 7: "EVENTHUB_GROUP", } DBType_value = map[string]int32{ - "BIGQUERY": 0, - "SNOWFLAKE": 1, - "MONGO": 2, - "POSTGRES": 3, - "EVENTHUB": 4, - "S3": 5, - "SQLSERVER": 6, + "BIGQUERY": 0, + "SNOWFLAKE": 1, + "MONGO": 2, + "POSTGRES": 3, + "EVENTHUB": 4, + "S3": 5, + "SQLSERVER": 6, + "EVENTHUB_GROUP": 7, } ) @@ -495,6 +498,12 @@ type EventHubConfig struct { ResourceGroup string `protobuf:"bytes,2,opt,name=resource_group,json=resourceGroup,proto3" json:"resource_group,omitempty"` Location string `protobuf:"bytes,3,opt,name=location,proto3" json:"location,omitempty"` MetadataDb *PostgresConfig `protobuf:"bytes,4,opt,name=metadata_db,json=metadataDb,proto3" json:"metadata_db,omitempty"` + // if this is empty PeerDB uses `AZURE_SUBSCRIPTION_ID` environment variable. + SubscriptionId string `protobuf:"bytes,5,opt,name=subscription_id,json=subscriptionId,proto3" json:"subscription_id,omitempty"` + // defaults to 3 + PartitionCount uint32 `protobuf:"varint,6,opt,name=partition_count,json=partitionCount,proto3" json:"partition_count,omitempty"` + // defaults to 7 + MessageRetentionInDays uint32 `protobuf:"varint,7,opt,name=message_retention_in_days,json=messageRetentionInDays,proto3" json:"message_retention_in_days,omitempty"` } func (x *EventHubConfig) Reset() { @@ -557,18 +566,109 @@ func (x *EventHubConfig) GetMetadataDb() *PostgresConfig { return nil } +func (x *EventHubConfig) GetSubscriptionId() string { + if x != nil { + return x.SubscriptionId + } + return "" +} + +func (x *EventHubConfig) GetPartitionCount() uint32 { + if x != nil { + return x.PartitionCount + } + return 0 +} + +func (x *EventHubConfig) GetMessageRetentionInDays() uint32 { + if x != nil { + return x.MessageRetentionInDays + } + return 0 +} + +type EventHubGroupConfig struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // event hub peer name to event hub config + Eventhubs map[string]*EventHubConfig `protobuf:"bytes,1,rep,name=eventhubs,proto3" json:"eventhubs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + MetadataDb *PostgresConfig `protobuf:"bytes,2,opt,name=metadata_db,json=metadataDb,proto3" json:"metadata_db,omitempty"` + UnnestColumns []string `protobuf:"bytes,3,rep,name=unnest_columns,json=unnestColumns,proto3" json:"unnest_columns,omitempty"` +} + +func (x *EventHubGroupConfig) Reset() { + *x = EventHubGroupConfig{} + if protoimpl.UnsafeEnabled { + mi := &file_peers_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EventHubGroupConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EventHubGroupConfig) ProtoMessage() {} + +func (x *EventHubGroupConfig) ProtoReflect() protoreflect.Message { + mi := &file_peers_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EventHubGroupConfig.ProtoReflect.Descriptor instead. +func (*EventHubGroupConfig) Descriptor() ([]byte, []int) { + return file_peers_proto_rawDescGZIP(), []int{5} +} + +func (x *EventHubGroupConfig) GetEventhubs() map[string]*EventHubConfig { + if x != nil { + return x.Eventhubs + } + return nil +} + +func (x *EventHubGroupConfig) GetMetadataDb() *PostgresConfig { + if x != nil { + return x.MetadataDb + } + return nil +} + +func (x *EventHubGroupConfig) GetUnnestColumns() []string { + if x != nil { + return x.UnnestColumns + } + return nil +} + type S3Config struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + AccessKeyId *string `protobuf:"bytes,2,opt,name=access_key_id,json=accessKeyId,proto3,oneof" json:"access_key_id,omitempty"` + SecretAccessKey *string `protobuf:"bytes,3,opt,name=secret_access_key,json=secretAccessKey,proto3,oneof" json:"secret_access_key,omitempty"` + RoleArn *string `protobuf:"bytes,4,opt,name=role_arn,json=roleArn,proto3,oneof" json:"role_arn,omitempty"` + Region *string `protobuf:"bytes,5,opt,name=region,proto3,oneof" json:"region,omitempty"` + Endpoint *string `protobuf:"bytes,6,opt,name=endpoint,proto3,oneof" json:"endpoint,omitempty"` + MetadataDb *PostgresConfig `protobuf:"bytes,7,opt,name=metadata_db,json=metadataDb,proto3" json:"metadata_db,omitempty"` } func (x *S3Config) Reset() { *x = S3Config{} if protoimpl.UnsafeEnabled { - mi := &file_peers_proto_msgTypes[5] + mi := &file_peers_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -581,7 +681,7 @@ func (x *S3Config) String() string { func (*S3Config) ProtoMessage() {} func (x *S3Config) ProtoReflect() protoreflect.Message { - mi := &file_peers_proto_msgTypes[5] + mi := &file_peers_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -594,7 +694,7 @@ func (x *S3Config) ProtoReflect() protoreflect.Message { // Deprecated: Use S3Config.ProtoReflect.Descriptor instead. func (*S3Config) Descriptor() ([]byte, []int) { - return file_peers_proto_rawDescGZIP(), []int{5} + return file_peers_proto_rawDescGZIP(), []int{6} } func (x *S3Config) GetUrl() string { @@ -604,6 +704,48 @@ func (x *S3Config) GetUrl() string { return "" } +func (x *S3Config) GetAccessKeyId() string { + if x != nil && x.AccessKeyId != nil { + return *x.AccessKeyId + } + return "" +} + +func (x *S3Config) GetSecretAccessKey() string { + if x != nil && x.SecretAccessKey != nil { + return *x.SecretAccessKey + } + return "" +} + +func (x *S3Config) GetRoleArn() string { + if x != nil && x.RoleArn != nil { + return *x.RoleArn + } + return "" +} + +func (x *S3Config) GetRegion() string { + if x != nil && x.Region != nil { + return *x.Region + } + return "" +} + +func (x *S3Config) GetEndpoint() string { + if x != nil && x.Endpoint != nil { + return *x.Endpoint + } + return "" +} + +func (x *S3Config) GetMetadataDb() *PostgresConfig { + if x != nil { + return x.MetadataDb + } + return nil +} + type SqlServerConfig struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -619,7 +761,7 @@ type SqlServerConfig struct { func (x *SqlServerConfig) Reset() { *x = SqlServerConfig{} if protoimpl.UnsafeEnabled { - mi := &file_peers_proto_msgTypes[6] + mi := &file_peers_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -632,7 +774,7 @@ func (x *SqlServerConfig) String() string { func (*SqlServerConfig) ProtoMessage() {} func (x *SqlServerConfig) ProtoReflect() protoreflect.Message { - mi := &file_peers_proto_msgTypes[6] + mi := &file_peers_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -645,7 +787,7 @@ func (x *SqlServerConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use SqlServerConfig.ProtoReflect.Descriptor instead. func (*SqlServerConfig) Descriptor() ([]byte, []int) { - return file_peers_proto_rawDescGZIP(), []int{6} + return file_peers_proto_rawDescGZIP(), []int{7} } func (x *SqlServerConfig) GetServer() string { @@ -699,13 +841,14 @@ type Peer struct { // *Peer_EventhubConfig // *Peer_S3Config // *Peer_SqlserverConfig + // *Peer_EventhubGroupConfig Config isPeer_Config `protobuf_oneof:"config"` } func (x *Peer) Reset() { *x = Peer{} if protoimpl.UnsafeEnabled { - mi := &file_peers_proto_msgTypes[7] + mi := &file_peers_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -718,7 +861,7 @@ func (x *Peer) String() string { func (*Peer) ProtoMessage() {} func (x *Peer) ProtoReflect() protoreflect.Message { - mi := &file_peers_proto_msgTypes[7] + mi := &file_peers_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -731,7 +874,7 @@ func (x *Peer) ProtoReflect() protoreflect.Message { // Deprecated: Use Peer.ProtoReflect.Descriptor instead. func (*Peer) Descriptor() ([]byte, []int) { - return file_peers_proto_rawDescGZIP(), []int{7} + return file_peers_proto_rawDescGZIP(), []int{8} } func (x *Peer) GetName() string { @@ -804,6 +947,13 @@ func (x *Peer) GetSqlserverConfig() *SqlServerConfig { return nil } +func (x *Peer) GetEventhubGroupConfig() *EventHubGroupConfig { + if x, ok := x.GetConfig().(*Peer_EventhubGroupConfig); ok { + return x.EventhubGroupConfig + } + return nil +} + type isPeer_Config interface { isPeer_Config() } @@ -836,6 +986,10 @@ type Peer_SqlserverConfig struct { SqlserverConfig *SqlServerConfig `protobuf:"bytes,9,opt,name=sqlserver_config,json=sqlserverConfig,proto3,oneof"` } +type Peer_EventhubGroupConfig struct { + EventhubGroupConfig *EventHubGroupConfig `protobuf:"bytes,10,opt,name=eventhub_group_config,json=eventhubGroupConfig,proto3,oneof"` +} + func (*Peer_SnowflakeConfig) isPeer_Config() {} func (*Peer_BigqueryConfig) isPeer_Config() {} @@ -850,6 +1004,8 @@ func (*Peer_S3Config) isPeer_Config() {} func (*Peer_SqlserverConfig) isPeer_Config() {} +func (*Peer_EventhubGroupConfig) isPeer_Config() {} + var File_peers_proto protoreflect.FileDescriptor var file_peers_proto_rawDesc = []byte{ @@ -922,7 +1078,7 @@ var file_peers_proto_rawDesc = []byte{ 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x22, - 0xb0, 0x01, 0x0a, 0x0e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x75, 0x62, 0x43, 0x6f, 0x6e, 0x66, + 0xbd, 0x02, 0x0a, 0x0e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x75, 0x62, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x67, 0x72, 0x6f, @@ -933,68 +1089,122 @@ var file_peers_proto_rawDesc = []byte{ 0x64, 0x62, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x44, 0x62, 0x22, 0x1c, 0x0a, 0x08, 0x53, 0x33, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x10, - 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, - 0x22, 0x89, 0x01, 0x0a, 0x0f, 0x53, 0x71, 0x6c, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, - 0x70, 0x6f, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x70, 0x6f, 0x72, 0x74, - 0x12, 0x12, 0x0a, 0x04, 0x75, 0x73, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x75, 0x73, 0x65, 0x72, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, - 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x22, 0xb8, 0x04, 0x0a, - 0x04, 0x50, 0x65, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x14, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, - 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x44, 0x42, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x12, 0x4a, 0x0a, 0x10, 0x73, 0x6e, 0x6f, 0x77, 0x66, 0x6c, 0x61, 0x6b, 0x65, - 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, - 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x53, 0x6e, 0x6f, - 0x77, 0x66, 0x6c, 0x61, 0x6b, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0f, - 0x73, 0x6e, 0x6f, 0x77, 0x66, 0x6c, 0x61, 0x6b, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x47, 0x0a, 0x0f, 0x62, 0x69, 0x67, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x42, 0x69, 0x67, 0x71, 0x75, 0x65, 0x72, 0x79, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0e, 0x62, 0x69, 0x67, 0x71, 0x75, 0x65, - 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x3e, 0x0a, 0x0c, 0x6d, 0x6f, 0x6e, 0x67, - 0x6f, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, - 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x4d, 0x6f, - 0x6e, 0x67, 0x6f, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0b, 0x6d, 0x6f, 0x6e, - 0x67, 0x6f, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x47, 0x0a, 0x0f, 0x70, 0x6f, 0x73, 0x74, - 0x67, 0x72, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, - 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, - 0x00, 0x52, 0x0e, 0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x12, 0x47, 0x0a, 0x0f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x68, 0x75, 0x62, 0x5f, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, + 0x44, 0x62, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x75, 0x62, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x27, 0x0a, 0x0f, 0x70, + 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0e, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x39, 0x0a, 0x19, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, + 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x5f, 0x64, 0x61, 0x79, + 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x16, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x52, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x22, + 0xa7, 0x02, 0x0a, 0x13, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x75, 0x62, 0x47, 0x72, 0x6f, 0x75, + 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x68, 0x75, 0x62, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, - 0x75, 0x62, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x68, 0x75, 0x62, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x35, 0x0a, 0x09, 0x73, 0x33, - 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, - 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x53, 0x33, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x08, 0x73, 0x33, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x12, 0x4a, 0x0a, 0x10, 0x73, 0x71, 0x6c, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x65, - 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x53, 0x71, 0x6c, 0x53, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0f, 0x73, 0x71, - 0x6c, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x08, 0x0a, - 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2a, 0x63, 0x0a, 0x06, 0x44, 0x42, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x0c, 0x0a, 0x08, 0x42, 0x49, 0x47, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x00, 0x12, - 0x0d, 0x0a, 0x09, 0x53, 0x4e, 0x4f, 0x57, 0x46, 0x4c, 0x41, 0x4b, 0x45, 0x10, 0x01, 0x12, 0x09, - 0x0a, 0x05, 0x4d, 0x4f, 0x4e, 0x47, 0x4f, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x4f, 0x53, - 0x54, 0x47, 0x52, 0x45, 0x53, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x45, 0x56, 0x45, 0x4e, 0x54, - 0x48, 0x55, 0x42, 0x10, 0x04, 0x12, 0x06, 0x0a, 0x02, 0x53, 0x33, 0x10, 0x05, 0x12, 0x0d, 0x0a, - 0x09, 0x53, 0x51, 0x4c, 0x53, 0x45, 0x52, 0x56, 0x45, 0x52, 0x10, 0x06, 0x42, 0x7c, 0x0a, 0x10, - 0x63, 0x6f, 0x6d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, - 0x42, 0x0a, 0x50, 0x65, 0x65, 0x72, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x10, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, - 0xa2, 0x02, 0x03, 0x50, 0x58, 0x58, 0xaa, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x50, - 0x65, 0x65, 0x72, 0x73, 0xca, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x50, 0x65, 0x65, - 0x72, 0x73, 0xe2, 0x02, 0x17, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x50, 0x65, 0x65, 0x72, 0x73, - 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0b, 0x50, - 0x65, 0x65, 0x72, 0x64, 0x62, 0x50, 0x65, 0x65, 0x72, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x75, 0x62, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x68, 0x75, 0x62, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x68, 0x75, 0x62, 0x73, 0x12, 0x3d, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x5f, 0x64, 0x62, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, + 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x6f, 0x73, 0x74, + 0x67, 0x72, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x44, 0x62, 0x12, 0x25, 0x0a, 0x0e, 0x75, 0x6e, 0x6e, 0x65, 0x73, 0x74, + 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, + 0x75, 0x6e, 0x6e, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x1a, 0x5a, 0x0a, + 0x0e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x68, 0x75, 0x62, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x32, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x75, 0x62, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xe0, 0x02, 0x0a, 0x08, 0x53, 0x33, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x27, 0x0a, 0x0d, 0x61, 0x63, 0x63, 0x65, + 0x73, 0x73, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x88, 0x01, + 0x01, 0x12, 0x2f, 0x0a, 0x11, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x61, 0x63, 0x63, 0x65, + 0x73, 0x73, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0f, + 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x4b, 0x65, 0x79, 0x88, + 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x08, 0x72, 0x6f, 0x6c, 0x65, 0x5f, 0x61, 0x72, 0x6e, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x07, 0x72, 0x6f, 0x6c, 0x65, 0x41, 0x72, 0x6e, 0x88, + 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x03, 0x52, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, + 0x1f, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x04, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x88, 0x01, 0x01, + 0x12, 0x3d, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x64, 0x62, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, + 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x44, 0x62, 0x42, + 0x10, 0x0a, 0x0e, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, + 0x64, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x61, 0x63, 0x63, + 0x65, 0x73, 0x73, 0x5f, 0x6b, 0x65, 0x79, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x6f, 0x6c, 0x65, + 0x5f, 0x61, 0x72, 0x6e, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x42, + 0x0b, 0x0a, 0x09, 0x5f, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x22, 0x89, 0x01, 0x0a, + 0x0f, 0x53, 0x71, 0x6c, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x12, 0x16, 0x0a, 0x06, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6f, 0x72, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x12, 0x0a, 0x04, + 0x75, 0x73, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x73, 0x65, 0x72, + 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x1a, 0x0a, 0x08, + 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x22, 0x91, 0x05, 0x0a, 0x04, 0x50, 0x65, 0x65, + 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0e, 0x32, 0x14, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, + 0x72, 0x73, 0x2e, 0x44, 0x42, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, + 0x4a, 0x0a, 0x10, 0x73, 0x6e, 0x6f, 0x77, 0x66, 0x6c, 0x61, 0x6b, 0x65, 0x5f, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x53, 0x6e, 0x6f, 0x77, 0x66, 0x6c, 0x61, + 0x6b, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0f, 0x73, 0x6e, 0x6f, 0x77, + 0x66, 0x6c, 0x61, 0x6b, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x47, 0x0a, 0x0f, 0x62, + 0x69, 0x67, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, + 0x65, 0x72, 0x73, 0x2e, 0x42, 0x69, 0x67, 0x71, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x48, 0x00, 0x52, 0x0e, 0x62, 0x69, 0x67, 0x71, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x12, 0x3e, 0x0a, 0x0c, 0x6d, 0x6f, 0x6e, 0x67, 0x6f, 0x5f, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x4d, 0x6f, 0x6e, 0x67, 0x6f, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0b, 0x6d, 0x6f, 0x6e, 0x67, 0x6f, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x12, 0x47, 0x0a, 0x0f, 0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, + 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, + 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x6f, 0x73, + 0x74, 0x67, 0x72, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0e, 0x70, + 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x47, 0x0a, + 0x0f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x68, 0x75, 0x62, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, + 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x75, 0x62, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x68, 0x75, 0x62, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x35, 0x0a, 0x09, 0x73, 0x33, 0x5f, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x53, 0x33, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x48, 0x00, 0x52, 0x08, 0x73, 0x33, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4a, 0x0a, + 0x10, 0x73, 0x71, 0x6c, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, + 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x53, 0x71, 0x6c, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0f, 0x73, 0x71, 0x6c, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x57, 0x0a, 0x15, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x68, 0x75, 0x62, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x75, 0x62, + 0x47, 0x72, 0x6f, 0x75, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x13, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x68, 0x75, 0x62, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x42, 0x08, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2a, 0x77, 0x0a, 0x06, + 0x44, 0x42, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0c, 0x0a, 0x08, 0x42, 0x49, 0x47, 0x51, 0x55, 0x45, + 0x52, 0x59, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x4e, 0x4f, 0x57, 0x46, 0x4c, 0x41, 0x4b, + 0x45, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x4f, 0x4e, 0x47, 0x4f, 0x10, 0x02, 0x12, 0x0c, + 0x0a, 0x08, 0x50, 0x4f, 0x53, 0x54, 0x47, 0x52, 0x45, 0x53, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, + 0x45, 0x56, 0x45, 0x4e, 0x54, 0x48, 0x55, 0x42, 0x10, 0x04, 0x12, 0x06, 0x0a, 0x02, 0x53, 0x33, + 0x10, 0x05, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x51, 0x4c, 0x53, 0x45, 0x52, 0x56, 0x45, 0x52, 0x10, + 0x06, 0x12, 0x12, 0x0a, 0x0e, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x48, 0x55, 0x42, 0x5f, 0x47, 0x52, + 0x4f, 0x55, 0x50, 0x10, 0x07, 0x42, 0x7c, 0x0a, 0x10, 0x63, 0x6f, 0x6d, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x42, 0x0a, 0x50, 0x65, 0x65, 0x72, 0x73, + 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x10, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0xa2, 0x02, 0x03, 0x50, 0x58, 0x58, 0xaa, + 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x50, 0x65, 0x65, 0x72, 0x73, 0xca, 0x02, 0x0b, + 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x50, 0x65, 0x65, 0x72, 0x73, 0xe2, 0x02, 0x17, 0x50, 0x65, + 0x65, 0x72, 0x64, 0x62, 0x50, 0x65, 0x65, 0x72, 0x73, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x50, 0x65, + 0x65, 0x72, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1010,33 +1220,40 @@ func file_peers_proto_rawDescGZIP() []byte { } var file_peers_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_peers_proto_msgTypes = make([]protoimpl.MessageInfo, 8) +var file_peers_proto_msgTypes = make([]protoimpl.MessageInfo, 10) var file_peers_proto_goTypes = []interface{}{ - (DBType)(0), // 0: peerdb_peers.DBType - (*SnowflakeConfig)(nil), // 1: peerdb_peers.SnowflakeConfig - (*BigqueryConfig)(nil), // 2: peerdb_peers.BigqueryConfig - (*MongoConfig)(nil), // 3: peerdb_peers.MongoConfig - (*PostgresConfig)(nil), // 4: peerdb_peers.PostgresConfig - (*EventHubConfig)(nil), // 5: peerdb_peers.EventHubConfig - (*S3Config)(nil), // 6: peerdb_peers.S3Config - (*SqlServerConfig)(nil), // 7: peerdb_peers.SqlServerConfig - (*Peer)(nil), // 8: peerdb_peers.Peer + (DBType)(0), // 0: peerdb_peers.DBType + (*SnowflakeConfig)(nil), // 1: peerdb_peers.SnowflakeConfig + (*BigqueryConfig)(nil), // 2: peerdb_peers.BigqueryConfig + (*MongoConfig)(nil), // 3: peerdb_peers.MongoConfig + (*PostgresConfig)(nil), // 4: peerdb_peers.PostgresConfig + (*EventHubConfig)(nil), // 5: peerdb_peers.EventHubConfig + (*EventHubGroupConfig)(nil), // 6: peerdb_peers.EventHubGroupConfig + (*S3Config)(nil), // 7: peerdb_peers.S3Config + (*SqlServerConfig)(nil), // 8: peerdb_peers.SqlServerConfig + (*Peer)(nil), // 9: peerdb_peers.Peer + nil, // 10: peerdb_peers.EventHubGroupConfig.EventhubsEntry } var file_peers_proto_depIdxs = []int32{ - 4, // 0: peerdb_peers.EventHubConfig.metadata_db:type_name -> peerdb_peers.PostgresConfig - 0, // 1: peerdb_peers.Peer.type:type_name -> peerdb_peers.DBType - 1, // 2: peerdb_peers.Peer.snowflake_config:type_name -> peerdb_peers.SnowflakeConfig - 2, // 3: peerdb_peers.Peer.bigquery_config:type_name -> peerdb_peers.BigqueryConfig - 3, // 4: peerdb_peers.Peer.mongo_config:type_name -> peerdb_peers.MongoConfig - 4, // 5: peerdb_peers.Peer.postgres_config:type_name -> peerdb_peers.PostgresConfig - 5, // 6: peerdb_peers.Peer.eventhub_config:type_name -> peerdb_peers.EventHubConfig - 6, // 7: peerdb_peers.Peer.s3_config:type_name -> peerdb_peers.S3Config - 7, // 8: peerdb_peers.Peer.sqlserver_config:type_name -> peerdb_peers.SqlServerConfig - 9, // [9:9] is the sub-list for method output_type - 9, // [9:9] is the sub-list for method input_type - 9, // [9:9] is the sub-list for extension type_name - 9, // [9:9] is the sub-list for extension extendee - 0, // [0:9] is the sub-list for field type_name + 4, // 0: peerdb_peers.EventHubConfig.metadata_db:type_name -> peerdb_peers.PostgresConfig + 10, // 1: peerdb_peers.EventHubGroupConfig.eventhubs:type_name -> peerdb_peers.EventHubGroupConfig.EventhubsEntry + 4, // 2: peerdb_peers.EventHubGroupConfig.metadata_db:type_name -> peerdb_peers.PostgresConfig + 4, // 3: peerdb_peers.S3Config.metadata_db:type_name -> peerdb_peers.PostgresConfig + 0, // 4: peerdb_peers.Peer.type:type_name -> peerdb_peers.DBType + 1, // 5: peerdb_peers.Peer.snowflake_config:type_name -> peerdb_peers.SnowflakeConfig + 2, // 6: peerdb_peers.Peer.bigquery_config:type_name -> peerdb_peers.BigqueryConfig + 3, // 7: peerdb_peers.Peer.mongo_config:type_name -> peerdb_peers.MongoConfig + 4, // 8: peerdb_peers.Peer.postgres_config:type_name -> peerdb_peers.PostgresConfig + 5, // 9: peerdb_peers.Peer.eventhub_config:type_name -> peerdb_peers.EventHubConfig + 7, // 10: peerdb_peers.Peer.s3_config:type_name -> peerdb_peers.S3Config + 8, // 11: peerdb_peers.Peer.sqlserver_config:type_name -> peerdb_peers.SqlServerConfig + 6, // 12: peerdb_peers.Peer.eventhub_group_config:type_name -> peerdb_peers.EventHubGroupConfig + 5, // 13: peerdb_peers.EventHubGroupConfig.EventhubsEntry.value:type_name -> peerdb_peers.EventHubConfig + 14, // [14:14] is the sub-list for method output_type + 14, // [14:14] is the sub-list for method input_type + 14, // [14:14] is the sub-list for extension type_name + 14, // [14:14] is the sub-list for extension extendee + 0, // [0:14] is the sub-list for field type_name } func init() { file_peers_proto_init() } @@ -1106,7 +1323,7 @@ func file_peers_proto_init() { } } file_peers_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*S3Config); i { + switch v := v.(*EventHubGroupConfig); i { case 0: return &v.state case 1: @@ -1118,7 +1335,7 @@ func file_peers_proto_init() { } } file_peers_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SqlServerConfig); i { + switch v := v.(*S3Config); i { case 0: return &v.state case 1: @@ -1130,6 +1347,18 @@ func file_peers_proto_init() { } } file_peers_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SqlServerConfig); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_peers_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Peer); i { case 0: return &v.state @@ -1143,7 +1372,8 @@ func file_peers_proto_init() { } } file_peers_proto_msgTypes[0].OneofWrappers = []interface{}{} - file_peers_proto_msgTypes[7].OneofWrappers = []interface{}{ + file_peers_proto_msgTypes[6].OneofWrappers = []interface{}{} + file_peers_proto_msgTypes[8].OneofWrappers = []interface{}{ (*Peer_SnowflakeConfig)(nil), (*Peer_BigqueryConfig)(nil), (*Peer_MongoConfig)(nil), @@ -1151,6 +1381,7 @@ func file_peers_proto_init() { (*Peer_EventhubConfig)(nil), (*Peer_S3Config)(nil), (*Peer_SqlserverConfig)(nil), + (*Peer_EventhubGroupConfig)(nil), } type x struct{} out := protoimpl.TypeBuilder{ @@ -1158,7 +1389,7 @@ func file_peers_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_peers_proto_rawDesc, NumEnums: 1, - NumMessages: 8, + NumMessages: 10, NumExtensions: 0, NumServices: 0, }, diff --git a/flow/generated/protos/route.pb.go b/flow/generated/protos/route.pb.go index d11e4f6e8..840623f14 100644 --- a/flow/generated/protos/route.pb.go +++ b/flow/generated/protos/route.pb.go @@ -7,9 +7,10 @@ package protos import ( + _ "google.golang.org/genproto/googleapis/api/annotations" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" - _ "google.golang.org/protobuf/types/known/timestamppb" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" ) @@ -21,12 +22,111 @@ const ( _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) +type ValidatePeerStatus int32 + +const ( + ValidatePeerStatus_CREATION_UNKNOWN ValidatePeerStatus = 0 + ValidatePeerStatus_VALID ValidatePeerStatus = 1 + ValidatePeerStatus_INVALID ValidatePeerStatus = 2 +) + +// Enum value maps for ValidatePeerStatus. +var ( + ValidatePeerStatus_name = map[int32]string{ + 0: "CREATION_UNKNOWN", + 1: "VALID", + 2: "INVALID", + } + ValidatePeerStatus_value = map[string]int32{ + "CREATION_UNKNOWN": 0, + "VALID": 1, + "INVALID": 2, + } +) + +func (x ValidatePeerStatus) Enum() *ValidatePeerStatus { + p := new(ValidatePeerStatus) + *p = x + return p +} + +func (x ValidatePeerStatus) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ValidatePeerStatus) Descriptor() protoreflect.EnumDescriptor { + return file_route_proto_enumTypes[0].Descriptor() +} + +func (ValidatePeerStatus) Type() protoreflect.EnumType { + return &file_route_proto_enumTypes[0] +} + +func (x ValidatePeerStatus) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ValidatePeerStatus.Descriptor instead. +func (ValidatePeerStatus) EnumDescriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{0} +} + +type CreatePeerStatus int32 + +const ( + CreatePeerStatus_VALIDATION_UNKNOWN CreatePeerStatus = 0 + CreatePeerStatus_CREATED CreatePeerStatus = 1 + CreatePeerStatus_FAILED CreatePeerStatus = 2 +) + +// Enum value maps for CreatePeerStatus. +var ( + CreatePeerStatus_name = map[int32]string{ + 0: "VALIDATION_UNKNOWN", + 1: "CREATED", + 2: "FAILED", + } + CreatePeerStatus_value = map[string]int32{ + "VALIDATION_UNKNOWN": 0, + "CREATED": 1, + "FAILED": 2, + } +) + +func (x CreatePeerStatus) Enum() *CreatePeerStatus { + p := new(CreatePeerStatus) + *p = x + return p +} + +func (x CreatePeerStatus) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CreatePeerStatus) Descriptor() protoreflect.EnumDescriptor { + return file_route_proto_enumTypes[1].Descriptor() +} + +func (CreatePeerStatus) Type() protoreflect.EnumType { + return &file_route_proto_enumTypes[1] +} + +func (x CreatePeerStatus) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CreatePeerStatus.Descriptor instead. +func (CreatePeerStatus) EnumDescriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{1} +} + type CreateCDCFlowRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ConnectionConfigs *FlowConnectionConfigs `protobuf:"bytes,1,opt,name=connection_configs,json=connectionConfigs,proto3" json:"connection_configs,omitempty"` + ConnectionConfigs *FlowConnectionConfigs `protobuf:"bytes,1,opt,name=connection_configs,json=connectionConfigs,proto3" json:"connection_configs,omitempty"` + CreateCatalogEntry bool `protobuf:"varint,2,opt,name=create_catalog_entry,json=createCatalogEntry,proto3" json:"create_catalog_entry,omitempty"` } func (x *CreateCDCFlowRequest) Reset() { @@ -68,6 +168,13 @@ func (x *CreateCDCFlowRequest) GetConnectionConfigs() *FlowConnectionConfigs { return nil } +func (x *CreateCDCFlowRequest) GetCreateCatalogEntry() bool { + if x != nil { + return x.CreateCatalogEntry + } + return false +} + type CreateCDCFlowResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -120,7 +227,8 @@ type CreateQRepFlowRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - QrepConfig *QRepConfig `protobuf:"bytes,1,opt,name=qrep_config,json=qrepConfig,proto3" json:"qrep_config,omitempty"` + QrepConfig *QRepConfig `protobuf:"bytes,1,opt,name=qrep_config,json=qrepConfig,proto3" json:"qrep_config,omitempty"` + CreateCatalogEntry bool `protobuf:"varint,2,opt,name=create_catalog_entry,json=createCatalogEntry,proto3" json:"create_catalog_entry,omitempty"` } func (x *CreateQRepFlowRequest) Reset() { @@ -162,6 +270,13 @@ func (x *CreateQRepFlowRequest) GetQrepConfig() *QRepConfig { return nil } +func (x *CreateQRepFlowRequest) GetCreateCatalogEntry() bool { + if x != nil { + return x.CreateCatalogEntry + } + return false +} + type CreateQRepFlowResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -335,14 +450,16 @@ func (x *ShutdownResponse) GetErrorMessage() string { return "" } -type ListPeersRequest struct { +type ValidatePeerRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields + + Peer *Peer `protobuf:"bytes,1,opt,name=peer,proto3" json:"peer,omitempty"` } -func (x *ListPeersRequest) Reset() { - *x = ListPeersRequest{} +func (x *ValidatePeerRequest) Reset() { + *x = ValidatePeerRequest{} if protoimpl.UnsafeEnabled { mi := &file_route_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -350,13 +467,13 @@ func (x *ListPeersRequest) Reset() { } } -func (x *ListPeersRequest) String() string { +func (x *ValidatePeerRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*ListPeersRequest) ProtoMessage() {} +func (*ValidatePeerRequest) ProtoMessage() {} -func (x *ListPeersRequest) ProtoReflect() protoreflect.Message { +func (x *ValidatePeerRequest) ProtoReflect() protoreflect.Message { mi := &file_route_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -368,21 +485,28 @@ func (x *ListPeersRequest) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use ListPeersRequest.ProtoReflect.Descriptor instead. -func (*ListPeersRequest) Descriptor() ([]byte, []int) { +// Deprecated: Use ValidatePeerRequest.ProtoReflect.Descriptor instead. +func (*ValidatePeerRequest) Descriptor() ([]byte, []int) { return file_route_proto_rawDescGZIP(), []int{6} } -type ListPeersResponse struct { +func (x *ValidatePeerRequest) GetPeer() *Peer { + if x != nil { + return x.Peer + } + return nil +} + +type CreatePeerRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Peers []*Peer `protobuf:"bytes,1,rep,name=peers,proto3" json:"peers,omitempty"` + Peer *Peer `protobuf:"bytes,1,opt,name=peer,proto3" json:"peer,omitempty"` } -func (x *ListPeersResponse) Reset() { - *x = ListPeersResponse{} +func (x *CreatePeerRequest) Reset() { + *x = CreatePeerRequest{} if protoimpl.UnsafeEnabled { mi := &file_route_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -390,13 +514,13 @@ func (x *ListPeersResponse) Reset() { } } -func (x *ListPeersResponse) String() string { +func (x *CreatePeerRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*ListPeersResponse) ProtoMessage() {} +func (*CreatePeerRequest) ProtoMessage() {} -func (x *ListPeersResponse) ProtoReflect() protoreflect.Message { +func (x *CreatePeerRequest) ProtoReflect() protoreflect.Message { mi := &file_route_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -408,199 +532,1728 @@ func (x *ListPeersResponse) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use ListPeersResponse.ProtoReflect.Descriptor instead. -func (*ListPeersResponse) Descriptor() ([]byte, []int) { +// Deprecated: Use CreatePeerRequest.ProtoReflect.Descriptor instead. +func (*CreatePeerRequest) Descriptor() ([]byte, []int) { return file_route_proto_rawDescGZIP(), []int{7} } -func (x *ListPeersResponse) GetPeers() []*Peer { +func (x *CreatePeerRequest) GetPeer() *Peer { if x != nil { - return x.Peers + return x.Peer } return nil } -var File_route_proto protoreflect.FileDescriptor +type DropPeerRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -var file_route_proto_rawDesc = []byte{ - 0x0a, 0x0b, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x70, - 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0b, 0x70, 0x65, - 0x65, 0x72, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0a, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x69, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, - 0x44, 0x43, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x51, 0x0a, - 0x12, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, - 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x11, 0x63, - 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, - 0x22, 0x36, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, 0x6f, - 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x6f, 0x72, - 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x77, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x22, 0x51, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x38, 0x0a, 0x0b, 0x71, 0x72, 0x65, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, 0x65, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, - 0x0a, 0x71, 0x72, 0x65, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x37, 0x0a, 0x16, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x77, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x49, 0x64, 0x22, 0xca, 0x01, 0x0a, 0x0f, 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, - 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, - 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, - 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x65, - 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, - 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, - 0x52, 0x0f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x65, 0x65, - 0x72, 0x22, 0x47, 0x0a, 0x10, 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, 0x6e, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x02, 0x6f, 0x6b, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x12, 0x0a, 0x10, 0x4c, 0x69, - 0x73, 0x74, 0x50, 0x65, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x3d, - 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x65, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x70, 0x65, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, - 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x05, 0x70, 0x65, 0x65, 0x72, 0x73, 0x32, 0xe9, 0x02, - 0x0a, 0x0b, 0x46, 0x6c, 0x6f, 0x77, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x4e, 0x0a, - 0x09, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x65, 0x65, 0x72, 0x73, 0x12, 0x1e, 0x2e, 0x70, 0x65, 0x65, - 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x65, - 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x65, 0x65, - 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x65, - 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5a, 0x0a, - 0x0d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, 0x6f, 0x77, 0x12, 0x22, - 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, - 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, 0x6f, 0x77, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5d, 0x0a, 0x0e, 0x43, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x12, 0x23, 0x2e, 0x70, 0x65, - 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x0c, 0x53, 0x68, 0x75, 0x74, - 0x64, 0x6f, 0x77, 0x6e, 0x46, 0x6c, 0x6f, 0x77, 0x12, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, - 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, 0x6e, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, - 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, 0x6e, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x7c, 0x0a, 0x10, 0x63, 0x6f, 0x6d, - 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x42, 0x0a, 0x52, - 0x6f, 0x75, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x10, 0x67, 0x65, 0x6e, - 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0xa2, 0x02, 0x03, - 0x50, 0x58, 0x58, 0xaa, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, - 0x65, 0xca, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, 0x65, 0xe2, - 0x02, 0x17, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, 0x65, 0x5c, 0x47, 0x50, - 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, - 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + PeerName string `protobuf:"bytes,1,opt,name=peer_name,json=peerName,proto3" json:"peer_name,omitempty"` } -var ( - file_route_proto_rawDescOnce sync.Once - file_route_proto_rawDescData = file_route_proto_rawDesc -) +func (x *DropPeerRequest) Reset() { + *x = DropPeerRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} -func file_route_proto_rawDescGZIP() []byte { - file_route_proto_rawDescOnce.Do(func() { - file_route_proto_rawDescData = protoimpl.X.CompressGZIP(file_route_proto_rawDescData) - }) - return file_route_proto_rawDescData +func (x *DropPeerRequest) String() string { + return protoimpl.X.MessageStringOf(x) } -var file_route_proto_msgTypes = make([]protoimpl.MessageInfo, 8) -var file_route_proto_goTypes = []interface{}{ - (*CreateCDCFlowRequest)(nil), // 0: peerdb_route.CreateCDCFlowRequest - (*CreateCDCFlowResponse)(nil), // 1: peerdb_route.CreateCDCFlowResponse - (*CreateQRepFlowRequest)(nil), // 2: peerdb_route.CreateQRepFlowRequest - (*CreateQRepFlowResponse)(nil), // 3: peerdb_route.CreateQRepFlowResponse - (*ShutdownRequest)(nil), // 4: peerdb_route.ShutdownRequest - (*ShutdownResponse)(nil), // 5: peerdb_route.ShutdownResponse - (*ListPeersRequest)(nil), // 6: peerdb_route.ListPeersRequest - (*ListPeersResponse)(nil), // 7: peerdb_route.ListPeersResponse - (*FlowConnectionConfigs)(nil), // 8: peerdb_flow.FlowConnectionConfigs - (*QRepConfig)(nil), // 9: peerdb_flow.QRepConfig - (*Peer)(nil), // 10: peerdb_peers.Peer +func (*DropPeerRequest) ProtoMessage() {} + +func (x *DropPeerRequest) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var file_route_proto_depIdxs = []int32{ - 8, // 0: peerdb_route.CreateCDCFlowRequest.connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs - 9, // 1: peerdb_route.CreateQRepFlowRequest.qrep_config:type_name -> peerdb_flow.QRepConfig - 10, // 2: peerdb_route.ShutdownRequest.source_peer:type_name -> peerdb_peers.Peer - 10, // 3: peerdb_route.ShutdownRequest.destination_peer:type_name -> peerdb_peers.Peer - 10, // 4: peerdb_route.ListPeersResponse.peers:type_name -> peerdb_peers.Peer - 6, // 5: peerdb_route.FlowService.ListPeers:input_type -> peerdb_route.ListPeersRequest - 0, // 6: peerdb_route.FlowService.CreateCDCFlow:input_type -> peerdb_route.CreateCDCFlowRequest - 2, // 7: peerdb_route.FlowService.CreateQRepFlow:input_type -> peerdb_route.CreateQRepFlowRequest - 4, // 8: peerdb_route.FlowService.ShutdownFlow:input_type -> peerdb_route.ShutdownRequest - 7, // 9: peerdb_route.FlowService.ListPeers:output_type -> peerdb_route.ListPeersResponse - 1, // 10: peerdb_route.FlowService.CreateCDCFlow:output_type -> peerdb_route.CreateCDCFlowResponse - 3, // 11: peerdb_route.FlowService.CreateQRepFlow:output_type -> peerdb_route.CreateQRepFlowResponse - 5, // 12: peerdb_route.FlowService.ShutdownFlow:output_type -> peerdb_route.ShutdownResponse - 9, // [9:13] is the sub-list for method output_type - 5, // [5:9] is the sub-list for method input_type - 5, // [5:5] is the sub-list for extension type_name - 5, // [5:5] is the sub-list for extension extendee - 0, // [0:5] is the sub-list for field type_name + +// Deprecated: Use DropPeerRequest.ProtoReflect.Descriptor instead. +func (*DropPeerRequest) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{8} } -func init() { file_route_proto_init() } -func file_route_proto_init() { - if File_route_proto != nil { - return +func (x *DropPeerRequest) GetPeerName() string { + if x != nil { + return x.PeerName } - file_peers_proto_init() - file_flow_proto_init() - if !protoimpl.UnsafeEnabled { - file_route_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateCDCFlowRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } + return "" +} + +type DropPeerResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Ok bool `protobuf:"varint,1,opt,name=ok,proto3" json:"ok,omitempty"` + ErrorMessage string `protobuf:"bytes,2,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` +} + +func (x *DropPeerResponse) Reset() { + *x = DropPeerResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DropPeerResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DropPeerResponse) ProtoMessage() {} + +func (x *DropPeerResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) } - file_route_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateCDCFlowResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DropPeerResponse.ProtoReflect.Descriptor instead. +func (*DropPeerResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{9} +} + +func (x *DropPeerResponse) GetOk() bool { + if x != nil { + return x.Ok + } + return false +} + +func (x *DropPeerResponse) GetErrorMessage() string { + if x != nil { + return x.ErrorMessage + } + return "" +} + +type ValidatePeerResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Status ValidatePeerStatus `protobuf:"varint,1,opt,name=status,proto3,enum=peerdb_route.ValidatePeerStatus" json:"status,omitempty"` + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *ValidatePeerResponse) Reset() { + *x = ValidatePeerResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ValidatePeerResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ValidatePeerResponse) ProtoMessage() {} + +func (x *ValidatePeerResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) } - file_route_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateQRepFlowRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ValidatePeerResponse.ProtoReflect.Descriptor instead. +func (*ValidatePeerResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{10} +} + +func (x *ValidatePeerResponse) GetStatus() ValidatePeerStatus { + if x != nil { + return x.Status + } + return ValidatePeerStatus_CREATION_UNKNOWN +} + +func (x *ValidatePeerResponse) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +type CreatePeerResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Status CreatePeerStatus `protobuf:"varint,1,opt,name=status,proto3,enum=peerdb_route.CreatePeerStatus" json:"status,omitempty"` + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *CreatePeerResponse) Reset() { + *x = CreatePeerResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreatePeerResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreatePeerResponse) ProtoMessage() {} + +func (x *CreatePeerResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) } - file_route_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateQRepFlowResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreatePeerResponse.ProtoReflect.Descriptor instead. +func (*CreatePeerResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{11} +} + +func (x *CreatePeerResponse) GetStatus() CreatePeerStatus { + if x != nil { + return x.Status + } + return CreatePeerStatus_VALIDATION_UNKNOWN +} + +func (x *CreatePeerResponse) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +type MirrorStatusRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + FlowJobName string `protobuf:"bytes,1,opt,name=flow_job_name,json=flowJobName,proto3" json:"flow_job_name,omitempty"` +} + +func (x *MirrorStatusRequest) Reset() { + *x = MirrorStatusRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MirrorStatusRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MirrorStatusRequest) ProtoMessage() {} + +func (x *MirrorStatusRequest) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MirrorStatusRequest.ProtoReflect.Descriptor instead. +func (*MirrorStatusRequest) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{12} +} + +func (x *MirrorStatusRequest) GetFlowJobName() string { + if x != nil { + return x.FlowJobName + } + return "" +} + +type PartitionStatus struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PartitionId string `protobuf:"bytes,1,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"` + StartTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + NumRows int32 `protobuf:"varint,4,opt,name=num_rows,json=numRows,proto3" json:"num_rows,omitempty"` +} + +func (x *PartitionStatus) Reset() { + *x = PartitionStatus{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PartitionStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PartitionStatus) ProtoMessage() {} + +func (x *PartitionStatus) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PartitionStatus.ProtoReflect.Descriptor instead. +func (*PartitionStatus) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{13} +} + +func (x *PartitionStatus) GetPartitionId() string { + if x != nil { + return x.PartitionId + } + return "" +} + +func (x *PartitionStatus) GetStartTime() *timestamppb.Timestamp { + if x != nil { + return x.StartTime + } + return nil +} + +func (x *PartitionStatus) GetEndTime() *timestamppb.Timestamp { + if x != nil { + return x.EndTime + } + return nil +} + +func (x *PartitionStatus) GetNumRows() int32 { + if x != nil { + return x.NumRows + } + return 0 +} + +type QRepMirrorStatus struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Config *QRepConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + Partitions []*PartitionStatus `protobuf:"bytes,2,rep,name=partitions,proto3" json:"partitions,omitempty"` +} + +func (x *QRepMirrorStatus) Reset() { + *x = QRepMirrorStatus{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QRepMirrorStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QRepMirrorStatus) ProtoMessage() {} + +func (x *QRepMirrorStatus) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QRepMirrorStatus.ProtoReflect.Descriptor instead. +func (*QRepMirrorStatus) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{14} +} + +func (x *QRepMirrorStatus) GetConfig() *QRepConfig { + if x != nil { + return x.Config + } + return nil +} + +func (x *QRepMirrorStatus) GetPartitions() []*PartitionStatus { + if x != nil { + return x.Partitions + } + return nil +} + +type CDCSyncStatus struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + StartLsn int64 `protobuf:"varint,1,opt,name=start_lsn,json=startLsn,proto3" json:"start_lsn,omitempty"` + EndLsn int64 `protobuf:"varint,2,opt,name=end_lsn,json=endLsn,proto3" json:"end_lsn,omitempty"` + NumRows int32 `protobuf:"varint,3,opt,name=num_rows,json=numRows,proto3" json:"num_rows,omitempty"` + StartTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` +} + +func (x *CDCSyncStatus) Reset() { + *x = CDCSyncStatus{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CDCSyncStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CDCSyncStatus) ProtoMessage() {} + +func (x *CDCSyncStatus) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CDCSyncStatus.ProtoReflect.Descriptor instead. +func (*CDCSyncStatus) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{15} +} + +func (x *CDCSyncStatus) GetStartLsn() int64 { + if x != nil { + return x.StartLsn + } + return 0 +} + +func (x *CDCSyncStatus) GetEndLsn() int64 { + if x != nil { + return x.EndLsn + } + return 0 +} + +func (x *CDCSyncStatus) GetNumRows() int32 { + if x != nil { + return x.NumRows + } + return 0 +} + +func (x *CDCSyncStatus) GetStartTime() *timestamppb.Timestamp { + if x != nil { + return x.StartTime + } + return nil +} + +func (x *CDCSyncStatus) GetEndTime() *timestamppb.Timestamp { + if x != nil { + return x.EndTime + } + return nil +} + +type PeerSchemasResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Schemas []string `protobuf:"bytes,1,rep,name=schemas,proto3" json:"schemas,omitempty"` +} + +func (x *PeerSchemasResponse) Reset() { + *x = PeerSchemasResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PeerSchemasResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PeerSchemasResponse) ProtoMessage() {} + +func (x *PeerSchemasResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PeerSchemasResponse.ProtoReflect.Descriptor instead. +func (*PeerSchemasResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{16} +} + +func (x *PeerSchemasResponse) GetSchemas() []string { + if x != nil { + return x.Schemas + } + return nil +} + +type SchemaTablesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PeerName string `protobuf:"bytes,1,opt,name=peer_name,json=peerName,proto3" json:"peer_name,omitempty"` + SchemaName string `protobuf:"bytes,2,opt,name=schema_name,json=schemaName,proto3" json:"schema_name,omitempty"` +} + +func (x *SchemaTablesRequest) Reset() { + *x = SchemaTablesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SchemaTablesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SchemaTablesRequest) ProtoMessage() {} + +func (x *SchemaTablesRequest) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SchemaTablesRequest.ProtoReflect.Descriptor instead. +func (*SchemaTablesRequest) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{17} +} + +func (x *SchemaTablesRequest) GetPeerName() string { + if x != nil { + return x.PeerName + } + return "" +} + +func (x *SchemaTablesRequest) GetSchemaName() string { + if x != nil { + return x.SchemaName + } + return "" +} + +type SchemaTablesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tables []string `protobuf:"bytes,1,rep,name=tables,proto3" json:"tables,omitempty"` +} + +func (x *SchemaTablesResponse) Reset() { + *x = SchemaTablesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SchemaTablesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SchemaTablesResponse) ProtoMessage() {} + +func (x *SchemaTablesResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SchemaTablesResponse.ProtoReflect.Descriptor instead. +func (*SchemaTablesResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{18} +} + +func (x *SchemaTablesResponse) GetTables() []string { + if x != nil { + return x.Tables + } + return nil +} + +type TableColumnsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PeerName string `protobuf:"bytes,1,opt,name=peer_name,json=peerName,proto3" json:"peer_name,omitempty"` + SchemaName string `protobuf:"bytes,2,opt,name=schema_name,json=schemaName,proto3" json:"schema_name,omitempty"` + TableName string `protobuf:"bytes,3,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` +} + +func (x *TableColumnsRequest) Reset() { + *x = TableColumnsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TableColumnsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TableColumnsRequest) ProtoMessage() {} + +func (x *TableColumnsRequest) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TableColumnsRequest.ProtoReflect.Descriptor instead. +func (*TableColumnsRequest) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{19} +} + +func (x *TableColumnsRequest) GetPeerName() string { + if x != nil { + return x.PeerName + } + return "" +} + +func (x *TableColumnsRequest) GetSchemaName() string { + if x != nil { + return x.SchemaName + } + return "" +} + +func (x *TableColumnsRequest) GetTableName() string { + if x != nil { + return x.TableName + } + return "" +} + +type TableColumnsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Columns []string `protobuf:"bytes,1,rep,name=columns,proto3" json:"columns,omitempty"` +} + +func (x *TableColumnsResponse) Reset() { + *x = TableColumnsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TableColumnsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TableColumnsResponse) ProtoMessage() {} + +func (x *TableColumnsResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TableColumnsResponse.ProtoReflect.Descriptor instead. +func (*TableColumnsResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{20} +} + +func (x *TableColumnsResponse) GetColumns() []string { + if x != nil { + return x.Columns + } + return nil +} + +type PostgresPeerActivityInfoRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PeerName string `protobuf:"bytes,1,opt,name=peer_name,json=peerName,proto3" json:"peer_name,omitempty"` +} + +func (x *PostgresPeerActivityInfoRequest) Reset() { + *x = PostgresPeerActivityInfoRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PostgresPeerActivityInfoRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PostgresPeerActivityInfoRequest) ProtoMessage() {} + +func (x *PostgresPeerActivityInfoRequest) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PostgresPeerActivityInfoRequest.ProtoReflect.Descriptor instead. +func (*PostgresPeerActivityInfoRequest) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{21} +} + +func (x *PostgresPeerActivityInfoRequest) GetPeerName() string { + if x != nil { + return x.PeerName + } + return "" +} + +type SlotInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + SlotName string `protobuf:"bytes,1,opt,name=slot_name,json=slotName,proto3" json:"slot_name,omitempty"` + RedoLSN string `protobuf:"bytes,2,opt,name=redo_lSN,json=redoLSN,proto3" json:"redo_lSN,omitempty"` + RestartLSN string `protobuf:"bytes,3,opt,name=restart_lSN,json=restartLSN,proto3" json:"restart_lSN,omitempty"` + Active bool `protobuf:"varint,4,opt,name=active,proto3" json:"active,omitempty"` + LagInMb float32 `protobuf:"fixed32,5,opt,name=lag_in_mb,json=lagInMb,proto3" json:"lag_in_mb,omitempty"` +} + +func (x *SlotInfo) Reset() { + *x = SlotInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SlotInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SlotInfo) ProtoMessage() {} + +func (x *SlotInfo) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SlotInfo.ProtoReflect.Descriptor instead. +func (*SlotInfo) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{22} +} + +func (x *SlotInfo) GetSlotName() string { + if x != nil { + return x.SlotName + } + return "" +} + +func (x *SlotInfo) GetRedoLSN() string { + if x != nil { + return x.RedoLSN + } + return "" +} + +func (x *SlotInfo) GetRestartLSN() string { + if x != nil { + return x.RestartLSN + } + return "" +} + +func (x *SlotInfo) GetActive() bool { + if x != nil { + return x.Active + } + return false +} + +func (x *SlotInfo) GetLagInMb() float32 { + if x != nil { + return x.LagInMb + } + return 0 +} + +type StatInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Pid int64 `protobuf:"varint,1,opt,name=pid,proto3" json:"pid,omitempty"` + WaitEvent string `protobuf:"bytes,2,opt,name=wait_event,json=waitEvent,proto3" json:"wait_event,omitempty"` + WaitEventType string `protobuf:"bytes,3,opt,name=wait_event_type,json=waitEventType,proto3" json:"wait_event_type,omitempty"` + QueryStart string `protobuf:"bytes,4,opt,name=query_start,json=queryStart,proto3" json:"query_start,omitempty"` + Query string `protobuf:"bytes,5,opt,name=query,proto3" json:"query,omitempty"` + Duration float32 `protobuf:"fixed32,6,opt,name=duration,proto3" json:"duration,omitempty"` +} + +func (x *StatInfo) Reset() { + *x = StatInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *StatInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*StatInfo) ProtoMessage() {} + +func (x *StatInfo) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use StatInfo.ProtoReflect.Descriptor instead. +func (*StatInfo) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{23} +} + +func (x *StatInfo) GetPid() int64 { + if x != nil { + return x.Pid + } + return 0 +} + +func (x *StatInfo) GetWaitEvent() string { + if x != nil { + return x.WaitEvent + } + return "" +} + +func (x *StatInfo) GetWaitEventType() string { + if x != nil { + return x.WaitEventType + } + return "" +} + +func (x *StatInfo) GetQueryStart() string { + if x != nil { + return x.QueryStart + } + return "" +} + +func (x *StatInfo) GetQuery() string { + if x != nil { + return x.Query + } + return "" +} + +func (x *StatInfo) GetDuration() float32 { + if x != nil { + return x.Duration + } + return 0 +} + +type PeerSlotResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + SlotData []*SlotInfo `protobuf:"bytes,1,rep,name=slot_data,json=slotData,proto3" json:"slot_data,omitempty"` +} + +func (x *PeerSlotResponse) Reset() { + *x = PeerSlotResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PeerSlotResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PeerSlotResponse) ProtoMessage() {} + +func (x *PeerSlotResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PeerSlotResponse.ProtoReflect.Descriptor instead. +func (*PeerSlotResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{24} +} + +func (x *PeerSlotResponse) GetSlotData() []*SlotInfo { + if x != nil { + return x.SlotData + } + return nil +} + +type PeerStatResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + StatData []*StatInfo `protobuf:"bytes,1,rep,name=stat_data,json=statData,proto3" json:"stat_data,omitempty"` +} + +func (x *PeerStatResponse) Reset() { + *x = PeerStatResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PeerStatResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PeerStatResponse) ProtoMessage() {} + +func (x *PeerStatResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PeerStatResponse.ProtoReflect.Descriptor instead. +func (*PeerStatResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{25} +} + +func (x *PeerStatResponse) GetStatData() []*StatInfo { + if x != nil { + return x.StatData + } + return nil +} + +type SnapshotStatus struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Clones []*QRepMirrorStatus `protobuf:"bytes,1,rep,name=clones,proto3" json:"clones,omitempty"` +} + +func (x *SnapshotStatus) Reset() { + *x = SnapshotStatus{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SnapshotStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SnapshotStatus) ProtoMessage() {} + +func (x *SnapshotStatus) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SnapshotStatus.ProtoReflect.Descriptor instead. +func (*SnapshotStatus) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{26} +} + +func (x *SnapshotStatus) GetClones() []*QRepMirrorStatus { + if x != nil { + return x.Clones + } + return nil +} + +type CDCMirrorStatus struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Config *FlowConnectionConfigs `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + SnapshotStatus *SnapshotStatus `protobuf:"bytes,2,opt,name=snapshot_status,json=snapshotStatus,proto3" json:"snapshot_status,omitempty"` + CdcSyncs []*CDCSyncStatus `protobuf:"bytes,3,rep,name=cdc_syncs,json=cdcSyncs,proto3" json:"cdc_syncs,omitempty"` +} + +func (x *CDCMirrorStatus) Reset() { + *x = CDCMirrorStatus{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CDCMirrorStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CDCMirrorStatus) ProtoMessage() {} + +func (x *CDCMirrorStatus) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[27] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CDCMirrorStatus.ProtoReflect.Descriptor instead. +func (*CDCMirrorStatus) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{27} +} + +func (x *CDCMirrorStatus) GetConfig() *FlowConnectionConfigs { + if x != nil { + return x.Config + } + return nil +} + +func (x *CDCMirrorStatus) GetSnapshotStatus() *SnapshotStatus { + if x != nil { + return x.SnapshotStatus + } + return nil +} + +func (x *CDCMirrorStatus) GetCdcSyncs() []*CDCSyncStatus { + if x != nil { + return x.CdcSyncs + } + return nil +} + +type MirrorStatusResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + FlowJobName string `protobuf:"bytes,1,opt,name=flow_job_name,json=flowJobName,proto3" json:"flow_job_name,omitempty"` + // Types that are assignable to Status: + // + // *MirrorStatusResponse_QrepStatus + // *MirrorStatusResponse_CdcStatus + Status isMirrorStatusResponse_Status `protobuf_oneof:"status"` + ErrorMessage string `protobuf:"bytes,4,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` +} + +func (x *MirrorStatusResponse) Reset() { + *x = MirrorStatusResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_route_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MirrorStatusResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MirrorStatusResponse) ProtoMessage() {} + +func (x *MirrorStatusResponse) ProtoReflect() protoreflect.Message { + mi := &file_route_proto_msgTypes[28] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MirrorStatusResponse.ProtoReflect.Descriptor instead. +func (*MirrorStatusResponse) Descriptor() ([]byte, []int) { + return file_route_proto_rawDescGZIP(), []int{28} +} + +func (x *MirrorStatusResponse) GetFlowJobName() string { + if x != nil { + return x.FlowJobName + } + return "" +} + +func (m *MirrorStatusResponse) GetStatus() isMirrorStatusResponse_Status { + if m != nil { + return m.Status + } + return nil +} + +func (x *MirrorStatusResponse) GetQrepStatus() *QRepMirrorStatus { + if x, ok := x.GetStatus().(*MirrorStatusResponse_QrepStatus); ok { + return x.QrepStatus + } + return nil +} + +func (x *MirrorStatusResponse) GetCdcStatus() *CDCMirrorStatus { + if x, ok := x.GetStatus().(*MirrorStatusResponse_CdcStatus); ok { + return x.CdcStatus + } + return nil +} + +func (x *MirrorStatusResponse) GetErrorMessage() string { + if x != nil { + return x.ErrorMessage + } + return "" +} + +type isMirrorStatusResponse_Status interface { + isMirrorStatusResponse_Status() +} + +type MirrorStatusResponse_QrepStatus struct { + QrepStatus *QRepMirrorStatus `protobuf:"bytes,2,opt,name=qrep_status,json=qrepStatus,proto3,oneof"` +} + +type MirrorStatusResponse_CdcStatus struct { + CdcStatus *CDCMirrorStatus `protobuf:"bytes,3,opt,name=cdc_status,json=cdcStatus,proto3,oneof"` +} + +func (*MirrorStatusResponse_QrepStatus) isMirrorStatusResponse_Status() {} + +func (*MirrorStatusResponse_CdcStatus) isMirrorStatusResponse_Status() {} + +var File_route_proto protoreflect.FileDescriptor + +var file_route_proto_rawDesc = []byte{ + 0x0a, 0x0b, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x70, + 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0b, 0x70, 0x65, 0x65, 0x72, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0a, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x22, 0x9b, 0x01, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, + 0x43, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x51, 0x0a, 0x12, + 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x11, 0x63, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, + 0x30, 0x0a, 0x14, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, + 0x67, 0x5f, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x12, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x22, 0x36, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, + 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x6f, + 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x77, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x22, 0x83, 0x01, 0x0a, 0x15, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x0b, 0x71, 0x72, 0x65, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, 0x65, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x52, 0x0a, 0x71, 0x72, 0x65, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x30, 0x0a, + 0x14, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x5f, + 0x65, 0x6e, 0x74, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x12, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x22, + 0x37, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, + 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x6f, 0x72, + 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x77, + 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x22, 0xca, 0x01, 0x0a, 0x0f, 0x53, 0x68, 0x75, + 0x74, 0x64, 0x6f, 0x77, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, + 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x12, 0x22, 0x0a, + 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x33, 0x0a, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x65, 0x72, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, + 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x50, 0x65, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, + 0x50, 0x65, 0x65, 0x72, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x50, 0x65, 0x65, 0x72, 0x22, 0x47, 0x0a, 0x10, 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, + 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x6f, 0x6b, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x02, 0x6f, 0x6b, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3d, + 0x0a, 0x13, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x04, 0x70, 0x65, 0x65, 0x72, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, + 0x72, 0x73, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x52, 0x04, 0x70, 0x65, 0x65, 0x72, 0x22, 0x3b, 0x0a, + 0x11, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x26, 0x0a, 0x04, 0x70, 0x65, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x12, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2e, + 0x50, 0x65, 0x65, 0x72, 0x52, 0x04, 0x70, 0x65, 0x65, 0x72, 0x22, 0x2e, 0x0a, 0x0f, 0x44, 0x72, + 0x6f, 0x70, 0x50, 0x65, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, + 0x09, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x70, 0x65, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x47, 0x0a, 0x10, 0x44, 0x72, + 0x6f, 0x70, 0x50, 0x65, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, + 0x0a, 0x02, 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x02, 0x6f, 0x6b, 0x12, 0x23, + 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x22, 0x6a, 0x0a, 0x14, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x65, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x38, 0x0a, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x70, 0x65, + 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, + 0x66, 0x0a, 0x12, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, + 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x53, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x18, 0x0a, + 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x39, 0x0a, 0x13, 0x4d, 0x69, 0x72, 0x72, 0x6f, + 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x22, + 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, + 0x6d, 0x65, 0x22, 0xc1, 0x01, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x61, + 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6e, + 0x75, 0x6d, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x6e, + 0x75, 0x6d, 0x52, 0x6f, 0x77, 0x73, 0x22, 0x82, 0x01, 0x0a, 0x10, 0x51, 0x52, 0x65, 0x70, 0x4d, + 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2f, 0x0a, 0x06, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x65, + 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x51, 0x52, 0x65, 0x70, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x3d, 0x0a, 0x0a, + 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, + 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, + 0x0a, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xd2, 0x01, 0x0a, 0x0d, + 0x43, 0x44, 0x43, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1b, 0x0a, + 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x73, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, 0x73, 0x6e, 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, + 0x64, 0x5f, 0x6c, 0x73, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x65, 0x6e, 0x64, + 0x4c, 0x73, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x75, 0x6d, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x6e, 0x75, 0x6d, 0x52, 0x6f, 0x77, 0x73, 0x12, 0x39, + 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, + 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, + 0x22, 0x2f, 0x0a, 0x13, 0x50, 0x65, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x73, 0x22, 0x53, 0x0a, 0x13, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x61, 0x62, 0x6c, 0x65, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x65, 0x65, 0x72, + 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x65, 0x65, + 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x2e, 0x0a, 0x14, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, + 0x0a, 0x06, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, + 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x22, 0x72, 0x0a, 0x13, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, + 0x09, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x70, 0x65, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x74, + 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x30, 0x0a, 0x14, 0x54, 0x61, + 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x22, 0x3e, 0x0a, 0x1f, + 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x50, 0x65, 0x65, 0x72, 0x41, 0x63, 0x74, 0x69, + 0x76, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x1b, 0x0a, 0x09, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x70, 0x65, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x97, 0x01, 0x0a, + 0x08, 0x53, 0x6c, 0x6f, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x6c, 0x6f, + 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x6c, + 0x6f, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x72, 0x65, 0x64, 0x6f, 0x5f, 0x6c, + 0x53, 0x4e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x72, 0x65, 0x64, 0x6f, 0x4c, 0x53, + 0x4e, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6c, 0x53, 0x4e, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4c, + 0x53, 0x4e, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x12, 0x1a, 0x0a, 0x09, 0x6c, 0x61, + 0x67, 0x5f, 0x69, 0x6e, 0x5f, 0x6d, 0x62, 0x18, 0x05, 0x20, 0x01, 0x28, 0x02, 0x52, 0x07, 0x6c, + 0x61, 0x67, 0x49, 0x6e, 0x4d, 0x62, 0x22, 0xb6, 0x01, 0x0a, 0x08, 0x53, 0x74, 0x61, 0x74, 0x49, + 0x6e, 0x66, 0x6f, 0x12, 0x10, 0x0a, 0x03, 0x70, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x03, 0x70, 0x69, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x61, 0x69, 0x74, 0x5f, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x77, 0x61, 0x69, 0x74, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x77, 0x61, 0x69, 0x74, 0x5f, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, + 0x61, 0x69, 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1f, 0x0a, 0x0b, + 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x14, 0x0a, + 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, + 0x65, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x02, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x47, 0x0a, 0x10, 0x50, 0x65, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, + 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x08, + 0x73, 0x6c, 0x6f, 0x74, 0x44, 0x61, 0x74, 0x61, 0x22, 0x47, 0x0a, 0x10, 0x50, 0x65, 0x65, 0x72, + 0x53, 0x74, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x09, + 0x73, 0x74, 0x61, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x16, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x53, + 0x74, 0x61, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x08, 0x73, 0x74, 0x61, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x22, 0x48, 0x0a, 0x0e, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x12, 0x36, 0x0a, 0x06, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, + 0x74, 0x65, 0x2e, 0x51, 0x52, 0x65, 0x70, 0x4d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x52, 0x06, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x73, 0x22, 0xce, 0x01, 0x0a, 0x0f, + 0x43, 0x44, 0x43, 0x4d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, + 0x3a, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x46, 0x6c, + 0x6f, 0x77, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x73, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x45, 0x0a, 0x0f, 0x73, + 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, + 0x75, 0x74, 0x65, 0x2e, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x0e, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x64, 0x63, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x73, 0x18, + 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, + 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, 0x44, 0x43, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x08, 0x63, 0x64, 0x63, 0x53, 0x79, 0x6e, 0x63, 0x73, 0x22, 0xec, 0x01, 0x0a, + 0x14, 0x4d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x22, 0x0a, 0x0d, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6a, 0x6f, + 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x6c, + 0x6f, 0x77, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x41, 0x0a, 0x0b, 0x71, 0x72, 0x65, + 0x70, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, + 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x51, 0x52, + 0x65, 0x70, 0x4d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, + 0x52, 0x0a, 0x71, 0x72, 0x65, 0x70, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x3e, 0x0a, 0x0a, + 0x63, 0x64, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, + 0x43, 0x44, 0x43, 0x4d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, + 0x00, 0x52, 0x09, 0x63, 0x64, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x23, 0x0a, 0x0d, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x42, 0x08, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2a, 0x42, 0x0a, 0x12, 0x56, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x12, 0x14, 0x0a, 0x10, 0x43, 0x52, 0x45, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x55, 0x4e, + 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x56, 0x41, 0x4c, 0x49, 0x44, + 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x02, 0x2a, + 0x43, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x41, 0x54, 0x49, 0x4f, + 0x4e, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x43, + 0x52, 0x45, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, + 0x45, 0x44, 0x10, 0x02, 0x32, 0xa2, 0x0b, 0x0a, 0x0b, 0x46, 0x6c, 0x6f, 0x77, 0x53, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x12, 0x74, 0x0a, 0x0c, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, + 0x50, 0x65, 0x65, 0x72, 0x12, 0x21, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, + 0x75, 0x74, 0x65, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, + 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x65, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1d, 0x82, 0xd3, 0xe4, + 0x93, 0x02, 0x17, 0x3a, 0x01, 0x2a, 0x22, 0x12, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, 0x72, + 0x73, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x12, 0x6c, 0x0a, 0x0a, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x50, 0x65, 0x65, 0x72, 0x12, 0x1f, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x65, + 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, + 0x65, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1b, 0x82, 0xd3, 0xe4, + 0x93, 0x02, 0x15, 0x3a, 0x01, 0x2a, 0x22, 0x10, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, 0x72, + 0x73, 0x2f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x64, 0x0a, 0x08, 0x44, 0x72, 0x6f, 0x70, + 0x50, 0x65, 0x65, 0x72, 0x12, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, + 0x75, 0x74, 0x65, 0x2e, 0x44, 0x72, 0x6f, 0x70, 0x50, 0x65, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, + 0x74, 0x65, 0x2e, 0x44, 0x72, 0x6f, 0x70, 0x50, 0x65, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x19, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x13, 0x3a, 0x01, 0x2a, 0x22, 0x0e, + 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2f, 0x64, 0x72, 0x6f, 0x70, 0x12, 0x79, + 0x0a, 0x0d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, 0x6f, 0x77, 0x12, + 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, + 0x74, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x44, 0x43, 0x46, 0x6c, 0x6f, 0x77, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, + 0x3a, 0x01, 0x2a, 0x22, 0x14, 0x2f, 0x76, 0x31, 0x2f, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x2f, 0x63, + 0x64, 0x63, 0x2f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x7d, 0x0a, 0x0e, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x12, 0x23, 0x2e, 0x70, 0x65, + 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x51, 0x52, 0x65, 0x70, 0x46, 0x6c, 0x6f, 0x77, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x20, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1a, 0x3a, 0x01, + 0x2a, 0x22, 0x15, 0x2f, 0x76, 0x31, 0x2f, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x2f, 0x71, 0x72, 0x65, + 0x70, 0x2f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x79, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x2d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, + 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x50, 0x65, + 0x65, 0x72, 0x41, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, + 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x19, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x13, + 0x12, 0x11, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2f, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x73, 0x12, 0x74, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, + 0x49, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x21, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x61, + 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x65, + 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x18, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x12, 0x12, 0x10, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, + 0x72, 0x73, 0x2f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x6e, 0x0a, 0x0a, 0x47, 0x65, 0x74, + 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x21, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, + 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x6c, 0x75, + 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x19, + 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x13, 0x12, 0x11, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, 0x72, + 0x73, 0x2f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x81, 0x01, 0x0a, 0x0b, 0x47, 0x65, + 0x74, 0x53, 0x6c, 0x6f, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x2d, 0x2e, 0x70, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, + 0x73, 0x50, 0x65, 0x65, 0x72, 0x41, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x66, + 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, + 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x53, 0x6c, 0x6f, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1d, + 0x12, 0x1b, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2f, 0x73, 0x6c, 0x6f, 0x74, + 0x73, 0x2f, 0x7b, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x81, 0x01, + 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x2d, 0x2e, + 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x50, 0x6f, 0x73, + 0x74, 0x67, 0x72, 0x65, 0x73, 0x50, 0x65, 0x65, 0x72, 0x41, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, + 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, + 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x50, 0x65, 0x65, 0x72, + 0x53, 0x74, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x82, 0xd3, + 0xe4, 0x93, 0x02, 0x1d, 0x12, 0x1b, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x65, 0x65, 0x72, 0x73, 0x2f, + 0x73, 0x74, 0x61, 0x74, 0x73, 0x2f, 0x7b, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, + 0x7d, 0x12, 0x6a, 0x0a, 0x0c, 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, 0x6e, 0x46, 0x6c, 0x6f, + 0x77, 0x12, 0x1d, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, + 0x2e, 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, + 0x53, 0x68, 0x75, 0x74, 0x64, 0x6f, 0x77, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x1b, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x15, 0x3a, 0x01, 0x2a, 0x22, 0x10, 0x2f, 0x76, 0x31, + 0x2f, 0x6d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x64, 0x72, 0x6f, 0x70, 0x12, 0x7a, 0x0a, + 0x0c, 0x4d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x21, 0x2e, + 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, 0x4d, 0x69, 0x72, + 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x22, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x2e, + 0x4d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1d, 0x12, 0x1b, 0x2f, 0x76, + 0x31, 0x2f, 0x6d, 0x69, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, + 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x7c, 0x0a, 0x10, 0x63, 0x6f, 0x6d, + 0x2e, 0x70, 0x65, 0x65, 0x72, 0x64, 0x62, 0x5f, 0x72, 0x6f, 0x75, 0x74, 0x65, 0x42, 0x0a, 0x52, + 0x6f, 0x75, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x10, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0xa2, 0x02, 0x03, + 0x50, 0x58, 0x58, 0xaa, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, + 0x65, 0xca, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, 0x65, 0xe2, + 0x02, 0x17, 0x50, 0x65, 0x65, 0x72, 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, 0x65, 0x5c, 0x47, 0x50, + 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0b, 0x50, 0x65, 0x65, 0x72, + 0x64, 0x62, 0x52, 0x6f, 0x75, 0x74, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_route_proto_rawDescOnce sync.Once + file_route_proto_rawDescData = file_route_proto_rawDesc +) + +func file_route_proto_rawDescGZIP() []byte { + file_route_proto_rawDescOnce.Do(func() { + file_route_proto_rawDescData = protoimpl.X.CompressGZIP(file_route_proto_rawDescData) + }) + return file_route_proto_rawDescData +} + +var file_route_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_route_proto_msgTypes = make([]protoimpl.MessageInfo, 29) +var file_route_proto_goTypes = []interface{}{ + (ValidatePeerStatus)(0), // 0: peerdb_route.ValidatePeerStatus + (CreatePeerStatus)(0), // 1: peerdb_route.CreatePeerStatus + (*CreateCDCFlowRequest)(nil), // 2: peerdb_route.CreateCDCFlowRequest + (*CreateCDCFlowResponse)(nil), // 3: peerdb_route.CreateCDCFlowResponse + (*CreateQRepFlowRequest)(nil), // 4: peerdb_route.CreateQRepFlowRequest + (*CreateQRepFlowResponse)(nil), // 5: peerdb_route.CreateQRepFlowResponse + (*ShutdownRequest)(nil), // 6: peerdb_route.ShutdownRequest + (*ShutdownResponse)(nil), // 7: peerdb_route.ShutdownResponse + (*ValidatePeerRequest)(nil), // 8: peerdb_route.ValidatePeerRequest + (*CreatePeerRequest)(nil), // 9: peerdb_route.CreatePeerRequest + (*DropPeerRequest)(nil), // 10: peerdb_route.DropPeerRequest + (*DropPeerResponse)(nil), // 11: peerdb_route.DropPeerResponse + (*ValidatePeerResponse)(nil), // 12: peerdb_route.ValidatePeerResponse + (*CreatePeerResponse)(nil), // 13: peerdb_route.CreatePeerResponse + (*MirrorStatusRequest)(nil), // 14: peerdb_route.MirrorStatusRequest + (*PartitionStatus)(nil), // 15: peerdb_route.PartitionStatus + (*QRepMirrorStatus)(nil), // 16: peerdb_route.QRepMirrorStatus + (*CDCSyncStatus)(nil), // 17: peerdb_route.CDCSyncStatus + (*PeerSchemasResponse)(nil), // 18: peerdb_route.PeerSchemasResponse + (*SchemaTablesRequest)(nil), // 19: peerdb_route.SchemaTablesRequest + (*SchemaTablesResponse)(nil), // 20: peerdb_route.SchemaTablesResponse + (*TableColumnsRequest)(nil), // 21: peerdb_route.TableColumnsRequest + (*TableColumnsResponse)(nil), // 22: peerdb_route.TableColumnsResponse + (*PostgresPeerActivityInfoRequest)(nil), // 23: peerdb_route.PostgresPeerActivityInfoRequest + (*SlotInfo)(nil), // 24: peerdb_route.SlotInfo + (*StatInfo)(nil), // 25: peerdb_route.StatInfo + (*PeerSlotResponse)(nil), // 26: peerdb_route.PeerSlotResponse + (*PeerStatResponse)(nil), // 27: peerdb_route.PeerStatResponse + (*SnapshotStatus)(nil), // 28: peerdb_route.SnapshotStatus + (*CDCMirrorStatus)(nil), // 29: peerdb_route.CDCMirrorStatus + (*MirrorStatusResponse)(nil), // 30: peerdb_route.MirrorStatusResponse + (*FlowConnectionConfigs)(nil), // 31: peerdb_flow.FlowConnectionConfigs + (*QRepConfig)(nil), // 32: peerdb_flow.QRepConfig + (*Peer)(nil), // 33: peerdb_peers.Peer + (*timestamppb.Timestamp)(nil), // 34: google.protobuf.Timestamp +} +var file_route_proto_depIdxs = []int32{ + 31, // 0: peerdb_route.CreateCDCFlowRequest.connection_configs:type_name -> peerdb_flow.FlowConnectionConfigs + 32, // 1: peerdb_route.CreateQRepFlowRequest.qrep_config:type_name -> peerdb_flow.QRepConfig + 33, // 2: peerdb_route.ShutdownRequest.source_peer:type_name -> peerdb_peers.Peer + 33, // 3: peerdb_route.ShutdownRequest.destination_peer:type_name -> peerdb_peers.Peer + 33, // 4: peerdb_route.ValidatePeerRequest.peer:type_name -> peerdb_peers.Peer + 33, // 5: peerdb_route.CreatePeerRequest.peer:type_name -> peerdb_peers.Peer + 0, // 6: peerdb_route.ValidatePeerResponse.status:type_name -> peerdb_route.ValidatePeerStatus + 1, // 7: peerdb_route.CreatePeerResponse.status:type_name -> peerdb_route.CreatePeerStatus + 34, // 8: peerdb_route.PartitionStatus.start_time:type_name -> google.protobuf.Timestamp + 34, // 9: peerdb_route.PartitionStatus.end_time:type_name -> google.protobuf.Timestamp + 32, // 10: peerdb_route.QRepMirrorStatus.config:type_name -> peerdb_flow.QRepConfig + 15, // 11: peerdb_route.QRepMirrorStatus.partitions:type_name -> peerdb_route.PartitionStatus + 34, // 12: peerdb_route.CDCSyncStatus.start_time:type_name -> google.protobuf.Timestamp + 34, // 13: peerdb_route.CDCSyncStatus.end_time:type_name -> google.protobuf.Timestamp + 24, // 14: peerdb_route.PeerSlotResponse.slot_data:type_name -> peerdb_route.SlotInfo + 25, // 15: peerdb_route.PeerStatResponse.stat_data:type_name -> peerdb_route.StatInfo + 16, // 16: peerdb_route.SnapshotStatus.clones:type_name -> peerdb_route.QRepMirrorStatus + 31, // 17: peerdb_route.CDCMirrorStatus.config:type_name -> peerdb_flow.FlowConnectionConfigs + 28, // 18: peerdb_route.CDCMirrorStatus.snapshot_status:type_name -> peerdb_route.SnapshotStatus + 17, // 19: peerdb_route.CDCMirrorStatus.cdc_syncs:type_name -> peerdb_route.CDCSyncStatus + 16, // 20: peerdb_route.MirrorStatusResponse.qrep_status:type_name -> peerdb_route.QRepMirrorStatus + 29, // 21: peerdb_route.MirrorStatusResponse.cdc_status:type_name -> peerdb_route.CDCMirrorStatus + 8, // 22: peerdb_route.FlowService.ValidatePeer:input_type -> peerdb_route.ValidatePeerRequest + 9, // 23: peerdb_route.FlowService.CreatePeer:input_type -> peerdb_route.CreatePeerRequest + 10, // 24: peerdb_route.FlowService.DropPeer:input_type -> peerdb_route.DropPeerRequest + 2, // 25: peerdb_route.FlowService.CreateCDCFlow:input_type -> peerdb_route.CreateCDCFlowRequest + 4, // 26: peerdb_route.FlowService.CreateQRepFlow:input_type -> peerdb_route.CreateQRepFlowRequest + 23, // 27: peerdb_route.FlowService.GetSchemas:input_type -> peerdb_route.PostgresPeerActivityInfoRequest + 19, // 28: peerdb_route.FlowService.GetTablesInSchema:input_type -> peerdb_route.SchemaTablesRequest + 21, // 29: peerdb_route.FlowService.GetColumns:input_type -> peerdb_route.TableColumnsRequest + 23, // 30: peerdb_route.FlowService.GetSlotInfo:input_type -> peerdb_route.PostgresPeerActivityInfoRequest + 23, // 31: peerdb_route.FlowService.GetStatInfo:input_type -> peerdb_route.PostgresPeerActivityInfoRequest + 6, // 32: peerdb_route.FlowService.ShutdownFlow:input_type -> peerdb_route.ShutdownRequest + 14, // 33: peerdb_route.FlowService.MirrorStatus:input_type -> peerdb_route.MirrorStatusRequest + 12, // 34: peerdb_route.FlowService.ValidatePeer:output_type -> peerdb_route.ValidatePeerResponse + 13, // 35: peerdb_route.FlowService.CreatePeer:output_type -> peerdb_route.CreatePeerResponse + 11, // 36: peerdb_route.FlowService.DropPeer:output_type -> peerdb_route.DropPeerResponse + 3, // 37: peerdb_route.FlowService.CreateCDCFlow:output_type -> peerdb_route.CreateCDCFlowResponse + 5, // 38: peerdb_route.FlowService.CreateQRepFlow:output_type -> peerdb_route.CreateQRepFlowResponse + 18, // 39: peerdb_route.FlowService.GetSchemas:output_type -> peerdb_route.PeerSchemasResponse + 20, // 40: peerdb_route.FlowService.GetTablesInSchema:output_type -> peerdb_route.SchemaTablesResponse + 22, // 41: peerdb_route.FlowService.GetColumns:output_type -> peerdb_route.TableColumnsResponse + 26, // 42: peerdb_route.FlowService.GetSlotInfo:output_type -> peerdb_route.PeerSlotResponse + 27, // 43: peerdb_route.FlowService.GetStatInfo:output_type -> peerdb_route.PeerStatResponse + 7, // 44: peerdb_route.FlowService.ShutdownFlow:output_type -> peerdb_route.ShutdownResponse + 30, // 45: peerdb_route.FlowService.MirrorStatus:output_type -> peerdb_route.MirrorStatusResponse + 34, // [34:46] is the sub-list for method output_type + 22, // [22:34] is the sub-list for method input_type + 22, // [22:22] is the sub-list for extension type_name + 22, // [22:22] is the sub-list for extension extendee + 0, // [0:22] is the sub-list for field type_name +} + +func init() { file_route_proto_init() } +func file_route_proto_init() { + if File_route_proto != nil { + return + } + file_peers_proto_init() + file_flow_proto_init() + if !protoimpl.UnsafeEnabled { + file_route_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateCDCFlowRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateCDCFlowResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateQRepFlowRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateQRepFlowResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: return &v.unknownFields default: return nil @@ -631,7 +2284,7 @@ func file_route_proto_init() { } } file_route_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPeersRequest); i { + switch v := v.(*ValidatePeerRequest); i { case 0: return &v.state case 1: @@ -643,7 +2296,259 @@ func file_route_proto_init() { } } file_route_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPeersResponse); i { + switch v := v.(*CreatePeerRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DropPeerRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DropPeerResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ValidatePeerResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreatePeerResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MirrorStatusRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PartitionStatus); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QRepMirrorStatus); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CDCSyncStatus); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PeerSchemasResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SchemaTablesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SchemaTablesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TableColumnsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TableColumnsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PostgresPeerActivityInfoRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SlotInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StatInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PeerSlotResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PeerStatResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SnapshotStatus); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CDCMirrorStatus); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_route_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MirrorStatusResponse); i { case 0: return &v.state case 1: @@ -655,18 +2560,23 @@ func file_route_proto_init() { } } } + file_route_proto_msgTypes[28].OneofWrappers = []interface{}{ + (*MirrorStatusResponse_QrepStatus)(nil), + (*MirrorStatusResponse_CdcStatus)(nil), + } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_route_proto_rawDesc, - NumEnums: 0, - NumMessages: 8, + NumEnums: 2, + NumMessages: 29, NumExtensions: 0, NumServices: 1, }, GoTypes: file_route_proto_goTypes, DependencyIndexes: file_route_proto_depIdxs, + EnumInfos: file_route_proto_enumTypes, MessageInfos: file_route_proto_msgTypes, }.Build() File_route_proto = out.File diff --git a/flow/generated/protos/route.pb.gw.go b/flow/generated/protos/route.pb.gw.go new file mode 100644 index 000000000..020ba2536 --- /dev/null +++ b/flow/generated/protos/route.pb.gw.go @@ -0,0 +1,1166 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: route.proto + +/* +Package protos is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package protos + +import ( + "context" + "io" + "net/http" + + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" +) + +// Suppress "imported and not used" errors +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray +var _ = metadata.Join + +func request_FlowService_ValidatePeer_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ValidatePeerRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ValidatePeer(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_ValidatePeer_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ValidatePeerRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.ValidatePeer(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_CreatePeer_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreatePeerRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreatePeer(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_CreatePeer_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreatePeerRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.CreatePeer(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_DropPeer_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq DropPeerRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.DropPeer(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_DropPeer_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq DropPeerRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.DropPeer(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_CreateCDCFlow_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateCDCFlowRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateCDCFlow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_CreateCDCFlow_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateCDCFlowRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.CreateCDCFlow(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_CreateQRepFlow_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateQRepFlowRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateQRepFlow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_CreateQRepFlow_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreateQRepFlowRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.CreateQRepFlow(ctx, &protoReq) + return msg, metadata, err + +} + +var ( + filter_FlowService_GetSchemas_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_FlowService_GetSchemas_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq PostgresPeerActivityInfoRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FlowService_GetSchemas_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetSchemas(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_GetSchemas_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq PostgresPeerActivityInfoRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FlowService_GetSchemas_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.GetSchemas(ctx, &protoReq) + return msg, metadata, err + +} + +var ( + filter_FlowService_GetTablesInSchema_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_FlowService_GetTablesInSchema_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SchemaTablesRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FlowService_GetTablesInSchema_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetTablesInSchema(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_GetTablesInSchema_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq SchemaTablesRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FlowService_GetTablesInSchema_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.GetTablesInSchema(ctx, &protoReq) + return msg, metadata, err + +} + +var ( + filter_FlowService_GetColumns_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_FlowService_GetColumns_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq TableColumnsRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FlowService_GetColumns_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetColumns(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_GetColumns_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq TableColumnsRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FlowService_GetColumns_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.GetColumns(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_GetSlotInfo_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq PostgresPeerActivityInfoRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["peer_name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "peer_name") + } + + protoReq.PeerName, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "peer_name", err) + } + + msg, err := client.GetSlotInfo(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_GetSlotInfo_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq PostgresPeerActivityInfoRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["peer_name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "peer_name") + } + + protoReq.PeerName, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "peer_name", err) + } + + msg, err := server.GetSlotInfo(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_GetStatInfo_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq PostgresPeerActivityInfoRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["peer_name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "peer_name") + } + + protoReq.PeerName, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "peer_name", err) + } + + msg, err := client.GetStatInfo(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_GetStatInfo_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq PostgresPeerActivityInfoRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["peer_name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "peer_name") + } + + protoReq.PeerName, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "peer_name", err) + } + + msg, err := server.GetStatInfo(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_ShutdownFlow_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ShutdownRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ShutdownFlow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_ShutdownFlow_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ShutdownRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.ShutdownFlow(ctx, &protoReq) + return msg, metadata, err + +} + +func request_FlowService_MirrorStatus_0(ctx context.Context, marshaler runtime.Marshaler, client FlowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq MirrorStatusRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["flow_job_name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "flow_job_name") + } + + protoReq.FlowJobName, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "flow_job_name", err) + } + + msg, err := client.MirrorStatus(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_FlowService_MirrorStatus_0(ctx context.Context, marshaler runtime.Marshaler, server FlowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq MirrorStatusRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["flow_job_name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "flow_job_name") + } + + protoReq.FlowJobName, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "flow_job_name", err) + } + + msg, err := server.MirrorStatus(ctx, &protoReq) + return msg, metadata, err + +} + +// RegisterFlowServiceHandlerServer registers the http handlers for service FlowService to "mux". +// UnaryRPC :call FlowServiceServer directly. +// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. +// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterFlowServiceHandlerFromEndpoint instead. +func RegisterFlowServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server FlowServiceServer) error { + + mux.Handle("POST", pattern_FlowService_ValidatePeer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/ValidatePeer", runtime.WithHTTPPathPattern("/v1/peers/validate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_ValidatePeer_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_ValidatePeer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_CreatePeer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/CreatePeer", runtime.WithHTTPPathPattern("/v1/peers/create")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_CreatePeer_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_CreatePeer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_DropPeer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/DropPeer", runtime.WithHTTPPathPattern("/v1/peers/drop")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_DropPeer_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_DropPeer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_CreateCDCFlow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/CreateCDCFlow", runtime.WithHTTPPathPattern("/v1/flows/cdc/create")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_CreateCDCFlow_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_CreateCDCFlow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_CreateQRepFlow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/CreateQRepFlow", runtime.WithHTTPPathPattern("/v1/flows/qrep/create")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_CreateQRepFlow_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_CreateQRepFlow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetSchemas_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/GetSchemas", runtime.WithHTTPPathPattern("/v1/peers/schemas")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_GetSchemas_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetSchemas_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetTablesInSchema_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/GetTablesInSchema", runtime.WithHTTPPathPattern("/v1/peers/tables")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_GetTablesInSchema_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetTablesInSchema_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetColumns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/GetColumns", runtime.WithHTTPPathPattern("/v1/peers/columns")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_GetColumns_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetColumns_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetSlotInfo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/GetSlotInfo", runtime.WithHTTPPathPattern("/v1/peers/slots/{peer_name}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_GetSlotInfo_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetSlotInfo_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetStatInfo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/GetStatInfo", runtime.WithHTTPPathPattern("/v1/peers/stats/{peer_name}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_GetStatInfo_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetStatInfo_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_ShutdownFlow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/ShutdownFlow", runtime.WithHTTPPathPattern("/v1/mirrors/drop")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_ShutdownFlow_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_ShutdownFlow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_MirrorStatus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/peerdb_route.FlowService/MirrorStatus", runtime.WithHTTPPathPattern("/v1/mirrors/{flow_job_name}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_FlowService_MirrorStatus_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_MirrorStatus_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +// RegisterFlowServiceHandlerFromEndpoint is same as RegisterFlowServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterFlowServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.DialContext(ctx, endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterFlowServiceHandler(ctx, mux, conn) +} + +// RegisterFlowServiceHandler registers the http handlers for service FlowService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterFlowServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterFlowServiceHandlerClient(ctx, mux, NewFlowServiceClient(conn)) +} + +// RegisterFlowServiceHandlerClient registers the http handlers for service FlowService +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "FlowServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "FlowServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "FlowServiceClient" to call the correct interceptors. +func RegisterFlowServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client FlowServiceClient) error { + + mux.Handle("POST", pattern_FlowService_ValidatePeer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/ValidatePeer", runtime.WithHTTPPathPattern("/v1/peers/validate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_ValidatePeer_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_ValidatePeer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_CreatePeer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/CreatePeer", runtime.WithHTTPPathPattern("/v1/peers/create")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_CreatePeer_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_CreatePeer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_DropPeer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/DropPeer", runtime.WithHTTPPathPattern("/v1/peers/drop")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_DropPeer_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_DropPeer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_CreateCDCFlow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/CreateCDCFlow", runtime.WithHTTPPathPattern("/v1/flows/cdc/create")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_CreateCDCFlow_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_CreateCDCFlow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_CreateQRepFlow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/CreateQRepFlow", runtime.WithHTTPPathPattern("/v1/flows/qrep/create")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_CreateQRepFlow_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_CreateQRepFlow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetSchemas_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/GetSchemas", runtime.WithHTTPPathPattern("/v1/peers/schemas")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_GetSchemas_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetSchemas_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetTablesInSchema_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/GetTablesInSchema", runtime.WithHTTPPathPattern("/v1/peers/tables")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_GetTablesInSchema_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetTablesInSchema_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetColumns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/GetColumns", runtime.WithHTTPPathPattern("/v1/peers/columns")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_GetColumns_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetColumns_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetSlotInfo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/GetSlotInfo", runtime.WithHTTPPathPattern("/v1/peers/slots/{peer_name}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_GetSlotInfo_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetSlotInfo_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_GetStatInfo_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/GetStatInfo", runtime.WithHTTPPathPattern("/v1/peers/stats/{peer_name}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_GetStatInfo_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_GetStatInfo_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_FlowService_ShutdownFlow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/ShutdownFlow", runtime.WithHTTPPathPattern("/v1/mirrors/drop")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_ShutdownFlow_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_ShutdownFlow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_FlowService_MirrorStatus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/peerdb_route.FlowService/MirrorStatus", runtime.WithHTTPPathPattern("/v1/mirrors/{flow_job_name}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_FlowService_MirrorStatus_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_FlowService_MirrorStatus_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_FlowService_ValidatePeer_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "peers", "validate"}, "")) + + pattern_FlowService_CreatePeer_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "peers", "create"}, "")) + + pattern_FlowService_DropPeer_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "peers", "drop"}, "")) + + pattern_FlowService_CreateCDCFlow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"v1", "flows", "cdc", "create"}, "")) + + pattern_FlowService_CreateQRepFlow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"v1", "flows", "qrep", "create"}, "")) + + pattern_FlowService_GetSchemas_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "peers", "schemas"}, "")) + + pattern_FlowService_GetTablesInSchema_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "peers", "tables"}, "")) + + pattern_FlowService_GetColumns_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "peers", "columns"}, "")) + + pattern_FlowService_GetSlotInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"v1", "peers", "slots", "peer_name"}, "")) + + pattern_FlowService_GetStatInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"v1", "peers", "stats", "peer_name"}, "")) + + pattern_FlowService_ShutdownFlow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "mirrors", "drop"}, "")) + + pattern_FlowService_MirrorStatus_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2}, []string{"v1", "mirrors", "flow_job_name"}, "")) +) + +var ( + forward_FlowService_ValidatePeer_0 = runtime.ForwardResponseMessage + + forward_FlowService_CreatePeer_0 = runtime.ForwardResponseMessage + + forward_FlowService_DropPeer_0 = runtime.ForwardResponseMessage + + forward_FlowService_CreateCDCFlow_0 = runtime.ForwardResponseMessage + + forward_FlowService_CreateQRepFlow_0 = runtime.ForwardResponseMessage + + forward_FlowService_GetSchemas_0 = runtime.ForwardResponseMessage + + forward_FlowService_GetTablesInSchema_0 = runtime.ForwardResponseMessage + + forward_FlowService_GetColumns_0 = runtime.ForwardResponseMessage + + forward_FlowService_GetSlotInfo_0 = runtime.ForwardResponseMessage + + forward_FlowService_GetStatInfo_0 = runtime.ForwardResponseMessage + + forward_FlowService_ShutdownFlow_0 = runtime.ForwardResponseMessage + + forward_FlowService_MirrorStatus_0 = runtime.ForwardResponseMessage +) diff --git a/flow/generated/protos/route_grpc.pb.go b/flow/generated/protos/route_grpc.pb.go index 9efd24b25..05136fecc 100644 --- a/flow/generated/protos/route_grpc.pb.go +++ b/flow/generated/protos/route_grpc.pb.go @@ -19,20 +19,36 @@ import ( const _ = grpc.SupportPackageIsVersion7 const ( - FlowService_ListPeers_FullMethodName = "/peerdb_route.FlowService/ListPeers" - FlowService_CreateCDCFlow_FullMethodName = "/peerdb_route.FlowService/CreateCDCFlow" - FlowService_CreateQRepFlow_FullMethodName = "/peerdb_route.FlowService/CreateQRepFlow" - FlowService_ShutdownFlow_FullMethodName = "/peerdb_route.FlowService/ShutdownFlow" + FlowService_ValidatePeer_FullMethodName = "/peerdb_route.FlowService/ValidatePeer" + FlowService_CreatePeer_FullMethodName = "/peerdb_route.FlowService/CreatePeer" + FlowService_DropPeer_FullMethodName = "/peerdb_route.FlowService/DropPeer" + FlowService_CreateCDCFlow_FullMethodName = "/peerdb_route.FlowService/CreateCDCFlow" + FlowService_CreateQRepFlow_FullMethodName = "/peerdb_route.FlowService/CreateQRepFlow" + FlowService_GetSchemas_FullMethodName = "/peerdb_route.FlowService/GetSchemas" + FlowService_GetTablesInSchema_FullMethodName = "/peerdb_route.FlowService/GetTablesInSchema" + FlowService_GetColumns_FullMethodName = "/peerdb_route.FlowService/GetColumns" + FlowService_GetSlotInfo_FullMethodName = "/peerdb_route.FlowService/GetSlotInfo" + FlowService_GetStatInfo_FullMethodName = "/peerdb_route.FlowService/GetStatInfo" + FlowService_ShutdownFlow_FullMethodName = "/peerdb_route.FlowService/ShutdownFlow" + FlowService_MirrorStatus_FullMethodName = "/peerdb_route.FlowService/MirrorStatus" ) // FlowServiceClient is the client API for FlowService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. type FlowServiceClient interface { - ListPeers(ctx context.Context, in *ListPeersRequest, opts ...grpc.CallOption) (*ListPeersResponse, error) + ValidatePeer(ctx context.Context, in *ValidatePeerRequest, opts ...grpc.CallOption) (*ValidatePeerResponse, error) + CreatePeer(ctx context.Context, in *CreatePeerRequest, opts ...grpc.CallOption) (*CreatePeerResponse, error) + DropPeer(ctx context.Context, in *DropPeerRequest, opts ...grpc.CallOption) (*DropPeerResponse, error) CreateCDCFlow(ctx context.Context, in *CreateCDCFlowRequest, opts ...grpc.CallOption) (*CreateCDCFlowResponse, error) CreateQRepFlow(ctx context.Context, in *CreateQRepFlowRequest, opts ...grpc.CallOption) (*CreateQRepFlowResponse, error) + GetSchemas(ctx context.Context, in *PostgresPeerActivityInfoRequest, opts ...grpc.CallOption) (*PeerSchemasResponse, error) + GetTablesInSchema(ctx context.Context, in *SchemaTablesRequest, opts ...grpc.CallOption) (*SchemaTablesResponse, error) + GetColumns(ctx context.Context, in *TableColumnsRequest, opts ...grpc.CallOption) (*TableColumnsResponse, error) + GetSlotInfo(ctx context.Context, in *PostgresPeerActivityInfoRequest, opts ...grpc.CallOption) (*PeerSlotResponse, error) + GetStatInfo(ctx context.Context, in *PostgresPeerActivityInfoRequest, opts ...grpc.CallOption) (*PeerStatResponse, error) ShutdownFlow(ctx context.Context, in *ShutdownRequest, opts ...grpc.CallOption) (*ShutdownResponse, error) + MirrorStatus(ctx context.Context, in *MirrorStatusRequest, opts ...grpc.CallOption) (*MirrorStatusResponse, error) } type flowServiceClient struct { @@ -43,9 +59,27 @@ func NewFlowServiceClient(cc grpc.ClientConnInterface) FlowServiceClient { return &flowServiceClient{cc} } -func (c *flowServiceClient) ListPeers(ctx context.Context, in *ListPeersRequest, opts ...grpc.CallOption) (*ListPeersResponse, error) { - out := new(ListPeersResponse) - err := c.cc.Invoke(ctx, FlowService_ListPeers_FullMethodName, in, out, opts...) +func (c *flowServiceClient) ValidatePeer(ctx context.Context, in *ValidatePeerRequest, opts ...grpc.CallOption) (*ValidatePeerResponse, error) { + out := new(ValidatePeerResponse) + err := c.cc.Invoke(ctx, FlowService_ValidatePeer_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *flowServiceClient) CreatePeer(ctx context.Context, in *CreatePeerRequest, opts ...grpc.CallOption) (*CreatePeerResponse, error) { + out := new(CreatePeerResponse) + err := c.cc.Invoke(ctx, FlowService_CreatePeer_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *flowServiceClient) DropPeer(ctx context.Context, in *DropPeerRequest, opts ...grpc.CallOption) (*DropPeerResponse, error) { + out := new(DropPeerResponse) + err := c.cc.Invoke(ctx, FlowService_DropPeer_FullMethodName, in, out, opts...) if err != nil { return nil, err } @@ -70,6 +104,51 @@ func (c *flowServiceClient) CreateQRepFlow(ctx context.Context, in *CreateQRepFl return out, nil } +func (c *flowServiceClient) GetSchemas(ctx context.Context, in *PostgresPeerActivityInfoRequest, opts ...grpc.CallOption) (*PeerSchemasResponse, error) { + out := new(PeerSchemasResponse) + err := c.cc.Invoke(ctx, FlowService_GetSchemas_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *flowServiceClient) GetTablesInSchema(ctx context.Context, in *SchemaTablesRequest, opts ...grpc.CallOption) (*SchemaTablesResponse, error) { + out := new(SchemaTablesResponse) + err := c.cc.Invoke(ctx, FlowService_GetTablesInSchema_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *flowServiceClient) GetColumns(ctx context.Context, in *TableColumnsRequest, opts ...grpc.CallOption) (*TableColumnsResponse, error) { + out := new(TableColumnsResponse) + err := c.cc.Invoke(ctx, FlowService_GetColumns_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *flowServiceClient) GetSlotInfo(ctx context.Context, in *PostgresPeerActivityInfoRequest, opts ...grpc.CallOption) (*PeerSlotResponse, error) { + out := new(PeerSlotResponse) + err := c.cc.Invoke(ctx, FlowService_GetSlotInfo_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *flowServiceClient) GetStatInfo(ctx context.Context, in *PostgresPeerActivityInfoRequest, opts ...grpc.CallOption) (*PeerStatResponse, error) { + out := new(PeerStatResponse) + err := c.cc.Invoke(ctx, FlowService_GetStatInfo_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + func (c *flowServiceClient) ShutdownFlow(ctx context.Context, in *ShutdownRequest, opts ...grpc.CallOption) (*ShutdownResponse, error) { out := new(ShutdownResponse) err := c.cc.Invoke(ctx, FlowService_ShutdownFlow_FullMethodName, in, out, opts...) @@ -79,14 +158,31 @@ func (c *flowServiceClient) ShutdownFlow(ctx context.Context, in *ShutdownReques return out, nil } +func (c *flowServiceClient) MirrorStatus(ctx context.Context, in *MirrorStatusRequest, opts ...grpc.CallOption) (*MirrorStatusResponse, error) { + out := new(MirrorStatusResponse) + err := c.cc.Invoke(ctx, FlowService_MirrorStatus_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // FlowServiceServer is the server API for FlowService service. // All implementations must embed UnimplementedFlowServiceServer // for forward compatibility type FlowServiceServer interface { - ListPeers(context.Context, *ListPeersRequest) (*ListPeersResponse, error) + ValidatePeer(context.Context, *ValidatePeerRequest) (*ValidatePeerResponse, error) + CreatePeer(context.Context, *CreatePeerRequest) (*CreatePeerResponse, error) + DropPeer(context.Context, *DropPeerRequest) (*DropPeerResponse, error) CreateCDCFlow(context.Context, *CreateCDCFlowRequest) (*CreateCDCFlowResponse, error) CreateQRepFlow(context.Context, *CreateQRepFlowRequest) (*CreateQRepFlowResponse, error) + GetSchemas(context.Context, *PostgresPeerActivityInfoRequest) (*PeerSchemasResponse, error) + GetTablesInSchema(context.Context, *SchemaTablesRequest) (*SchemaTablesResponse, error) + GetColumns(context.Context, *TableColumnsRequest) (*TableColumnsResponse, error) + GetSlotInfo(context.Context, *PostgresPeerActivityInfoRequest) (*PeerSlotResponse, error) + GetStatInfo(context.Context, *PostgresPeerActivityInfoRequest) (*PeerStatResponse, error) ShutdownFlow(context.Context, *ShutdownRequest) (*ShutdownResponse, error) + MirrorStatus(context.Context, *MirrorStatusRequest) (*MirrorStatusResponse, error) mustEmbedUnimplementedFlowServiceServer() } @@ -94,8 +190,14 @@ type FlowServiceServer interface { type UnimplementedFlowServiceServer struct { } -func (UnimplementedFlowServiceServer) ListPeers(context.Context, *ListPeersRequest) (*ListPeersResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListPeers not implemented") +func (UnimplementedFlowServiceServer) ValidatePeer(context.Context, *ValidatePeerRequest) (*ValidatePeerResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ValidatePeer not implemented") +} +func (UnimplementedFlowServiceServer) CreatePeer(context.Context, *CreatePeerRequest) (*CreatePeerResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreatePeer not implemented") +} +func (UnimplementedFlowServiceServer) DropPeer(context.Context, *DropPeerRequest) (*DropPeerResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DropPeer not implemented") } func (UnimplementedFlowServiceServer) CreateCDCFlow(context.Context, *CreateCDCFlowRequest) (*CreateCDCFlowResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method CreateCDCFlow not implemented") @@ -103,9 +205,27 @@ func (UnimplementedFlowServiceServer) CreateCDCFlow(context.Context, *CreateCDCF func (UnimplementedFlowServiceServer) CreateQRepFlow(context.Context, *CreateQRepFlowRequest) (*CreateQRepFlowResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method CreateQRepFlow not implemented") } +func (UnimplementedFlowServiceServer) GetSchemas(context.Context, *PostgresPeerActivityInfoRequest) (*PeerSchemasResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetSchemas not implemented") +} +func (UnimplementedFlowServiceServer) GetTablesInSchema(context.Context, *SchemaTablesRequest) (*SchemaTablesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTablesInSchema not implemented") +} +func (UnimplementedFlowServiceServer) GetColumns(context.Context, *TableColumnsRequest) (*TableColumnsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetColumns not implemented") +} +func (UnimplementedFlowServiceServer) GetSlotInfo(context.Context, *PostgresPeerActivityInfoRequest) (*PeerSlotResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetSlotInfo not implemented") +} +func (UnimplementedFlowServiceServer) GetStatInfo(context.Context, *PostgresPeerActivityInfoRequest) (*PeerStatResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetStatInfo not implemented") +} func (UnimplementedFlowServiceServer) ShutdownFlow(context.Context, *ShutdownRequest) (*ShutdownResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method ShutdownFlow not implemented") } +func (UnimplementedFlowServiceServer) MirrorStatus(context.Context, *MirrorStatusRequest) (*MirrorStatusResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method MirrorStatus not implemented") +} func (UnimplementedFlowServiceServer) mustEmbedUnimplementedFlowServiceServer() {} // UnsafeFlowServiceServer may be embedded to opt out of forward compatibility for this service. @@ -119,20 +239,56 @@ func RegisterFlowServiceServer(s grpc.ServiceRegistrar, srv FlowServiceServer) { s.RegisterService(&FlowService_ServiceDesc, srv) } -func _FlowService_ListPeers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListPeersRequest) +func _FlowService_ValidatePeer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ValidatePeerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).ValidatePeer(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_ValidatePeer_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).ValidatePeer(ctx, req.(*ValidatePeerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FlowService_CreatePeer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePeerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).CreatePeer(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_CreatePeer_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).CreatePeer(ctx, req.(*CreatePeerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FlowService_DropPeer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DropPeerRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(FlowServiceServer).ListPeers(ctx, in) + return srv.(FlowServiceServer).DropPeer(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: FlowService_ListPeers_FullMethodName, + FullMethod: FlowService_DropPeer_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(FlowServiceServer).ListPeers(ctx, req.(*ListPeersRequest)) + return srv.(FlowServiceServer).DropPeer(ctx, req.(*DropPeerRequest)) } return interceptor(ctx, in, info, handler) } @@ -173,6 +329,96 @@ func _FlowService_CreateQRepFlow_Handler(srv interface{}, ctx context.Context, d return interceptor(ctx, in, info, handler) } +func _FlowService_GetSchemas_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PostgresPeerActivityInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).GetSchemas(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_GetSchemas_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).GetSchemas(ctx, req.(*PostgresPeerActivityInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FlowService_GetTablesInSchema_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SchemaTablesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).GetTablesInSchema(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_GetTablesInSchema_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).GetTablesInSchema(ctx, req.(*SchemaTablesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FlowService_GetColumns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TableColumnsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).GetColumns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_GetColumns_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).GetColumns(ctx, req.(*TableColumnsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FlowService_GetSlotInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PostgresPeerActivityInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).GetSlotInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_GetSlotInfo_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).GetSlotInfo(ctx, req.(*PostgresPeerActivityInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FlowService_GetStatInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PostgresPeerActivityInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).GetStatInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_GetStatInfo_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).GetStatInfo(ctx, req.(*PostgresPeerActivityInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + func _FlowService_ShutdownFlow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ShutdownRequest) if err := dec(in); err != nil { @@ -191,6 +437,24 @@ func _FlowService_ShutdownFlow_Handler(srv interface{}, ctx context.Context, dec return interceptor(ctx, in, info, handler) } +func _FlowService_MirrorStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MirrorStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FlowServiceServer).MirrorStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: FlowService_MirrorStatus_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FlowServiceServer).MirrorStatus(ctx, req.(*MirrorStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + // FlowService_ServiceDesc is the grpc.ServiceDesc for FlowService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -199,8 +463,16 @@ var FlowService_ServiceDesc = grpc.ServiceDesc{ HandlerType: (*FlowServiceServer)(nil), Methods: []grpc.MethodDesc{ { - MethodName: "ListPeers", - Handler: _FlowService_ListPeers_Handler, + MethodName: "ValidatePeer", + Handler: _FlowService_ValidatePeer_Handler, + }, + { + MethodName: "CreatePeer", + Handler: _FlowService_CreatePeer_Handler, + }, + { + MethodName: "DropPeer", + Handler: _FlowService_DropPeer_Handler, }, { MethodName: "CreateCDCFlow", @@ -210,10 +482,34 @@ var FlowService_ServiceDesc = grpc.ServiceDesc{ MethodName: "CreateQRepFlow", Handler: _FlowService_CreateQRepFlow_Handler, }, + { + MethodName: "GetSchemas", + Handler: _FlowService_GetSchemas_Handler, + }, + { + MethodName: "GetTablesInSchema", + Handler: _FlowService_GetTablesInSchema_Handler, + }, + { + MethodName: "GetColumns", + Handler: _FlowService_GetColumns_Handler, + }, + { + MethodName: "GetSlotInfo", + Handler: _FlowService_GetSlotInfo_Handler, + }, + { + MethodName: "GetStatInfo", + Handler: _FlowService_GetStatInfo_Handler, + }, { MethodName: "ShutdownFlow", Handler: _FlowService_ShutdownFlow_Handler, }, + { + MethodName: "MirrorStatus", + Handler: _FlowService_MirrorStatus_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "route.proto", diff --git a/flow/go.mod b/flow/go.mod index 0ea1b9d71..c1dd4013e 100644 --- a/flow/go.mod +++ b/flow/go.mod @@ -3,101 +3,91 @@ module github.com/PeerDB-io/peer-flow go 1.19 require ( - cloud.google.com/go v0.110.7 - cloud.google.com/go/bigquery v1.54.0 - cloud.google.com/go/storage v1.32.0 - github.com/Azure/azure-amqp-common-go/v4 v4.2.0 - github.com/Azure/azure-event-hubs-go/v3 v3.6.1 - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1 + cloud.google.com/go v0.110.8 + cloud.google.com/go/bigquery v1.56.0 + cloud.google.com/go/storage v1.33.0 + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 + github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs v1.0.1 github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.1.1 - github.com/aws/aws-sdk-go v1.44.332 + github.com/aws/aws-sdk-go v1.45.25 + github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.3.1 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 github.com/hashicorp/go-multierror v1.1.1 - github.com/jackc/pglogrepl v0.0.0-20230810221841-d0818e1fbef7 + github.com/jackc/pglogrepl v0.0.0-20230826184802-9ed16cb201f6 github.com/jackc/pgx/v5 v5.4.3 github.com/jmoiron/sqlx v1.3.5 github.com/joho/godotenv v1.5.1 github.com/lib/pq v1.10.9 github.com/linkedin/goavro/v2 v2.12.0 - github.com/microsoft/go-mssqldb v1.5.0 - github.com/prometheus/client_golang v1.16.0 + github.com/microsoft/go-mssqldb v1.6.0 + github.com/orcaman/concurrent-map/v2 v2.0.1 + github.com/prometheus/client_golang v1.17.0 github.com/sirupsen/logrus v1.9.3 - github.com/snowflakedb/gosnowflake v1.6.24 + github.com/snowflakedb/gosnowflake v1.6.25 github.com/stretchr/testify v1.8.4 - github.com/uber-go/tally/v4 v4.1.7 + github.com/uber-go/tally/v4 v4.1.10 github.com/urfave/cli/v2 v2.25.7 + github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a go.temporal.io/api v1.24.0 - go.temporal.io/sdk v1.24.0 + go.temporal.io/sdk v1.25.0 go.uber.org/automaxprocs v1.5.3 - google.golang.org/api v0.138.0 - google.golang.org/grpc v1.57.0 + google.golang.org/api v0.147.0 + google.golang.org/grpc v1.58.3 google.golang.org/protobuf v1.31.0 ) require ( github.com/golang-jwt/jwt/v5 v5.0.0 // indirect - github.com/grafana/pyroscope-go/godeltaprof v0.1.3 // indirect + github.com/grafana/pyroscope-go/godeltaprof v0.1.5 // indirect github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect - github.com/orcaman/concurrent-map/v2 v2.0.1 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/twmb/murmur3 v1.1.8 // indirect - github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect + github.com/twpayne/go-geos v0.13.2 // indirect ) require ( - cloud.google.com/go/compute v1.23.0 // indirect + cloud.google.com/go/compute v1.23.1 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect - cloud.google.com/go/iam v1.1.2 // indirect + cloud.google.com/go/iam v1.1.3 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect github.com/99designs/keyring v1.2.2 // indirect - github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.8.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0 // indirect - github.com/Azure/go-amqp v1.0.1 // indirect - github.com/Azure/go-autorest v14.2.0+incompatible // indirect - github.com/Azure/go-autorest/autorest v0.11.29 // indirect - github.com/Azure/go-autorest/autorest/adal v0.9.23 // indirect - github.com/Azure/go-autorest/autorest/azure/auth v0.5.12 // indirect - github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect - github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect - github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect - github.com/Azure/go-autorest/logger v0.2.1 // indirect - github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.0 // indirect + github.com/Azure/go-amqp v1.0.2 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0 // indirect github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c // indirect github.com/andybalholm/brotli v1.0.5 // indirect github.com/apache/arrow/go/v12 v12.0.1 // indirect - github.com/apache/thrift v0.18.1 // indirect - github.com/aws/aws-sdk-go-v2 v1.21.0 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.13.35 // indirect - github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.81 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.4 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.14 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.36 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.4 // indirect - github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5 // indirect - github.com/aws/smithy-go v1.14.2 // indirect + github.com/apache/thrift v0.19.0 // indirect + github.com/aws/aws-sdk-go-v2 v1.21.2 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.14 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.13.43 // indirect + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.90 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.43 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.37 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.6 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.15 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.38 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.37 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.6 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.40.2 // indirect + github.com/aws/smithy-go v1.15.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect - github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect github.com/danieljoos/wincred v1.2.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/devigned/tab v0.1.1 // indirect github.com/dvsekhvalnov/jose2go v1.5.0 // indirect github.com/facebookgo/clock v0.0.0-20150410010913-600d898af40a // indirect github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect - github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/gabriel-vasile/mimetype v1.4.3 // indirect github.com/goccy/go-json v0.10.2 // indirect github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect github.com/gogo/googleapis v1.4.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/gogo/status v1.1.1 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect @@ -105,11 +95,11 @@ require ( github.com/golang/protobuf v1.5.3 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/flatbuffers v23.5.26+incompatible // indirect - github.com/google/go-cmp v0.5.9 // indirect - github.com/google/s2a-go v0.1.5 // indirect - github.com/googleapis/enterprise-certificate-proxy v0.2.5 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/google/s2a-go v0.1.7 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.1 // indirect github.com/googleapis/gax-go/v2 v2.12.0 // indirect - github.com/grafana/pyroscope-go v1.0.2 + github.com/grafana/pyroscope-go v1.0.4 github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect github.com/hashicorp/errwrap v1.1.0 // indirect @@ -118,23 +108,21 @@ require ( github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect github.com/jackc/puddle/v2 v2.2.1 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/jpillora/backoff v1.0.0 // indirect github.com/klauspost/asmfmt v1.3.2 // indirect - github.com/klauspost/compress v1.16.7 // indirect + github.com/klauspost/compress v1.17.1 // indirect github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mtibben/percent v0.2.1 // indirect github.com/pborman/uuid v1.2.1 // indirect github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/prometheus/client_model v0.4.0 // indirect + github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.44.0 // indirect - github.com/prometheus/procfs v0.11.1 // indirect + github.com/prometheus/procfs v0.12.0 // indirect github.com/robfig/cron v1.2.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/stretchr/objx v0.5.1 // indirect @@ -142,22 +130,22 @@ require ( github.com/zeebo/xxh3 v1.0.2 // indirect go.opencensus.io v0.24.0 // indirect go.temporal.io/sdk/contrib/tally v0.2.0 - go.uber.org/atomic v1.11.0 // indirect - golang.org/x/crypto v0.12.0 // indirect - golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 - golang.org/x/mod v0.12.0 // indirect - golang.org/x/net v0.14.0 // indirect - golang.org/x/oauth2 v0.11.0 // indirect - golang.org/x/sync v0.3.0 // indirect - golang.org/x/sys v0.11.0 // indirect - golang.org/x/term v0.11.0 // indirect - golang.org/x/text v0.12.0 // indirect + go.uber.org/atomic v1.11.0 + golang.org/x/crypto v0.14.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d + golang.org/x/mod v0.13.0 // indirect + golang.org/x/net v0.17.0 // indirect + golang.org/x/oauth2 v0.13.0 // indirect + golang.org/x/sync v0.4.0 // indirect + golang.org/x/sys v0.13.0 // indirect + golang.org/x/term v0.13.0 // indirect + golang.org/x/text v0.13.0 // indirect golang.org/x/time v0.3.0 // indirect - golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846 // indirect - golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect - google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20230822172742-b8732ec3820d // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d // indirect + golang.org/x/tools v0.14.0 // indirect + golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20231012201019-e917dd12ba7a + google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/flow/go.sum b/flow/go.sum index 8cf261b36..1f643a26f 100644 --- a/flow/go.sum +++ b/flow/go.sum @@ -39,8 +39,9 @@ cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMz cloud.google.com/go v0.110.2/go.mod h1:k04UEeEtb6ZBRTv3dZz4CeJC3jKGxyhl0sAiVVquxiw= cloud.google.com/go v0.110.4/go.mod h1:+EYjdK8e5RME/VY/qLCAtuyALQ9q67dvuum8i+H5xsI= cloud.google.com/go v0.110.6/go.mod h1:+EYjdK8e5RME/VY/qLCAtuyALQ9q67dvuum8i+H5xsI= -cloud.google.com/go v0.110.7 h1:rJyC7nWRg2jWGZ4wSJ5nY65GTdYJkg0cd/uXb+ACI6o= cloud.google.com/go v0.110.7/go.mod h1:+EYjdK8e5RME/VY/qLCAtuyALQ9q67dvuum8i+H5xsI= +cloud.google.com/go v0.110.8 h1:tyNdfIxjzaWctIiLYOTalaLKZ17SI44SKFW26QbOhME= +cloud.google.com/go v0.110.8/go.mod h1:Iz8AkXJf1qmxC3Oxoep8R1T36w8B92yU29PcBhHO5fk= cloud.google.com/go/accessapproval v1.4.0/go.mod h1:zybIuC3KpDOvotz59lFe5qxRZx6C75OtwbisN56xYB4= cloud.google.com/go/accessapproval v1.5.0/go.mod h1:HFy3tuiGvMdcd/u+Cu5b9NkO1pEICJ46IR82PoUdplw= cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E= @@ -151,10 +152,11 @@ cloud.google.com/go/bigquery v1.48.0/go.mod h1:QAwSz+ipNgfL5jxiaK7weyOhzdoAy1zFm cloud.google.com/go/bigquery v1.49.0/go.mod h1:Sv8hMmTFFYBlt/ftw2uN6dFdQPzBlREY9yBh7Oy7/4Q= cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU= cloud.google.com/go/bigquery v1.52.0/go.mod h1:3b/iXjRQGU4nKa87cXeg6/gogLjO8C6PmuM8i5Bi/u4= -cloud.google.com/go/bigquery v1.53.0 h1:K3wLbjbnSlxhuG5q4pntHv5AEbQM1QqHKGYgwFIqOTg= cloud.google.com/go/bigquery v1.53.0/go.mod h1:3b/iXjRQGU4nKa87cXeg6/gogLjO8C6PmuM8i5Bi/u4= -cloud.google.com/go/bigquery v1.54.0 h1:ify6s7sy+kQuAimRnVTrPUzaeY0+X5GEsKt2C5CiA8w= -cloud.google.com/go/bigquery v1.54.0/go.mod h1:9Y5I3PN9kQWuid6183JFhOGOW3GcirA5LpsKCUn+2ec= +cloud.google.com/go/bigquery v1.55.0 h1:hs44Xxov3XLWQiCx2J8lK5U/ihLqnpm4RVVl5fdtLLI= +cloud.google.com/go/bigquery v1.55.0/go.mod h1:9Y5I3PN9kQWuid6183JFhOGOW3GcirA5LpsKCUn+2ec= +cloud.google.com/go/bigquery v1.56.0 h1:LHIc9E7Kw+ftFpQFKzZYBB88IAFz7qONawXXx0F3QBo= +cloud.google.com/go/bigquery v1.56.0/go.mod h1:KDcsploXTEY7XT3fDQzMUZlpQLHzE4itubHrnmhUrZA= cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY= cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s= cloud.google.com/go/billing v1.6.0/go.mod h1:WoXzguj+BeHXPbKfNWkqVtDdzORazmCjraY+vrxcyvI= @@ -215,6 +217,8 @@ cloud.google.com/go/compute v1.19.3/go.mod h1:qxvISKp/gYnXkSAD1ppcSOveRAmzxicEv/ cloud.google.com/go/compute v1.20.1/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= cloud.google.com/go/compute v1.23.0 h1:tP41Zoavr8ptEqaW6j+LQOnyBBhO7OkOMAGrgLopTwY= cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= +cloud.google.com/go/compute v1.23.1 h1:V97tBoDaZHb6leicZ1G6DLK2BAaZLJ/7+9BB/En3hR0= +cloud.google.com/go/compute v1.23.1/go.mod h1:CqB3xpmPKKt3OJpW2ndFIXnA9A4xAy/F3Xp1ixncW78= cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZEXYonfTBHHFPO/4UU= cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= @@ -247,8 +251,9 @@ cloud.google.com/go/datacatalog v1.12.0/go.mod h1:CWae8rFkfp6LzLumKOnmVh4+Zle4A3 cloud.google.com/go/datacatalog v1.13.0/go.mod h1:E4Rj9a5ZtAxcQJlEBTLgMTphfP11/lNaAshpoBgemX8= cloud.google.com/go/datacatalog v1.14.0/go.mod h1:h0PrGtlihoutNMp/uvwhawLQ9+c63Kz65UFqh49Yo+E= cloud.google.com/go/datacatalog v1.14.1/go.mod h1:d2CevwTG4yedZilwe+v3E3ZBDRMobQfSG/a6cCCN5R4= -cloud.google.com/go/datacatalog v1.16.0 h1:qVeQcw1Cz93/cGu2E7TYUPh8Lz5dn5Ws2siIuQ17Vng= cloud.google.com/go/datacatalog v1.16.0/go.mod h1:d2CevwTG4yedZilwe+v3E3ZBDRMobQfSG/a6cCCN5R4= +cloud.google.com/go/datacatalog v1.17.1 h1:qGWrlYvWtK+8jD1jhwq5BsGoSr7S4/LOroV7LwXi00g= +cloud.google.com/go/datacatalog v1.18.0 h1:AZHHhoSEK4n3yMsHFLibUjMX5jQz/0FcKKD4T1vxyGM= cloud.google.com/go/dataflow v0.6.0/go.mod h1:9QwV89cGoxjjSR9/r7eFDqqjtvbKxAK2BaYU6PVk9UM= cloud.google.com/go/dataflow v0.7.0/go.mod h1:PX526vb4ijFMesO1o202EaUmouZKBpjHsTlCtB4parQ= cloud.google.com/go/dataflow v0.8.0/go.mod h1:Rcf5YgTKPtQyYz8bLYhFoIV/vP39eL7fWNcSOyFfLJE= @@ -401,10 +406,11 @@ cloud.google.com/go/iam v0.12.0/go.mod h1:knyHGviacl11zrtZUoDuYpDgLjvr28sLQaG0YB cloud.google.com/go/iam v0.13.0/go.mod h1:ljOg+rcNfzZ5d6f1nAUJ8ZIxOaZUVoS14bKCtaLZ/D0= cloud.google.com/go/iam v1.0.1/go.mod h1:yR3tmSL8BcZB4bxByRv2jkSIahVmCtfKZwLYGBalRE8= cloud.google.com/go/iam v1.1.0/go.mod h1:nxdHjaKfCr7fNYx/HJMM8LgiMugmveWlkatear5gVyk= -cloud.google.com/go/iam v1.1.1 h1:lW7fzj15aVIXYHREOqjRBV9PsH0Z6u8Y46a1YGvQP4Y= cloud.google.com/go/iam v1.1.1/go.mod h1:A5avdyVL2tCppe4unb0951eI9jreack+RJ0/d+KUZOU= cloud.google.com/go/iam v1.1.2 h1:gacbrBdWcoVmGLozRuStX45YKvJtzIjJdAolzUs1sm4= cloud.google.com/go/iam v1.1.2/go.mod h1:A5avdyVL2tCppe4unb0951eI9jreack+RJ0/d+KUZOU= +cloud.google.com/go/iam v1.1.3 h1:18tKG7DzydKWUnLjonWcJO6wjSCAtzh4GcRKlH/Hrzc= +cloud.google.com/go/iam v1.1.3/go.mod h1:3khUlaBXfPKKe7huYgEpDn6FtgRyMEqbkvBxrQyY5SE= cloud.google.com/go/iap v1.4.0/go.mod h1:RGFwRJdihTINIe4wZ2iCP0zF/qu18ZwyKxrhMhygBEc= cloud.google.com/go/iap v1.5.0/go.mod h1:UH/CGgKd4KyohZL5Pt0jSKE4m3FR51qg6FKQ/z/Ix9A= cloud.google.com/go/iap v1.6.0/go.mod h1:NSuvI9C/j7UdjGjIde7t7HBz+QTwBcapPE07+sSRcLk= @@ -680,10 +686,8 @@ cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi cloud.google.com/go/storage v1.28.1/go.mod h1:Qnisd4CqDdo6BGs2AD5LLnEsmSQ80wQ5ogcBBKhU86Y= cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4= cloud.google.com/go/storage v1.30.1/go.mod h1:NfxhC0UJE1aXSx7CIIbCf7y9HKT7BiccwkR7+P7gN8E= -cloud.google.com/go/storage v1.31.0 h1:+S3LjjEN2zZ+L5hOwj4+1OkGCsLVe0NzpXKQ1pSdTCI= -cloud.google.com/go/storage v1.31.0/go.mod h1:81ams1PrhW16L4kF7qg+4mTq7SRs5HsbDTM0bWvrwJ0= -cloud.google.com/go/storage v1.32.0 h1:5w6DxEGOnktmJHarxAOUywxVW9lbNWIzlzzUltG/3+o= -cloud.google.com/go/storage v1.32.0/go.mod h1:Hhh/dogNRGca7IWv1RC2YqEn0c0G77ctA/OxflYkiD8= +cloud.google.com/go/storage v1.33.0 h1:PVrDOkIC8qQVa1P3SXGpQvfuJhN2LHOoyZvWs8D2X5M= +cloud.google.com/go/storage v1.33.0/go.mod h1:Hhh/dogNRGca7IWv1RC2YqEn0c0G77ctA/OxflYkiD8= cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w= cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I= cloud.google.com/go/storagetransfer v1.7.0/go.mod h1:8Giuj1QNb1kfLAiWM1bN6dHzfdlDAVC9rv9abHot2W4= @@ -774,63 +778,33 @@ github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMb github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk= -github.com/Azure/azure-amqp-common-go/v4 v4.2.0 h1:q/jLx1KJ8xeI8XGfkOWMN9XrXzAfVTkyvCxPvHCjd2I= -github.com/Azure/azure-amqp-common-go/v4 v4.2.0/go.mod h1:GD3m/WPPma+621UaU6KNjKEo5Hl09z86viKwQjTpV0Q= -github.com/Azure/azure-event-hubs-go/v3 v3.6.1 h1:vSiMmn3tOwgiLyfnmhT5K6Of/3QWRLaaNZPI0hFvZyU= -github.com/Azure/azure-event-hubs-go/v3 v3.6.1/go.mod h1:i2NByb9Pr2na7y8wi/XefEVKkuA2CDUjCNoWQJtTsGo= -github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= -github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0 h1:8q4SaHjFsClSvuVne0ID/5Ka8u3fcIHyqkLjcFpNRHQ= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1 h1:/iHxaJhsFr0+xVFfbMr5vxz848jyiWuIEDhYq3y5odY= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0/go.mod h1:OQeznEEkTZ9OrhHJoDD8ZDq51FHgXjqtP9z6bEwBq9U= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.2 h1:t5+QXLCK9SVi0PPdaY0PrFvYUo24KwA0QwxnaHRSVd4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.2/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.8.0 h1:9kDVnTz3vbfweTqAUmk/a/pH5pWFCHtvRpHYC0G/dcA= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.8.0/go.mod h1:3Ug6Qzto9anB6mGlEdgYMDF5zHQ+wwhEaYR4s17PHMw= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1 h1:LNHhpdK7hzUcx/k1LIcuh5k7k1LGIWLQfCjaneSj7Fc= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1/go.mod h1:uE9zaUfEQT/nbQjVi2IblCG9iaLtZsuYZ8ne+PuQ02M= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 h1:BMAjVKJM0U/CYF27gA0ZMmXGkOcvfFtD0oHVZ1TIPRI= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0/go.mod h1:1fXstnBMas5kzG+S3q8UoJcmyU6nUeunJcMDHcRYHhs= github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= +github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs v1.0.1 h1:7G4EhZbWFwfgkNfJkNoZmFL8FfWT6P96YVwG71uhNxY= +github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs v1.0.1/go.mod h1:fswVBSaYFoW4XXp3oXG0vuDVdToLr3kRzgp5oePMq5g= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.1.1 h1:gZ1ZZvrVUhDNsGNpbo2N87Y0CJB8p3IS5UH9Z4Ui97g= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.1.1/go.mod h1:7fQVOnRA11ScLE8dOCWanXHQa2NMFOM2i0u/1VRICXA= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.1.2 h1:mLY+pNLjCUeKhgnAJWAKhEUQM+RJQo2H1fuGSw1Ky1E= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0 h1:ECsQtyERDVz3NP3kvDOTLvbQhqWp/x9EsGKtb4ogUr8= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.2.0 h1:Ma67P/GGprNwsslzEH6+Kb8nybI8jpDTm4Wmzu2ReK8= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.0 h1:yfJe15aSwEQ6Oo6J+gdfdulPNoZ3TEhmbhLIoxZcA+U= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v0.8.0 h1:T028gtTPiYt/RMUfs8nVsAL7FDQrfLlrm/NnRG/zcC4= github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0 h1:nVocQV40OQne5613EeLayJiRAJuKlBGy+m22qWG+WRg= github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0/go.mod h1:7QJP7dr2wznCMeqIrhMgWGf7XpAQnVrJqDm9nvV3Cu4= -github.com/Azure/go-amqp v1.0.1 h1:Jf8OQCKzRDMZ3pCiH4onM7yrhl5curkRSGkRLTyP35o= -github.com/Azure/go-amqp v1.0.1/go.mod h1:+bg0x3ce5+Q3ahCEXnCsGG3ETpDQe3MEVnOuT2ywPwc= -github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= -github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= -github.com/Azure/go-autorest/autorest v0.11.24/go.mod h1:G6kyRlFnTuSbEYkQGawPfsCswgme4iYf6rfSKUDzbCc= -github.com/Azure/go-autorest/autorest v0.11.29 h1:I4+HL/JDvErx2LjyzaVxllw2lRDB5/BT2Bm4g20iqYw= -github.com/Azure/go-autorest/autorest v0.11.29/go.mod h1:ZtEzC4Jy2JDrZLxvWs8LrBWEBycl1hbT1eknI8MtfAs= -github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= -github.com/Azure/go-autorest/autorest/adal v0.9.22/go.mod h1:XuAbAEUv2Tta//+voMI038TrJBqjKam0me7qR+L8Cmk= -github.com/Azure/go-autorest/autorest/adal v0.9.23 h1:Yepx8CvFxwNKpH6ja7RZ+sKX+DWYNldbLiALMC3BTz8= -github.com/Azure/go-autorest/autorest/adal v0.9.23/go.mod h1:5pcMqFkdPhviJdlEy3kC/v1ZLnQl0MH6XA5YCcMhy4c= -github.com/Azure/go-autorest/autorest/azure/auth v0.5.12 h1:wkAZRgT/pn8HhFyzfe9UnqOjJYqlembgCTi72Bm/xKk= -github.com/Azure/go-autorest/autorest/azure/auth v0.5.12/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= -github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 h1:0W/yGmFdTIT77fvdlGZ0LMISoLHFJ7Tx4U0yeB+uFs4= -github.com/Azure/go-autorest/autorest/azure/cli v0.4.5/go.mod h1:ADQAXrkgm7acgWVUNamOgh8YNrv4p27l3Wc55oVfpzg= -github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw= -github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= -github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= -github.com/Azure/go-autorest/autorest/mocks v0.4.2 h1:PGN4EDXnuQbojHbU0UWoNvmu9AGVwYHG9/fkDYhtAfw= -github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU= -github.com/Azure/go-autorest/autorest/to v0.4.0 h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk= -github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= -github.com/Azure/go-autorest/autorest/validation v0.3.1 h1:AgyqjAd94fwNAoTjl/WQXg4VvFeRFpO+UhNyRXqF1ac= -github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= -github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg= -github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= -github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= -github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0/go.mod h1:kgDmCTgBzIEPFElEF+FK0SdjAor06dRq2Go927dnQ6o= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.0 h1:HCc0+LpPfpCKs6LGGLAhwBARt9632unrVcI6i8s/8os= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.0/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 h1:WpB/QDNLpMw72xHJc34BNNykqSOeEJDAWkhf0u12/Jk= -github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.0 h1:gggzg0SUMs6SQbEw+3LoSsYf9YMjkupeAnHMX8O9mmY= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.0/go.mod h1:+6KLcKIVgxoBDMqMO/Nvy7bZ9a0nbU3I1DtFQK3YvB4= +github.com/Azure/go-amqp v1.0.2 h1:zHCHId+kKC7fO8IkwyZJnWMvtRXhYC0VJtD0GYkHc6M= +github.com/Azure/go-amqp v1.0.2/go.mod h1:vZAogwdrkbyK3Mla8m/CxSc/aKdnTZ4IbPxl51Y5WZE= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0 h1:hVeq+yCyUi+MsoO/CU95yqCIcdzra5ovzk8Q2BBpV2M= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= @@ -855,88 +829,88 @@ github.com/apache/arrow/go/v12 v12.0.0/go.mod h1:d+tV/eHZZ7Dz7RPrFKtPK02tpr+c9/P github.com/apache/arrow/go/v12 v12.0.1 h1:JsR2+hzYYjgSUkBSaahpqCetqZMr76djX80fF/DiJbg= github.com/apache/arrow/go/v12 v12.0.1/go.mod h1:weuTY7JvTG/HDPtMQxEUp7pU73vkLWMLpY67QwZ/WWw= github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= -github.com/apache/thrift v0.18.1 h1:lNhK/1nqjbwbiOPDBPFJVKxgDEGSepKuTh6OLiXW8kg= -github.com/apache/thrift v0.18.1/go.mod h1:rdQn/dCcDKEWjjylUeueum4vQEjG2v8v2PqriUnbr+I= -github.com/aws/aws-sdk-go v1.44.317 h1:+8XWrLmGMwPPXSRSLPzhgcGnzJ2mYkgkrcB9C/GnSOU= -github.com/aws/aws-sdk-go v1.44.317/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= -github.com/aws/aws-sdk-go v1.44.332 h1:Ze+98F41+LxoJUdsisAFThV+0yYYLYw17/Vt0++nFYM= -github.com/aws/aws-sdk-go v1.44.332/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= -github.com/aws/aws-sdk-go-v2 v1.20.0 h1:INUDpYLt4oiPOJl0XwZDK2OVAVf0Rzo+MGVTv9f+gy8= -github.com/aws/aws-sdk-go-v2 v1.20.0/go.mod h1:uWOr0m0jDsiWw8nnXiqZ+YG6LdvAlGYDLLf2NmHZoy4= +github.com/apache/thrift v0.19.0 h1:sOqkWPzMj7w6XaYbJQG7m4sGqVolaW/0D28Ln7yPzMk= +github.com/apache/thrift v0.19.0/go.mod h1:SUALL216IiaOw2Oy+5Vs9lboJ/t9g40C+G07Dc0QC1I= +github.com/aws/aws-sdk-go v1.45.15 h1:gYBTVSYuhXdatrLbsPaRgVcc637zzdgThWmsDRwXLOo= +github.com/aws/aws-sdk-go v1.45.15/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.45.25 h1:c4fLlh5sLdK2DCRTY1z0hyuJZU4ygxX8m1FswL6/nF4= +github.com/aws/aws-sdk-go v1.45.25/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go-v2 v1.21.0 h1:gMT0IW+03wtYJhRqTVYn0wLzwdnK9sRMcxmtfGzRdJc= github.com/aws/aws-sdk-go-v2 v1.21.0/go.mod h1:/RfNgGmRxI+iFOB1OeJUyxiU+9s88k3pfHvDagGEp0M= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.11 h1:/MS8AzqYNAhhRNalOmxUvYs8VEbNGifTnzhPFdcRQkQ= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.11/go.mod h1:va22++AdXht4ccO3kH2SHkHHYvZ2G9Utz+CXKmm2CaU= +github.com/aws/aws-sdk-go-v2 v1.21.2 h1:+LXZ0sgo8quN9UOKXXzAWRT3FWd4NxeXWOZom9pE7GA= +github.com/aws/aws-sdk-go-v2 v1.21.2/go.mod h1:ErQhvNuEMhJjweavOYhxVkn2RUx7kQXVATHrjKtxIpM= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13 h1:OPLEkmhXf6xFPiz0bLeDArZIDx1NNS4oJyG4nv3Gct0= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13/go.mod h1:gpAbvyDGQFozTEmlTFO8XcQKHzubdq0LzRyJpG6MiXM= -github.com/aws/aws-sdk-go-v2/config v1.18.32 h1:tqEOvkbTxwEV7hToRcJ1xZRjcATqwDVsWbAscgRKyNI= -github.com/aws/aws-sdk-go-v2/config v1.18.32/go.mod h1:U3ZF0fQRRA4gnbn9GGvOWLoT2EzzZfAWeKwnVrm1rDc= -github.com/aws/aws-sdk-go-v2/config v1.18.37 h1:RNAfbPqw1CstCooHaTPhScz7z1PyocQj0UL+l95CgzI= -github.com/aws/aws-sdk-go-v2/config v1.18.37/go.mod h1:8AnEFxW9/XGKCbjYDCJy7iltVNyEI9Iu9qC21UzhhgQ= -github.com/aws/aws-sdk-go-v2/credentials v1.13.31 h1:vJyON3lG7R8VOErpJJBclBADiWTwzcwdkQpTKx8D2sk= -github.com/aws/aws-sdk-go-v2/credentials v1.13.31/go.mod h1:T4sESjBtY2lNxLgkIASmeP57b5j7hTQqCbqG0tWnxC4= -github.com/aws/aws-sdk-go-v2/credentials v1.13.35 h1:QpsNitYJu0GgvMBLUIYu9H4yryA5kMksjeIVQfgXrt8= -github.com/aws/aws-sdk-go-v2/credentials v1.13.35/go.mod h1:o7rCaLtvK0hUggAGclf76mNGGkaG5a9KWlp+d9IpcV8= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.7 h1:X3H6+SU21x+76LRglk21dFRgMTJMa5QcpW+SqUf5BBg= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.7/go.mod h1:3we0V09SwcJBzNlnyovrR2wWJhWmVdqAsmVs4uronv8= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.14 h1:Sc82v7tDQ/vdU1WtuSyzZ1I7y/68j//HJ6uozND1IDs= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.14/go.mod h1:9NCTOURS8OpxvoAVHq79LK81/zC78hfRWFn+aL0SPcY= +github.com/aws/aws-sdk-go-v2/config v1.18.42 h1:28jHROB27xZwU0CB88giDSjz7M1Sba3olb5JBGwina8= +github.com/aws/aws-sdk-go-v2/config v1.18.42/go.mod h1:4AZM3nMMxwlG+eZlxvBKqwVbkDLlnN2a4UGTL6HjaZI= +github.com/aws/aws-sdk-go-v2/config v1.18.45 h1:Aka9bI7n8ysuwPeFdm77nfbyHCAKQ3z9ghB3S/38zes= +github.com/aws/aws-sdk-go-v2/config v1.18.45/go.mod h1:ZwDUgFnQgsazQTnWfeLWk5GjeqTQTL8lMkoE1UXzxdE= +github.com/aws/aws-sdk-go-v2/credentials v1.13.40 h1:s8yOkDh+5b1jUDhMBtngF6zKWLDs84chUk2Vk0c38Og= +github.com/aws/aws-sdk-go-v2/credentials v1.13.40/go.mod h1:VtEHVAAqDWASwdOqj/1huyT6uHbs5s8FUHfDQdky/Rs= +github.com/aws/aws-sdk-go-v2/credentials v1.13.43 h1:LU8vo40zBlo3R7bAvBVy/ku4nxGEyZe9N8MqAeFTzF8= +github.com/aws/aws-sdk-go-v2/credentials v1.13.43/go.mod h1:zWJBz1Yf1ZtX5NGax9ZdNjhhI4rgjfgsyk6vTY1yfVg= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11 h1:uDZJF1hu0EVT/4bogChk8DyjSF6fof6uL/0Y26Ma7Fg= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11/go.mod h1:TEPP4tENqBGO99KwVpV9MlOX4NSrSLP8u3KRy2CDwA8= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.76 h1:DJ1kHj0GI9BbX+XhF0kHxlzOVjcncmDUXmCvXdbfdAE= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.76/go.mod h1:/AZCdswMSgwpB2yMSFfY5H4pVeBLnCuPehdmO/r3xSM= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.81 h1:PQ9zoe2GEoTVSVPuNtjNrKeVPvyVPWesETMPb7KB3Fk= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.81/go.mod h1:EztVLIU9xGitjdZ1TyHWL9IcNx4952FlqKJe6GLG2z4= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.37 h1:zr/gxAZkMcvP71ZhQOcvdm8ReLjFgIXnIn0fw5AM7mo= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.37/go.mod h1:Pdn4j43v49Kk6+82spO3Tu5gSeQXRsxo56ePPQAvFiA= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.13 h1:PIktER+hwIG286DqXyvVENjgLTAwGgoeriLDD5C+YlQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.13/go.mod h1:f/Ib/qYjhV2/qdsf79H3QP/eRE4AkVyEf6sk7XfZ1tg= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.86 h1:tnn/U5bz5flqoTCFSgRMEdg93ULR9Q6+tL5LkwjJ0DM= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.86/go.mod h1:TJGNZIhz3fsaQ6PU9roZacAEMMnG89X2UzaDblNoeNw= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.90 h1:mtJRt80k1oGw7QQPluAx8AZ6u16MyCA2di/lMhagZ7I= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.90/go.mod h1:lYwZTkeMQWPvNU+u7oYArdNhQ8EKiSGU76jVv0w2GH4= github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41 h1:22dGT7PneFMx4+b3pz7lMTRyN8ZKH7M2cW4GP9yUS2g= github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41/go.mod h1:CrObHAuPneJBlfEJ5T3szXOUkLEThaGfvnhTf33buas= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.31 h1:0HCMIkAkVY9KMgueD8tf4bRTUanzEYvhw7KkPXIMpO0= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.31/go.mod h1:fTJDMe8LOFYtqiFFFeHA+SVMAwqLhoq0kcInYoLa9Js= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.43 h1:nFBQlGtkbPzp/NjZLuFxRqmT91rLJkgvsEQs68h962Y= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.43/go.mod h1:auo+PiyLl0n1l8A0e8RIeR8tOzYPfZZH/JNlrJ8igTQ= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 h1:SijA0mgjV8E+8G45ltVHs0fvKpTj8xmZJ3VwhGKtUSI= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35/go.mod h1:SJC1nEVVva1g3pHAIdCp7QsRIkMmLAgoDquQ9Rr8kYw= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.38 h1:+i1DOFrW3YZ3apE45tCal9+aDKK6kNEbW6Ib7e1nFxE= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.38/go.mod h1:1/jLp0OgOaWIetycOmycW+vYTYgTZFPttJQRgsI1PoU= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.42 h1:GPUcE/Yq7Ur8YSUk6lVkoIMWnJNO0HT18GUzCWCgCI0= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.42/go.mod h1:rzfdUlfA+jdgLDmPKjd3Chq9V7LVLYo1Nz++Wb91aRo= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.0 h1:U5yySdwt2HPo/pnQec04DImLzWORbeWML1fJiLkKruI= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.0/go.mod h1:EhC/83j8/hL/UB1WmExo3gkElaja/KlmZM/gl1rTfjM= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.37 h1:JRVhO25+r3ar2mKGP7E0LDl8K9/G36gjlqca5iQbaqc= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.37/go.mod h1:Qe+2KtKml+FEsQF/DHmDV+xjtche/hwoF75EG4UlHW8= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.43 h1:g+qlObJH4Kn4n21g69DjspU0hKTjWtq7naZ9OLCv0ew= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.43/go.mod h1:rzfdUlfA+jdgLDmPKjd3Chq9V7LVLYo1Nz++Wb91aRo= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.45 h1:hze8YsjSh8Wl1rYa1CJpRmXP21BvOBuc76YhW0HsuQ4= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.45/go.mod h1:lD5M20o09/LCuQ2mE62Mb/iSdSlCNuj6H5ci7tW7OsE= github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.4 h1:6lJvvkQ9HmbHZ4h/IEwclwv2mrTW8Uq1SOB/kXy0mfw= github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.4/go.mod h1:1PrKYwxTM+zjpw9Y41KFtoJCQrJ34Z47Y4VgVbfndjo= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.12 h1:uAiiHnWihGP2rVp64fHwzLDrswGjEjsPszwRYMiYQPU= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.12/go.mod h1:fUTHpOXqRQpXvEpDPSa3zxCc2fnpW6YnBoba+eQr+Bg= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.6 h1:wmGLw2i8ZTlHLw7a9ULGfQbuccw8uIiNr6sol5bFzc8= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.6/go.mod h1:Q0Hq2X/NuL7z8b1Dww8rmOFl+jzusKEcyvkKspwdpyc= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.14 h1:m0QTSI6pZYJTk5WSKx3fm5cNW/DCicVzULBgU/6IyD0= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.14/go.mod h1:dDilntgHy9WnHXsh7dDtUPgHKEfTJIBUTHM8OWm0f/0= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.32 h1:kvN1jPHr9UffqqG3bSgZ8tx4+1zKVHz/Ktw/BwW6hX8= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.32/go.mod h1:QmMEM7es84EUkbYWcpnkx8i5EW2uERPfrTFeOch128Y= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.15 h1:7R8uRYyXzdD71KWVCL78lJZltah6VVznXBazvKjfH58= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.15/go.mod h1:26SQUPcTNgV1Tapwdt4a1rOsYRsnBsJHLMPoxK2b0d8= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.36 h1:eev2yZX7esGRjqRbnVk1UxMLw4CyVZDpZXRCcy75oQk= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.36/go.mod h1:lGnOkH9NJATw0XEPcAknFBj3zzNTEGRHtSw+CwC1YTg= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.31 h1:auGDJ0aLZahF5SPvkJ6WcUuX7iQ7kyl2MamV7Tm8QBk= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.31/go.mod h1:3+lloe3sZuBQw1aBc5MyndvodzQlyqCZ7x1QPDHaWP4= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.38 h1:skaFGzv+3kA+v2BPKhuekeb1Hbb105+44r8ASC+q5SE= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.38/go.mod h1:epIZoRSSbRIwLPJU5F+OldHhwZPBdpDeQkRdCeY3+00= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35 h1:CdzPW9kKitgIiLV1+MHobfR5Xg25iYnyzWZhyQuSlDI= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35/go.mod h1:QGF2Rs33W5MaN9gYdEQOBBFPLwTZkEhRwI33f7KIG0o= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.0 h1:Wgjft9X4W5pMeuqgPCHIQtbZ87wsgom7S5F8obreg+c= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.0/go.mod h1:FWNzS4+zcWAP05IF7TDYTY1ysZAzIvogxWaDT9p8fsA= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.37 h1:WWZA/I2K4ptBS1kg0kV1JbBtG/umed0vwHRrmcr9z7k= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.37/go.mod h1:vBmDnwWXWxNPFRMmG2m/3MKOe+xEcMDo1tanpaWCcck= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.4 h1:v0jkRigbSD6uOdwcaUQmgEwG1BkPfAPDqaeNt/29ghg= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.4/go.mod h1:LhTyt8J04LL+9cIt7pYJ5lbS/U98ZmXovLOR/4LUsk8= -github.com/aws/aws-sdk-go-v2/service/s3 v1.38.1 h1:mTgFVlfQT8gikc5+/HwD8UL9jnUro5MGv8n/VEYF12I= -github.com/aws/aws-sdk-go-v2/service/s3 v1.38.1/go.mod h1:6SOWLiobcZZshbmECRTADIRYliPL0etqFSigauQEeT0= -github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5 h1:A42xdtStObqy7NGvzZKpnyNXvoOmm+FENobZ0/ssHWk= -github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5/go.mod h1:rDGMZA7f4pbmTtPOk5v5UM2lmX6UAbRnMDJeDvnH7AM= -github.com/aws/aws-sdk-go-v2/service/sso v1.13.1 h1:DSNpSbfEgFXRV+IfEcKE5kTbqxm+MeF5WgyeRlsLnHY= -github.com/aws/aws-sdk-go-v2/service/sso v1.13.1/go.mod h1:TC9BubuFMVScIU+TLKamO6VZiYTkYoEHqlSQwAe2omw= -github.com/aws/aws-sdk-go-v2/service/sso v1.13.5 h1:oCvTFSDi67AX0pOX3PuPdGFewvLRU2zzFSrTsgURNo0= -github.com/aws/aws-sdk-go-v2/service/sso v1.13.5/go.mod h1:fIAwKQKBFu90pBxx07BFOMJLpRUGu8VOzLJakeY+0K4= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.1 h1:hd0SKLMdOL/Sl6Z0np1PX9LeH2gqNtBe0MhTedA8MGI= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.1/go.mod h1:XO/VcyoQ8nKyKfFW/3DMsRQXsfh/052tHTWmg3xBXRg= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.5 h1:dnInJb4S0oy8aQuri1mV6ipLlnZPfnsDNB9BGO9PDNY= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.5/go.mod h1:yygr8ACQRY2PrEcy3xsUI357stq2AxnFM6DIsR9lij4= -github.com/aws/aws-sdk-go-v2/service/sts v1.21.1 h1:pAOJj+80tC8sPVgSDHzMYD6KLWsaLQ1kZw31PTeORbs= -github.com/aws/aws-sdk-go-v2/service/sts v1.21.1/go.mod h1:G8SbvL0rFk4WOJroU8tKBczhsbhj2p/YY7qeJezJ3CI= -github.com/aws/aws-sdk-go-v2/service/sts v1.21.5 h1:CQBFElb0LS8RojMJlxRSo/HXipvTZW2S44Lt9Mk2aYQ= -github.com/aws/aws-sdk-go-v2/service/sts v1.21.5/go.mod h1:VC7JDqsqiwXukYEDjoHh9U0fOJtNWh04FPQz4ct4GGU= -github.com/aws/smithy-go v1.14.0 h1:+X90sB94fizKjDmwb4vyl2cTTPXTE5E2G/1mjByb0io= -github.com/aws/smithy-go v1.14.0/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.6 h1:9ulSU5ClouoPIYhDQdg9tpl83d5Yb91PXTKK+17q+ow= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.6/go.mod h1:lnc2taBsR9nTlz9meD+lhFZZ9EWY712QHrRflWpTcOA= +github.com/aws/aws-sdk-go-v2/service/s3 v1.39.0 h1:VZ2WMkKLio5tVjYfThcy5+pb6YHGd6B6egq75FfM6hU= +github.com/aws/aws-sdk-go-v2/service/s3 v1.39.0/go.mod h1:rDGMZA7f4pbmTtPOk5v5UM2lmX6UAbRnMDJeDvnH7AM= +github.com/aws/aws-sdk-go-v2/service/s3 v1.40.2 h1:Ll5/YVCOzRB+gxPqs2uD0R7/MyATC0w85626glSKmp4= +github.com/aws/aws-sdk-go-v2/service/s3 v1.40.2/go.mod h1:Zjfqt7KhQK+PO1bbOsFNzKgaq7TcxzmEoDWN8lM0qzQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.14.1 h1:YkNzx1RLS0F5qdf9v1Q8Cuv9NXCL2TkosOxhzlUPV64= +github.com/aws/aws-sdk-go-v2/service/sso v1.14.1/go.mod h1:fIAwKQKBFu90pBxx07BFOMJLpRUGu8VOzLJakeY+0K4= +github.com/aws/aws-sdk-go-v2/service/sso v1.15.2 h1:JuPGc7IkOP4AaqcZSIcyqLpFSqBWK32rM9+a1g6u73k= +github.com/aws/aws-sdk-go-v2/service/sso v1.15.2/go.mod h1:gsL4keucRCgW+xA85ALBpRFfdSLH4kHOVSnLMSuBECo= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.1 h1:8lKOidPkmSmfUtiTgtdXWgaKItCZ/g75/jEk6Ql6GsA= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.1/go.mod h1:yygr8ACQRY2PrEcy3xsUI357stq2AxnFM6DIsR9lij4= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.3 h1:HFiiRkf1SdaAmV3/BHOFZ9DjFynPHj8G/UIO1lQS+fk= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.17.3/go.mod h1:a7bHA82fyUXOm+ZSWKU6PIoBxrjSprdLoM8xPYvzYVg= +github.com/aws/aws-sdk-go-v2/service/sts v1.22.0 h1:s4bioTgjSFRwOoyEFzAVCmFmoowBgjTR8gkrF/sQ4wk= +github.com/aws/aws-sdk-go-v2/service/sts v1.22.0/go.mod h1:VC7JDqsqiwXukYEDjoHh9U0fOJtNWh04FPQz4ct4GGU= +github.com/aws/aws-sdk-go-v2/service/sts v1.23.2 h1:0BkLfgeDjfZnZ+MhB3ONb01u9pwFYTCZVhlsSSBvlbU= +github.com/aws/aws-sdk-go-v2/service/sts v1.23.2/go.mod h1:Eows6e1uQEsc4ZaHANmsPRzAKcVDrcmjjWiih2+HUUQ= github.com/aws/smithy-go v1.14.2 h1:MJU9hqBGbvWZdApzpvoF2WAIJDbtjK2NDJSiJP7HblQ= github.com/aws/smithy-go v1.14.2/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= +github.com/aws/smithy-go v1.15.0 h1:PS/durmlzvAFpQHDs4wi4sNNP9ExsqZh6IlfdHXgKK8= +github.com/aws/smithy-go v1.15.0/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= @@ -946,6 +920,8 @@ github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI= github.com/cactus/go-statsd-client/v5 v5.0.0/go.mod h1:COEvJ1E+/E2L4q6QE5CkjWPi4eeDw9maJBMIuMPBZbY= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= @@ -974,19 +950,15 @@ github.com/cncf/xds/go v0.0.0-20230310173818-32f1caf87195/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM= +github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/devigned/tab v0.1.1 h1:3mD6Kb1mUOYeLpJvTVSDwSg5ZsfSxfvxGRTxRsJsITA= -github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= -github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= -github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= -github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= -github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= @@ -1018,6 +990,8 @@ github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoD github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= @@ -1056,11 +1030,7 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69 github.com/gogo/status v1.1.0/go.mod h1:BFv9nrluPLmrS0EmGVvLaPNmRosr9KapBYd5/hpY1WM= github.com/gogo/status v1.1.1 h1:DuHXlSFHNKqTQ+/ACf5Vs6r4X/dH2EgIzR9Vr+H65kg= github.com/gogo/status v1.1.1/go.mod h1:jpG3dM5QPcqu19Hg8lkUhBFBa3TcLs1DG7+2Jqci7oU= -github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang-jwt/jwt/v4 v4.4.3/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= @@ -1130,6 +1100,8 @@ github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8 github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= @@ -1156,13 +1128,11 @@ github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLe github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.0/go.mod h1:OJpEgntRZo8ugHpF9hkoLJbS5dSI20XZeXJ9JVywLlM= github.com/google/s2a-go v0.1.3/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= -github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= -github.com/google/s2a-go v0.1.5 h1:8IYp3w9nysqv3JH+NJgXJzGbDHzLOTj43BmSkp+O7qg= -github.com/google/s2a-go v0.1.5/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -1171,8 +1141,8 @@ github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= github.com/googleapis/enterprise-certificate-proxy v0.2.1/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= -github.com/googleapis/enterprise-certificate-proxy v0.2.5 h1:UR4rDjcgpgEnqpIEvkiqTYKBCKLNmlge2eVjoZfySzM= -github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w= +github.com/googleapis/enterprise-certificate-proxy v0.3.1 h1:SBWmZhjUDRorQxrN0nwzf+AHBxnbFjViHQS4P0yVpmQ= +github.com/googleapis/enterprise-certificate-proxy v0.3.1/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -1191,12 +1161,14 @@ github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56 github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= -github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= -github.com/grafana/pyroscope-go v1.0.2 h1:dEFgO9VbhYTwuwpCC5coTpuW0JjISEWDZtvRAW9v5Tw= -github.com/grafana/pyroscope-go v1.0.2/go.mod h1:bShDKsVZdzxq+Ol6no0JKigU9y5FTWUcFditMXaH09o= -github.com/grafana/pyroscope-go/godeltaprof v0.1.3 h1:eunWpv1B3Z7ZK9o4499EmQGlY+CsDmSZ4FbxjRx37uk= -github.com/grafana/pyroscope-go/godeltaprof v0.1.3/go.mod h1:1HSPtjU8vLG0jE9JrTdzjgFqdJ/VgN7fvxBNq3luJko= +github.com/grafana/pyroscope-go v1.0.3 h1:8WWmItzLfg4m8G+j//ElSjMeMr88Y6Lvblar6qeTyKk= +github.com/grafana/pyroscope-go v1.0.3/go.mod h1:0d7ftwSMBV/Awm7CCiYmHQEG8Y44Ma3YSjt+nWcWztY= +github.com/grafana/pyroscope-go v1.0.4 h1:oyQX0BOkL+iARXzHuCdIF5TQ7/sRSel1YFViMHC7Bm0= +github.com/grafana/pyroscope-go v1.0.4/go.mod h1:0d7ftwSMBV/Awm7CCiYmHQEG8Y44Ma3YSjt+nWcWztY= +github.com/grafana/pyroscope-go/godeltaprof v0.1.4 h1:mDsJ3ngul7UfrHibGQpV66PbZ3q1T8glz/tK3bQKKEk= +github.com/grafana/pyroscope-go/godeltaprof v0.1.4/go.mod h1:1HSPtjU8vLG0jE9JrTdzjgFqdJ/VgN7fvxBNq3luJko= +github.com/grafana/pyroscope-go/godeltaprof v0.1.5 h1:gkFVqihFRL1Nro2FCC0u6mW47jclef96Zu8I/ykq+4E= +github.com/grafana/pyroscope-go/godeltaprof v0.1.5/go.mod h1:1HSPtjU8vLG0jE9JrTdzjgFqdJ/VgN7fvxBNq3luJko= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= @@ -1204,6 +1176,8 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4 github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 h1:RtRsiaGvWxcwd8y3BiRZxsylPT8hLWZ5SPcfI+3IDNk= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0/go.mod h1:TzP6duP4Py2pHLVPPQp42aoYI92+PCrVotyR5e8Vqlk= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -1211,8 +1185,6 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= @@ -1220,10 +1192,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1: github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= -github.com/jackc/pglogrepl v0.0.0-20230728225306-38e8a4e50913 h1:n35fv0W1w8e7x68zjhoQx8W+N6tEhaImAErPypDQ7ik= -github.com/jackc/pglogrepl v0.0.0-20230728225306-38e8a4e50913/go.mod h1:Y1HIk+uK2wXiU8vuvQh0GaSzVh+MXFn2kfKBMpn6CZg= -github.com/jackc/pglogrepl v0.0.0-20230810221841-d0818e1fbef7 h1:gpfct0XvEOnv5N1sbTo2KnWC2VwrWKJBOqcxtAUEi3g= -github.com/jackc/pglogrepl v0.0.0-20230810221841-d0818e1fbef7/go.mod h1:Y1HIk+uK2wXiU8vuvQh0GaSzVh+MXFn2kfKBMpn6CZg= +github.com/jackc/pglogrepl v0.0.0-20230826184802-9ed16cb201f6 h1:qMz8L5hFgBNF3W72W7OwuUjB04qlic7YPzij+UiFle0= +github.com/jackc/pglogrepl v0.0.0-20230826184802-9ed16cb201f6/go.mod h1:Y1HIk+uK2wXiU8vuvQh0GaSzVh+MXFn2kfKBMpn6CZg= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= @@ -1235,12 +1205,6 @@ github.com/jackc/pgx/v5 v5.4.3/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSlj github.com/jackc/puddle/v2 v2.0.0/go.mod h1:itE7ZJY8xnoo0JqJEpSMprN0f+NQkMCuEV/N9j8h0oc= github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= -github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= -github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= -github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= -github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= -github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= -github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= @@ -1250,7 +1214,6 @@ github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= -github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -1267,9 +1230,12 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= -github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= -github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= +github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.1 h1:NE3C767s2ak2bweCZo3+rdP4U/HoyVXLv/X9f2gPS5g= +github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= @@ -1299,30 +1265,24 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/go-sqlite3 v1.14.14 h1:qZgc/Rwetq+MtyE18WhzjokPD93dNqLGNT3QJuLvBGw= github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/microsoft/go-mssqldb v1.5.0 h1:CgENxkwtOBNj3Jg6T1X209y2blCfTTcwuOlznd2k9fk= -github.com/microsoft/go-mssqldb v1.5.0/go.mod h1:lmWsjHD8XX/Txr0f8ZqgbEZSC+BZjmEQy/Ms+rLrvho= +github.com/microsoft/go-mssqldb v1.6.0 h1:mM3gYdVwEPFrlg/Dvr2DNVEgYFG7L42l+dGc67NNNpc= +github.com/microsoft/go-mssqldb v1.6.0/go.mod h1:00mDtPbeQCRGC1HwOOR5K/gr30P1NcEG0vx6Kbv2aJU= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= -github.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= @@ -1331,8 +1291,13 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLA github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/orcaman/concurrent-map/v2 v2.0.1 h1:jOJ5Pg2w1oeB6PeDurIYf6k9PQ+aTITr/6lP/L/zp6c= github.com/orcaman/concurrent-map/v2 v2.0.1/go.mod h1:9Eq3TG2oBe5FirmYWQfYO5iH1q0Jv47PLaNK++uCdOM= +github.com/paulmach/orb v0.10.0 h1:guVYVqzxHE/CQ1KpfGO077TR0ATHSNjp4s6XGLn3W9s= +github.com/paulmach/orb v0.10.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU= +github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY= github.com/pborman/uuid v1.2.1 h1:+ZZIw58t/ozdjRaXh/3awHfmWRbzYxJoAdNJxe/3pvw= github.com/pborman/uuid v1.2.1/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/peterstace/simplefeatures v0.45.1 h1:V615OgtN8ZXYuM9h1UTtuQFn9AgAC1XQT2ipQyYZWVE= +github.com/peterstace/simplefeatures v0.45.1/go.mod h1:nosSwG+GcVmAUBoxFWoyy1hS1qg0RuX0M9tmqsIzFX8= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= @@ -1357,6 +1322,8 @@ github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8= github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -1364,6 +1331,8 @@ github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6T github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= @@ -1373,8 +1342,8 @@ github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI= -github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ= @@ -1393,10 +1362,10 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/snowflakedb/gosnowflake v1.6.23 h1:uO+zMTXJcSHzOm6ks5To8ergNjt5Dy6cr5QtStpRFT8= -github.com/snowflakedb/gosnowflake v1.6.23/go.mod h1:KfO4F7bk+aXPUIvBqYxvPhxLlu2/w4TtSC8Rw/yr5Mg= github.com/snowflakedb/gosnowflake v1.6.24 h1:NiBh1WSstNtr12qywmdFMS1XHaYdF5iWWGnjIQb1cEY= github.com/snowflakedb/gosnowflake v1.6.24/go.mod h1:KfO4F7bk+aXPUIvBqYxvPhxLlu2/w4TtSC8Rw/yr5Mg= +github.com/snowflakedb/gosnowflake v1.6.25 h1:o5zUmxTOo0Eo9AdkEj8blCeiMuILrQJ+rjUMAeZhcRE= +github.com/snowflakedb/gosnowflake v1.6.25/go.mod h1:KfO4F7bk+aXPUIvBqYxvPhxLlu2/w4TtSC8Rw/yr5Mg= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= @@ -1405,7 +1374,6 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.1 h1:4VhoImhV/Bm0ToFkXFi8hXNXwpDRZ/ynw3amt82mzq0= github.com/stretchr/objx v0.5.1/go.mod h1:/iHQpkQwBD6DLUmQ4pE+s1TXdob1mORJ4/UFdrifcy0= @@ -1423,16 +1391,27 @@ github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/twmb/murmur3 v1.1.5/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg= github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= +github.com/twpayne/go-geom v1.5.2 h1:LyRfBX2W0LM7XN/bGqX0XxrJ7SZc3XwmxU4aj4kSoxw= +github.com/twpayne/go-geom v1.5.2/go.mod h1:3z6O2sAnGtGCXx4Q+5nPOLCA5e8WI2t3cthdb1P2HH8= +github.com/twpayne/go-geos v0.13.2 h1:kyQenUwpU+mGrNX403KNlRDuIIIL22OHVVj1SGfXUm0= +github.com/twpayne/go-geos v0.13.2/go.mod h1:r5O89NwzDqYqiDF5HnkYjdgJtODwzpjeNlj/gL9ztXk= github.com/uber-go/tally/v4 v4.1.1/go.mod h1:aXeSTDMl4tNosyf6rdU8jlgScHyjEGGtfJ/uwCIf/vM= github.com/uber-go/tally/v4 v4.1.7 h1:YiKvvMKCCXlCKXI0i1hVk+xda8YxdIpjeFXohpvn8Zo= github.com/uber-go/tally/v4 v4.1.7/go.mod h1:pPR56rjthjtLB8xQlEx2I1VwAwRGCh/i4xMUcmG+6z4= +github.com/uber-go/tally/v4 v4.1.10 h1:2GSX7Tmq26wjAvOtQEc5EvRROIkX2OX4vpROt6mlRLM= +github.com/uber-go/tally/v4 v4.1.10/go.mod h1:pPR56rjthjtLB8xQlEx2I1VwAwRGCh/i4xMUcmG+6z4= github.com/urfave/cli/v2 v2.25.7 h1:VAzn5oq403l5pHjc4OhD54+XGO9cdKVL/7lDjF+iKUs= github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= +github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU= github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -1446,6 +1425,7 @@ github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= +go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -1460,13 +1440,13 @@ go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.temporal.io/api v1.5.0/go.mod h1:BqKxEJJYdxb5dqf0ODfzfMxh8UEQ5L3zKS51FiIYYkA= go.temporal.io/api v1.21.0/go.mod h1:xlsUEakkN2vU2/WV7e5NqMG4N93nfuNfvbXdaXUpU8w= -go.temporal.io/api v1.23.0 h1:4y9mTQjEHsE0Du0WJ2ExJUcP/1/a+B/UefzIDm4ALTE= -go.temporal.io/api v1.23.0/go.mod h1:AcJd1+rc1j0zte+ZBIkOHGHjntR/17LnZWFz+gMFHQ0= go.temporal.io/api v1.24.0 h1:WWjMYSXNh4+T4Y4jq1e/d9yCNnWoHhq4bIwflHY6fic= go.temporal.io/api v1.24.0/go.mod h1:4ackgCMjQHMpJYr1UQ6Tr/nknIqFkJ6dZ/SZsGv+St0= go.temporal.io/sdk v1.12.0/go.mod h1:lSp3lH1lI0TyOsus0arnO3FYvjVXBZGi/G7DjnAnm6o= go.temporal.io/sdk v1.24.0 h1:mAk5VFR+z4s8QVzRx3iIpRnHcEO3m10CYNjnRXrhVq4= go.temporal.io/sdk v1.24.0/go.mod h1:S7vWxU01lGcCny0sWx03bkkYw4VtVrpzeqBTn2A6y+E= +go.temporal.io/sdk v1.25.0 h1:urC4CYy3ZJOC4oOWreNfIH08N4qHydOc20pN1bYpmYw= +go.temporal.io/sdk v1.25.0/go.mod h1:X7iFKZpsj90BfszfpFCzLX8lwEJXbnRrl351/HyEgmU= go.temporal.io/sdk/contrib/tally v0.2.0 h1:XnTJIQcjOv+WuCJ1u8Ve2nq+s2H4i/fys34MnWDRrOo= go.temporal.io/sdk/contrib/tally v0.2.0/go.mod h1:1kpSuCms/tHeJQDPuuKkaBsMqfHnIIRnCtUYlPNXxuE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= @@ -1494,16 +1474,17 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= -golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -1519,10 +1500,10 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= -golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b h1:r+vk0EmXNmekl0S0BascoeeoHk/L7wmaW2QF90K+kYI= -golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= -golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 h1:m64FZMko/V45gv0bNmrNYoDEq8U5YUhetc9cBWKS1TQ= -golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63/go.mod h1:0v4NqG35kSWCMzLaMeX+IQrlSnVE/bqGSyC2cz/9Le8= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -1568,6 +1549,8 @@ golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1631,8 +1614,11 @@ golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= +golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1663,8 +1649,10 @@ golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= golang.org/x/oauth2 v0.8.0/go.mod h1:yr7u4HXZRm1R1kBWqr/xKNqewf0plRYoB7sla+BCIXE= -golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU= -golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk= +golang.org/x/oauth2 v0.12.0 h1:smVPGxink+n1ZI5pkQa8y6fZT0RW0MgCO5bFpepy4B4= +golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4= +golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY= +golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1684,6 +1672,8 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1775,8 +1765,11 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1787,8 +1780,11 @@ golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= +golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1805,8 +1801,9 @@ golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1880,10 +1877,10 @@ golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= -golang.org/x/tools v0.11.1 h1:ojD5zOW8+7dOGzdnNgersm8aPfcDjhMp12UfG93NIMc= -golang.org/x/tools v0.11.1/go.mod h1:anzJrxPjNtfgiYQYirP2CPGzGLxrH2u2QBhn6Bf3qY8= -golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846 h1:Vve/L0v7CXXuxUmaMGIEK/dEeq7uiqb5qBgQrZzIE7E= -golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846/go.mod h1:Sc0INKfu04TlqNoRA1hgpFZbhYXHPr4V5DzpSBTPqQM= +golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1893,6 +1890,8 @@ golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNq golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= @@ -1964,18 +1963,19 @@ google.golang.org/api v0.122.0/go.mod h1:gcitW0lvnyWjSp9nKxAbdHKIZ6vF4aajGueeslZ google.golang.org/api v0.124.0/go.mod h1:xu2HQurE5gi/3t1aFCvhPD781p0a3p11sdunTJ2BlP4= google.golang.org/api v0.125.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= google.golang.org/api v0.126.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= -google.golang.org/api v0.134.0 h1:ktL4Goua+UBgoP1eL1/60LwZJqa1sIzkLmvoR3hR6Gw= -google.golang.org/api v0.134.0/go.mod h1:sjRL3UnjTx5UqNQS9EWr9N8p7xbHpy1k0XGRLCf3Spk= -google.golang.org/api v0.138.0 h1:K/tVp05MxNVbHShRw9m7e9VJGdagNeTdMzqPH7AUqr0= -google.golang.org/api v0.138.0/go.mod h1:4xyob8CxC+0GChNBvEUAk8VBKNvYOTWM9T3v3UfRxuY= +google.golang.org/api v0.142.0 h1:mf+7EJ94fi5ZcnpPy+m0Yv2dkz8bKm+UL0snTCuwXlY= +google.golang.org/api v0.142.0/go.mod h1:zJAN5o6HRqR7O+9qJUFOWrZkYE66RH+efPBdTLA4xBA= +google.golang.org/api v0.147.0 h1:Can3FaQo9LlVqxJCodNmeZW/ib3/qKAY3rFeXiHo5gc= +google.golang.org/api v0.147.0/go.mod h1:pQ/9j83DcmPd/5C9e2nFOdjjNkDZ1G+zkbK2uvdkJMs= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180518175338-11a468237815/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -2118,11 +2118,12 @@ google.golang.org/genproto v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:xZnkP7mR google.golang.org/genproto v0.0.0-20230629202037-9506855d4529/go.mod h1:xZnkP7mREFX5MORlOPEzLMr+90PPZQ2QWzrVTWfAq64= google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:O9kGHb51iE/nOGvQaDUuadVYqovW56s5emA88lQnj6Y= google.golang.org/genproto v0.0.0-20230726155614-23370e0ffb3e/go.mod h1:0ggbjUrZYpy1q+ANUS30SEoGZ53cdfwtbuG7Ptgy108= -google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 h1:L6iMMGrtzgHsWofoFcihmDEMYeDR9KN/ThbPWGrh++g= google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5/go.mod h1:oH/ZOT02u4kWEp7oYBGYFFkCdKS/uYR9Z7+0/xuuFp8= google.golang.org/genproto v0.0.0-20230815205213-6bfd019c3878/go.mod h1:yZTlhN0tQnXo3h00fuXNCxJdLdIdnVFVBaRJ5LWBbw4= -google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d h1:VBu5YqKPv6XiJ199exd8Br+Aetz+o08F+PLMnwJQHAY= -google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d/go.mod h1:yZTlhN0tQnXo3h00fuXNCxJdLdIdnVFVBaRJ5LWBbw4= +google.golang.org/genproto v0.0.0-20230920204549-e6e6cdab5c13 h1:vlzZttNJGVqTsRFU9AmdnrcO1Znh8Ew9kCD//yjigk0= +google.golang.org/genproto v0.0.0-20230920204549-e6e6cdab5c13/go.mod h1:CCviP9RmpZ1mxVr8MUjCnSiY09IbAXZxhLE6EhHIdPU= +google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a h1:fwgW9j3vHirt4ObdHoYNwuO24BEZjSzbh+zPaNWoiY8= +google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:EMfReVxb80Dq1hhioy0sOsY9jCE46YDgHlJ7fWVUWRE= google.golang.org/genproto/googleapis/api v0.0.0-20230525234020-1aefcd67740a/go.mod h1:ts19tUU+Z0ZShN1y3aPyq2+O3d5FUNNgT6FtOzmrNn8= google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= google.golang.org/genproto/googleapis/api v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= @@ -2130,11 +2131,12 @@ google.golang.org/genproto/googleapis/api v0.0.0-20230530153820-e85fd2cbaebc/go. google.golang.org/genproto/googleapis/api v0.0.0-20230629202037-9506855d4529/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= google.golang.org/genproto/googleapis/api v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:mPBs5jNgx2GuQGvFwUvVKqtn6HsUw9nP64BedgvqEsQ= google.golang.org/genproto/googleapis/api v0.0.0-20230726155614-23370e0ffb3e/go.mod h1:rsr7RhLuwsDKL7RmgDDCUc6yaGr1iqceVb5Wv6f6YvQ= -google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 h1:nIgk/EEq3/YlnmVVXVnm14rC2oxgs1o0ong4sD/rd44= google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5/go.mod h1:5DZzOUPCLYL3mNkQ0ms0F3EuUNZ7py1Bqeq6sxzI7/Q= google.golang.org/genproto/googleapis/api v0.0.0-20230815205213-6bfd019c3878/go.mod h1:KjSP20unUpOx5kyQUFa7k4OJg0qeJ7DEZflGDu2p6Bk= -google.golang.org/genproto/googleapis/api v0.0.0-20230822172742-b8732ec3820d h1:DoPTO70H+bcDXcd39vOqb2viZxgqeBeSGtZ55yZU4/Q= -google.golang.org/genproto/googleapis/api v0.0.0-20230822172742-b8732ec3820d/go.mod h1:KjSP20unUpOx5kyQUFa7k4OJg0qeJ7DEZflGDu2p6Bk= +google.golang.org/genproto/googleapis/api v0.0.0-20230920204549-e6e6cdab5c13 h1:U7+wNaVuSTaUqNvK2+osJ9ejEZxbjHHk8F2b6Hpx0AE= +google.golang.org/genproto/googleapis/api v0.0.0-20230920204549-e6e6cdab5c13/go.mod h1:RdyHbowztCGQySiCvQPgWQWgWhGnouTdCflKoDBt32U= +google.golang.org/genproto/googleapis/api v0.0.0-20231012201019-e917dd12ba7a h1:myvhA4is3vrit1a6NZCWBIwN0kNEnX21DJOJX/NvIfI= +google.golang.org/genproto/googleapis/api v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:SUBoKXbI1Efip18FClrQVGjWcyd0QZd8KkvdP34t7ww= google.golang.org/genproto/googleapis/bytestream v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:ylj+BE99M198VPbBh6A8d9n3w8fChvyLK3wwBOjXBFA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234015-3fc162c6f38a/go.mod h1:xURIpW9ES5+/GZhnV6beoEtxQrnkRGIfP5VQG2tCBLc= google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= @@ -2143,11 +2145,12 @@ google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go. google.golang.org/genproto/googleapis/rpc v0.0.0-20230629202037-9506855d4529/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:8mL13HKkDa+IuJ8yruA3ci0q+0vsUz4m//+ottjwS5o= google.golang.org/genproto/googleapis/rpc v0.0.0-20230731190214-cbb8c96f2d6d/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 h1:eSaPbMR4T7WfH9FvABk36NBMacoTUKdWCvV0dx+KfOg= google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5/go.mod h1:zBEcrKX2ZOcEkHWxBPAIvYUWOKKMIhYcmNiUIu2ji3I= google.golang.org/genproto/googleapis/rpc v0.0.0-20230815205213-6bfd019c3878/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d h1:uvYuEyMHKNt+lT4K3bN6fGswmK8qSvcreM3BwjDh+y4= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13 h1:N3bU/SQDCDyD6R528GJ/PwW9KjYcJA3dgyH+MovAkIM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13/go.mod h1:KSqppvjFjtoCI+KGd4PELB0qLNxdJHRGqRI09mB6pQA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a h1:a2MQQVoTo96JC9PMGtGBymLp7+/RzpFc2yX/9WfFg1c= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:4cYg8o5yUbm77w8ZX00LhMVNl/YVBFJRYWDc0uYWMs0= google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -2192,8 +2195,11 @@ google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwS google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= +google.golang.org/grpc v1.58.2 h1:SXUpjxeVF3FKrTYQI4f4KvbGD5u2xccdYdurwowix5I= +google.golang.org/grpc v1.58.2/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= +google.golang.org/grpc v1.58.3 h1:BjnpXut1btbtgN/6sp+brB2Kbm2LjNXnidYujAVbSoQ= +google.golang.org/grpc v1.58.3/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -2296,6 +2302,7 @@ modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/token v1.0.1/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/z v1.5.1/go.mod h1:eWFB510QWW5Th9YGZT81s+LwvaAs3Q2yr4sP0rmLkv8= +nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= diff --git a/flow/model/column.go b/flow/model/column.go new file mode 100644 index 000000000..5cbf25dc2 --- /dev/null +++ b/flow/model/column.go @@ -0,0 +1,8 @@ +package model + +type ColumnInformation struct { + // This is a mapping from column name to column type + // Example: "name" -> "VARCHAR" + ColumnMap map[string]string + Columns []string // List of column names +} diff --git a/flow/model/model.go b/flow/model/model.go index 64582127a..754277ea6 100644 --- a/flow/model/model.go +++ b/flow/model/model.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "math/big" + "sync" "time" "github.com/PeerDB-io/peer-flow/generated/protos" @@ -32,6 +33,8 @@ type PullRecordsRequest struct { OverrideReplicationSlotName string // for supporting schema changes RelationMessageMapping RelationMessageMapping + // record batch for pushing changes into + RecordStream *CDCRecordStream } type Record interface { @@ -110,10 +113,11 @@ func (r *RecordItems) Len() int { return len(r.values) } -func (r *RecordItems) ToJSON() (string, error) { +func (r *RecordItems) toMap() (map[string]interface{}, error) { if r.colToValIdx == nil { - return "", errors.New("colToValIdx is nil") + return nil, errors.New("colToValIdx is nil") } + jsonStruct := make(map[string]interface{}) for col, idx := range r.colToValIdx { v := r.values[idx] @@ -129,18 +133,60 @@ func (r *RecordItems) ToJSON() (string, error) { qvalue.QValueKindTime, qvalue.QValueKindTimeTZ: jsonStruct[col], err = v.GoTimeConvert() if err != nil { - return "", err + return nil, err } case qvalue.QValueKindNumeric: bigRat, ok := v.Value.(*big.Rat) if !ok { - return "", errors.New("expected *big.Rat value") + return nil, errors.New("expected *big.Rat value") } jsonStruct[col] = bigRat.FloatString(9) default: jsonStruct[col] = v.Value } } + + return jsonStruct, nil +} + +type ToJSONOptions struct { + UnnestColumns map[string]bool +} + +func NewToJSONOptions(unnestCols []string) *ToJSONOptions { + unnestColumns := make(map[string]bool) + for _, col := range unnestCols { + unnestColumns[col] = true + } + return &ToJSONOptions{ + UnnestColumns: unnestColumns, + } +} + +func (r *RecordItems) ToJSONWithOpts(opts *ToJSONOptions) (string, error) { + jsonStruct, err := r.toMap() + if err != nil { + return "", err + } + + for col, idx := range r.colToValIdx { + v := r.values[idx] + if v.Kind == qvalue.QValueKindJSON { + if _, ok := opts.UnnestColumns[col]; ok { + var unnestStruct map[string]interface{} + err := json.Unmarshal([]byte(v.Value.(string)), &unnestStruct) + if err != nil { + return "", err + } + + for k, v := range unnestStruct { + jsonStruct[k] = v + } + delete(jsonStruct, col) + } + } + } + jsonBytes, err := json.Marshal(jsonStruct) if err != nil { return "", err @@ -149,6 +195,11 @@ func (r *RecordItems) ToJSON() (string, error) { return string(jsonBytes), nil } +func (r *RecordItems) ToJSON() (string, error) { + unnestCols := make([]string, 0) + return r.ToJSONWithOpts(NewToJSONOptions(unnestCols)) +} + type InsertRecord struct { // Name of the source table SourceTableName string @@ -160,8 +211,6 @@ type InsertRecord struct { CommitID int64 // Items is a map of column name to value. Items *RecordItems - // unchanged toast columns - UnchangedToastColumns map[string]bool } // Implement Record interface for InsertRecord. @@ -189,7 +238,7 @@ type UpdateRecord struct { // NewItems is a map of column name to value. NewItems *RecordItems // unchanged toast columns - UnchangedToastColumns map[string]bool + UnchangedToastColumns map[string]struct{} } // Implement Record interface for UpdateRecord. @@ -215,8 +264,6 @@ type DeleteRecord struct { CheckPointID int64 // Items is a map of column name to value. Items *RecordItems - // unchanged toast columns - UnchangedToastColumns map[string]bool } // Implement Record interface for DeleteRecord. @@ -234,22 +281,110 @@ func (r *DeleteRecord) GetItems() *RecordItems { type TableWithPkey struct { TableName string - PkeyColVal qvalue.QValue + PkeyColVal string } -type RecordBatch struct { +type CDCRecordStream struct { // Records are a list of json objects. - Records []Record - // FirstCheckPointID is the first ID that was pulled. - FirstCheckPointID int64 - // LastCheckPointID is the last ID of the commit that corresponds to this batch. - LastCheckPointID int64 - //TablePkey to record index mapping - TablePKeyLastSeen map[TableWithPkey]int + records chan Record + // Schema changes from the slot + SchemaDeltas chan *protos.TableSchemaDelta + // Relation message mapping + RelationMessageMapping chan *RelationMessageMapping + // Mutex for synchronizing access to the checkpoint fields + checkpointMutex sync.Mutex + // firstCheckPointID is the first ID of the commit that corresponds to this batch. + firstCheckPointID int64 + // Indicates if the last checkpoint has been set. + lastCheckpointSet bool + // lastCheckPointID is the last ID of the commit that corresponds to this batch. + lastCheckPointID int64 + // empty signal to indicate if the records are going to be empty or not. + emptySignal chan bool +} + +func NewCDCRecordStream() *CDCRecordStream { + return &CDCRecordStream{ + records: make(chan Record, 1<<18), + // TODO (kaushik): more than 1024 schema deltas can cause problems! + SchemaDeltas: make(chan *protos.TableSchemaDelta, 1<<10), + emptySignal: make(chan bool, 1), + RelationMessageMapping: make(chan *RelationMessageMapping, 1), + lastCheckpointSet: false, + lastCheckPointID: 0, + firstCheckPointID: 0, + } +} + +func (r *CDCRecordStream) UpdateLatestCheckpoint(val int64) { + r.checkpointMutex.Lock() + defer r.checkpointMutex.Unlock() + + if r.firstCheckPointID == 0 { + r.firstCheckPointID = val + } + + if val > r.lastCheckPointID { + r.lastCheckPointID = val + } +} + +func (r *CDCRecordStream) GetFirstCheckpoint() int64 { + r.checkpointMutex.Lock() + defer r.checkpointMutex.Unlock() + + return r.firstCheckPointID +} + +func (r *CDCRecordStream) GetLastCheckpoint() (int64, error) { + r.checkpointMutex.Lock() + defer r.checkpointMutex.Unlock() + + if !r.lastCheckpointSet { + return 0, errors.New("last checkpoint not set, stream is still active") + } + return r.lastCheckPointID, nil +} + +func (r *CDCRecordStream) AddRecord(record Record) { + r.records <- record +} + +func (r *CDCRecordStream) SignalAsEmpty() { + r.emptySignal <- true +} + +func (r *CDCRecordStream) SignalAsNotEmpty() { + r.emptySignal <- false +} + +func (r *CDCRecordStream) WaitAndCheckEmpty() bool { + isEmpty := <-r.emptySignal + return isEmpty +} + +func (r *CDCRecordStream) WaitForSchemaDeltas() []*protos.TableSchemaDelta { + schemaDeltas := make([]*protos.TableSchemaDelta, 0) + for delta := range r.SchemaDeltas { + schemaDeltas = append(schemaDeltas, delta) + } + return schemaDeltas +} + +func (r *CDCRecordStream) Close() { + close(r.emptySignal) + close(r.records) + close(r.SchemaDeltas) + close(r.RelationMessageMapping) + r.lastCheckpointSet = true +} + +func (r *CDCRecordStream) GetRecords() chan Record { + return r.records } type SyncRecordsRequest struct { - Records *RecordBatch + Records *CDCRecordStream // FlowJobName is the name of the flow job. FlowJobName string // SyncMode to use for pushing raw records @@ -278,10 +413,10 @@ type SyncResponse struct { CurrentSyncBatchID int64 // TableNameRowsMapping tells how many records need to be synced to each destination table. TableNameRowsMapping map[string]uint32 - // to be carried to NormalizeFlow - TableSchemaDelta *protos.TableSchemaDelta + // to be carried to parent WorkFlow + TableSchemaDeltas []*protos.TableSchemaDelta // to be stored in state for future PullFlows - RelationMessageMapping RelationMessageMapping + RelationMessageMapping *RelationMessageMapping } type NormalizeResponse struct { @@ -291,13 +426,6 @@ type NormalizeResponse struct { EndBatchID int64 } -// sync all the records normally, then apply the schema delta after NormalizeFlow. -type RecordsWithTableSchemaDelta struct { - RecordBatch *RecordBatch - TableSchemaDelta *protos.TableSchemaDelta - RelationMessageMapping RelationMessageMapping -} - // being clever and passing the delta back as a regular record instead of heavy CDC refactoring. type RelationRecord struct { CheckPointID int64 diff --git a/flow/model/qrecord_stream.go b/flow/model/qrecord_stream.go index 1fb22826e..721ab58c6 100644 --- a/flow/model/qrecord_stream.go +++ b/flow/model/qrecord_stream.go @@ -19,6 +19,32 @@ type QRecordStream struct { schemaCache *QRecordSchema } +type RecordsToStreamRequest struct { + records chan Record + TableMapping map[string]uint32 + BatchID int64 +} + +func NewRecordsToStreamRequest( + records chan Record, + tableMapping map[string]uint32, + batchID int64, +) *RecordsToStreamRequest { + return &RecordsToStreamRequest{ + records: records, + TableMapping: tableMapping, + BatchID: batchID, + } +} + +func (r *RecordsToStreamRequest) GetRecords() chan Record { + return r.records +} + +type RecordsToStreamResponse struct { + Stream *QRecordStream +} + func NewQRecordStream(buffer int) *QRecordStream { return &QRecordStream{ schema: make(chan *QRecordSchemaOrError, 1), diff --git a/flow/model/qvalue/avro_converter.go b/flow/model/qvalue/avro_converter.go index 62fefe698..52aaa8750 100644 --- a/flow/model/qvalue/avro_converter.go +++ b/flow/model/qvalue/avro_converter.go @@ -1,7 +1,6 @@ package qvalue import ( - "errors" "fmt" "math/big" "time" @@ -36,6 +35,10 @@ func GetAvroSchemaFromQValueKind(kind QValueKind, nullable bool) (*QValueKindAvr return &QValueKindAvroSchema{ AvroLogicalSchema: "string", }, nil + case QValueKindGeometry, QValueKindGeography, QValueKindPoint: + return &QValueKindAvroSchema{ + AvroLogicalSchema: "string", + }, nil case QValueKindInt16, QValueKindInt32, QValueKindInt64: return &QValueKindAvroSchema{ AvroLogicalSchema: "long", @@ -119,7 +122,7 @@ func GetAvroSchemaFromQValueKind(kind QValueKind, nullable bool) (*QValueKindAvr AvroLogicalSchema: "string", }, nil default: - return nil, errors.New("unsupported QValueKind type") + return nil, fmt.Errorf("unsupported QValueKind type: %s", kind) } } @@ -202,6 +205,8 @@ func (c *QValueAvroConverter) ToAvroValue() (interface{}, error) { return c.processArrayString() case QValueKindUUID: return c.processUUID() + case QValueKindGeography, QValueKindGeometry, QValueKindPoint: + return c.processGeospatial() default: return nil, fmt.Errorf("[toavro] unsupported QValueKind: %s", c.Value.Kind) } @@ -330,6 +335,22 @@ func (c *QValueAvroConverter) processUUID() (interface{}, error) { return uuidString, nil } +func (c *QValueAvroConverter) processGeospatial() (interface{}, error) { + if c.Value.Value == nil { + return nil, nil + } + + geoString, ok := c.Value.Value.(string) + if !ok { + return nil, fmt.Errorf("[conversion] invalid geospatial value %v", c.Value.Value) + } + + if c.Nullable { + return goavro.Union("string", geoString), nil + } + return geoString, nil +} + func (c *QValueAvroConverter) processArrayInt32() (interface{}, error) { if c.Value.Value == nil && c.Nullable { return nil, nil diff --git a/flow/model/qvalue/kind.go b/flow/model/qvalue/kind.go index 1def70861..b2eb1b1e4 100644 --- a/flow/model/qvalue/kind.go +++ b/flow/model/qvalue/kind.go @@ -1,5 +1,7 @@ package qvalue +import "fmt" + type QValueKind string const ( @@ -23,6 +25,9 @@ const ( QValueKindJSON QValueKind = "json" QValueKindBit QValueKind = "bit" QValueKindHStore QValueKind = "hstore" + QValueKindGeography QValueKind = "geography" + QValueKindGeometry QValueKind = "geometry" + QValueKindPoint QValueKind = "point" // array types QValueKindArrayFloat32 QValueKind = "array_float32" @@ -44,3 +49,48 @@ func QValueKindIsArray(kind QValueKind) bool { return false } } + +var QValueKindToSnowflakeTypeMap = map[QValueKind]string{ + QValueKindBoolean: "BOOLEAN", + QValueKindInt16: "INTEGER", + QValueKindInt32: "INTEGER", + QValueKindInt64: "INTEGER", + QValueKindFloat32: "FLOAT", + QValueKindFloat64: "FLOAT", + QValueKindNumeric: "NUMBER(38, 9)", + QValueKindString: "STRING", + QValueKindJSON: "VARIANT", + QValueKindTimestamp: "TIMESTAMP_NTZ", + QValueKindTimestampTZ: "TIMESTAMP_TZ", + QValueKindTime: "TIME", + QValueKindDate: "DATE", + QValueKindBit: "BINARY", + QValueKindBytes: "BINARY", + QValueKindStruct: "STRING", + QValueKindUUID: "STRING", + QValueKindTimeTZ: "STRING", + QValueKindInvalid: "STRING", + QValueKindHStore: "STRING", + QValueKindGeography: "GEOGRAPHY", + QValueKindGeometry: "GEOMETRY", + QValueKindPoint: "GEOMETRY", + + // array types will be mapped to VARIANT + QValueKindArrayFloat32: "VARIANT", + QValueKindArrayFloat64: "VARIANT", + QValueKindArrayInt32: "VARIANT", + QValueKindArrayInt64: "VARIANT", + QValueKindArrayString: "VARIANT", +} + +func (kind QValueKind) ToDWHColumnType(dwhType QDWHType) (string, error) { + if dwhType != QDWHTypeSnowflake { + return "", fmt.Errorf("unsupported DWH type: %v", dwhType) + } + + if val, ok := QValueKindToSnowflakeTypeMap[kind]; ok { + return val, nil + } else { + return "STRING", nil + } +} diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index 46d44ad51..9ac0ade82 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -2,6 +2,7 @@ package peerflow import ( "fmt" + "strings" "time" "github.com/PeerDB-io/peer-flow/generated/protos" @@ -68,7 +69,7 @@ type CDCFlowState struct { NormalizeFlowErrors error // Global mapping of relation IDs to RelationMessages sent as a part of logical replication. // Needed to support schema changes. - RelationMessageMapping model.RelationMessageMapping + RelationMessageMapping *model.RelationMessageMapping } // returns a new empty PeerFlowState @@ -82,7 +83,7 @@ func NewCDCFlowState() *CDCFlowState { SyncFlowErrors: nil, NormalizeFlowErrors: nil, // WORKAROUND: empty maps are protobufed into nil maps for reasons beyond me - RelationMessageMapping: model.RelationMessageMapping{ + RelationMessageMapping: &model.RelationMessageMapping{ 0: &protos.RelationMessage{ RelationId: 0, RelationName: "protobuf_workaround", @@ -102,6 +103,28 @@ func (s *CDCFlowState) TruncateProgress() { if len(s.NormalizeFlowStatuses) > 10 { s.NormalizeFlowStatuses = s.NormalizeFlowStatuses[len(s.NormalizeFlowStatuses)-10:] } + + if s.SyncFlowErrors != nil { + fmt.Println("SyncFlowErrors: ", s.SyncFlowErrors) + s.SyncFlowErrors = nil + } + + if s.NormalizeFlowErrors != nil { + fmt.Println("NormalizeFlowErrors: ", s.NormalizeFlowErrors) + s.NormalizeFlowErrors = nil + } +} + +func (s *CDCFlowState) SendWALHeartbeat(ctx workflow.Context, cfg *protos.FlowConnectionConfigs) error { + walHeartbeatCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + StartToCloseTimeout: 5 * time.Minute, + }) + + if err := workflow.ExecuteActivity(walHeartbeatCtx, flowable.SendWALHeartbeat, cfg).Get(ctx, nil); err != nil { + return fmt.Errorf("failed to send WAL heartbeat: %w", err) + } + + return nil } // CDCFlowWorkflowExecution represents the state for execution of a peer flow. @@ -181,6 +204,16 @@ func CDCFlowWorkflowWithConfig( }) if !state.SetupComplete { + // if resync is true, alter the table name schema mapping to temporarily add + // a suffix to the table names. + if cfg.Resync { + for _, mapping := range cfg.TableMappings { + oldName := mapping.DestinationTableIdentifier + newName := fmt.Sprintf("%s_resync", oldName) + mapping.DestinationTableIdentifier = newName + } + } + // start the SetupFlow workflow as a child workflow, and wait for it to complete // it should return the table schema for the source peer setupFlowID, err := GetChildWorkflowID(ctx, "setup-flow", cfg.FlowJobName) @@ -219,6 +252,30 @@ func CDCFlowWorkflowWithConfig( return state, fmt.Errorf("failed to execute child workflow: %w", err) } + if cfg.Resync { + renameOpts := &protos.RenameTablesInput{} + renameOpts.FlowJobName = cfg.FlowJobName + renameOpts.Peer = cfg.Destination + for _, mapping := range cfg.TableMappings { + oldName := mapping.DestinationTableIdentifier + newName := strings.TrimSuffix(oldName, "_resync") + renameOpts.RenameTableOptions = append(renameOpts.RenameTableOptions, &protos.RenameTableOption{ + CurrentName: oldName, + NewName: newName, + }) + mapping.DestinationTableIdentifier = newName + } + + renameTablesCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + StartToCloseTimeout: 12 * time.Hour, + HeartbeatTimeout: 1 * time.Hour, + }) + renameTablesFuture := workflow.ExecuteActivity(renameTablesCtx, flowable.RenameTables, renameOpts) + if err := renameTablesFuture.Get(renameTablesCtx, nil); err != nil { + return state, fmt.Errorf("failed to execute rename tables activity: %w", err) + } + } + state.SetupComplete = true state.Progress = append(state.Progress, "executed setup flow and snapshot flow") } @@ -258,7 +315,7 @@ func CDCFlowWorkflowWithConfig( }, } ctx = workflow.WithChildOptions(ctx, childSyncFlowOpts) - syncFlowOptions.RelationMessageMapping = state.RelationMessageMapping + syncFlowOptions.RelationMessageMapping = *state.RelationMessageMapping childSyncFlowFuture := workflow.ExecuteChildWorkflow( ctx, SyncFlowWorkflow, @@ -291,38 +348,28 @@ func CDCFlowWorkflowWithConfig( } ctx = workflow.WithChildOptions(ctx, childNormalizeFlowOpts) - var tableSchemaDelta *protos.TableSchemaDelta = nil + var tableSchemaDeltas []*protos.TableSchemaDelta = nil if childSyncFlowRes != nil { - tableSchemaDelta = childSyncFlowRes.TableSchemaDelta + tableSchemaDeltas = childSyncFlowRes.TableSchemaDeltas } - childNormalizeFlowFuture := workflow.ExecuteChildWorkflow( - ctx, - NormalizeFlowWorkflow, - cfg, - tableSchemaDelta, - ) + // slightly hacky: table schema mapping is cached, so we need to manually update it if schema changes. + if tableSchemaDeltas != nil { + modifiedSrcTables := make([]string, 0) + modifiedDstTables := make([]string, 0) - selector.AddFuture(childNormalizeFlowFuture, func(f workflow.Future) { - var childNormalizeFlowRes *model.NormalizeResponse - if err := f.Get(ctx, &childNormalizeFlowRes); err != nil { - w.logger.Error("failed to execute normalize flow: ", err) - state.NormalizeFlowErrors = multierror.Append(state.NormalizeFlowErrors, err) - } else { - state.NormalizeFlowStatuses = append(state.NormalizeFlowStatuses, childNormalizeFlowRes) + for _, tableSchemaDelta := range tableSchemaDeltas { + modifiedSrcTables = append(modifiedSrcTables, tableSchemaDelta.SrcTableName) + modifiedDstTables = append(modifiedDstTables, tableSchemaDelta.DstTableName) } - }) - selector.Select(ctx) - // slightly hacky: table schema mapping is cached, so we need to manually update it if schema changes. - if tableSchemaDelta != nil { getModifiedSchemaCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ StartToCloseTimeout: 5 * time.Minute, }) getModifiedSchemaFuture := workflow.ExecuteActivity(getModifiedSchemaCtx, flowable.GetTableSchema, &protos.GetTableSchemaBatchInput{ PeerConnectionConfig: cfg.Source, - TableIdentifiers: []string{tableSchemaDelta.SrcTableName}, + TableIdentifiers: modifiedSrcTables, }) var getModifiedSchemaRes *protos.GetTableSchemaBatchOutput @@ -330,10 +377,34 @@ func CDCFlowWorkflowWithConfig( w.logger.Error("failed to execute schema update at source: ", err) state.SyncFlowErrors = multierror.Append(state.SyncFlowErrors, err) } else { - cfg.TableNameSchemaMapping[tableSchemaDelta.DstTableName] = - getModifiedSchemaRes.TableNameSchemaMapping[tableSchemaDelta.SrcTableName] + for i := range modifiedSrcTables { + cfg.TableNameSchemaMapping[modifiedDstTables[i]] = + getModifiedSchemaRes.TableNameSchemaMapping[modifiedSrcTables[i]] + } } } + + childNormalizeFlowFuture := workflow.ExecuteChildWorkflow( + ctx, + NormalizeFlowWorkflow, + cfg, + ) + + selector.AddFuture(childNormalizeFlowFuture, func(f workflow.Future) { + var childNormalizeFlowRes *model.NormalizeResponse + if err := f.Get(ctx, &childNormalizeFlowRes); err != nil { + w.logger.Error("failed to execute normalize flow: ", err) + state.NormalizeFlowErrors = multierror.Append(state.NormalizeFlowErrors, err) + } else { + state.NormalizeFlowStatuses = append(state.NormalizeFlowStatuses, childNormalizeFlowRes) + } + }) + selector.Select(ctx) + } + + // send WAL heartbeat + if err := state.SendWALHeartbeat(ctx, cfg); err != nil { + return state, err } state.TruncateProgress() diff --git a/flow/workflows/qrep_flow.go b/flow/workflows/qrep_flow.go index bceb1fa7f..9a5a8641e 100644 --- a/flow/workflows/qrep_flow.go +++ b/flow/workflows/qrep_flow.go @@ -6,6 +6,7 @@ import ( "time" "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/shared" "github.com/google/uuid" "go.temporal.io/api/enums/v1" "go.temporal.io/sdk/log" @@ -46,6 +47,46 @@ func (q *QRepFlowExecution) SetupMetadataTables(ctx workflow.Context) error { return nil } +func (q *QRepFlowExecution) SetupWatermarkTableOnDestination(ctx workflow.Context) error { + if q.config.SetupWatermarkTableOnDestination { + q.logger.Info("setting up watermark table on destination for qrep flow: ", q.config.FlowJobName) + + ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + StartToCloseTimeout: 5 * time.Minute, + }) + + tableSchemaInput := &protos.GetTableSchemaBatchInput{ + PeerConnectionConfig: q.config.SourcePeer, + TableIdentifiers: []string{q.config.WatermarkTable}, + } + + future := workflow.ExecuteActivity(ctx, flowable.GetTableSchema, tableSchemaInput) + + var tblSchemaOutput *protos.GetTableSchemaBatchOutput + if err := future.Get(ctx, &tblSchemaOutput); err != nil { + q.logger.Error("failed to fetch schema for watermark table: ", err) + return fmt.Errorf("failed to fetch schema for watermark table %s: %w", q.config.WatermarkTable, err) + } + + // now setup the normalized tables on the destination peer + setupConfig := &protos.SetupNormalizedTableBatchInput{ + PeerConnectionConfig: q.config.DestinationPeer, + TableNameSchemaMapping: map[string]*protos.TableSchema{ + q.config.DestinationTableIdentifier: tblSchemaOutput.TableNameSchemaMapping[q.config.WatermarkTable], + }, + } + + future = workflow.ExecuteActivity(ctx, flowable.CreateNormalizedTable, setupConfig) + var createNormalizedTablesOutput *protos.SetupNormalizedTableBatchOutput + if err := future.Get(ctx, &createNormalizedTablesOutput); err != nil { + q.logger.Error("failed to create watermark table: ", err) + return fmt.Errorf("failed to create watermark table: %w", err) + } + q.logger.Info("finished setting up watermark table for qrep flow: ", q.config.FlowJobName) + } + return nil +} + // GetPartitions returns the partitions to replicate. func (q *QRepFlowExecution) GetPartitions( ctx workflow.Context, @@ -197,6 +238,22 @@ func (q *QRepFlowExecution) consolidatePartitions(ctx workflow.Context) error { return nil } +func (q *QRepFlowExecution) waitForNewRows(ctx workflow.Context, lastPartition *protos.QRepPartition) error { + q.logger.Info("idling until new rows are detected") + + ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + StartToCloseTimeout: 16 * 365 * 24 * time.Hour, // 16 years + HeartbeatTimeout: 5 * time.Minute, + }) + + if err := workflow.ExecuteActivity(ctx, flowable.QRepWaitUntilNewRows, q.config, + lastPartition).Get(ctx, nil); err != nil { + return fmt.Errorf("failed while idling for new rows: %w", err) + } + + return nil +} + func QRepFlowWorkflow( ctx workflow.Context, config *protos.QRepConfig, @@ -216,29 +273,19 @@ func QRepFlowWorkflow( maxParallelWorkers = int(config.MaxParallelWorkers) } - waitBetweenBatches := 5 * time.Second - if config.WaitBetweenBatchesSeconds > 0 { - waitBetweenBatches = time.Duration(config.WaitBetweenBatchesSeconds) * time.Second - } - - if config.BatchDurationSeconds == 0 { - config.BatchDurationSeconds = 60 - } - - if config.BatchSizeInt == 0 { - config.BatchSizeInt = 10000 - } - // register a signal handler to terminate the workflow terminateWorkflow := false - signalChan := workflow.GetSignalChannel(ctx, "terminate") + signalChan := workflow.GetSignalChannel(ctx, shared.CDCFlowSignalName) s := workflow.NewSelector(ctx) s.AddReceive(signalChan, func(c workflow.ReceiveChannel, _ bool) { - var signal string - c.Receive(ctx, &signal) - logger.Info("Received signal to terminate workflow", "Signal", signal) - terminateWorkflow = true + var signalVal shared.CDCFlowSignal + c.Receive(ctx, &signalVal) + logger.Info("received signal", "signal", signalVal) + if signalVal == shared.ShutdownSignal { + logger.Info("received shutdown signal") + terminateWorkflow = true + } }) // register a query to get the number of partitions processed @@ -267,6 +314,11 @@ func QRepFlowWorkflow( } q.logger.Info("metadata tables setup for peer flow - ", config.FlowJobName) + err = q.SetupWatermarkTableOnDestination(ctx) + if err != nil { + return fmt.Errorf("failed to setup watermark table: %w", err) + } + logger.Info("fetching partitions to replicate for peer flow - ", config.FlowJobName) partitions, err := q.GetPartitions(ctx, lastPartition) if err != nil { @@ -304,9 +356,9 @@ func QRepFlowWorkflow( } // sleep for a while and continue the workflow - err = workflow.Sleep(ctx, waitBetweenBatches) + err = q.waitForNewRows(ctx, lastPartition) if err != nil { - return fmt.Errorf("failed to sleep: %w", err) + return err } workflow.GetLogger(ctx).Info("Continuing as new workflow", diff --git a/flow/workflows/setup_flow.go b/flow/workflows/setup_flow.go index cf5d39569..4702fef8e 100644 --- a/flow/workflows/setup_flow.go +++ b/flow/workflows/setup_flow.go @@ -30,8 +30,9 @@ import ( // - creating the normalized table on the destination peer type SetupFlowState struct { - CDCFlowName string - Progress []string + tableNameMapping map[string]string + CDCFlowName string + Progress []string } type SetupFlowExecution struct { @@ -102,7 +103,7 @@ func (s *SetupFlowExecution) ensurePullability( }) tmpMap := make(map[uint32]string) - srcTblIdentifiers := maps.Keys(config.TableNameMapping) + srcTblIdentifiers := maps.Keys(s.tableNameMapping) sort.Strings(srcTblIdentifiers) // create EnsurePullabilityInput for the srcTableName @@ -148,7 +149,7 @@ func (s *SetupFlowExecution) createRawTable( createRawTblInput := &protos.CreateRawTableInput{ PeerConnectionConfig: config.Destination, FlowJobName: s.CDCFlowName, - TableNameMapping: config.TableNameMapping, + TableNameMapping: s.tableNameMapping, CdcSyncMode: config.CdcSyncMode, } @@ -171,7 +172,7 @@ func (s *SetupFlowExecution) fetchTableSchemaAndSetupNormalizedTables( HeartbeatTimeout: 5 * time.Minute, }) - sourceTables := maps.Keys(flowConnectionConfigs.TableNameMapping) + sourceTables := maps.Keys(s.tableNameMapping) sort.Strings(sourceTables) tableSchemaInput := &protos.GetTableSchemaBatchInput{ @@ -195,7 +196,7 @@ func (s *SetupFlowExecution) fetchTableSchemaAndSetupNormalizedTables( normalizedTableMapping := make(map[string]*protos.TableSchema) for _, srcTableName := range sortedSourceTables { tableSchema := tableNameSchemaMapping[srcTableName] - normalizedTableName := flowConnectionConfigs.TableNameMapping[srcTableName] + normalizedTableName := s.tableNameMapping[srcTableName] normalizedTableMapping[normalizedTableName] = tableSchema s.logger.Info("normalized table schema: ", normalizedTableName, " -> ", tableSchema) } @@ -251,9 +252,15 @@ func (s *SetupFlowExecution) executeSetupFlow( // SetupFlowWorkflow is the workflow that sets up the flow. func SetupFlowWorkflow(ctx workflow.Context, config *protos.FlowConnectionConfigs) (*protos.FlowConnectionConfigs, error) { + tblNameMapping := make(map[string]string) + for _, v := range config.TableMappings { + tblNameMapping[v.SourceTableIdentifier] = v.DestinationTableIdentifier + } + setupFlowState := &SetupFlowState{ - CDCFlowName: config.FlowJobName, - Progress: []string{}, + tableNameMapping: tblNameMapping, + CDCFlowName: config.FlowJobName, + Progress: []string{}, } // create the setup flow execution diff --git a/flow/workflows/snapshot_flow.go b/flow/workflows/snapshot_flow.go index 8d4d5faf9..fab06d3ce 100644 --- a/flow/workflows/snapshot_flow.go +++ b/flow/workflows/snapshot_flow.go @@ -3,10 +3,10 @@ package peerflow import ( "fmt" "regexp" - "sort" "time" "github.com/PeerDB-io/peer-flow/concurrency" + "github.com/PeerDB-io/peer-flow/connectors/utils" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/shared" "github.com/google/uuid" @@ -14,7 +14,6 @@ import ( "go.temporal.io/sdk/log" "go.temporal.io/sdk/temporal" "go.temporal.io/sdk/workflow" - "golang.org/x/exp/maps" ) type SnapshotFlowExecution struct { @@ -36,10 +35,15 @@ func (s *SnapshotFlowExecution) setupReplication( }, }) + tblNameMapping := make(map[string]string) + for _, v := range s.config.TableMappings { + tblNameMapping[v.SourceTableIdentifier] = v.DestinationTableIdentifier + } + setupReplicationInput := &protos.SetupReplicationInput{ PeerConnectionConfig: s.config.Source, FlowJobName: flowName, - TableNameMapping: s.config.TableNameMapping, + TableNameMapping: tblNameMapping, DoInitialCopy: s.config.DoInitialCopy, ExistingPublicationName: s.config.PublicationName, ExistingReplicationSlotName: s.config.ReplicationSlotName, @@ -79,12 +83,11 @@ func (s *SnapshotFlowExecution) cloneTable( boundSelector *concurrency.BoundSelector, childCtx workflow.Context, snapshotName string, - sourceTableName string, - destinationTableName string, + mapping *protos.TableMapping, ) error { flowName := s.config.FlowJobName - srcName := sourceTableName - dstName := destinationTableName + srcName := mapping.SourceTableIdentifier + dstName := mapping.DestinationTableIdentifier childWorkflowIDSideEffect := workflow.SideEffect(childCtx, func(ctx workflow.Context) interface{} { childWorkflowID := fmt.Sprintf("clone_%s_%s_%s", flowName, dstName, uuid.New().String()) reg := regexp.MustCompile("[^a-zA-Z0-9]+") @@ -123,7 +126,21 @@ func (s *SnapshotFlowExecution) cloneTable( sourcePostgres := s.config.Source sourcePostgres.GetPostgresConfig().TransactionSnapshot = snapshotName - query := fmt.Sprintf("SELECT * FROM %s WHERE ctid BETWEEN {{.start}} AND {{.end}}", srcName) + partitionCol := "ctid" + if mapping.PartitionKey != "" { + partitionCol = mapping.PartitionKey + } + + parsedSrcTable, err := utils.ParseSchemaTable(srcName) + if err != nil { + logrus.WithFields(logrus.Fields{ + "flowName": flowName, + "snapshotName": snapshotName, + }).Errorf("unable to parse source table") + return fmt.Errorf("unable to parse source table: %w", err) + } + query := fmt.Sprintf("SELECT * FROM %s WHERE %s BETWEEN {{.start}} AND {{.end}}", + parsedSrcTable.String(), partitionCol) numWorkers := uint32(8) if s.config.SnapshotMaxParallelWorkers > 0 { @@ -140,7 +157,7 @@ func (s *SnapshotFlowExecution) cloneTable( SourcePeer: sourcePostgres, DestinationPeer: s.config.Destination, Query: query, - WatermarkColumn: "ctid", + WatermarkColumn: partitionCol, WatermarkTable: srcName, InitialCopyOnly: true, DestinationTableIdentifier: dstName, @@ -148,6 +165,9 @@ func (s *SnapshotFlowExecution) cloneTable( SyncMode: s.config.SnapshotSyncMode, MaxParallelWorkers: numWorkers, StagingPath: s.config.SnapshotStagingPath, + WriteMode: &protos.QRepWriteMode{ + WriteType: protos.QRepWriteType_QREP_WRITE_MODE_APPEND, + }, } numPartitionsProcessed := 0 @@ -165,14 +185,12 @@ func (s *SnapshotFlowExecution) cloneTables( logrus.Infof("cloning tables for slot name %s and snapshotName %s", slotInfo.SlotName, slotInfo.SnapshotName) - srcTables := maps.Keys(s.config.TableNameMapping) - sort.Strings(srcTables) - - boundSelector := concurrency.NewBoundSelector(maxParallelClones, len(srcTables), ctx) + numTables := len(s.config.TableMappings) + boundSelector := concurrency.NewBoundSelector(maxParallelClones, numTables, ctx) - for _, srcTbl := range srcTables { - source := srcTbl - destination := s.config.TableNameMapping[source] + for _, v := range s.config.TableMappings { + source := v.SourceTableIdentifier + destination := v.DestinationTableIdentifier snapshotName := slotInfo.SnapshotName logrus.WithFields(logrus.Fields{ "snapshotName": snapshotName, @@ -180,7 +198,7 @@ func (s *SnapshotFlowExecution) cloneTables( "Cloning table with source table %s and destination table name %s", source, destination, ) - err := s.cloneTable(boundSelector, ctx, snapshotName, source, destination) + err := s.cloneTable(boundSelector, ctx, snapshotName, v) if err != nil { s.logger.Error("failed to start clone child workflow: ", err) continue @@ -237,7 +255,10 @@ func SnapshotFlowWorkflow(ctx workflow.Context, config *protos.FlowConnectionCon numTablesInParallel = 1 } + logger.Info("cloning tables in parallel: ", numTablesInParallel) se.cloneTables(ctx, slotInfo, numTablesInParallel) + } else { + logger.Info("skipping initial copy as 'doInitialCopy' is false") } if err := se.closeSlotKeepAlive(replCtx); err != nil { diff --git a/flow/workflows/sync_flow.go b/flow/workflows/sync_flow.go index fe7a28583..d59de7cf6 100644 --- a/flow/workflows/sync_flow.go +++ b/flow/workflows/sync_flow.go @@ -82,10 +82,8 @@ func (s *SyncFlowExecution) executeSyncFlow( } startFlowCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ - StartToCloseTimeout: 24 * time.Hour, - // TODO: activity needs to call heartbeat. - // see https://github.com/PeerDB-io/nexus/issues/216 - HeartbeatTimeout: 30 * time.Second, + StartToCloseTimeout: 72 * time.Hour, + HeartbeatTimeout: 5 * time.Minute, }) // execute StartFlow on the peers to start the flow @@ -102,6 +100,20 @@ func (s *SyncFlowExecution) executeSyncFlow( return nil, fmt.Errorf("failed to flow: %w", err) } + replayTableSchemaDeltaCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ + StartToCloseTimeout: 30 * time.Minute, + }) + replayTableSchemaInput := &protos.ReplayTableSchemaDeltaInput{ + FlowConnectionConfigs: config, + TableSchemaDeltas: syncRes.TableSchemaDeltas, + } + + fReplayTableSchemaDelta := workflow.ExecuteActivity(replayTableSchemaDeltaCtx, + flowable.ReplayTableSchemaDeltas, replayTableSchemaInput) + if err := fReplayTableSchemaDelta.Get(replayTableSchemaDeltaCtx, nil); err != nil { + return nil, fmt.Errorf("failed to replay schema delta: %w", err) + } + return syncRes, nil } @@ -122,20 +134,18 @@ func SyncFlowWorkflow(ctx workflow.Context, func NormalizeFlowWorkflow(ctx workflow.Context, config *protos.FlowConnectionConfigs, - tableSchemaDelta *protos.TableSchemaDelta, ) (*model.NormalizeResponse, error) { s := NewNormalizeFlowExecution(ctx, &NormalizeFlowState{ CDCFlowName: config.FlowJobName, Progress: []string{}, }) - return s.executeNormalizeFlow(ctx, config, tableSchemaDelta) + return s.executeNormalizeFlow(ctx, config) } func (s *NormalizeFlowExecution) executeNormalizeFlow( ctx workflow.Context, config *protos.FlowConnectionConfigs, - tableSchemaDelta *protos.TableSchemaDelta, ) (*model.NormalizeResponse, error) { s.logger.Info("executing normalize flow - ", s.CDCFlowName) @@ -155,16 +165,5 @@ func (s *NormalizeFlowExecution) executeNormalizeFlow( return nil, fmt.Errorf("failed to flow: %w", err) } - replayTableSchemaInput := &protos.ReplayTableSchemaDeltaInput{ - FlowConnectionConfigs: config, - TableSchemaDelta: tableSchemaDelta, - } - - fReplayTableSchemaDelta := workflow.ExecuteActivity(normalizeFlowCtx, flowable.ReplayTableSchemaDelta, - replayTableSchemaInput) - if err := fReplayTableSchemaDelta.Get(normalizeFlowCtx, nil); err != nil { - return nil, fmt.Errorf("failed to replay schema delta: %w", err) - } - return normalizeResponse, nil } diff --git a/nexus/Cargo.lock b/nexus/Cargo.lock index 3876b4be1..2ea6421fe 100644 --- a/nexus/Cargo.lock +++ b/nexus/Cargo.lock @@ -1989,7 +1989,7 @@ dependencies = [ "anyhow", "async-recursion", "async-trait", - "base64 0.13.1", + "base64 0.21.4", "catalog", "chrono", "dashmap", diff --git a/nexus/analyzer/src/lib.rs b/nexus/analyzer/src/lib.rs index e1b396998..a06abb34a 100644 --- a/nexus/analyzer/src/lib.rs +++ b/nexus/analyzer/src/lib.rs @@ -88,8 +88,15 @@ impl<'a> StatementAnalyzer for PeerExistanceAnalyzer<'a> { /// PeerDDLAnalyzer is a statement analyzer that checks if the given /// statement is a PeerDB DDL statement. If it is, it returns the type of /// DDL statement. -#[derive(Default)] -pub struct PeerDDLAnalyzer; +pub struct PeerDDLAnalyzer<'a> { + peers: &'a HashMap, +} + +impl<'a> PeerDDLAnalyzer<'a> { + pub fn new(peers: &'a HashMap) -> Self { + Self { peers } + } +} #[derive(Debug, Clone)] pub enum PeerDDL { @@ -97,6 +104,10 @@ pub enum PeerDDL { peer: Box, if_not_exists: bool, }, + DropPeer { + peer_name: String, + if_exists: bool, + }, CreateMirrorForCDC { if_not_exists: bool, flow_job: FlowJob, @@ -114,7 +125,7 @@ pub enum PeerDDL { }, } -impl StatementAnalyzer for PeerDDLAnalyzer { +impl<'a> StatementAnalyzer for PeerDDLAnalyzer<'a> { type Output = Option; fn analyze(&self, statement: &Statement) -> anyhow::Result { @@ -126,7 +137,7 @@ impl StatementAnalyzer for PeerDDLAnalyzer { with_options, } => { let db_type = DbType::from(peer_type.clone()); - let config = parse_db_options(db_type, with_options.clone())?; + let config = parse_db_options(self.peers, db_type, with_options.clone())?; let peer = Peer { name: peer_name.to_string().to_lowercase(), r#type: db_type as i32, @@ -145,16 +156,14 @@ impl StatementAnalyzer for PeerDDLAnalyzer { match create_mirror { CDC(cdc) => { let mut flow_job_table_mappings = vec![]; - for table_mapping in &cdc.table_mappings { + for table_mapping in &cdc.mapping_options { flow_job_table_mappings.push(FlowJobTableMapping { - source_table_identifier: table_mapping - .source_table_identifier - .to_string() - .to_lowercase(), - target_table_identifier: table_mapping - .target_table_identifier - .to_string() - .to_lowercase(), + source_table_identifier: table_mapping.source.to_string(), + destination_table_identifier: table_mapping.destination.to_string(), + partition_key: table_mapping + .partition_key + .clone() + .map(|s| s.to_string()), }); } @@ -165,6 +174,32 @@ impl StatementAnalyzer for PeerDDLAnalyzer { } let do_initial_copy = match raw_options.remove("do_initial_copy") { Some(sqlparser::ast::Value::Boolean(b)) => *b, + // also support "true" and "false" as strings + Some(sqlparser::ast::Value::SingleQuotedString(s)) => { + match s.as_ref() { + "true" => true, + "false" => false, + _ => { + return Err(anyhow::anyhow!( + "do_initial_copy must be a boolean" + )) + } + } + } + _ => return Err(anyhow::anyhow!("do_initial_copy must be a boolean")), + }; + + // bool resync true or false, default to false if not in opts + let resync = match raw_options.remove("resync") { + Some(sqlparser::ast::Value::Boolean(b)) => *b, + // also support "true" and "false" as strings + Some(sqlparser::ast::Value::SingleQuotedString(s)) => { + match s.as_ref() { + "true" => true, + "false" => false, + _ => return Err(anyhow::anyhow!("resync must be a boolean")), + } + } _ => false, }; @@ -275,6 +310,7 @@ impl StatementAnalyzer for PeerDDLAnalyzer { push_batch_size, push_parallelism, max_batch_size, + resync, }; // Error reporting @@ -335,6 +371,13 @@ impl StatementAnalyzer for PeerDDLAnalyzer { if_exists: *if_exists, flow_job_name: mirror_name.to_string().to_lowercase(), })), + Statement::DropPeer { + if_exists, + peer_name, + } => Ok(Some(PeerDDL::DropPeer { + if_exists: *if_exists, + peer_name: peer_name.to_string().to_lowercase(), + })), _ => Ok(None), } } @@ -395,6 +438,7 @@ impl StatementAnalyzer for PeerCursorAnalyzer { } fn parse_db_options( + peers: &HashMap, db_type: DbType, with_options: Vec, ) -> anyhow::Result> { @@ -403,6 +447,8 @@ fn parse_db_options( let key = opt.name.value; let val = match opt.value { sqlparser::ast::Value::SingleQuotedString(str) => str, + sqlparser::ast::Value::Number(v, _) => v, + sqlparser::ast::Value::Boolean(v) => v.to_string(), _ => panic!("invalid option type for peer"), }; opts.insert(key, val); @@ -545,33 +591,32 @@ fn parse_db_options( Some(config) } DbType::Eventhub => { - let mut metadata_db = PostgresConfig::default(); - let conn_str = opts + let conn_str: String = opts .get("metadata_db") - .context("no metadata db specified")?; - let param_pairs: Vec<&str> = conn_str.split_whitespace().collect(); - match param_pairs.len() { - 5 => Ok(true), - _ => Err(anyhow::Error::msg("Invalid connection string. Check formatting and if the required parameters have been specified.")), - }?; - for pair in param_pairs { - let key_value: Vec<&str> = pair.trim().split('=').collect(); - match key_value.len() { - 2 => Ok(true), - _ => Err(anyhow::Error::msg( - "Invalid config setting for PG. Check the formatting", - )), - }?; - let value = key_value[1].to_string(); - match key_value[0] { - "host" => metadata_db.host = value, - "port" => metadata_db.port = value.parse().context("Invalid PG Port")?, - "database" => metadata_db.database = value, - "user" => metadata_db.user = value, - "password" => metadata_db.password = value, - _ => (), - }; - } + .map(|s| s.to_string()) + .unwrap_or_default(); + let metadata_db = parse_metadata_db_info(&conn_str)?; + let subscription_id = opts + .get("subscription_id") + .map(|s| s.to_string()) + .unwrap_or_default(); + + // partition_count default to 3 if not set, parse as int + let partition_count = opts + .get("partition_count") + .map(|s| s.to_string()) + .unwrap_or_else(|| "3".to_string()) + .parse::() + .context("unable to parse partition_count as valid int")?; + + // message_retention_in_days default to 7 if not set, parse as int + let message_retention_in_days = opts + .get("message_retention_in_days") + .map(|s| s.to_string()) + .unwrap_or_else(|| "7".to_string()) + .parse::() + .context("unable to parse message_retention_in_days as valid int")?; + let eventhub_config = EventHubConfig { namespace: opts .get("namespace") @@ -585,17 +630,31 @@ fn parse_db_options( .get("location") .context("location not specified")? .to_string(), - metadata_db: Some(metadata_db), + metadata_db, + subscription_id, + partition_count, + message_retention_in_days, }; let config = Config::EventhubConfig(eventhub_config); Some(config) } DbType::S3 => { + let s3_conn_str: String = opts + .get("metadata_db") + .map(|s| s.to_string()) + .unwrap_or_default(); + let metadata_db = parse_metadata_db_info(&s3_conn_str)?; let s3_config = S3Config { url: opts .get("url") .context("S3 bucket url not specified")? .to_string(), + access_key_id: opts.get("access_key_id").map(|s| s.to_string()), + secret_access_key: opts.get("secret_access_key").map(|s| s.to_string()), + region: opts.get("region").map(|s| s.to_string()), + role_arn: opts.get("role_arn").map(|s| s.to_string()), + endpoint: opts.get("endpoint").map(|s| s.to_string()), + metadata_db, }; let config = Config::S3Config(s3_config); Some(config) @@ -622,7 +681,96 @@ fn parse_db_options( let config = Config::SqlserverConfig(sqlserver_config); Some(config) } + DbType::EventhubGroup => { + let conn_str = opts + .get("metadata_db") + .context("no metadata db specified")?; + let metadata_db = parse_metadata_db_info(conn_str)?; + + // metadata_db is required for eventhub group + if metadata_db.is_none() { + anyhow::bail!("metadata_db is required for eventhub group"); + } + + // split comma separated list of columns and trim + let unnest_columns = opts + .get("unnest_columns") + .map(|columns| { + columns + .split(',') + .map(|column| column.trim().to_string()) + .collect::>() + }) + .unwrap_or_default(); + + let keys_to_ignore: HashSet = vec!["metadata_db", "unnest_columns"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + + let mut eventhubs: HashMap = HashMap::new(); + for (key, _) in opts { + if keys_to_ignore.contains(&key) { + continue; + } + + // check if peers contains key and if it does + // then add it to the eventhubs hashmap, if not error + if let Some(peer) = peers.get(&key) { + let eventhub_config = peer.config.clone().unwrap(); + if let Config::EventhubConfig(eventhub_config) = eventhub_config { + eventhubs.insert(key.to_string(), eventhub_config); + } else { + anyhow::bail!("Peer '{}' is not an eventhub", key); + } + } else { + anyhow::bail!("Peer '{}' does not exist", key); + } + } + + let eventhub_group_config = pt::peerdb_peers::EventHubGroupConfig { + eventhubs, + metadata_db, + unnest_columns, + }; + let config = Config::EventhubGroupConfig(eventhub_group_config); + Some(config) + } }; Ok(config) } + +fn parse_metadata_db_info(conn_str: &str) -> anyhow::Result> { + if conn_str.is_empty() { + return Ok(None); + } + + let mut metadata_db = PostgresConfig::default(); + let param_pairs: Vec<&str> = conn_str.split_whitespace().collect(); + match param_pairs.len() { + 5 => Ok(true), + _ => Err(anyhow::Error::msg("Invalid connection string. Check formatting and if the required parameters have been specified.")), + }?; + + for pair in param_pairs { + let key_value: Vec<&str> = pair.trim().split('=').collect(); + match key_value.len() { + 2 => Ok(true), + _ => Err(anyhow::Error::msg( + "Invalid config setting for PG. Check the formatting", + )), + }?; + let value = key_value[1].to_string(); + match key_value[0] { + "host" => metadata_db.host = value, + "port" => metadata_db.port = value.parse().context("Invalid PG Port")?, + "database" => metadata_db.database = value, + "user" => metadata_db.user = value, + "password" => metadata_db.password = value, + _ => (), + }; + } + + Ok(Some(metadata_db)) +} diff --git a/nexus/analyzer/src/qrep.rs b/nexus/analyzer/src/qrep.rs index e11242203..7107d2728 100644 --- a/nexus/analyzer/src/qrep.rs +++ b/nexus/analyzer/src/qrep.rs @@ -80,35 +80,27 @@ lazy_static::lazy_static! { default_value: 10, required: false, }, - QRepOptionType::Int { - name: "batch_size_int", - min_value: Some(1), - default_value: 1000, - required: false, - }, - QRepOptionType::Int { - name: "batch_duration_timestamp", - min_value: Some(1), - default_value: 60, - required: false, - }, QRepOptionType::Int { name: "num_rows_per_partition", - min_value: Some(0), - default_value: 0, - required: false, + min_value: Some(1), + default_value: 50000, + required: true, }, QRepOptionType::Boolean { name: "initial_copy_only", default_value: false, required: false, }, - ] + QRepOptionType::Boolean { + name: "setup_watermark_table_on_destination", + default_value: false, + required: false + }] }; } pub fn process_options( - raw_opts: HashMap<&str, &SqlValue>, + mut raw_opts: HashMap<&str, &SqlValue>, ) -> anyhow::Result> { let mut opts: HashMap = HashMap::new(); @@ -120,7 +112,7 @@ pub fn process_options( required, accepted_values, } => { - if let Some(raw_value) = raw_opts.get(*name) { + if let Some(raw_value) = raw_opts.remove(*name) { if let SqlValue::SingleQuotedString(str) = raw_value { if let Some(values) = accepted_values { if !values.contains(&str.as_str()) { @@ -143,7 +135,7 @@ pub fn process_options( default_value, required, } => { - if let Some(raw_value) = raw_opts.get(*name) { + if let Some(raw_value) = raw_opts.remove(*name) { if let SqlValue::Number(num_str, _) = raw_value { let num = num_str.parse::()?; if let Some(min) = min_value { @@ -164,7 +156,7 @@ pub fn process_options( } QRepOptionType::StringArray { name } => { // read it as a string and split on comma - if let Some(raw_value) = raw_opts.get(*name) { + if let Some(raw_value) = raw_opts.remove(*name) { if let SqlValue::SingleQuotedString(str) = raw_value { let values: Vec = str .split(',') @@ -181,7 +173,7 @@ pub fn process_options( default_value, required, } => { - if let Some(raw_value) = raw_opts.get(*name) { + if let Some(raw_value) = raw_opts.remove(*name) { if let SqlValue::Boolean(b) = raw_value { opts.insert((*name).to_string(), Value::Bool(*b)); } else { @@ -197,5 +189,21 @@ pub fn process_options( } } + // all options processed have been removed from the map + // so any leftover keys are options that shouldn't be here + if !raw_opts.is_empty() { + anyhow::bail!( + "Unknown options for QRep mirrors: {:#?}", + raw_opts.into_keys().collect::>() + ); + } + + // If mode is upsert, we need unique key columns + if opts.get("mode") == Some(&Value::String(String::from("upsert"))) + && (opts.get("unique_key_columns").is_none() + || opts.get("unique_key_columns") == Some(&Value::Array(vec![]))) + { + anyhow::bail!("For upsert mode, unique_key_columns must be specified"); + } Ok(opts) } diff --git a/nexus/catalog/migrations/V10__mirror_drop_bad_constraints.sql b/nexus/catalog/migrations/V10__mirror_drop_bad_constraints.sql new file mode 100644 index 000000000..7bf7de164 --- /dev/null +++ b/nexus/catalog/migrations/V10__mirror_drop_bad_constraints.sql @@ -0,0 +1,19 @@ +-- Drop the foreign key constraint from qrep_partitions to qrep_runs +ALTER TABLE peerdb_stats.qrep_partitions +DROP CONSTRAINT fk_qrep_partitions_run_uuid; + +-- Drop the unique constraint for flow_name from qrep_runs +ALTER TABLE peerdb_stats.qrep_runs +DROP CONSTRAINT uq_qrep_runs_flow_name; + +-- Add unique constraint to qrep_runs for (flow_name, run_uuid) +ALTER TABLE peerdb_stats.qrep_runs +ADD CONSTRAINT uq_qrep_runs_flow_run +UNIQUE (flow_name, run_uuid); + +-- Add foreign key from qrep_partitions to qrep_runs +ALTER TABLE peerdb_stats.qrep_partitions +ADD CONSTRAINT fk_qrep_partitions_run +FOREIGN KEY (flow_name, run_uuid) +REFERENCES peerdb_stats.qrep_runs(flow_name, run_uuid) +ON DELETE CASCADE; diff --git a/nexus/catalog/migrations/V11__qrep_runs_start_time_nullable.sql b/nexus/catalog/migrations/V11__qrep_runs_start_time_nullable.sql new file mode 100644 index 000000000..3e7824911 --- /dev/null +++ b/nexus/catalog/migrations/V11__qrep_runs_start_time_nullable.sql @@ -0,0 +1,5 @@ +ALTER TABLE peerdb_stats.qrep_runs +ALTER COLUMN start_time DROP NOT NULL; + +ALTER TABLE peerdb_stats.qrep_partitions +ALTER COLUMN start_time DROP NOT NULL; diff --git a/nexus/catalog/migrations/V7__store_flow_config.sql b/nexus/catalog/migrations/V7__store_flow_config.sql new file mode 100644 index 000000000..f9e6d4c0f --- /dev/null +++ b/nexus/catalog/migrations/V7__store_flow_config.sql @@ -0,0 +1,2 @@ +ALTER TABLE flows +ADD COLUMN config_proto BYTEA; diff --git a/nexus/catalog/migrations/V8__qrep_runs_config.sql b/nexus/catalog/migrations/V8__qrep_runs_config.sql new file mode 100644 index 000000000..65091d0db --- /dev/null +++ b/nexus/catalog/migrations/V8__qrep_runs_config.sql @@ -0,0 +1,2 @@ +ALTER TABLE peerdb_stats.qrep_runs +ADD COLUMN config_proto BYTEA; diff --git a/nexus/catalog/migrations/V9__mirror_stats_rels.sql b/nexus/catalog/migrations/V9__mirror_stats_rels.sql new file mode 100644 index 000000000..e462258f8 --- /dev/null +++ b/nexus/catalog/migrations/V9__mirror_stats_rels.sql @@ -0,0 +1,59 @@ +-- For the cdc_batches, set batch_id as the primary key +ALTER TABLE peerdb_stats.cdc_batches +ADD COLUMN id SERIAL PRIMARY KEY; + +-- add incrementing id column to cdc_batch_table, make this the primary key +ALTER TABLE peerdb_stats.cdc_batch_table +ADD COLUMN id SERIAL PRIMARY KEY; + +-- add incrementing id column to qrep_runs, make this the primary key +ALTER TABLE peerdb_stats.qrep_runs +ADD COLUMN id SERIAL PRIMARY KEY; + +-- add unique for flow_name to qrep_runs +ALTER TABLE peerdb_stats.qrep_runs +ADD CONSTRAINT uq_qrep_runs_flow_name +UNIQUE (flow_name); + +-- add incrementing id column to qrep_partitions, make this the primary key +ALTER TABLE peerdb_stats.qrep_partitions +ADD COLUMN id SERIAL PRIMARY KEY; + +-- For peerdb_stats.cdc_batches +CREATE INDEX idx_cdc_batches_flow_name ON peerdb_stats.cdc_batches USING HASH(flow_name); +CREATE INDEX idx_cdc_batches_batch_id ON peerdb_stats.cdc_batches(batch_id); +CREATE INDEX idx_cdc_batches_start_time ON peerdb_stats.cdc_batches(start_time); + +-- For peerdb_stats.cdc_batch_table +CREATE INDEX idx_cdc_batch_table_flow_name_batch_id ON peerdb_stats.cdc_batch_table(flow_name, batch_id); + +-- For peerdb_stats.qrep_runs +CREATE INDEX idx_qrep_runs_flow_name ON peerdb_stats.qrep_runs USING HASH(flow_name); +CREATE INDEX idx_qrep_runs_run_uuid ON peerdb_stats.qrep_runs USING HASH(run_uuid); +CREATE INDEX idx_qrep_runs_start_time ON peerdb_stats.qrep_runs(start_time); + +-- For peerdb_stats.qrep_partitions +CREATE INDEX idx_qrep_partitions_flow_name_run_uuid ON peerdb_stats.qrep_partitions(flow_name, run_uuid); +CREATE INDEX idx_qrep_partitions_partition_uuid ON peerdb_stats.qrep_partitions USING HASH(partition_uuid); +CREATE INDEX idx_qrep_partitions_start_time ON peerdb_stats.qrep_partitions(start_time); + +-- add fkey from cdc_batches to cdc_flows +ALTER TABLE peerdb_stats.cdc_batches +ADD CONSTRAINT fk_cdc_batches_flow_name +FOREIGN KEY (flow_name) +REFERENCES peerdb_stats.cdc_flows (flow_name) +ON DELETE CASCADE; + +-- add fkey from cdc_batch_table to cdc_flows +ALTER TABLE peerdb_stats.cdc_batch_table +ADD CONSTRAINT fk_cdc_batch_table_flow_name +FOREIGN KEY (flow_name) +REFERENCES peerdb_stats.cdc_flows (flow_name) +ON DELETE CASCADE; + +-- add fkey from qrep_partitions to qrep_runs +ALTER TABLE peerdb_stats.qrep_partitions +ADD CONSTRAINT fk_qrep_partitions_run_uuid +FOREIGN KEY (flow_name) +REFERENCES peerdb_stats.qrep_runs (flow_name) +ON DELETE CASCADE; diff --git a/nexus/catalog/src/lib.rs b/nexus/catalog/src/lib.rs index bab49b945..fb4c94296 100644 --- a/nexus/catalog/src/lib.rs +++ b/nexus/catalog/src/lib.rs @@ -143,6 +143,11 @@ impl Catalog { buf.reserve(config_len); sqlserver_config.encode(&mut buf)?; } + Config::EventhubGroupConfig(eventhub_group_config) => { + let config_len = eventhub_group_config.encoded_len(); + buf.reserve(config_len); + eventhub_group_config.encode(&mut buf)?; + } }; buf @@ -331,6 +336,16 @@ impl Catalog { pt::peerdb_peers::SqlServerConfig::decode(options.as_slice()).context(err)?; Ok(Some(Config::SqlserverConfig(sqlserver_config))) } + Some(DbType::EventhubGroup) => { + let err = format!( + "unable to decode {} options for peer {}", + "eventhub_group", name + ); + let eventhub_group_config = + pt::peerdb_peers::EventHubGroupConfig::decode(options.as_slice()) + .context(err)?; + Ok(Some(Config::EventhubGroupConfig(eventhub_group_config))) + } None => Ok(None), } } @@ -388,7 +403,7 @@ impl Catalog { .await?, &self .normalize_schema_for_table_identifier( - &table_mapping.target_table_identifier, + &table_mapping.destination_table_identifier, destination_peer_id, ) .await?, @@ -512,7 +527,14 @@ impl Catalog { } let first_row = rows.get(0).unwrap(); - let workflow_id: String = first_row.get(0); + let workflow_id: Option = first_row.get(0); + if workflow_id.is_none() { + return Err(anyhow!( + "workflow id not found for existing flow job {}", + flow_job_name + )); + } + let workflow_id = workflow_id.unwrap(); let source_peer_id: i32 = first_row.get(1); let destination_peer_id: i32 = first_row.get(2); @@ -542,4 +564,13 @@ impl Catalog { } Ok(()) } + + pub async fn check_peer_entry(&self, peer_name: &str) -> anyhow::Result { + let peer_check = self + .pg + .query_one("SELECT COUNT(*) FROM PEERS WHERE NAME = $1", &[&peer_name]) + .await?; + let peer_count: i64 = peer_check.get(0); + Ok(peer_count) + } } diff --git a/nexus/flow-rs/src/grpc.rs b/nexus/flow-rs/src/grpc.rs index f15b43106..cc4a6153f 100644 --- a/nexus/flow-rs/src/grpc.rs +++ b/nexus/flow-rs/src/grpc.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, time::Duration}; +use std::time::Duration; use anyhow::Context; use catalog::WorkflowDetails; @@ -10,6 +10,11 @@ use pt::{ use serde_json::Value; use tonic_health::pb::health_client; +pub enum PeerValidationResult { + Valid, + Invalid(String), +} + pub struct FlowGrpcClient { client: peerdb_route::flow_service_client::FlowServiceClient, health_client: health_client::HealthClient, @@ -76,18 +81,38 @@ impl FlowGrpcClient { ) -> anyhow::Result { let create_qrep_flow_req = pt::peerdb_route::CreateQRepFlowRequest { qrep_config: Some(qrep_config.clone()), + create_catalog_entry: false, }; let response = self.client.create_q_rep_flow(create_qrep_flow_req).await?; let workflow_id = response.into_inner().worflow_id; Ok(workflow_id) } + pub async fn validate_peer( + &mut self, + validate_request: &pt::peerdb_route::ValidatePeerRequest, + ) -> anyhow::Result { + let validate_peer_req = pt::peerdb_route::ValidatePeerRequest { + peer: validate_request.peer.clone(), + }; + let response = self.client.validate_peer(validate_peer_req).await?; + let response_body = &response.into_inner(); + let message = response_body.message.clone(); + let status = response_body.status; + if status == pt::peerdb_route::ValidatePeerStatus::Valid as i32 { + Ok(PeerValidationResult::Valid) + } else { + Ok(PeerValidationResult::Invalid(message)) + } + } + async fn start_peer_flow( &mut self, peer_flow_config: pt::peerdb_flow::FlowConnectionConfigs, ) -> anyhow::Result { let create_peer_flow_req = pt::peerdb_route::CreateCdcFlowRequest { connection_configs: Some(peer_flow_config), + create_catalog_entry: false, }; let response = self.client.create_cdc_flow(create_peer_flow_req).await?; let workflow_id = response.into_inner().worflow_id; @@ -117,18 +142,35 @@ impl FlowGrpcClient { } } + pub async fn drop_peer(&mut self, peer_name: &str) -> anyhow::Result<()> { + let drop_peer_req = pt::peerdb_route::DropPeerRequest { + peer_name: String::from(peer_name), + }; + let response = self.client.drop_peer(drop_peer_req).await?; + let drop_response = response.into_inner(); + if drop_response.ok { + Ok(()) + } else { + Err(anyhow::anyhow!(format!( + "failed to drop peer: {:?}", + drop_response.error_message + ))) + } + } + pub async fn start_peer_flow_job( &mut self, job: &FlowJob, src: pt::peerdb_peers::Peer, dst: pt::peerdb_peers::Peer, ) -> anyhow::Result { - let mut src_dst_name_map: HashMap = HashMap::new(); + let mut table_mappings: Vec = vec![]; job.table_mappings.iter().for_each(|mapping| { - src_dst_name_map.insert( - mapping.source_table_identifier.clone(), - mapping.target_table_identifier.clone(), - ); + table_mappings.push(pt::peerdb_flow::TableMapping { + source_table_identifier: mapping.source_table_identifier.clone(), + destination_table_identifier: mapping.destination_table_identifier.clone(), + partition_key: mapping.partition_key.clone().unwrap_or_default(), + }); }); let do_initial_copy = job.do_initial_copy; @@ -142,7 +184,7 @@ impl FlowGrpcClient { source: Some(src), destination: Some(dst), flow_job_name: job.name.clone(), - table_name_mapping: src_dst_name_map, + table_mappings, do_initial_copy, publication_name: publication_name.unwrap_or_default(), snapshot_num_rows_per_partition: snapshot_num_rows_per_partition.unwrap_or(0), @@ -165,6 +207,7 @@ impl FlowGrpcClient { push_batch_size: job.push_batch_size.unwrap_or_default(), push_parallelism: job.push_parallelism.unwrap_or_default(), max_batch_size: job.max_batch_size.unwrap_or_default(), + resync: job.resync, ..Default::default() }; @@ -218,13 +261,6 @@ impl FlowGrpcClient { } "append" => cfg.write_mode = Some(wm), "overwrite" => { - if !cfg.initial_copy_only { - return anyhow::Result::Err( - anyhow::anyhow!( - "write mode overwrite can only be set with initial_copy_only = true" - ) - ); - } wm.write_type = QRepWriteType::QrepWriteModeOverwrite as i32; cfg.write_mode = Some(wm); } @@ -245,16 +281,6 @@ impl FlowGrpcClient { cfg.wait_between_batches_seconds = n as u32; } } - "batch_size_int" => { - if let Some(n) = n.as_i64() { - cfg.batch_size_int = n as u32; - } - } - "batch_duration_timestamp" => { - if let Some(n) = n.as_i64() { - cfg.batch_duration_seconds = n as u32; - } - } "num_rows_per_partition" => { if let Some(n) = n.as_i64() { cfg.num_rows_per_partition = n as u32; @@ -265,6 +291,8 @@ impl FlowGrpcClient { Value::Bool(v) => { if key == "initial_copy_only" { cfg.initial_copy_only = *v; + } else if key == "setup_watermark_table_on_destination" { + cfg.setup_watermark_table_on_destination = *v; } else { return anyhow::Result::Err(anyhow::anyhow!("invalid bool option {}", key)); } @@ -274,7 +302,19 @@ impl FlowGrpcClient { } } } - + if !cfg.initial_copy_only { + if let Some(QRepWriteMode { + write_type: wt, + upsert_key_columns: _, + }) = cfg.write_mode + { + if wt == QRepWriteType::QrepWriteModeOverwrite as i32 { + return anyhow::Result::Err(anyhow::anyhow!( + "write mode overwrite can only be set with initial_copy_only = true" + )); + } + } + } self.start_query_replication_flow(&cfg).await } diff --git a/nexus/parser/src/lib.rs b/nexus/parser/src/lib.rs index 1233ab8bf..f5b2aac34 100644 --- a/nexus/parser/src/lib.rs +++ b/nexus/parser/src/lib.rs @@ -23,7 +23,7 @@ pub struct NexusQueryParser { pub enum NexusStatement { PeerDDL { stmt: Statement, - ddl: PeerDDL, + ddl: Box, }, PeerQuery { stmt: Statement, @@ -42,7 +42,7 @@ impl NexusStatement { stmt: &Statement, ) -> PgWireResult { let ddl = { - let pdl: PeerDDLAnalyzer = Default::default(); + let pdl: PeerDDLAnalyzer = PeerDDLAnalyzer::new(&peers); pdl.analyze(stmt).map_err(|e| { PgWireError::UserError(Box::new(ErrorInfo::new( "ERROR".to_owned(), @@ -55,7 +55,7 @@ impl NexusStatement { if let Some(ddl) = ddl { return Ok(NexusStatement::PeerDDL { stmt: stmt.clone(), - ddl, + ddl: Box::new(ddl), }); } @@ -100,8 +100,7 @@ impl NexusQueryParser { let peers = tokio::task::block_in_place(move || { tokio::runtime::Handle::current().block_on(async move { let catalog = self.catalog.lock().await; - let peers = catalog.get_peers().await; - peers + catalog.get_peers().await }) }); diff --git a/nexus/peer-bigquery/src/ast.rs b/nexus/peer-bigquery/src/ast.rs index dd020a668..075bcc09c 100644 --- a/nexus/peer-bigquery/src/ast.rs +++ b/nexus/peer-bigquery/src/ast.rs @@ -78,17 +78,19 @@ impl BigqueryAst { visit_function_arg_mut(query, |node| { if let FunctionArgExpr::Expr(arg_expr) = node { - if let Expr::Cast { expr: _, data_type } = arg_expr { - if let DataType::Array(_) = data_type { - let list = self - .flatten_expr_to_in_list(&arg_expr) - .expect("failed to flatten in function"); - let rewritten_array = Array { - elem: list, - named: true, - }; - *node = FunctionArgExpr::Expr(Expr::Array(rewritten_array)); - } + if let Expr::Cast { + expr: _, + data_type: DataType::Array(_), + } = arg_expr + { + let list = self + .flatten_expr_to_in_list(arg_expr) + .expect("failed to flatten in function"); + let rewritten_array = Array { + elem: list, + named: true, + }; + *node = FunctionArgExpr::Expr(Expr::Array(rewritten_array)); } } diff --git a/nexus/peer-bigquery/src/cursor.rs b/nexus/peer-bigquery/src/cursor.rs index ab591dcc3..23812a382 100644 --- a/nexus/peer-bigquery/src/cursor.rs +++ b/nexus/peer-bigquery/src/cursor.rs @@ -9,7 +9,6 @@ use sqlparser::ast::Statement; use crate::BigQueryQueryExecutor; pub struct BigQueryCursor { - stmt: Statement, position: usize, stream: Mutex, schema: SchemaRef, @@ -42,7 +41,6 @@ impl BigQueryCursorManager { // Create a new cursor let cursor = BigQueryCursor { - stmt: stmt.clone(), position: 0, stream: Mutex::new(stream), schema, diff --git a/nexus/peer-bigquery/src/stream.rs b/nexus/peer-bigquery/src/stream.rs index 0edf35757..fc4867b4f 100644 --- a/nexus/peer-bigquery/src/stream.rs +++ b/nexus/peer-bigquery/src/stream.rs @@ -4,7 +4,7 @@ use std::{ task::{Context, Poll}, }; -use chrono::{DateTime, NaiveDateTime, Utc}; +use chrono::{NaiveDateTime, TimeZone, Utc}; use futures::Stream; use gcp_bigquery_client::model::{ field_type::FieldType, query_response::ResultSet, table_field_schema::TableFieldSchema, @@ -149,10 +149,7 @@ impl BqRecordStream { if let Some(ts) = timestamp { let naive_datetime = NaiveDateTime::from_timestamp_opt(ts, 0) .ok_or(anyhow::Error::msg("Invalid naive datetime"))?; - Some(Value::Timestamp(DateTime::::from_utc( - naive_datetime, - Utc, - ))) + Some(Value::Timestamp(Utc.from_utc_datetime(&naive_datetime))) } else { None } diff --git a/nexus/peer-cursor/src/util.rs b/nexus/peer-cursor/src/util.rs index e3e4405aa..e87478d67 100644 --- a/nexus/peer-cursor/src/util.rs +++ b/nexus/peer-cursor/src/util.rs @@ -88,7 +88,7 @@ pub fn records_to_query_response<'a>(records: Records) -> PgWireResult> = Arc::new(records.schema.fields.clone()); let schema_copy = pg_schema.clone(); - let data_row_stream = stream::iter(records.records.into_iter()) + let data_row_stream = stream::iter(records.records) .map(move |record| { let mut encoder = DataRowEncoder::new(schema_copy.clone()); for value in record.values.iter() { diff --git a/nexus/peer-snowflake/Cargo.toml b/nexus/peer-snowflake/Cargo.toml index c8e5f673b..ce61c7977 100644 --- a/nexus/peer-snowflake/Cargo.toml +++ b/nexus/peer-snowflake/Cargo.toml @@ -15,7 +15,7 @@ pgerror = { path = "../pgerror" } secrecy = { version = "0.8.0" } async-trait = "0.1.57" jsonwebtoken = { version = "8.0", features = ["use_pem"] } -base64 = "0.13" +base64 = "0.21" dashmap = "5.0" pgwire = "0.15" sha2 = "0.10" diff --git a/nexus/peer-snowflake/src/auth.rs b/nexus/peer-snowflake/src/auth.rs index e71c6f897..10eb2e32e 100644 --- a/nexus/peer-snowflake/src/auth.rs +++ b/nexus/peer-snowflake/src/auth.rs @@ -4,7 +4,7 @@ use std::{ }; use anyhow::Context; -use base64::encode as base64_encode; +use base64::prelude::{Engine as _, BASE64_STANDARD}; use jsonwebtoken::{encode as jwt_encode, Algorithm, EncodingKey, Header}; use pkcs1::EncodeRsaPrivateKey; use pkcs8::{DecodePrivateKey, EncodePublicKey}; @@ -47,9 +47,8 @@ impl SnowflakeAuth { expiry_threshold: u64, ) -> anyhow::Result { let pkey = match password { - Some(pw) => { - DecodePrivateKey::from_pkcs8_encrypted_pem(&private_key, pw).context("Invalid private key or decryption failed")? - }, + Some(pw) => DecodePrivateKey::from_pkcs8_encrypted_pem(&private_key, pw) + .context("Invalid private key or decryption failed")?, None => { DecodePrivateKey::from_pkcs8_pem(&private_key).context("Invalid private key")? } @@ -77,16 +76,15 @@ impl SnowflakeAuth { // Normalize the account identifer to a form that is embedded into the JWT. // Logic adapted from Snowflake's example Python code for key-pair authentication "sql-api-generate-jwt.py". fn normalize_account_identifier(raw_account: &str) -> String { - let split_index: usize; - if !raw_account.contains(".global") { - split_index = *raw_account - .find(".") - .get_or_insert(raw_account.chars().count()); + let split_index = if !raw_account.contains(".global") { + *raw_account + .find('.') + .get_or_insert(raw_account.chars().count()) } else { - split_index = *raw_account - .find("-") - .get_or_insert(raw_account.chars().count()); - } + *raw_account + .find('-') + .get_or_insert(raw_account.chars().count()) + }; raw_account .to_uppercase() .chars() @@ -99,7 +97,7 @@ impl SnowflakeAuth { let public_key = EncodePublicKey::to_public_key_der(&RsaPublicKey::from(private_key))?; let res = format!( "SHA256:{}", - base64_encode(Sha256::new_with_prefix(public_key.as_bytes()).finalize()) + BASE64_STANDARD.encode(Sha256::new_with_prefix(public_key.as_bytes()).finalize()) ); Ok(res) } diff --git a/nexus/peer-snowflake/src/cursor.rs b/nexus/peer-snowflake/src/cursor.rs index b1a0ecc9a..475a2d7f3 100644 --- a/nexus/peer-snowflake/src/cursor.rs +++ b/nexus/peer-snowflake/src/cursor.rs @@ -7,7 +7,6 @@ use sqlparser::ast::Statement; use tokio::sync::Mutex; pub struct SnowflakeCursor { - stmt: Statement, position: usize, stream: Mutex, schema: SchemaRef, @@ -39,7 +38,6 @@ impl SnowflakeCursorManager { // Create a new cursor let cursor = SnowflakeCursor { - stmt: stmt.clone(), position: 0, stream: Mutex::new(stream), schema, diff --git a/nexus/peer-snowflake/src/lib.rs b/nexus/peer-snowflake/src/lib.rs index c2e4fbd9d..86f7b5854 100644 --- a/nexus/peer-snowflake/src/lib.rs +++ b/nexus/peer-snowflake/src/lib.rs @@ -28,13 +28,13 @@ mod stream; const DEFAULT_REFRESH_THRESHOLD: u64 = 3000; const DEFAULT_EXPIRY_THRESHOLD: u64 = 3600; -const SNOWFLAKE_URL_PREFIX: &'static str = "https://"; -const SNOWFLAKE_URL_SUFFIX: &'static str = ".snowflakecomputing.com/api/v2/statements"; +const SNOWFLAKE_URL_PREFIX: &str = "https://"; +const SNOWFLAKE_URL_SUFFIX: &str = ".snowflakecomputing.com/api/v2/statements"; -const DATE_OUTPUT_FORMAT: &'static str = "YYYY/MM/DD"; -const TIME_OUTPUT_FORMAT: &'static str = "HH:MI:SS.FF"; -const TIMESTAMP_OUTPUT_FORMAT: &'static str = "YYYY-MM-DDTHH24:MI:SS.FF"; -const TIMESTAMP_TZ_OUTPUT_FORMAT: &'static str = "YYYY-MM-DDTHH24:MI:SS.FFTZHTZM"; +const DATE_OUTPUT_FORMAT: &str = "YYYY/MM/DD"; +const TIME_OUTPUT_FORMAT: &str = "HH:MI:SS.FF"; +const TIMESTAMP_OUTPUT_FORMAT: &str = "YYYY-MM-DDTHH24:MI:SS.FF"; +const TIMESTAMP_TZ_OUTPUT_FORMAT: &str = "YYYY-MM-DDTHH24:MI:SS.FFTZHTZM"; #[derive(Debug, Serialize)] struct SQLStatement<'a> { @@ -59,7 +59,7 @@ pub(crate) struct ResultSetRowType { r#type: SnowflakeDataType, } -#[allow(non_snake_case)] +#[allow(non_snake_case, dead_code)] #[derive(Deserialize, Debug)] struct ResultSetPartitionInfo { rowCount: u64, @@ -207,7 +207,7 @@ impl SnowflakeQueryExecutor { }) } - pub async fn query(&self, query: &Box) -> PgWireResult { + pub async fn query(&self, query: &Query) -> PgWireResult { let mut query = query.clone(); let ast = ast::SnowflakeAst::default(); diff --git a/nexus/peer-snowflake/src/stream.rs b/nexus/peer-snowflake/src/stream.rs index 041716395..3434b70df 100644 --- a/nexus/peer-snowflake/src/stream.rs +++ b/nexus/peer-snowflake/src/stream.rs @@ -1,5 +1,5 @@ use crate::{auth::SnowflakeAuth, PartitionResult, ResultSet}; -use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc}; +use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc}; use futures::Stream; use peer_cursor::Schema; use peer_cursor::{Record, RecordStream, SchemaRef}; @@ -13,7 +13,6 @@ use pgwire::{ }; use secrecy::ExposeSecret; use serde::Deserialize; -use serde_json; use std::{ pin::Pin, task::{Context, Poll}, @@ -146,19 +145,21 @@ impl SnowflakeRecordStream { // really hacky workaround for parsing the UTC timezone specifically. SnowflakeDataType::TimestampLtz => { match DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT) { - Ok(_) => TimestampWithTimeZone(DateTime::::from_utc( - DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)? + Ok(_) => TimestampWithTimeZone( + Utc.from_utc_datetime( + &DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)? + .naive_utc(), + ), + ), + Err(_) => TimestampWithTimeZone( + Utc.from_utc_datetime( + &DateTime::parse_from_str( + &elem.replace('Z', "+0000"), + TIMESTAMP_TZ_PARSE_FORMAT, + )? .naive_utc(), - Utc, - )), - Err(_) => TimestampWithTimeZone(DateTime::::from_utc( - DateTime::parse_from_str( - &elem.replace("Z", "+0000"), - TIMESTAMP_TZ_PARSE_FORMAT, - )? - .naive_utc(), - Utc, - )), + ), + ), } } SnowflakeDataType::TimestampNtz => PostgresTimestamp( @@ -166,23 +167,25 @@ impl SnowflakeRecordStream { ), SnowflakeDataType::TimestampTz => { match DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT) { - Ok(_) => TimestampWithTimeZone(DateTime::::from_utc( - DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)? + Ok(_) => TimestampWithTimeZone( + Utc.from_utc_datetime( + &DateTime::parse_from_str(elem, TIMESTAMP_TZ_PARSE_FORMAT)? + .naive_utc(), + ), + ), + Err(_) => TimestampWithTimeZone( + Utc.from_utc_datetime( + &DateTime::parse_from_str( + &elem.replace('Z', "+0000"), + TIMESTAMP_TZ_PARSE_FORMAT, + )? .naive_utc(), - Utc, - )), - Err(_) => TimestampWithTimeZone(DateTime::::from_utc( - DateTime::parse_from_str( - &elem.replace("Z", "+0000"), - TIMESTAMP_TZ_PARSE_FORMAT, - )? - .naive_utc(), - Utc, - )), + ), + ), } } SnowflakeDataType::Variant => { - let jsonb: serde_json::Value = serde_json::from_str(&elem)?; + let jsonb: serde_json::Value = serde_json::from_str(elem)?; Value::JsonB(jsonb) } }, @@ -192,7 +195,7 @@ impl SnowflakeRecordStream { row_values.push(row_value.unwrap_or(Value::Null)); } - self.partition_index = self.partition_index + 1; + self.partition_index += 1; Ok(Record { values: row_values, @@ -204,7 +207,7 @@ impl SnowflakeRecordStream { if (self.partition_number + 1) == self.result_set.resultSetMetaData.partitionInfo.len() { return Ok(false); } - self.partition_number = self.partition_number + 1; + self.partition_number += 1; self.partition_index = 0; let partition_number = self.partition_number; let secret = self.auth.get_jwt()?.expose_secret().clone(); diff --git a/nexus/postgres-connection/src/lib.rs b/nexus/postgres-connection/src/lib.rs index 21ba7d082..58e9ecc79 100644 --- a/nexus/postgres-connection/src/lib.rs +++ b/nexus/postgres-connection/src/lib.rs @@ -17,7 +17,7 @@ pub fn get_pg_connection_string(config: &PostgresConfig) -> String { connection_string.push('/'); connection_string.push_str(&config.database); - // Add the timeout as a query parameter + // Add the timeout as a query parameter, sslmode changes here appear to be useless connection_string.push_str("?connect_timeout=15"); connection_string @@ -27,6 +27,8 @@ pub async fn connect_postgres(config: &PostgresConfig) -> anyhow::Result, } #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] @@ -76,6 +77,7 @@ pub struct FlowJob { pub push_parallelism: Option, pub push_batch_size: Option, pub max_batch_size: Option, + pub resync: bool, } #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] diff --git a/nexus/pt/src/google.api.rs b/nexus/pt/src/google.api.rs new file mode 100644 index 000000000..c758f27ec --- /dev/null +++ b/nexus/pt/src/google.api.rs @@ -0,0 +1,374 @@ +// @generated +/// Defines the HTTP configuration for an API service. It contains a list of +/// \[HttpRule][google.api.HttpRule\], each specifying the mapping of an RPC method +/// to one or more HTTP REST API methods. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Http { + /// A list of HTTP configuration rules that apply to individual API methods. + /// + /// **NOTE:** All service configuration rules follow "last one wins" order. + #[prost(message, repeated, tag="1")] + pub rules: ::prost::alloc::vec::Vec, + /// When set to true, URL path parameters will be fully URI-decoded except in + /// cases of single segment matches in reserved expansion, where "%2F" will be + /// left encoded. + /// + /// The default behavior is to not decode RFC 6570 reserved characters in multi + /// segment matches. + #[prost(bool, tag="2")] + pub fully_decode_reserved_expansion: bool, +} +/// # gRPC Transcoding +/// +/// gRPC Transcoding is a feature for mapping between a gRPC method and one or +/// more HTTP REST endpoints. It allows developers to build a single API service +/// that supports both gRPC APIs and REST APIs. Many systems, including [Google +/// APIs](), +/// [Cloud Endpoints](), [gRPC +/// Gateway](), +/// and \[Envoy\]() proxy support this feature +/// and use it for large scale production services. +/// +/// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +/// how different portions of the gRPC request message are mapped to the URL +/// path, URL query parameters, and HTTP request body. It also controls how the +/// gRPC response message is mapped to the HTTP response body. `HttpRule` is +/// typically specified as an `google.api.http` annotation on the gRPC method. +/// +/// Each mapping specifies a URL path template and an HTTP method. The path +/// template may refer to one or more fields in the gRPC request message, as long +/// as each field is a non-repeated field with a primitive (non-message) type. +/// The path template controls how fields of the request message are mapped to +/// the URL path. +/// +/// Example: +/// +/// service Messaging { +/// rpc GetMessage(GetMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// get: "/v1/{name=messages/*}" +/// }; +/// } +/// } +/// message GetMessageRequest { +/// string name = 1; // Mapped to URL path. +/// } +/// message Message { +/// string text = 1; // The resource content. +/// } +/// +/// This enables an HTTP REST to gRPC mapping as below: +/// +/// HTTP | gRPC +/// -----|----- +/// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +/// +/// Any fields in the request message which are not bound by the path template +/// automatically become HTTP query parameters if there is no HTTP request body. +/// For example: +/// +/// service Messaging { +/// rpc GetMessage(GetMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// get:"/v1/messages/{message_id}" +/// }; +/// } +/// } +/// message GetMessageRequest { +/// message SubMessage { +/// string subfield = 1; +/// } +/// string message_id = 1; // Mapped to URL path. +/// int64 revision = 2; // Mapped to URL query parameter `revision`. +/// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +/// } +/// +/// This enables a HTTP JSON to RPC mapping as below: +/// +/// HTTP | gRPC +/// -----|----- +/// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +/// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +/// "foo"))` +/// +/// Note that fields which are mapped to URL query parameters must have a +/// primitive type or a repeated primitive type or a non-repeated message type. +/// In the case of a repeated type, the parameter can be repeated in the URL +/// as `...?param=A¶m=B`. In the case of a message type, each field of the +/// message is mapped to a separate parameter, such as +/// `...?foo.a=A&foo.b=B&foo.c=C`. +/// +/// For HTTP methods that allow a request body, the `body` field +/// specifies the mapping. Consider a REST update method on the +/// message resource collection: +/// +/// service Messaging { +/// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// patch: "/v1/messages/{message_id}" +/// body: "message" +/// }; +/// } +/// } +/// message UpdateMessageRequest { +/// string message_id = 1; // mapped to the URL +/// Message message = 2; // mapped to the body +/// } +/// +/// The following HTTP JSON to RPC mapping is enabled, where the +/// representation of the JSON in the request body is determined by +/// protos JSON encoding: +/// +/// HTTP | gRPC +/// -----|----- +/// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +/// "123456" message { text: "Hi!" })` +/// +/// The special name `*` can be used in the body mapping to define that +/// every field not bound by the path template should be mapped to the +/// request body. This enables the following alternative definition of +/// the update method: +/// +/// service Messaging { +/// rpc UpdateMessage(Message) returns (Message) { +/// option (google.api.http) = { +/// patch: "/v1/messages/{message_id}" +/// body: "*" +/// }; +/// } +/// } +/// message Message { +/// string message_id = 1; +/// string text = 2; +/// } +/// +/// +/// The following HTTP JSON to RPC mapping is enabled: +/// +/// HTTP | gRPC +/// -----|----- +/// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +/// "123456" text: "Hi!")` +/// +/// Note that when using `*` in the body mapping, it is not possible to +/// have HTTP parameters, as all fields not bound by the path end in +/// the body. This makes this option more rarely used in practice when +/// defining REST APIs. The common usage of `*` is in custom methods +/// which don't use the URL at all for transferring data. +/// +/// It is possible to define multiple HTTP methods for one RPC by using +/// the `additional_bindings` option. Example: +/// +/// service Messaging { +/// rpc GetMessage(GetMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// get: "/v1/messages/{message_id}" +/// additional_bindings { +/// get: "/v1/users/{user_id}/messages/{message_id}" +/// } +/// }; +/// } +/// } +/// message GetMessageRequest { +/// string message_id = 1; +/// string user_id = 2; +/// } +/// +/// This enables the following two alternative HTTP JSON to RPC mappings: +/// +/// HTTP | gRPC +/// -----|----- +/// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +/// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +/// "123456")` +/// +/// ## Rules for HTTP mapping +/// +/// 1. Leaf request fields (recursive expansion nested messages in the request +/// message) are classified into three categories: +/// - Fields referred by the path template. They are passed via the URL path. +/// - Fields referred by the \[HttpRule.body][google.api.HttpRule.body\]. They +/// are passed via the HTTP +/// request body. +/// - All other fields are passed via the URL query parameters, and the +/// parameter name is the field path in the request message. A repeated +/// field can be represented as multiple query parameters under the same +/// name. +/// 2. If \[HttpRule.body][google.api.HttpRule.body\] is "*", there is no URL +/// query parameter, all fields +/// are passed via URL path and HTTP request body. +/// 3. If \[HttpRule.body][google.api.HttpRule.body\] is omitted, there is no HTTP +/// request body, all +/// fields are passed via URL path and URL query parameters. +/// +/// ### Path template syntax +/// +/// Template = "/" Segments [ Verb ] ; +/// Segments = Segment { "/" Segment } ; +/// Segment = "*" | "**" | LITERAL | Variable ; +/// Variable = "{" FieldPath [ "=" Segments ] "}" ; +/// FieldPath = IDENT { "." IDENT } ; +/// Verb = ":" LITERAL ; +/// +/// The syntax `*` matches a single URL path segment. The syntax `**` matches +/// zero or more URL path segments, which must be the last part of the URL path +/// except the `Verb`. +/// +/// The syntax `Variable` matches part of the URL path as specified by its +/// template. A variable template must not contain other variables. If a variable +/// matches a single path segment, its template may be omitted, e.g. `{var}` +/// is equivalent to `{var=*}`. +/// +/// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +/// contains any reserved character, such characters should be percent-encoded +/// before the matching. +/// +/// If a variable contains exactly one path segment, such as `"{var}"` or +/// `"{var=*}"`, when such a variable is expanded into a URL path on the client +/// side, all characters except `\[-_.~0-9a-zA-Z\]` are percent-encoded. The +/// server side does the reverse decoding. Such variables show up in the +/// [Discovery +/// Document]() as +/// `{var}`. +/// +/// If a variable contains multiple path segments, such as `"{var=foo/*}"` +/// or `"{var=**}"`, when such a variable is expanded into a URL path on the +/// client side, all characters except `\[-_.~/0-9a-zA-Z\]` are percent-encoded. +/// The server side does the reverse decoding, except "%2F" and "%2f" are left +/// unchanged. Such variables show up in the +/// [Discovery +/// Document]() as +/// `{+var}`. +/// +/// ## Using gRPC API Service Configuration +/// +/// gRPC API Service Configuration (service config) is a configuration language +/// for configuring a gRPC service to become a user-facing product. The +/// service config is simply the YAML representation of the `google.api.Service` +/// proto message. +/// +/// As an alternative to annotating your proto file, you can configure gRPC +/// transcoding in your service config YAML files. You do this by specifying a +/// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +/// effect as the proto annotation. This can be particularly useful if you +/// have a proto that is reused in multiple services. Note that any transcoding +/// specified in the service config will override any matching transcoding +/// configuration in the proto. +/// +/// Example: +/// +/// http: +/// rules: +/// # Selects a gRPC method and applies HttpRule to it. +/// - selector: example.v1.Messaging.GetMessage +/// get: /v1/messages/{message_id}/{sub.subfield} +/// +/// ## Special notes +/// +/// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +/// proto to JSON conversion must follow the [proto3 +/// specification](). +/// +/// While the single segment variable follows the semantics of +/// [RFC 6570]() Section 3.2.2 Simple String +/// Expansion, the multi segment variable **does not** follow RFC 6570 Section +/// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +/// does not expand special characters like `?` and `#`, which would lead +/// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +/// for multi segment variables. +/// +/// The path variables **must not** refer to any repeated or mapped field, +/// because client libraries are not capable of handling such variable expansion. +/// +/// The path variables **must not** capture the leading "/" character. The reason +/// is that the most common use case "{var}" does not capture the leading "/" +/// character. For consistency, all path variables must share the same behavior. +/// +/// Repeated message fields must not be mapped to URL query parameters, because +/// no client library can support such complicated mapping. +/// +/// If an API needs to use a JSON array for request or response body, it can map +/// the request or response body to a repeated field. However, some gRPC +/// Transcoding implementations may not support this feature. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct HttpRule { + /// Selects a method to which this rule applies. + /// + /// Refer to \[selector][google.api.DocumentationRule.selector\] for syntax + /// details. + #[prost(string, tag="1")] + pub selector: ::prost::alloc::string::String, + /// The name of the request field whose value is mapped to the HTTP request + /// body, or `*` for mapping all request fields not captured by the path + /// pattern to the HTTP body, or omitted for not having any HTTP request body. + /// + /// NOTE: the referred field must be present at the top-level of the request + /// message type. + #[prost(string, tag="7")] + pub body: ::prost::alloc::string::String, + /// Optional. The name of the response field whose value is mapped to the HTTP + /// response body. When omitted, the entire response message will be used + /// as the HTTP response body. + /// + /// NOTE: The referred field must be present at the top-level of the response + /// message type. + #[prost(string, tag="12")] + pub response_body: ::prost::alloc::string::String, + /// Additional HTTP bindings for the selector. Nested bindings must + /// not contain an `additional_bindings` field themselves (that is, + /// the nesting may only be one level deep). + #[prost(message, repeated, tag="11")] + pub additional_bindings: ::prost::alloc::vec::Vec, + /// Determines the URL pattern is matched by this rules. This pattern can be + /// used with any of the {get|put|post|delete|patch} methods. A custom method + /// can be defined using the 'custom' field. + #[prost(oneof="http_rule::Pattern", tags="2, 3, 4, 5, 6, 8")] + pub pattern: ::core::option::Option, +} +/// Nested message and enum types in `HttpRule`. +pub mod http_rule { + /// Determines the URL pattern is matched by this rules. This pattern can be + /// used with any of the {get|put|post|delete|patch} methods. A custom method + /// can be defined using the 'custom' field. + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Pattern { + /// Maps to HTTP GET. Used for listing and getting information about + /// resources. + #[prost(string, tag="2")] + Get(::prost::alloc::string::String), + /// Maps to HTTP PUT. Used for replacing a resource. + #[prost(string, tag="3")] + Put(::prost::alloc::string::String), + /// Maps to HTTP POST. Used for creating a resource or performing an action. + #[prost(string, tag="4")] + Post(::prost::alloc::string::String), + /// Maps to HTTP DELETE. Used for deleting a resource. + #[prost(string, tag="5")] + Delete(::prost::alloc::string::String), + /// Maps to HTTP PATCH. Used for updating a resource. + #[prost(string, tag="6")] + Patch(::prost::alloc::string::String), + /// The custom pattern is used for specifying an HTTP method that is not + /// included in the `pattern` field, such as HEAD, or "*" to leave the + /// HTTP method unspecified for this rule. The wild-card rule is useful + /// for services that provide content to Web (HTML) clients. + #[prost(message, tag="8")] + Custom(super::CustomHttpPattern), + } +} +/// A custom pattern is used for defining custom HTTP verb. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CustomHttpPattern { + /// The name of this custom HTTP verb. + #[prost(string, tag="1")] + pub kind: ::prost::alloc::string::String, + /// The path matched by this custom verb. + #[prost(string, tag="2")] + pub path: ::prost::alloc::string::String, +} +include!("google.api.serde.rs"); +// @@protoc_insertion_point(module) \ No newline at end of file diff --git a/nexus/pt/src/google.api.serde.rs b/nexus/pt/src/google.api.serde.rs new file mode 100644 index 000000000..60a18fb85 --- /dev/null +++ b/nexus/pt/src/google.api.serde.rs @@ -0,0 +1,456 @@ +// @generated +impl serde::Serialize for CustomHttpPattern { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.kind.is_empty() { + len += 1; + } + if !self.path.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("google.api.CustomHttpPattern", len)?; + if !self.kind.is_empty() { + struct_ser.serialize_field("kind", &self.kind)?; + } + if !self.path.is_empty() { + struct_ser.serialize_field("path", &self.path)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for CustomHttpPattern { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "kind", + "path", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Kind, + Path, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "kind" => Ok(GeneratedField::Kind), + "path" => Ok(GeneratedField::Path), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = CustomHttpPattern; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct google.api.CustomHttpPattern") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut kind__ = None; + let mut path__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Kind => { + if kind__.is_some() { + return Err(serde::de::Error::duplicate_field("kind")); + } + kind__ = Some(map.next_value()?); + } + GeneratedField::Path => { + if path__.is_some() { + return Err(serde::de::Error::duplicate_field("path")); + } + path__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(CustomHttpPattern { + kind: kind__.unwrap_or_default(), + path: path__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("google.api.CustomHttpPattern", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for Http { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.rules.is_empty() { + len += 1; + } + if self.fully_decode_reserved_expansion { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("google.api.Http", len)?; + if !self.rules.is_empty() { + struct_ser.serialize_field("rules", &self.rules)?; + } + if self.fully_decode_reserved_expansion { + struct_ser.serialize_field("fullyDecodeReservedExpansion", &self.fully_decode_reserved_expansion)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for Http { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "rules", + "fully_decode_reserved_expansion", + "fullyDecodeReservedExpansion", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Rules, + FullyDecodeReservedExpansion, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "rules" => Ok(GeneratedField::Rules), + "fullyDecodeReservedExpansion" | "fully_decode_reserved_expansion" => Ok(GeneratedField::FullyDecodeReservedExpansion), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = Http; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct google.api.Http") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut rules__ = None; + let mut fully_decode_reserved_expansion__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Rules => { + if rules__.is_some() { + return Err(serde::de::Error::duplicate_field("rules")); + } + rules__ = Some(map.next_value()?); + } + GeneratedField::FullyDecodeReservedExpansion => { + if fully_decode_reserved_expansion__.is_some() { + return Err(serde::de::Error::duplicate_field("fullyDecodeReservedExpansion")); + } + fully_decode_reserved_expansion__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(Http { + rules: rules__.unwrap_or_default(), + fully_decode_reserved_expansion: fully_decode_reserved_expansion__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("google.api.Http", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for HttpRule { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.selector.is_empty() { + len += 1; + } + if !self.body.is_empty() { + len += 1; + } + if !self.response_body.is_empty() { + len += 1; + } + if !self.additional_bindings.is_empty() { + len += 1; + } + if self.pattern.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("google.api.HttpRule", len)?; + if !self.selector.is_empty() { + struct_ser.serialize_field("selector", &self.selector)?; + } + if !self.body.is_empty() { + struct_ser.serialize_field("body", &self.body)?; + } + if !self.response_body.is_empty() { + struct_ser.serialize_field("responseBody", &self.response_body)?; + } + if !self.additional_bindings.is_empty() { + struct_ser.serialize_field("additionalBindings", &self.additional_bindings)?; + } + if let Some(v) = self.pattern.as_ref() { + match v { + http_rule::Pattern::Get(v) => { + struct_ser.serialize_field("get", v)?; + } + http_rule::Pattern::Put(v) => { + struct_ser.serialize_field("put", v)?; + } + http_rule::Pattern::Post(v) => { + struct_ser.serialize_field("post", v)?; + } + http_rule::Pattern::Delete(v) => { + struct_ser.serialize_field("delete", v)?; + } + http_rule::Pattern::Patch(v) => { + struct_ser.serialize_field("patch", v)?; + } + http_rule::Pattern::Custom(v) => { + struct_ser.serialize_field("custom", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for HttpRule { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "selector", + "body", + "response_body", + "responseBody", + "additional_bindings", + "additionalBindings", + "get", + "put", + "post", + "delete", + "patch", + "custom", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Selector, + Body, + ResponseBody, + AdditionalBindings, + Get, + Put, + Post, + Delete, + Patch, + Custom, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "selector" => Ok(GeneratedField::Selector), + "body" => Ok(GeneratedField::Body), + "responseBody" | "response_body" => Ok(GeneratedField::ResponseBody), + "additionalBindings" | "additional_bindings" => Ok(GeneratedField::AdditionalBindings), + "get" => Ok(GeneratedField::Get), + "put" => Ok(GeneratedField::Put), + "post" => Ok(GeneratedField::Post), + "delete" => Ok(GeneratedField::Delete), + "patch" => Ok(GeneratedField::Patch), + "custom" => Ok(GeneratedField::Custom), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = HttpRule; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct google.api.HttpRule") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut selector__ = None; + let mut body__ = None; + let mut response_body__ = None; + let mut additional_bindings__ = None; + let mut pattern__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Selector => { + if selector__.is_some() { + return Err(serde::de::Error::duplicate_field("selector")); + } + selector__ = Some(map.next_value()?); + } + GeneratedField::Body => { + if body__.is_some() { + return Err(serde::de::Error::duplicate_field("body")); + } + body__ = Some(map.next_value()?); + } + GeneratedField::ResponseBody => { + if response_body__.is_some() { + return Err(serde::de::Error::duplicate_field("responseBody")); + } + response_body__ = Some(map.next_value()?); + } + GeneratedField::AdditionalBindings => { + if additional_bindings__.is_some() { + return Err(serde::de::Error::duplicate_field("additionalBindings")); + } + additional_bindings__ = Some(map.next_value()?); + } + GeneratedField::Get => { + if pattern__.is_some() { + return Err(serde::de::Error::duplicate_field("get")); + } + pattern__ = map.next_value::<::std::option::Option<_>>()?.map(http_rule::Pattern::Get); + } + GeneratedField::Put => { + if pattern__.is_some() { + return Err(serde::de::Error::duplicate_field("put")); + } + pattern__ = map.next_value::<::std::option::Option<_>>()?.map(http_rule::Pattern::Put); + } + GeneratedField::Post => { + if pattern__.is_some() { + return Err(serde::de::Error::duplicate_field("post")); + } + pattern__ = map.next_value::<::std::option::Option<_>>()?.map(http_rule::Pattern::Post); + } + GeneratedField::Delete => { + if pattern__.is_some() { + return Err(serde::de::Error::duplicate_field("delete")); + } + pattern__ = map.next_value::<::std::option::Option<_>>()?.map(http_rule::Pattern::Delete); + } + GeneratedField::Patch => { + if pattern__.is_some() { + return Err(serde::de::Error::duplicate_field("patch")); + } + pattern__ = map.next_value::<::std::option::Option<_>>()?.map(http_rule::Pattern::Patch); + } + GeneratedField::Custom => { + if pattern__.is_some() { + return Err(serde::de::Error::duplicate_field("custom")); + } + pattern__ = map.next_value::<::std::option::Option<_>>()?.map(http_rule::Pattern::Custom) +; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(HttpRule { + selector: selector__.unwrap_or_default(), + body: body__.unwrap_or_default(), + response_body: response_body__.unwrap_or_default(), + additional_bindings: additional_bindings__.unwrap_or_default(), + pattern: pattern__, + }) + } + } + deserializer.deserialize_struct("google.api.HttpRule", FIELDS, GeneratedVisitor) + } +} diff --git a/nexus/pt/src/lib.rs b/nexus/pt/src/lib.rs index 87ca1d699..a12e67d67 100644 --- a/nexus/pt/src/lib.rs +++ b/nexus/pt/src/lib.rs @@ -1,3 +1,5 @@ +#![allow(clippy::all)] + use peerdb_peers::DbType; use sqlparser::ast::PeerType; @@ -16,6 +18,7 @@ impl From for DbType { PeerType::EventHub => DbType::Eventhub, PeerType::S3 => DbType::S3, PeerType::SQLServer => DbType::Sqlserver, + PeerType::EventHubGroup => DbType::EventhubGroup, PeerType::Kafka => todo!("Add Kafka support"), } } diff --git a/nexus/pt/src/peerdb_flow.rs b/nexus/pt/src/peerdb_flow.rs index 8e996e644..2a2cb7e49 100644 --- a/nexus/pt/src/peerdb_flow.rs +++ b/nexus/pt/src/peerdb_flow.rs @@ -2,393 +2,429 @@ #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TableNameMapping { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub source_table_name: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub destination_table_name: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RelationMessageColumn { - #[prost(uint32, tag="1")] + #[prost(uint32, tag = "1")] pub flags: u32, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub name: ::prost::alloc::string::String, - #[prost(uint32, tag="3")] + #[prost(uint32, tag = "3")] pub data_type: u32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RelationMessage { - #[prost(uint32, tag="1")] + #[prost(uint32, tag = "1")] pub relation_id: u32, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub relation_name: ::prost::alloc::string::String, - #[prost(message, repeated, tag="3")] + #[prost(message, repeated, tag = "3")] pub columns: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] +pub struct TableMapping { + #[prost(string, tag = "1")] + pub source_table_identifier: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub destination_table_identifier: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub partition_key: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FlowConnectionConfigs { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub source: ::core::option::Option, - #[prost(message, optional, tag="2")] + #[prost(message, optional, tag = "2")] pub destination: ::core::option::Option, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub flow_job_name: ::prost::alloc::string::String, - #[prost(message, optional, tag="4")] + #[prost(message, optional, tag = "4")] pub table_schema: ::core::option::Option, - #[prost(map="string, string", tag="5")] - pub table_name_mapping: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, - #[prost(map="uint32, string", tag="6")] + #[prost(message, repeated, tag = "5")] + pub table_mappings: ::prost::alloc::vec::Vec, + #[prost(map = "uint32, string", tag = "6")] pub src_table_id_name_mapping: ::std::collections::HashMap, - #[prost(map="string, message", tag="7")] - pub table_name_schema_mapping: ::std::collections::HashMap<::prost::alloc::string::String, TableSchema>, + #[prost(map = "string, message", tag = "7")] + pub table_name_schema_mapping: + ::std::collections::HashMap<::prost::alloc::string::String, TableSchema>, /// This is an optional peer that will be used to hold metadata in cases where /// the destination isn't ideal for holding metadata. - #[prost(message, optional, tag="8")] + #[prost(message, optional, tag = "8")] pub metadata_peer: ::core::option::Option, - #[prost(uint32, tag="9")] + #[prost(uint32, tag = "9")] pub max_batch_size: u32, - #[prost(bool, tag="10")] + #[prost(bool, tag = "10")] pub do_initial_copy: bool, - #[prost(string, tag="11")] + #[prost(string, tag = "11")] pub publication_name: ::prost::alloc::string::String, - #[prost(uint32, tag="12")] + #[prost(uint32, tag = "12")] pub snapshot_num_rows_per_partition: u32, /// max parallel workers is per table - #[prost(uint32, tag="13")] + #[prost(uint32, tag = "13")] pub snapshot_max_parallel_workers: u32, - #[prost(uint32, tag="14")] + #[prost(uint32, tag = "14")] pub snapshot_num_tables_in_parallel: u32, - #[prost(enumeration="QRepSyncMode", tag="15")] + #[prost(enumeration = "QRepSyncMode", tag = "15")] pub snapshot_sync_mode: i32, - #[prost(enumeration="QRepSyncMode", tag="16")] + #[prost(enumeration = "QRepSyncMode", tag = "16")] pub cdc_sync_mode: i32, - #[prost(string, tag="17")] + #[prost(string, tag = "17")] pub snapshot_staging_path: ::prost::alloc::string::String, - #[prost(string, tag="18")] + #[prost(string, tag = "18")] pub cdc_staging_path: ::prost::alloc::string::String, /// currently only works for snowflake - #[prost(bool, tag="19")] + #[prost(bool, tag = "19")] pub soft_delete: bool, - #[prost(string, tag="20")] + #[prost(string, tag = "20")] pub replication_slot_name: ::prost::alloc::string::String, /// the below two are for eventhub only - #[prost(int64, tag="21")] + #[prost(int64, tag = "21")] pub push_batch_size: i64, - #[prost(int64, tag="22")] + #[prost(int64, tag = "22")] pub push_parallelism: i64, + /// if true, then the flow will be resynced + #[prost(bool, tag = "23")] + pub resync: bool, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RenameTableOption { + #[prost(string, tag = "1")] + pub current_name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub new_name: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RenameTablesInput { + #[prost(string, tag = "1")] + pub flow_job_name: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub peer: ::core::option::Option, + #[prost(message, repeated, tag = "3")] + pub rename_table_options: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RenameTablesOutput { + #[prost(string, tag = "1")] + pub flow_job_name: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SyncFlowOptions { - #[prost(int32, tag="1")] + #[prost(int32, tag = "1")] pub batch_size: i32, - #[prost(map="uint32, message", tag="2")] + #[prost(map = "uint32, message", tag = "2")] pub relation_message_mapping: ::std::collections::HashMap, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NormalizeFlowOptions { - #[prost(int32, tag="1")] + #[prost(int32, tag = "1")] pub batch_size: i32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LastSyncState { - #[prost(int64, tag="1")] + #[prost(int64, tag = "1")] pub checkpoint: i64, - #[prost(message, optional, tag="2")] + #[prost(message, optional, tag = "2")] pub last_synced_at: ::core::option::Option<::pbjson_types::Timestamp>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct StartFlowInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub last_sync_state: ::core::option::Option, - #[prost(message, optional, tag="2")] + #[prost(message, optional, tag = "2")] pub flow_connection_configs: ::core::option::Option, - #[prost(message, optional, tag="3")] + #[prost(message, optional, tag = "3")] pub sync_flow_options: ::core::option::Option, - #[prost(map="uint32, message", tag="4")] + #[prost(map = "uint32, message", tag = "4")] pub relation_message_mapping: ::std::collections::HashMap, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct StartNormalizeInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub flow_connection_configs: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetLastSyncedIdInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub flow_job_name: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EnsurePullabilityInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub flow_job_name: ::prost::alloc::string::String, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub source_table_identifier: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EnsurePullabilityBatchInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub flow_job_name: ::prost::alloc::string::String, - #[prost(string, repeated, tag="3")] + #[prost(string, repeated, tag = "3")] pub source_table_identifiers: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PostgresTableIdentifier { - #[prost(uint32, tag="1")] + #[prost(uint32, tag = "1")] pub rel_id: u32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TableIdentifier { - #[prost(oneof="table_identifier::TableIdentifier", tags="1")] + #[prost(oneof = "table_identifier::TableIdentifier", tags = "1")] pub table_identifier: ::core::option::Option, } /// Nested message and enum types in `TableIdentifier`. pub mod table_identifier { #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] + #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum TableIdentifier { - #[prost(message, tag="1")] + #[prost(message, tag = "1")] PostgresTableIdentifier(super::PostgresTableIdentifier), } } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EnsurePullabilityOutput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub table_identifier: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EnsurePullabilityBatchOutput { - #[prost(map="string, message", tag="1")] - pub table_identifier_mapping: ::std::collections::HashMap<::prost::alloc::string::String, TableIdentifier>, + #[prost(map = "string, message", tag = "1")] + pub table_identifier_mapping: + ::std::collections::HashMap<::prost::alloc::string::String, TableIdentifier>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SetupReplicationInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub flow_job_name: ::prost::alloc::string::String, - #[prost(map="string, string", tag="3")] - pub table_name_mapping: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(map = "string, string", tag = "3")] + pub table_name_mapping: + ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, /// replicate to destination using ctid - #[prost(message, optional, tag="4")] + #[prost(message, optional, tag = "4")] pub destination_peer: ::core::option::Option, - #[prost(bool, tag="5")] + #[prost(bool, tag = "5")] pub do_initial_copy: bool, - #[prost(string, tag="6")] + #[prost(string, tag = "6")] pub existing_publication_name: ::prost::alloc::string::String, - #[prost(string, tag="7")] + #[prost(string, tag = "7")] pub existing_replication_slot_name: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SetupReplicationOutput { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub slot_name: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub snapshot_name: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateRawTableInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub flow_job_name: ::prost::alloc::string::String, - #[prost(map="string, string", tag="3")] - pub table_name_mapping: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, - #[prost(enumeration="QRepSyncMode", tag="4")] + #[prost(map = "string, string", tag = "3")] + pub table_name_mapping: + ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(enumeration = "QRepSyncMode", tag = "4")] pub cdc_sync_mode: i32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateRawTableOutput { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub table_identifier: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TableSchema { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub table_identifier: ::prost::alloc::string::String, /// list of column names and types, types can be one of the following: /// "string", "int", "float", "bool", "timestamp". - #[prost(map="string, string", tag="2")] - pub columns: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, - #[prost(string, tag="3")] - pub primary_key_column: ::prost::alloc::string::String, - #[prost(bool, tag="4")] + #[prost(map = "string, string", tag = "2")] + pub columns: + ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(string, repeated, tag = "3")] + pub primary_key_columns: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(bool, tag = "4")] pub is_replica_identity_full: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTableSchemaBatchInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(string, repeated, tag="2")] + #[prost(string, repeated, tag = "2")] pub table_identifiers: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTableSchemaBatchOutput { - #[prost(map="string, message", tag="1")] - pub table_name_schema_mapping: ::std::collections::HashMap<::prost::alloc::string::String, TableSchema>, + #[prost(map = "string, message", tag = "1")] + pub table_name_schema_mapping: + ::std::collections::HashMap<::prost::alloc::string::String, TableSchema>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SetupNormalizedTableInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub table_identifier: ::prost::alloc::string::String, - #[prost(message, optional, tag="3")] + #[prost(message, optional, tag = "3")] pub source_table_schema: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SetupNormalizedTableBatchInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub peer_connection_config: ::core::option::Option, - #[prost(map="string, message", tag="2")] - pub table_name_schema_mapping: ::std::collections::HashMap<::prost::alloc::string::String, TableSchema>, + #[prost(map = "string, message", tag = "2")] + pub table_name_schema_mapping: + ::std::collections::HashMap<::prost::alloc::string::String, TableSchema>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SetupNormalizedTableOutput { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub table_identifier: ::prost::alloc::string::String, - #[prost(bool, tag="2")] + #[prost(bool, tag = "2")] pub already_exists: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SetupNormalizedTableBatchOutput { - #[prost(map="string, bool", tag="1")] + #[prost(map = "string, bool", tag = "1")] pub table_exists_mapping: ::std::collections::HashMap<::prost::alloc::string::String, bool>, } /// partition ranges [start, end] inclusive #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IntPartitionRange { - #[prost(int64, tag="1")] + #[prost(int64, tag = "1")] pub start: i64, - #[prost(int64, tag="2")] + #[prost(int64, tag = "2")] pub end: i64, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TimestampPartitionRange { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub start: ::core::option::Option<::pbjson_types::Timestamp>, - #[prost(message, optional, tag="2")] + #[prost(message, optional, tag = "2")] pub end: ::core::option::Option<::pbjson_types::Timestamp>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Tid { - #[prost(uint32, tag="1")] + #[prost(uint32, tag = "1")] pub block_number: u32, - #[prost(uint32, tag="2")] + #[prost(uint32, tag = "2")] pub offset_number: u32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TidPartitionRange { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub start: ::core::option::Option, - #[prost(message, optional, tag="2")] + #[prost(message, optional, tag = "2")] pub end: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] -pub struct XminPartitionRange { - #[prost(uint32, tag="1")] - pub start: u32, - #[prost(uint32, tag="2")] - pub end: u32, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] pub struct PartitionRange { /// can be a timestamp range or an integer range - #[prost(oneof="partition_range::Range", tags="1, 2, 3, 4")] + #[prost(oneof = "partition_range::Range", tags = "1, 2, 3")] pub range: ::core::option::Option, } /// Nested message and enum types in `PartitionRange`. pub mod partition_range { /// can be a timestamp range or an integer range #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] + #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Range { - #[prost(message, tag="1")] + #[prost(message, tag = "1")] IntRange(super::IntPartitionRange), - #[prost(message, tag="2")] + #[prost(message, tag = "2")] TimestampRange(super::TimestampPartitionRange), - #[prost(message, tag="3")] + #[prost(message, tag = "3")] TidRange(super::TidPartitionRange), - #[prost(message, tag="4")] - XminRange(super::XminPartitionRange), } } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QRepWriteMode { - #[prost(enumeration="QRepWriteType", tag="1")] + #[prost(enumeration = "QRepWriteType", tag = "1")] pub write_type: i32, - #[prost(string, repeated, tag="2")] + #[prost(string, repeated, tag = "2")] pub upsert_key_columns: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QRepConfig { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub flow_job_name: ::prost::alloc::string::String, - #[prost(message, optional, tag="2")] + #[prost(message, optional, tag = "2")] pub source_peer: ::core::option::Option, - #[prost(message, optional, tag="3")] + #[prost(message, optional, tag = "3")] pub destination_peer: ::core::option::Option, - #[prost(string, tag="4")] + #[prost(string, tag = "4")] pub destination_table_identifier: ::prost::alloc::string::String, - #[prost(string, tag="5")] + #[prost(string, tag = "5")] pub query: ::prost::alloc::string::String, - #[prost(string, tag="6")] + #[prost(string, tag = "6")] pub watermark_table: ::prost::alloc::string::String, - #[prost(string, tag="7")] + #[prost(string, tag = "7")] pub watermark_column: ::prost::alloc::string::String, - #[prost(bool, tag="8")] + #[prost(bool, tag = "8")] pub initial_copy_only: bool, - #[prost(enumeration="QRepSyncMode", tag="9")] + #[prost(enumeration = "QRepSyncMode", tag = "9")] pub sync_mode: i32, - #[prost(uint32, tag="10")] + /// DEPRECATED: eliminate when breaking changes are allowed. + #[prost(uint32, tag = "10")] pub batch_size_int: u32, - #[prost(uint32, tag="11")] + /// DEPRECATED: eliminate when breaking changes are allowed. + #[prost(uint32, tag = "11")] pub batch_duration_seconds: u32, - #[prost(uint32, tag="12")] + #[prost(uint32, tag = "12")] pub max_parallel_workers: u32, /// time to wait between getting partitions to process - #[prost(uint32, tag="13")] + #[prost(uint32, tag = "13")] pub wait_between_batches_seconds: u32, - #[prost(message, optional, tag="14")] + #[prost(message, optional, tag = "14")] pub write_mode: ::core::option::Option, /// This is only used when sync_mode is AVRO /// this is the location where the avro files will be written @@ -396,71 +432,72 @@ pub struct QRepConfig { /// if this starts with s3:// then it will be written to S3 /// if nothing is specified then it will be written to local disk /// if using GCS or S3 make sure your instance has the correct permissions. - #[prost(string, tag="15")] + #[prost(string, tag = "15")] pub staging_path: ::prost::alloc::string::String, /// This setting overrides batch_size_int and batch_duration_seconds /// and instead uses the number of rows per partition to determine /// how many rows to process per batch. - #[prost(uint32, tag="16")] + #[prost(uint32, tag = "16")] pub num_rows_per_partition: u32, + /// Creates the watermark table on the destination as-is, can be used for some queries. + #[prost(bool, tag = "17")] + pub setup_watermark_table_on_destination: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QRepPartition { - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub partition_id: ::prost::alloc::string::String, - #[prost(message, optional, tag="3")] + #[prost(message, optional, tag = "3")] pub range: ::core::option::Option, - #[prost(bool, tag="4")] + #[prost(bool, tag = "4")] pub full_table_partition: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QRepPartitionBatch { - #[prost(int32, tag="1")] + #[prost(int32, tag = "1")] pub batch_id: i32, - #[prost(message, repeated, tag="2")] + #[prost(message, repeated, tag = "2")] pub partitions: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QRepParitionResult { - #[prost(message, repeated, tag="1")] + #[prost(message, repeated, tag = "1")] pub partitions: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DropFlowInput { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub flow_name: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeltaAddedColumn { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub column_name: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub column_type: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TableSchemaDelta { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub src_table_name: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub dst_table_name: ::prost::alloc::string::String, - #[prost(message, repeated, tag="3")] + #[prost(message, repeated, tag = "3")] pub added_columns: ::prost::alloc::vec::Vec, - #[prost(string, repeated, tag="4")] - pub dropped_columns: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ReplayTableSchemaDeltaInput { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub flow_connection_configs: ::core::option::Option, - #[prost(message, optional, tag="2")] - pub table_schema_delta: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub table_schema_deltas: ::prost::alloc::vec::Vec, } /// protos for qrep #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] @@ -520,4 +557,4 @@ impl QRepWriteType { } } include!("peerdb_flow.serde.rs"); -// @@protoc_insertion_point(module) \ No newline at end of file +// @@protoc_insertion_point(module) diff --git a/nexus/pt/src/peerdb_flow.serde.rs b/nexus/pt/src/peerdb_flow.serde.rs index c8ad147bd..36acb19f8 100644 --- a/nexus/pt/src/peerdb_flow.serde.rs +++ b/nexus/pt/src/peerdb_flow.serde.rs @@ -937,7 +937,7 @@ impl serde::Serialize for FlowConnectionConfigs { if self.table_schema.is_some() { len += 1; } - if !self.table_name_mapping.is_empty() { + if !self.table_mappings.is_empty() { len += 1; } if !self.src_table_id_name_mapping.is_empty() { @@ -991,6 +991,9 @@ impl serde::Serialize for FlowConnectionConfigs { if self.push_parallelism != 0 { len += 1; } + if self.resync { + len += 1; + } let mut struct_ser = serializer.serialize_struct("peerdb_flow.FlowConnectionConfigs", len)?; if let Some(v) = self.source.as_ref() { struct_ser.serialize_field("source", v)?; @@ -1004,8 +1007,8 @@ impl serde::Serialize for FlowConnectionConfigs { if let Some(v) = self.table_schema.as_ref() { struct_ser.serialize_field("tableSchema", v)?; } - if !self.table_name_mapping.is_empty() { - struct_ser.serialize_field("tableNameMapping", &self.table_name_mapping)?; + if !self.table_mappings.is_empty() { + struct_ser.serialize_field("tableMappings", &self.table_mappings)?; } if !self.src_table_id_name_mapping.is_empty() { struct_ser.serialize_field("srcTableIdNameMapping", &self.src_table_id_name_mapping)?; @@ -1062,6 +1065,9 @@ impl serde::Serialize for FlowConnectionConfigs { if self.push_parallelism != 0 { struct_ser.serialize_field("pushParallelism", ToString::to_string(&self.push_parallelism).as_str())?; } + if self.resync { + struct_ser.serialize_field("resync", &self.resync)?; + } struct_ser.end() } } @@ -1078,8 +1084,8 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { "flowJobName", "table_schema", "tableSchema", - "table_name_mapping", - "tableNameMapping", + "table_mappings", + "tableMappings", "src_table_id_name_mapping", "srcTableIdNameMapping", "table_name_schema_mapping", @@ -1114,6 +1120,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { "pushBatchSize", "push_parallelism", "pushParallelism", + "resync", ]; #[allow(clippy::enum_variant_names)] @@ -1122,7 +1129,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { Destination, FlowJobName, TableSchema, - TableNameMapping, + TableMappings, SrcTableIdNameMapping, TableNameSchemaMapping, MetadataPeer, @@ -1140,6 +1147,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { ReplicationSlotName, PushBatchSize, PushParallelism, + Resync, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -1166,7 +1174,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { "destination" => Ok(GeneratedField::Destination), "flowJobName" | "flow_job_name" => Ok(GeneratedField::FlowJobName), "tableSchema" | "table_schema" => Ok(GeneratedField::TableSchema), - "tableNameMapping" | "table_name_mapping" => Ok(GeneratedField::TableNameMapping), + "tableMappings" | "table_mappings" => Ok(GeneratedField::TableMappings), "srcTableIdNameMapping" | "src_table_id_name_mapping" => Ok(GeneratedField::SrcTableIdNameMapping), "tableNameSchemaMapping" | "table_name_schema_mapping" => Ok(GeneratedField::TableNameSchemaMapping), "metadataPeer" | "metadata_peer" => Ok(GeneratedField::MetadataPeer), @@ -1184,6 +1192,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { "replicationSlotName" | "replication_slot_name" => Ok(GeneratedField::ReplicationSlotName), "pushBatchSize" | "push_batch_size" => Ok(GeneratedField::PushBatchSize), "pushParallelism" | "push_parallelism" => Ok(GeneratedField::PushParallelism), + "resync" => Ok(GeneratedField::Resync), _ => Ok(GeneratedField::__SkipField__), } } @@ -1207,7 +1216,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { let mut destination__ = None; let mut flow_job_name__ = None; let mut table_schema__ = None; - let mut table_name_mapping__ = None; + let mut table_mappings__ = None; let mut src_table_id_name_mapping__ = None; let mut table_name_schema_mapping__ = None; let mut metadata_peer__ = None; @@ -1225,6 +1234,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { let mut replication_slot_name__ = None; let mut push_batch_size__ = None; let mut push_parallelism__ = None; + let mut resync__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::Source => { @@ -1251,13 +1261,11 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { } table_schema__ = map.next_value()?; } - GeneratedField::TableNameMapping => { - if table_name_mapping__.is_some() { - return Err(serde::de::Error::duplicate_field("tableNameMapping")); + GeneratedField::TableMappings => { + if table_mappings__.is_some() { + return Err(serde::de::Error::duplicate_field("tableMappings")); } - table_name_mapping__ = Some( - map.next_value::>()? - ); + table_mappings__ = Some(map.next_value()?); } GeneratedField::SrcTableIdNameMapping => { if src_table_id_name_mapping__.is_some() { @@ -1378,6 +1386,12 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::Resync => { + if resync__.is_some() { + return Err(serde::de::Error::duplicate_field("resync")); + } + resync__ = Some(map.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } @@ -1388,7 +1402,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { destination: destination__, flow_job_name: flow_job_name__.unwrap_or_default(), table_schema: table_schema__, - table_name_mapping: table_name_mapping__.unwrap_or_default(), + table_mappings: table_mappings__.unwrap_or_default(), src_table_id_name_mapping: src_table_id_name_mapping__.unwrap_or_default(), table_name_schema_mapping: table_name_schema_mapping__.unwrap_or_default(), metadata_peer: metadata_peer__, @@ -1406,6 +1420,7 @@ impl<'de> serde::Deserialize<'de> for FlowConnectionConfigs { replication_slot_name: replication_slot_name__.unwrap_or_default(), push_batch_size: push_batch_size__.unwrap_or_default(), push_parallelism: push_parallelism__.unwrap_or_default(), + resync: resync__.unwrap_or_default(), }) } } @@ -2090,9 +2105,6 @@ impl serde::Serialize for PartitionRange { partition_range::Range::TidRange(v) => { struct_ser.serialize_field("tidRange", v)?; } - partition_range::Range::XminRange(v) => { - struct_ser.serialize_field("xminRange", v)?; - } } } struct_ser.end() @@ -2111,8 +2123,6 @@ impl<'de> serde::Deserialize<'de> for PartitionRange { "timestampRange", "tid_range", "tidRange", - "xmin_range", - "xminRange", ]; #[allow(clippy::enum_variant_names)] @@ -2120,7 +2130,6 @@ impl<'de> serde::Deserialize<'de> for PartitionRange { IntRange, TimestampRange, TidRange, - XminRange, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -2146,7 +2155,6 @@ impl<'de> serde::Deserialize<'de> for PartitionRange { "intRange" | "int_range" => Ok(GeneratedField::IntRange), "timestampRange" | "timestamp_range" => Ok(GeneratedField::TimestampRange), "tidRange" | "tid_range" => Ok(GeneratedField::TidRange), - "xminRange" | "xmin_range" => Ok(GeneratedField::XminRange), _ => Ok(GeneratedField::__SkipField__), } } @@ -2188,13 +2196,6 @@ impl<'de> serde::Deserialize<'de> for PartitionRange { return Err(serde::de::Error::duplicate_field("tidRange")); } range__ = map.next_value::<::std::option::Option<_>>()?.map(partition_range::Range::TidRange) -; - } - GeneratedField::XminRange => { - if range__.is_some() { - return Err(serde::de::Error::duplicate_field("xminRange")); - } - range__ = map.next_value::<::std::option::Option<_>>()?.map(partition_range::Range::XminRange) ; } GeneratedField::__SkipField__ => { @@ -2364,6 +2365,9 @@ impl serde::Serialize for QRepConfig { if self.num_rows_per_partition != 0 { len += 1; } + if self.setup_watermark_table_on_destination { + len += 1; + } let mut struct_ser = serializer.serialize_struct("peerdb_flow.QRepConfig", len)?; if !self.flow_job_name.is_empty() { struct_ser.serialize_field("flowJobName", &self.flow_job_name)?; @@ -2415,6 +2419,9 @@ impl serde::Serialize for QRepConfig { if self.num_rows_per_partition != 0 { struct_ser.serialize_field("numRowsPerPartition", &self.num_rows_per_partition)?; } + if self.setup_watermark_table_on_destination { + struct_ser.serialize_field("setupWatermarkTableOnDestination", &self.setup_watermark_table_on_destination)?; + } struct_ser.end() } } @@ -2456,6 +2463,8 @@ impl<'de> serde::Deserialize<'de> for QRepConfig { "stagingPath", "num_rows_per_partition", "numRowsPerPartition", + "setup_watermark_table_on_destination", + "setupWatermarkTableOnDestination", ]; #[allow(clippy::enum_variant_names)] @@ -2476,6 +2485,7 @@ impl<'de> serde::Deserialize<'de> for QRepConfig { WriteMode, StagingPath, NumRowsPerPartition, + SetupWatermarkTableOnDestination, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -2514,6 +2524,7 @@ impl<'de> serde::Deserialize<'de> for QRepConfig { "writeMode" | "write_mode" => Ok(GeneratedField::WriteMode), "stagingPath" | "staging_path" => Ok(GeneratedField::StagingPath), "numRowsPerPartition" | "num_rows_per_partition" => Ok(GeneratedField::NumRowsPerPartition), + "setupWatermarkTableOnDestination" | "setup_watermark_table_on_destination" => Ok(GeneratedField::SetupWatermarkTableOnDestination), _ => Ok(GeneratedField::__SkipField__), } } @@ -2549,6 +2560,7 @@ impl<'de> serde::Deserialize<'de> for QRepConfig { let mut write_mode__ = None; let mut staging_path__ = None; let mut num_rows_per_partition__ = None; + let mut setup_watermark_table_on_destination__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::FlowJobName => { @@ -2657,6 +2669,12 @@ impl<'de> serde::Deserialize<'de> for QRepConfig { Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::SetupWatermarkTableOnDestination => { + if setup_watermark_table_on_destination__.is_some() { + return Err(serde::de::Error::duplicate_field("setupWatermarkTableOnDestination")); + } + setup_watermark_table_on_destination__ = Some(map.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } @@ -2679,6 +2697,7 @@ impl<'de> serde::Deserialize<'de> for QRepConfig { write_mode: write_mode__, staging_path: staging_path__.unwrap_or_default(), num_rows_per_partition: num_rows_per_partition__.unwrap_or_default(), + setup_watermark_table_on_destination: setup_watermark_table_on_destination__.unwrap_or_default(), }) } } @@ -3558,7 +3577,7 @@ impl<'de> serde::Deserialize<'de> for RelationMessageColumn { deserializer.deserialize_struct("peerdb_flow.RelationMessageColumn", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for ReplayTableSchemaDeltaInput { +impl serde::Serialize for RenameTableOption { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -3566,39 +3585,39 @@ impl serde::Serialize for ReplayTableSchemaDeltaInput { { use serde::ser::SerializeStruct; let mut len = 0; - if self.flow_connection_configs.is_some() { + if !self.current_name.is_empty() { len += 1; } - if self.table_schema_delta.is_some() { + if !self.new_name.is_empty() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_flow.ReplayTableSchemaDeltaInput", len)?; - if let Some(v) = self.flow_connection_configs.as_ref() { - struct_ser.serialize_field("flowConnectionConfigs", v)?; + let mut struct_ser = serializer.serialize_struct("peerdb_flow.RenameTableOption", len)?; + if !self.current_name.is_empty() { + struct_ser.serialize_field("currentName", &self.current_name)?; } - if let Some(v) = self.table_schema_delta.as_ref() { - struct_ser.serialize_field("tableSchemaDelta", v)?; + if !self.new_name.is_empty() { + struct_ser.serialize_field("newName", &self.new_name)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for ReplayTableSchemaDeltaInput { +impl<'de> serde::Deserialize<'de> for RenameTableOption { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "flow_connection_configs", - "flowConnectionConfigs", - "table_schema_delta", - "tableSchemaDelta", + "current_name", + "currentName", + "new_name", + "newName", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - FlowConnectionConfigs, - TableSchemaDelta, + CurrentName, + NewName, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -3621,8 +3640,8 @@ impl<'de> serde::Deserialize<'de> for ReplayTableSchemaDeltaInput { E: serde::de::Error, { match value { - "flowConnectionConfigs" | "flow_connection_configs" => Ok(GeneratedField::FlowConnectionConfigs), - "tableSchemaDelta" | "table_schema_delta" => Ok(GeneratedField::TableSchemaDelta), + "currentName" | "current_name" => Ok(GeneratedField::CurrentName), + "newName" | "new_name" => Ok(GeneratedField::NewName), _ => Ok(GeneratedField::__SkipField__), } } @@ -3632,47 +3651,47 @@ impl<'de> serde::Deserialize<'de> for ReplayTableSchemaDeltaInput { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = ReplayTableSchemaDeltaInput; + type Value = RenameTableOption; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_flow.ReplayTableSchemaDeltaInput") + formatter.write_str("struct peerdb_flow.RenameTableOption") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut flow_connection_configs__ = None; - let mut table_schema_delta__ = None; + let mut current_name__ = None; + let mut new_name__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::FlowConnectionConfigs => { - if flow_connection_configs__.is_some() { - return Err(serde::de::Error::duplicate_field("flowConnectionConfigs")); + GeneratedField::CurrentName => { + if current_name__.is_some() { + return Err(serde::de::Error::duplicate_field("currentName")); } - flow_connection_configs__ = map.next_value()?; + current_name__ = Some(map.next_value()?); } - GeneratedField::TableSchemaDelta => { - if table_schema_delta__.is_some() { - return Err(serde::de::Error::duplicate_field("tableSchemaDelta")); + GeneratedField::NewName => { + if new_name__.is_some() { + return Err(serde::de::Error::duplicate_field("newName")); } - table_schema_delta__ = map.next_value()?; + new_name__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(ReplayTableSchemaDeltaInput { - flow_connection_configs: flow_connection_configs__, - table_schema_delta: table_schema_delta__, + Ok(RenameTableOption { + current_name: current_name__.unwrap_or_default(), + new_name: new_name__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_flow.ReplayTableSchemaDeltaInput", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_flow.RenameTableOption", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for SetupNormalizedTableBatchInput { +impl serde::Serialize for RenameTablesInput { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -3680,39 +3699,47 @@ impl serde::Serialize for SetupNormalizedTableBatchInput { { use serde::ser::SerializeStruct; let mut len = 0; - if self.peer_connection_config.is_some() { + if !self.flow_job_name.is_empty() { len += 1; } - if !self.table_name_schema_mapping.is_empty() { + if self.peer.is_some() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableBatchInput", len)?; - if let Some(v) = self.peer_connection_config.as_ref() { - struct_ser.serialize_field("peerConnectionConfig", v)?; + if !self.rename_table_options.is_empty() { + len += 1; } - if !self.table_name_schema_mapping.is_empty() { - struct_ser.serialize_field("tableNameSchemaMapping", &self.table_name_schema_mapping)?; + let mut struct_ser = serializer.serialize_struct("peerdb_flow.RenameTablesInput", len)?; + if !self.flow_job_name.is_empty() { + struct_ser.serialize_field("flowJobName", &self.flow_job_name)?; + } + if let Some(v) = self.peer.as_ref() { + struct_ser.serialize_field("peer", v)?; + } + if !self.rename_table_options.is_empty() { + struct_ser.serialize_field("renameTableOptions", &self.rename_table_options)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchInput { +impl<'de> serde::Deserialize<'de> for RenameTablesInput { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "peer_connection_config", - "peerConnectionConfig", - "table_name_schema_mapping", - "tableNameSchemaMapping", + "flow_job_name", + "flowJobName", + "peer", + "rename_table_options", + "renameTableOptions", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - PeerConnectionConfig, - TableNameSchemaMapping, + FlowJobName, + Peer, + RenameTableOptions, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -3735,8 +3762,9 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchInput { E: serde::de::Error, { match value { - "peerConnectionConfig" | "peer_connection_config" => Ok(GeneratedField::PeerConnectionConfig), - "tableNameSchemaMapping" | "table_name_schema_mapping" => Ok(GeneratedField::TableNameSchemaMapping), + "flowJobName" | "flow_job_name" => Ok(GeneratedField::FlowJobName), + "peer" => Ok(GeneratedField::Peer), + "renameTableOptions" | "rename_table_options" => Ok(GeneratedField::RenameTableOptions), _ => Ok(GeneratedField::__SkipField__), } } @@ -3746,49 +3774,55 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchInput { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = SetupNormalizedTableBatchInput; + type Value = RenameTablesInput; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_flow.SetupNormalizedTableBatchInput") + formatter.write_str("struct peerdb_flow.RenameTablesInput") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut peer_connection_config__ = None; - let mut table_name_schema_mapping__ = None; + let mut flow_job_name__ = None; + let mut peer__ = None; + let mut rename_table_options__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::PeerConnectionConfig => { - if peer_connection_config__.is_some() { - return Err(serde::de::Error::duplicate_field("peerConnectionConfig")); + GeneratedField::FlowJobName => { + if flow_job_name__.is_some() { + return Err(serde::de::Error::duplicate_field("flowJobName")); } - peer_connection_config__ = map.next_value()?; + flow_job_name__ = Some(map.next_value()?); } - GeneratedField::TableNameSchemaMapping => { - if table_name_schema_mapping__.is_some() { - return Err(serde::de::Error::duplicate_field("tableNameSchemaMapping")); + GeneratedField::Peer => { + if peer__.is_some() { + return Err(serde::de::Error::duplicate_field("peer")); } - table_name_schema_mapping__ = Some( - map.next_value::>()? - ); + peer__ = map.next_value()?; + } + GeneratedField::RenameTableOptions => { + if rename_table_options__.is_some() { + return Err(serde::de::Error::duplicate_field("renameTableOptions")); + } + rename_table_options__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(SetupNormalizedTableBatchInput { - peer_connection_config: peer_connection_config__, - table_name_schema_mapping: table_name_schema_mapping__.unwrap_or_default(), + Ok(RenameTablesInput { + flow_job_name: flow_job_name__.unwrap_or_default(), + peer: peer__, + rename_table_options: rename_table_options__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_flow.SetupNormalizedTableBatchInput", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_flow.RenameTablesInput", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for SetupNormalizedTableBatchOutput { +impl serde::Serialize for RenameTablesOutput { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -3796,30 +3830,30 @@ impl serde::Serialize for SetupNormalizedTableBatchOutput { { use serde::ser::SerializeStruct; let mut len = 0; - if !self.table_exists_mapping.is_empty() { + if !self.flow_job_name.is_empty() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableBatchOutput", len)?; - if !self.table_exists_mapping.is_empty() { - struct_ser.serialize_field("tableExistsMapping", &self.table_exists_mapping)?; + let mut struct_ser = serializer.serialize_struct("peerdb_flow.RenameTablesOutput", len)?; + if !self.flow_job_name.is_empty() { + struct_ser.serialize_field("flowJobName", &self.flow_job_name)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchOutput { +impl<'de> serde::Deserialize<'de> for RenameTablesOutput { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "table_exists_mapping", - "tableExistsMapping", + "flow_job_name", + "flowJobName", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - TableExistsMapping, + FlowJobName, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -3842,7 +3876,7 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchOutput { E: serde::de::Error, { match value { - "tableExistsMapping" | "table_exists_mapping" => Ok(GeneratedField::TableExistsMapping), + "flowJobName" | "flow_job_name" => Ok(GeneratedField::FlowJobName), _ => Ok(GeneratedField::__SkipField__), } } @@ -3852,41 +3886,39 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchOutput { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = SetupNormalizedTableBatchOutput; + type Value = RenameTablesOutput; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_flow.SetupNormalizedTableBatchOutput") + formatter.write_str("struct peerdb_flow.RenameTablesOutput") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut table_exists_mapping__ = None; + let mut flow_job_name__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::TableExistsMapping => { - if table_exists_mapping__.is_some() { - return Err(serde::de::Error::duplicate_field("tableExistsMapping")); + GeneratedField::FlowJobName => { + if flow_job_name__.is_some() { + return Err(serde::de::Error::duplicate_field("flowJobName")); } - table_exists_mapping__ = Some( - map.next_value::>()? - ); + flow_job_name__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(SetupNormalizedTableBatchOutput { - table_exists_mapping: table_exists_mapping__.unwrap_or_default(), + Ok(RenameTablesOutput { + flow_job_name: flow_job_name__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_flow.SetupNormalizedTableBatchOutput", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_flow.RenameTablesOutput", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for SetupNormalizedTableInput { +impl serde::Serialize for ReplayTableSchemaDeltaInput { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -3894,48 +3926,39 @@ impl serde::Serialize for SetupNormalizedTableInput { { use serde::ser::SerializeStruct; let mut len = 0; - if self.peer_connection_config.is_some() { - len += 1; - } - if !self.table_identifier.is_empty() { + if self.flow_connection_configs.is_some() { len += 1; } - if self.source_table_schema.is_some() { + if !self.table_schema_deltas.is_empty() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableInput", len)?; - if let Some(v) = self.peer_connection_config.as_ref() { - struct_ser.serialize_field("peerConnectionConfig", v)?; - } - if !self.table_identifier.is_empty() { - struct_ser.serialize_field("tableIdentifier", &self.table_identifier)?; + let mut struct_ser = serializer.serialize_struct("peerdb_flow.ReplayTableSchemaDeltaInput", len)?; + if let Some(v) = self.flow_connection_configs.as_ref() { + struct_ser.serialize_field("flowConnectionConfigs", v)?; } - if let Some(v) = self.source_table_schema.as_ref() { - struct_ser.serialize_field("sourceTableSchema", v)?; + if !self.table_schema_deltas.is_empty() { + struct_ser.serialize_field("tableSchemaDeltas", &self.table_schema_deltas)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for SetupNormalizedTableInput { +impl<'de> serde::Deserialize<'de> for ReplayTableSchemaDeltaInput { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "peer_connection_config", - "peerConnectionConfig", - "table_identifier", - "tableIdentifier", - "source_table_schema", - "sourceTableSchema", + "flow_connection_configs", + "flowConnectionConfigs", + "table_schema_deltas", + "tableSchemaDeltas", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - PeerConnectionConfig, - TableIdentifier, - SourceTableSchema, + FlowConnectionConfigs, + TableSchemaDeltas, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -3958,9 +3981,8 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableInput { E: serde::de::Error, { match value { - "peerConnectionConfig" | "peer_connection_config" => Ok(GeneratedField::PeerConnectionConfig), - "tableIdentifier" | "table_identifier" => Ok(GeneratedField::TableIdentifier), - "sourceTableSchema" | "source_table_schema" => Ok(GeneratedField::SourceTableSchema), + "flowConnectionConfigs" | "flow_connection_configs" => Ok(GeneratedField::FlowConnectionConfigs), + "tableSchemaDeltas" | "table_schema_deltas" => Ok(GeneratedField::TableSchemaDeltas), _ => Ok(GeneratedField::__SkipField__), } } @@ -3970,55 +3992,47 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableInput { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = SetupNormalizedTableInput; + type Value = ReplayTableSchemaDeltaInput; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_flow.SetupNormalizedTableInput") + formatter.write_str("struct peerdb_flow.ReplayTableSchemaDeltaInput") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut peer_connection_config__ = None; - let mut table_identifier__ = None; - let mut source_table_schema__ = None; + let mut flow_connection_configs__ = None; + let mut table_schema_deltas__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::PeerConnectionConfig => { - if peer_connection_config__.is_some() { - return Err(serde::de::Error::duplicate_field("peerConnectionConfig")); - } - peer_connection_config__ = map.next_value()?; - } - GeneratedField::TableIdentifier => { - if table_identifier__.is_some() { - return Err(serde::de::Error::duplicate_field("tableIdentifier")); + GeneratedField::FlowConnectionConfigs => { + if flow_connection_configs__.is_some() { + return Err(serde::de::Error::duplicate_field("flowConnectionConfigs")); } - table_identifier__ = Some(map.next_value()?); + flow_connection_configs__ = map.next_value()?; } - GeneratedField::SourceTableSchema => { - if source_table_schema__.is_some() { - return Err(serde::de::Error::duplicate_field("sourceTableSchema")); + GeneratedField::TableSchemaDeltas => { + if table_schema_deltas__.is_some() { + return Err(serde::de::Error::duplicate_field("tableSchemaDeltas")); } - source_table_schema__ = map.next_value()?; + table_schema_deltas__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(SetupNormalizedTableInput { - peer_connection_config: peer_connection_config__, - table_identifier: table_identifier__.unwrap_or_default(), - source_table_schema: source_table_schema__, + Ok(ReplayTableSchemaDeltaInput { + flow_connection_configs: flow_connection_configs__, + table_schema_deltas: table_schema_deltas__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_flow.SetupNormalizedTableInput", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_flow.ReplayTableSchemaDeltaInput", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for SetupNormalizedTableOutput { +impl serde::Serialize for SetupNormalizedTableBatchInput { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -4026,39 +4040,39 @@ impl serde::Serialize for SetupNormalizedTableOutput { { use serde::ser::SerializeStruct; let mut len = 0; - if !self.table_identifier.is_empty() { + if self.peer_connection_config.is_some() { len += 1; } - if self.already_exists { + if !self.table_name_schema_mapping.is_empty() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableOutput", len)?; - if !self.table_identifier.is_empty() { - struct_ser.serialize_field("tableIdentifier", &self.table_identifier)?; + let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableBatchInput", len)?; + if let Some(v) = self.peer_connection_config.as_ref() { + struct_ser.serialize_field("peerConnectionConfig", v)?; } - if self.already_exists { - struct_ser.serialize_field("alreadyExists", &self.already_exists)?; + if !self.table_name_schema_mapping.is_empty() { + struct_ser.serialize_field("tableNameSchemaMapping", &self.table_name_schema_mapping)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for SetupNormalizedTableOutput { +impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchInput { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "table_identifier", - "tableIdentifier", - "already_exists", - "alreadyExists", + "peer_connection_config", + "peerConnectionConfig", + "table_name_schema_mapping", + "tableNameSchemaMapping", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - TableIdentifier, - AlreadyExists, + PeerConnectionConfig, + TableNameSchemaMapping, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -4081,8 +4095,8 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableOutput { E: serde::de::Error, { match value { - "tableIdentifier" | "table_identifier" => Ok(GeneratedField::TableIdentifier), - "alreadyExists" | "already_exists" => Ok(GeneratedField::AlreadyExists), + "peerConnectionConfig" | "peer_connection_config" => Ok(GeneratedField::PeerConnectionConfig), + "tableNameSchemaMapping" | "table_name_schema_mapping" => Ok(GeneratedField::TableNameSchemaMapping), _ => Ok(GeneratedField::__SkipField__), } } @@ -4092,18 +4106,364 @@ impl<'de> serde::Deserialize<'de> for SetupNormalizedTableOutput { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = SetupNormalizedTableOutput; + type Value = SetupNormalizedTableBatchInput; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_flow.SetupNormalizedTableOutput") + formatter.write_str("struct peerdb_flow.SetupNormalizedTableBatchInput") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut table_identifier__ = None; - let mut already_exists__ = None; + let mut peer_connection_config__ = None; + let mut table_name_schema_mapping__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::PeerConnectionConfig => { + if peer_connection_config__.is_some() { + return Err(serde::de::Error::duplicate_field("peerConnectionConfig")); + } + peer_connection_config__ = map.next_value()?; + } + GeneratedField::TableNameSchemaMapping => { + if table_name_schema_mapping__.is_some() { + return Err(serde::de::Error::duplicate_field("tableNameSchemaMapping")); + } + table_name_schema_mapping__ = Some( + map.next_value::>()? + ); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(SetupNormalizedTableBatchInput { + peer_connection_config: peer_connection_config__, + table_name_schema_mapping: table_name_schema_mapping__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_flow.SetupNormalizedTableBatchInput", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SetupNormalizedTableBatchOutput { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.table_exists_mapping.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableBatchOutput", len)?; + if !self.table_exists_mapping.is_empty() { + struct_ser.serialize_field("tableExistsMapping", &self.table_exists_mapping)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SetupNormalizedTableBatchOutput { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "table_exists_mapping", + "tableExistsMapping", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + TableExistsMapping, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "tableExistsMapping" | "table_exists_mapping" => Ok(GeneratedField::TableExistsMapping), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SetupNormalizedTableBatchOutput; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_flow.SetupNormalizedTableBatchOutput") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut table_exists_mapping__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::TableExistsMapping => { + if table_exists_mapping__.is_some() { + return Err(serde::de::Error::duplicate_field("tableExistsMapping")); + } + table_exists_mapping__ = Some( + map.next_value::>()? + ); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(SetupNormalizedTableBatchOutput { + table_exists_mapping: table_exists_mapping__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_flow.SetupNormalizedTableBatchOutput", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SetupNormalizedTableInput { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.peer_connection_config.is_some() { + len += 1; + } + if !self.table_identifier.is_empty() { + len += 1; + } + if self.source_table_schema.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableInput", len)?; + if let Some(v) = self.peer_connection_config.as_ref() { + struct_ser.serialize_field("peerConnectionConfig", v)?; + } + if !self.table_identifier.is_empty() { + struct_ser.serialize_field("tableIdentifier", &self.table_identifier)?; + } + if let Some(v) = self.source_table_schema.as_ref() { + struct_ser.serialize_field("sourceTableSchema", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SetupNormalizedTableInput { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "peer_connection_config", + "peerConnectionConfig", + "table_identifier", + "tableIdentifier", + "source_table_schema", + "sourceTableSchema", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PeerConnectionConfig, + TableIdentifier, + SourceTableSchema, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "peerConnectionConfig" | "peer_connection_config" => Ok(GeneratedField::PeerConnectionConfig), + "tableIdentifier" | "table_identifier" => Ok(GeneratedField::TableIdentifier), + "sourceTableSchema" | "source_table_schema" => Ok(GeneratedField::SourceTableSchema), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SetupNormalizedTableInput; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_flow.SetupNormalizedTableInput") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut peer_connection_config__ = None; + let mut table_identifier__ = None; + let mut source_table_schema__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::PeerConnectionConfig => { + if peer_connection_config__.is_some() { + return Err(serde::de::Error::duplicate_field("peerConnectionConfig")); + } + peer_connection_config__ = map.next_value()?; + } + GeneratedField::TableIdentifier => { + if table_identifier__.is_some() { + return Err(serde::de::Error::duplicate_field("tableIdentifier")); + } + table_identifier__ = Some(map.next_value()?); + } + GeneratedField::SourceTableSchema => { + if source_table_schema__.is_some() { + return Err(serde::de::Error::duplicate_field("sourceTableSchema")); + } + source_table_schema__ = map.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(SetupNormalizedTableInput { + peer_connection_config: peer_connection_config__, + table_identifier: table_identifier__.unwrap_or_default(), + source_table_schema: source_table_schema__, + }) + } + } + deserializer.deserialize_struct("peerdb_flow.SetupNormalizedTableInput", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SetupNormalizedTableOutput { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.table_identifier.is_empty() { + len += 1; + } + if self.already_exists { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_flow.SetupNormalizedTableOutput", len)?; + if !self.table_identifier.is_empty() { + struct_ser.serialize_field("tableIdentifier", &self.table_identifier)?; + } + if self.already_exists { + struct_ser.serialize_field("alreadyExists", &self.already_exists)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SetupNormalizedTableOutput { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "table_identifier", + "tableIdentifier", + "already_exists", + "alreadyExists", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + TableIdentifier, + AlreadyExists, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "tableIdentifier" | "table_identifier" => Ok(GeneratedField::TableIdentifier), + "alreadyExists" | "already_exists" => Ok(GeneratedField::AlreadyExists), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SetupNormalizedTableOutput; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_flow.SetupNormalizedTableOutput") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut table_identifier__ = None; + let mut already_exists__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::TableIdentifier => { @@ -5151,6 +5511,138 @@ impl<'de> serde::Deserialize<'de> for TableIdentifier { deserializer.deserialize_struct("peerdb_flow.TableIdentifier", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for TableMapping { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.source_table_identifier.is_empty() { + len += 1; + } + if !self.destination_table_identifier.is_empty() { + len += 1; + } + if !self.partition_key.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_flow.TableMapping", len)?; + if !self.source_table_identifier.is_empty() { + struct_ser.serialize_field("sourceTableIdentifier", &self.source_table_identifier)?; + } + if !self.destination_table_identifier.is_empty() { + struct_ser.serialize_field("destinationTableIdentifier", &self.destination_table_identifier)?; + } + if !self.partition_key.is_empty() { + struct_ser.serialize_field("partitionKey", &self.partition_key)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TableMapping { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "source_table_identifier", + "sourceTableIdentifier", + "destination_table_identifier", + "destinationTableIdentifier", + "partition_key", + "partitionKey", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + SourceTableIdentifier, + DestinationTableIdentifier, + PartitionKey, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "sourceTableIdentifier" | "source_table_identifier" => Ok(GeneratedField::SourceTableIdentifier), + "destinationTableIdentifier" | "destination_table_identifier" => Ok(GeneratedField::DestinationTableIdentifier), + "partitionKey" | "partition_key" => Ok(GeneratedField::PartitionKey), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TableMapping; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_flow.TableMapping") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut source_table_identifier__ = None; + let mut destination_table_identifier__ = None; + let mut partition_key__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::SourceTableIdentifier => { + if source_table_identifier__.is_some() { + return Err(serde::de::Error::duplicate_field("sourceTableIdentifier")); + } + source_table_identifier__ = Some(map.next_value()?); + } + GeneratedField::DestinationTableIdentifier => { + if destination_table_identifier__.is_some() { + return Err(serde::de::Error::duplicate_field("destinationTableIdentifier")); + } + destination_table_identifier__ = Some(map.next_value()?); + } + GeneratedField::PartitionKey => { + if partition_key__.is_some() { + return Err(serde::de::Error::duplicate_field("partitionKey")); + } + partition_key__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(TableMapping { + source_table_identifier: source_table_identifier__.unwrap_or_default(), + destination_table_identifier: destination_table_identifier__.unwrap_or_default(), + partition_key: partition_key__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_flow.TableMapping", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for TableNameMapping { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -5279,7 +5771,7 @@ impl serde::Serialize for TableSchema { if !self.columns.is_empty() { len += 1; } - if !self.primary_key_column.is_empty() { + if !self.primary_key_columns.is_empty() { len += 1; } if self.is_replica_identity_full { @@ -5292,8 +5784,8 @@ impl serde::Serialize for TableSchema { if !self.columns.is_empty() { struct_ser.serialize_field("columns", &self.columns)?; } - if !self.primary_key_column.is_empty() { - struct_ser.serialize_field("primaryKeyColumn", &self.primary_key_column)?; + if !self.primary_key_columns.is_empty() { + struct_ser.serialize_field("primaryKeyColumns", &self.primary_key_columns)?; } if self.is_replica_identity_full { struct_ser.serialize_field("isReplicaIdentityFull", &self.is_replica_identity_full)?; @@ -5311,8 +5803,8 @@ impl<'de> serde::Deserialize<'de> for TableSchema { "table_identifier", "tableIdentifier", "columns", - "primary_key_column", - "primaryKeyColumn", + "primary_key_columns", + "primaryKeyColumns", "is_replica_identity_full", "isReplicaIdentityFull", ]; @@ -5321,7 +5813,7 @@ impl<'de> serde::Deserialize<'de> for TableSchema { enum GeneratedField { TableIdentifier, Columns, - PrimaryKeyColumn, + PrimaryKeyColumns, IsReplicaIdentityFull, __SkipField__, } @@ -5347,7 +5839,7 @@ impl<'de> serde::Deserialize<'de> for TableSchema { match value { "tableIdentifier" | "table_identifier" => Ok(GeneratedField::TableIdentifier), "columns" => Ok(GeneratedField::Columns), - "primaryKeyColumn" | "primary_key_column" => Ok(GeneratedField::PrimaryKeyColumn), + "primaryKeyColumns" | "primary_key_columns" => Ok(GeneratedField::PrimaryKeyColumns), "isReplicaIdentityFull" | "is_replica_identity_full" => Ok(GeneratedField::IsReplicaIdentityFull), _ => Ok(GeneratedField::__SkipField__), } @@ -5370,7 +5862,7 @@ impl<'de> serde::Deserialize<'de> for TableSchema { { let mut table_identifier__ = None; let mut columns__ = None; - let mut primary_key_column__ = None; + let mut primary_key_columns__ = None; let mut is_replica_identity_full__ = None; while let Some(k) = map.next_key()? { match k { @@ -5388,11 +5880,11 @@ impl<'de> serde::Deserialize<'de> for TableSchema { map.next_value::>()? ); } - GeneratedField::PrimaryKeyColumn => { - if primary_key_column__.is_some() { - return Err(serde::de::Error::duplicate_field("primaryKeyColumn")); + GeneratedField::PrimaryKeyColumns => { + if primary_key_columns__.is_some() { + return Err(serde::de::Error::duplicate_field("primaryKeyColumns")); } - primary_key_column__ = Some(map.next_value()?); + primary_key_columns__ = Some(map.next_value()?); } GeneratedField::IsReplicaIdentityFull => { if is_replica_identity_full__.is_some() { @@ -5408,7 +5900,7 @@ impl<'de> serde::Deserialize<'de> for TableSchema { Ok(TableSchema { table_identifier: table_identifier__.unwrap_or_default(), columns: columns__.unwrap_or_default(), - primary_key_column: primary_key_column__.unwrap_or_default(), + primary_key_columns: primary_key_columns__.unwrap_or_default(), is_replica_identity_full: is_replica_identity_full__.unwrap_or_default(), }) } @@ -5433,9 +5925,6 @@ impl serde::Serialize for TableSchemaDelta { if !self.added_columns.is_empty() { len += 1; } - if !self.dropped_columns.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("peerdb_flow.TableSchemaDelta", len)?; if !self.src_table_name.is_empty() { struct_ser.serialize_field("srcTableName", &self.src_table_name)?; @@ -5446,9 +5935,6 @@ impl serde::Serialize for TableSchemaDelta { if !self.added_columns.is_empty() { struct_ser.serialize_field("addedColumns", &self.added_columns)?; } - if !self.dropped_columns.is_empty() { - struct_ser.serialize_field("droppedColumns", &self.dropped_columns)?; - } struct_ser.end() } } @@ -5465,8 +5951,6 @@ impl<'de> serde::Deserialize<'de> for TableSchemaDelta { "dstTableName", "added_columns", "addedColumns", - "dropped_columns", - "droppedColumns", ]; #[allow(clippy::enum_variant_names)] @@ -5474,7 +5958,6 @@ impl<'de> serde::Deserialize<'de> for TableSchemaDelta { SrcTableName, DstTableName, AddedColumns, - DroppedColumns, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -5500,7 +5983,6 @@ impl<'de> serde::Deserialize<'de> for TableSchemaDelta { "srcTableName" | "src_table_name" => Ok(GeneratedField::SrcTableName), "dstTableName" | "dst_table_name" => Ok(GeneratedField::DstTableName), "addedColumns" | "added_columns" => Ok(GeneratedField::AddedColumns), - "droppedColumns" | "dropped_columns" => Ok(GeneratedField::DroppedColumns), _ => Ok(GeneratedField::__SkipField__), } } @@ -5523,7 +6005,6 @@ impl<'de> serde::Deserialize<'de> for TableSchemaDelta { let mut src_table_name__ = None; let mut dst_table_name__ = None; let mut added_columns__ = None; - let mut dropped_columns__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::SrcTableName => { @@ -5544,12 +6025,6 @@ impl<'de> serde::Deserialize<'de> for TableSchemaDelta { } added_columns__ = Some(map.next_value()?); } - GeneratedField::DroppedColumns => { - if dropped_columns__.is_some() { - return Err(serde::de::Error::duplicate_field("droppedColumns")); - } - dropped_columns__ = Some(map.next_value()?); - } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } @@ -5559,7 +6034,6 @@ impl<'de> serde::Deserialize<'de> for TableSchemaDelta { src_table_name: src_table_name__.unwrap_or_default(), dst_table_name: dst_table_name__.unwrap_or_default(), added_columns: added_columns__.unwrap_or_default(), - dropped_columns: dropped_columns__.unwrap_or_default(), }) } } @@ -5678,119 +6152,3 @@ impl<'de> serde::Deserialize<'de> for TimestampPartitionRange { deserializer.deserialize_struct("peerdb_flow.TimestampPartitionRange", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for XminPartitionRange { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.start != 0 { - len += 1; - } - if self.end != 0 { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("peerdb_flow.XMINPartitionRange", len)?; - if self.start != 0 { - struct_ser.serialize_field("start", &self.start)?; - } - if self.end != 0 { - struct_ser.serialize_field("end", &self.end)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for XminPartitionRange { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "start", - "end", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - Start, - End, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "start" => Ok(GeneratedField::Start), - "end" => Ok(GeneratedField::End), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = XminPartitionRange; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_flow.XMINPartitionRange") - } - - fn visit_map(self, mut map: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut start__ = None; - let mut end__ = None; - while let Some(k) = map.next_key()? { - match k { - GeneratedField::Start => { - if start__.is_some() { - return Err(serde::de::Error::duplicate_field("start")); - } - start__ = - Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::End => { - if end__.is_some() { - return Err(serde::de::Error::duplicate_field("end")); - } - end__ = - Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::__SkipField__ => { - let _ = map.next_value::()?; - } - } - } - Ok(XminPartitionRange { - start: start__.unwrap_or_default(), - end: end__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("peerdb_flow.XMINPartitionRange", FIELDS, GeneratedVisitor) - } -} diff --git a/nexus/pt/src/peerdb_peers.rs b/nexus/pt/src/peerdb_peers.rs index 8b96963bb..6bff1325f 100644 --- a/nexus/pt/src/peerdb_peers.rs +++ b/nexus/pt/src/peerdb_peers.rs @@ -2,143 +2,177 @@ #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SnowflakeConfig { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub account_id: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub username: ::prost::alloc::string::String, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub private_key: ::prost::alloc::string::String, - #[prost(string, tag="4")] + #[prost(string, tag = "4")] pub database: ::prost::alloc::string::String, - #[prost(string, tag="6")] + #[prost(string, tag = "6")] pub warehouse: ::prost::alloc::string::String, - #[prost(string, tag="7")] + #[prost(string, tag = "7")] pub role: ::prost::alloc::string::String, - #[prost(uint64, tag="8")] + #[prost(uint64, tag = "8")] pub query_timeout: u64, - #[prost(string, tag="9")] + #[prost(string, tag = "9")] pub s3_integration: ::prost::alloc::string::String, - #[prost(string, optional, tag="10")] + #[prost(string, optional, tag = "10")] pub password: ::core::option::Option<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BigqueryConfig { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub auth_type: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub project_id: ::prost::alloc::string::String, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub private_key_id: ::prost::alloc::string::String, - #[prost(string, tag="4")] + #[prost(string, tag = "4")] pub private_key: ::prost::alloc::string::String, - #[prost(string, tag="5")] + #[prost(string, tag = "5")] pub client_email: ::prost::alloc::string::String, - #[prost(string, tag="6")] + #[prost(string, tag = "6")] pub client_id: ::prost::alloc::string::String, - #[prost(string, tag="7")] + #[prost(string, tag = "7")] pub auth_uri: ::prost::alloc::string::String, - #[prost(string, tag="8")] + #[prost(string, tag = "8")] pub token_uri: ::prost::alloc::string::String, - #[prost(string, tag="9")] + #[prost(string, tag = "9")] pub auth_provider_x509_cert_url: ::prost::alloc::string::String, - #[prost(string, tag="10")] + #[prost(string, tag = "10")] pub client_x509_cert_url: ::prost::alloc::string::String, - #[prost(string, tag="11")] + #[prost(string, tag = "11")] pub dataset_id: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MongoConfig { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub username: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub password: ::prost::alloc::string::String, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub clusterurl: ::prost::alloc::string::String, - #[prost(int32, tag="4")] + #[prost(int32, tag = "4")] pub clusterport: i32, - #[prost(string, tag="5")] + #[prost(string, tag = "5")] pub database: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PostgresConfig { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub host: ::prost::alloc::string::String, - #[prost(uint32, tag="2")] + #[prost(uint32, tag = "2")] pub port: u32, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub user: ::prost::alloc::string::String, - #[prost(string, tag="4")] + #[prost(string, tag = "4")] pub password: ::prost::alloc::string::String, - #[prost(string, tag="5")] + #[prost(string, tag = "5")] pub database: ::prost::alloc::string::String, /// this is used only in query replication mode right now. - #[prost(string, tag="6")] + #[prost(string, tag = "6")] pub transaction_snapshot: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventHubConfig { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub namespace: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub resource_group: ::prost::alloc::string::String, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub location: ::prost::alloc::string::String, - #[prost(message, optional, tag="4")] + #[prost(message, optional, tag = "4")] pub metadata_db: ::core::option::Option, + /// if this is empty PeerDB uses `AZURE_SUBSCRIPTION_ID` environment variable. + #[prost(string, tag = "5")] + pub subscription_id: ::prost::alloc::string::String, + /// defaults to 3 + #[prost(uint32, tag = "6")] + pub partition_count: u32, + /// defaults to 7 + #[prost(uint32, tag = "7")] + pub message_retention_in_days: u32, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EventHubGroupConfig { + /// event hub peer name to event hub config + #[prost(map = "string, message", tag = "1")] + pub eventhubs: ::std::collections::HashMap<::prost::alloc::string::String, EventHubConfig>, + #[prost(message, optional, tag = "2")] + pub metadata_db: ::core::option::Option, + #[prost(string, repeated, tag = "3")] + pub unnest_columns: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct S3Config { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub url: ::prost::alloc::string::String, + #[prost(string, optional, tag = "2")] + pub access_key_id: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag = "3")] + pub secret_access_key: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag = "4")] + pub role_arn: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag = "5")] + pub region: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag = "6")] + pub endpoint: ::core::option::Option<::prost::alloc::string::String>, + #[prost(message, optional, tag = "7")] + pub metadata_db: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SqlServerConfig { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub server: ::prost::alloc::string::String, - #[prost(uint32, tag="2")] + #[prost(uint32, tag = "2")] pub port: u32, - #[prost(string, tag="3")] + #[prost(string, tag = "3")] pub user: ::prost::alloc::string::String, - #[prost(string, tag="4")] + #[prost(string, tag = "4")] pub password: ::prost::alloc::string::String, - #[prost(string, tag="5")] + #[prost(string, tag = "5")] pub database: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Peer { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub name: ::prost::alloc::string::String, - #[prost(enumeration="DbType", tag="2")] + #[prost(enumeration = "DbType", tag = "2")] pub r#type: i32, - #[prost(oneof="peer::Config", tags="3, 4, 5, 6, 7, 8, 9")] + #[prost(oneof = "peer::Config", tags = "3, 4, 5, 6, 7, 8, 9, 10")] pub config: ::core::option::Option, } /// Nested message and enum types in `Peer`. pub mod peer { #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] + #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Config { - #[prost(message, tag="3")] + #[prost(message, tag = "3")] SnowflakeConfig(super::SnowflakeConfig), - #[prost(message, tag="4")] + #[prost(message, tag = "4")] BigqueryConfig(super::BigqueryConfig), - #[prost(message, tag="5")] + #[prost(message, tag = "5")] MongoConfig(super::MongoConfig), - #[prost(message, tag="6")] + #[prost(message, tag = "6")] PostgresConfig(super::PostgresConfig), - #[prost(message, tag="7")] + #[prost(message, tag = "7")] EventhubConfig(super::EventHubConfig), - #[prost(message, tag="8")] + #[prost(message, tag = "8")] S3Config(super::S3Config), - #[prost(message, tag="9")] + #[prost(message, tag = "9")] SqlserverConfig(super::SqlServerConfig), + #[prost(message, tag = "10")] + EventhubGroupConfig(super::EventHubGroupConfig), } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] @@ -151,6 +185,7 @@ pub enum DbType { Eventhub = 4, S3 = 5, Sqlserver = 6, + EventhubGroup = 7, } impl DbType { /// String value of the enum field names used in the ProtoBuf definition. @@ -166,6 +201,7 @@ impl DbType { DbType::Eventhub => "EVENTHUB", DbType::S3 => "S3", DbType::Sqlserver => "SQLSERVER", + DbType::EventhubGroup => "EVENTHUB_GROUP", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -178,9 +214,10 @@ impl DbType { "EVENTHUB" => Some(Self::Eventhub), "S3" => Some(Self::S3), "SQLSERVER" => Some(Self::Sqlserver), + "EVENTHUB_GROUP" => Some(Self::EventhubGroup), _ => None, } } } include!("peerdb_peers.serde.rs"); -// @@protoc_insertion_point(module) \ No newline at end of file +// @@protoc_insertion_point(module) diff --git a/nexus/pt/src/peerdb_peers.serde.rs b/nexus/pt/src/peerdb_peers.serde.rs index 652e417db..e03dc494e 100644 --- a/nexus/pt/src/peerdb_peers.serde.rs +++ b/nexus/pt/src/peerdb_peers.serde.rs @@ -289,6 +289,7 @@ impl serde::Serialize for DbType { Self::Eventhub => "EVENTHUB", Self::S3 => "S3", Self::Sqlserver => "SQLSERVER", + Self::EventhubGroup => "EVENTHUB_GROUP", }; serializer.serialize_str(variant) } @@ -307,6 +308,7 @@ impl<'de> serde::Deserialize<'de> for DbType { "EVENTHUB", "S3", "SQLSERVER", + "EVENTHUB_GROUP", ]; struct GeneratedVisitor; @@ -356,6 +358,7 @@ impl<'de> serde::Deserialize<'de> for DbType { "EVENTHUB" => Ok(DbType::Eventhub), "S3" => Ok(DbType::S3), "SQLSERVER" => Ok(DbType::Sqlserver), + "EVENTHUB_GROUP" => Ok(DbType::EventhubGroup), _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), } } @@ -383,6 +386,15 @@ impl serde::Serialize for EventHubConfig { if self.metadata_db.is_some() { len += 1; } + if !self.subscription_id.is_empty() { + len += 1; + } + if self.partition_count != 0 { + len += 1; + } + if self.message_retention_in_days != 0 { + len += 1; + } let mut struct_ser = serializer.serialize_struct("peerdb_peers.EventHubConfig", len)?; if !self.namespace.is_empty() { struct_ser.serialize_field("namespace", &self.namespace)?; @@ -396,6 +408,15 @@ impl serde::Serialize for EventHubConfig { if let Some(v) = self.metadata_db.as_ref() { struct_ser.serialize_field("metadataDb", v)?; } + if !self.subscription_id.is_empty() { + struct_ser.serialize_field("subscriptionId", &self.subscription_id)?; + } + if self.partition_count != 0 { + struct_ser.serialize_field("partitionCount", &self.partition_count)?; + } + if self.message_retention_in_days != 0 { + struct_ser.serialize_field("messageRetentionInDays", &self.message_retention_in_days)?; + } struct_ser.end() } } @@ -412,6 +433,12 @@ impl<'de> serde::Deserialize<'de> for EventHubConfig { "location", "metadata_db", "metadataDb", + "subscription_id", + "subscriptionId", + "partition_count", + "partitionCount", + "message_retention_in_days", + "messageRetentionInDays", ]; #[allow(clippy::enum_variant_names)] @@ -420,6 +447,9 @@ impl<'de> serde::Deserialize<'de> for EventHubConfig { ResourceGroup, Location, MetadataDb, + SubscriptionId, + PartitionCount, + MessageRetentionInDays, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -446,6 +476,9 @@ impl<'de> serde::Deserialize<'de> for EventHubConfig { "resourceGroup" | "resource_group" => Ok(GeneratedField::ResourceGroup), "location" => Ok(GeneratedField::Location), "metadataDb" | "metadata_db" => Ok(GeneratedField::MetadataDb), + "subscriptionId" | "subscription_id" => Ok(GeneratedField::SubscriptionId), + "partitionCount" | "partition_count" => Ok(GeneratedField::PartitionCount), + "messageRetentionInDays" | "message_retention_in_days" => Ok(GeneratedField::MessageRetentionInDays), _ => Ok(GeneratedField::__SkipField__), } } @@ -469,6 +502,9 @@ impl<'de> serde::Deserialize<'de> for EventHubConfig { let mut resource_group__ = None; let mut location__ = None; let mut metadata_db__ = None; + let mut subscription_id__ = None; + let mut partition_count__ = None; + let mut message_retention_in_days__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::Namespace => { @@ -495,6 +531,28 @@ impl<'de> serde::Deserialize<'de> for EventHubConfig { } metadata_db__ = map.next_value()?; } + GeneratedField::SubscriptionId => { + if subscription_id__.is_some() { + return Err(serde::de::Error::duplicate_field("subscriptionId")); + } + subscription_id__ = Some(map.next_value()?); + } + GeneratedField::PartitionCount => { + if partition_count__.is_some() { + return Err(serde::de::Error::duplicate_field("partitionCount")); + } + partition_count__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::MessageRetentionInDays => { + if message_retention_in_days__.is_some() { + return Err(serde::de::Error::duplicate_field("messageRetentionInDays")); + } + message_retention_in_days__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } @@ -505,12 +563,148 @@ impl<'de> serde::Deserialize<'de> for EventHubConfig { resource_group: resource_group__.unwrap_or_default(), location: location__.unwrap_or_default(), metadata_db: metadata_db__, + subscription_id: subscription_id__.unwrap_or_default(), + partition_count: partition_count__.unwrap_or_default(), + message_retention_in_days: message_retention_in_days__.unwrap_or_default(), }) } } deserializer.deserialize_struct("peerdb_peers.EventHubConfig", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for EventHubGroupConfig { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.eventhubs.is_empty() { + len += 1; + } + if self.metadata_db.is_some() { + len += 1; + } + if !self.unnest_columns.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_peers.EventHubGroupConfig", len)?; + if !self.eventhubs.is_empty() { + struct_ser.serialize_field("eventhubs", &self.eventhubs)?; + } + if let Some(v) = self.metadata_db.as_ref() { + struct_ser.serialize_field("metadataDb", v)?; + } + if !self.unnest_columns.is_empty() { + struct_ser.serialize_field("unnestColumns", &self.unnest_columns)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for EventHubGroupConfig { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "eventhubs", + "metadata_db", + "metadataDb", + "unnest_columns", + "unnestColumns", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Eventhubs, + MetadataDb, + UnnestColumns, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "eventhubs" => Ok(GeneratedField::Eventhubs), + "metadataDb" | "metadata_db" => Ok(GeneratedField::MetadataDb), + "unnestColumns" | "unnest_columns" => Ok(GeneratedField::UnnestColumns), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = EventHubGroupConfig; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_peers.EventHubGroupConfig") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut eventhubs__ = None; + let mut metadata_db__ = None; + let mut unnest_columns__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Eventhubs => { + if eventhubs__.is_some() { + return Err(serde::de::Error::duplicate_field("eventhubs")); + } + eventhubs__ = Some( + map.next_value::>()? + ); + } + GeneratedField::MetadataDb => { + if metadata_db__.is_some() { + return Err(serde::de::Error::duplicate_field("metadataDb")); + } + metadata_db__ = map.next_value()?; + } + GeneratedField::UnnestColumns => { + if unnest_columns__.is_some() { + return Err(serde::de::Error::duplicate_field("unnestColumns")); + } + unnest_columns__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(EventHubGroupConfig { + eventhubs: eventhubs__.unwrap_or_default(), + metadata_db: metadata_db__, + unnest_columns: unnest_columns__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_peers.EventHubGroupConfig", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for MongoConfig { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -725,6 +919,9 @@ impl serde::Serialize for Peer { peer::Config::SqlserverConfig(v) => { struct_ser.serialize_field("sqlserverConfig", v)?; } + peer::Config::EventhubGroupConfig(v) => { + struct_ser.serialize_field("eventhubGroupConfig", v)?; + } } } struct_ser.end() @@ -753,6 +950,8 @@ impl<'de> serde::Deserialize<'de> for Peer { "s3Config", "sqlserver_config", "sqlserverConfig", + "eventhub_group_config", + "eventhubGroupConfig", ]; #[allow(clippy::enum_variant_names)] @@ -766,6 +965,7 @@ impl<'de> serde::Deserialize<'de> for Peer { EventhubConfig, S3Config, SqlserverConfig, + EventhubGroupConfig, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -797,6 +997,7 @@ impl<'de> serde::Deserialize<'de> for Peer { "eventhubConfig" | "eventhub_config" => Ok(GeneratedField::EventhubConfig), "s3Config" | "s3_config" => Ok(GeneratedField::S3Config), "sqlserverConfig" | "sqlserver_config" => Ok(GeneratedField::SqlserverConfig), + "eventhubGroupConfig" | "eventhub_group_config" => Ok(GeneratedField::EventhubGroupConfig), _ => Ok(GeneratedField::__SkipField__), } } @@ -880,6 +1081,13 @@ impl<'de> serde::Deserialize<'de> for Peer { return Err(serde::de::Error::duplicate_field("sqlserverConfig")); } config__ = map.next_value::<::std::option::Option<_>>()?.map(peer::Config::SqlserverConfig) +; + } + GeneratedField::EventhubGroupConfig => { + if config__.is_some() { + return Err(serde::de::Error::duplicate_field("eventhubGroupConfig")); + } + config__ = map.next_value::<::std::option::Option<_>>()?.map(peer::Config::EventhubGroupConfig) ; } GeneratedField::__SkipField__ => { @@ -1091,10 +1299,46 @@ impl serde::Serialize for S3Config { if !self.url.is_empty() { len += 1; } + if self.access_key_id.is_some() { + len += 1; + } + if self.secret_access_key.is_some() { + len += 1; + } + if self.role_arn.is_some() { + len += 1; + } + if self.region.is_some() { + len += 1; + } + if self.endpoint.is_some() { + len += 1; + } + if self.metadata_db.is_some() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("peerdb_peers.S3Config", len)?; if !self.url.is_empty() { struct_ser.serialize_field("url", &self.url)?; } + if let Some(v) = self.access_key_id.as_ref() { + struct_ser.serialize_field("accessKeyId", v)?; + } + if let Some(v) = self.secret_access_key.as_ref() { + struct_ser.serialize_field("secretAccessKey", v)?; + } + if let Some(v) = self.role_arn.as_ref() { + struct_ser.serialize_field("roleArn", v)?; + } + if let Some(v) = self.region.as_ref() { + struct_ser.serialize_field("region", v)?; + } + if let Some(v) = self.endpoint.as_ref() { + struct_ser.serialize_field("endpoint", v)?; + } + if let Some(v) = self.metadata_db.as_ref() { + struct_ser.serialize_field("metadataDb", v)?; + } struct_ser.end() } } @@ -1106,11 +1350,27 @@ impl<'de> serde::Deserialize<'de> for S3Config { { const FIELDS: &[&str] = &[ "url", + "access_key_id", + "accessKeyId", + "secret_access_key", + "secretAccessKey", + "role_arn", + "roleArn", + "region", + "endpoint", + "metadata_db", + "metadataDb", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Url, + AccessKeyId, + SecretAccessKey, + RoleArn, + Region, + Endpoint, + MetadataDb, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -1134,6 +1394,12 @@ impl<'de> serde::Deserialize<'de> for S3Config { { match value { "url" => Ok(GeneratedField::Url), + "accessKeyId" | "access_key_id" => Ok(GeneratedField::AccessKeyId), + "secretAccessKey" | "secret_access_key" => Ok(GeneratedField::SecretAccessKey), + "roleArn" | "role_arn" => Ok(GeneratedField::RoleArn), + "region" => Ok(GeneratedField::Region), + "endpoint" => Ok(GeneratedField::Endpoint), + "metadataDb" | "metadata_db" => Ok(GeneratedField::MetadataDb), _ => Ok(GeneratedField::__SkipField__), } } @@ -1154,6 +1420,12 @@ impl<'de> serde::Deserialize<'de> for S3Config { V: serde::de::MapAccess<'de>, { let mut url__ = None; + let mut access_key_id__ = None; + let mut secret_access_key__ = None; + let mut role_arn__ = None; + let mut region__ = None; + let mut endpoint__ = None; + let mut metadata_db__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::Url => { @@ -1162,6 +1434,42 @@ impl<'de> serde::Deserialize<'de> for S3Config { } url__ = Some(map.next_value()?); } + GeneratedField::AccessKeyId => { + if access_key_id__.is_some() { + return Err(serde::de::Error::duplicate_field("accessKeyId")); + } + access_key_id__ = map.next_value()?; + } + GeneratedField::SecretAccessKey => { + if secret_access_key__.is_some() { + return Err(serde::de::Error::duplicate_field("secretAccessKey")); + } + secret_access_key__ = map.next_value()?; + } + GeneratedField::RoleArn => { + if role_arn__.is_some() { + return Err(serde::de::Error::duplicate_field("roleArn")); + } + role_arn__ = map.next_value()?; + } + GeneratedField::Region => { + if region__.is_some() { + return Err(serde::de::Error::duplicate_field("region")); + } + region__ = map.next_value()?; + } + GeneratedField::Endpoint => { + if endpoint__.is_some() { + return Err(serde::de::Error::duplicate_field("endpoint")); + } + endpoint__ = map.next_value()?; + } + GeneratedField::MetadataDb => { + if metadata_db__.is_some() { + return Err(serde::de::Error::duplicate_field("metadataDb")); + } + metadata_db__ = map.next_value()?; + } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } @@ -1169,6 +1477,12 @@ impl<'de> serde::Deserialize<'de> for S3Config { } Ok(S3Config { url: url__.unwrap_or_default(), + access_key_id: access_key_id__, + secret_access_key: secret_access_key__, + role_arn: role_arn__, + region: region__, + endpoint: endpoint__, + metadata_db: metadata_db__, }) } } diff --git a/nexus/pt/src/peerdb_route.rs b/nexus/pt/src/peerdb_route.rs index fb51949ea..47b35d838 100644 --- a/nexus/pt/src/peerdb_route.rs +++ b/nexus/pt/src/peerdb_route.rs @@ -2,57 +2,314 @@ #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateCdcFlowRequest { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub connection_configs: ::core::option::Option, + #[prost(bool, tag = "2")] + pub create_catalog_entry: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateCdcFlowResponse { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub worflow_id: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateQRepFlowRequest { - #[prost(message, optional, tag="1")] + #[prost(message, optional, tag = "1")] pub qrep_config: ::core::option::Option, + #[prost(bool, tag = "2")] + pub create_catalog_entry: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreateQRepFlowResponse { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub worflow_id: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ShutdownRequest { - #[prost(string, tag="1")] + #[prost(string, tag = "1")] pub workflow_id: ::prost::alloc::string::String, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub flow_job_name: ::prost::alloc::string::String, - #[prost(message, optional, tag="3")] + #[prost(message, optional, tag = "3")] pub source_peer: ::core::option::Option, - #[prost(message, optional, tag="4")] + #[prost(message, optional, tag = "4")] pub destination_peer: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ShutdownResponse { - #[prost(bool, tag="1")] + #[prost(bool, tag = "1")] pub ok: bool, - #[prost(string, tag="2")] + #[prost(string, tag = "2")] pub error_message: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] -pub struct ListPeersRequest { +pub struct ValidatePeerRequest { + #[prost(message, optional, tag = "1")] + pub peer: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] -pub struct ListPeersResponse { - #[prost(message, repeated, tag="1")] - pub peers: ::prost::alloc::vec::Vec, +pub struct CreatePeerRequest { + #[prost(message, optional, tag = "1")] + pub peer: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropPeerRequest { + #[prost(string, tag = "1")] + pub peer_name: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropPeerResponse { + #[prost(bool, tag = "1")] + pub ok: bool, + #[prost(string, tag = "2")] + pub error_message: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ValidatePeerResponse { + #[prost(enumeration = "ValidatePeerStatus", tag = "1")] + pub status: i32, + #[prost(string, tag = "2")] + pub message: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreatePeerResponse { + #[prost(enumeration = "CreatePeerStatus", tag = "1")] + pub status: i32, + #[prost(string, tag = "2")] + pub message: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MirrorStatusRequest { + #[prost(string, tag = "1")] + pub flow_job_name: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PartitionStatus { + #[prost(string, tag = "1")] + pub partition_id: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub start_time: ::core::option::Option<::pbjson_types::Timestamp>, + #[prost(message, optional, tag = "3")] + pub end_time: ::core::option::Option<::pbjson_types::Timestamp>, + #[prost(int32, tag = "4")] + pub num_rows: i32, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QRepMirrorStatus { + #[prost(message, optional, tag = "1")] + pub config: ::core::option::Option, + /// TODO make note to see if we are still in initial copy + /// or if we are in the continuous streaming mode. + #[prost(message, repeated, tag = "2")] + pub partitions: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CdcSyncStatus { + #[prost(int64, tag = "1")] + pub start_lsn: i64, + #[prost(int64, tag = "2")] + pub end_lsn: i64, + #[prost(int32, tag = "3")] + pub num_rows: i32, + #[prost(message, optional, tag = "4")] + pub start_time: ::core::option::Option<::pbjson_types::Timestamp>, + #[prost(message, optional, tag = "5")] + pub end_time: ::core::option::Option<::pbjson_types::Timestamp>, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PeerSchemasResponse { + #[prost(string, repeated, tag = "1")] + pub schemas: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SchemaTablesRequest { + #[prost(string, tag = "1")] + pub peer_name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub schema_name: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SchemaTablesResponse { + #[prost(string, repeated, tag = "1")] + pub tables: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TableColumnsRequest { + #[prost(string, tag = "1")] + pub peer_name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub schema_name: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub table_name: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TableColumnsResponse { + #[prost(string, repeated, tag = "1")] + pub columns: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PostgresPeerActivityInfoRequest { + #[prost(string, tag = "1")] + pub peer_name: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SlotInfo { + #[prost(string, tag = "1")] + pub slot_name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub redo_l_sn: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub restart_l_sn: ::prost::alloc::string::String, + #[prost(bool, tag = "4")] + pub active: bool, + #[prost(float, tag = "5")] + pub lag_in_mb: f32, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct StatInfo { + #[prost(int64, tag = "1")] + pub pid: i64, + #[prost(string, tag = "2")] + pub wait_event: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub wait_event_type: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub query_start: ::prost::alloc::string::String, + #[prost(string, tag = "5")] + pub query: ::prost::alloc::string::String, + #[prost(float, tag = "6")] + pub duration: f32, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PeerSlotResponse { + #[prost(message, repeated, tag = "1")] + pub slot_data: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PeerStatResponse { + #[prost(message, repeated, tag = "1")] + pub stat_data: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SnapshotStatus { + #[prost(message, repeated, tag = "1")] + pub clones: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CdcMirrorStatus { + #[prost(message, optional, tag = "1")] + pub config: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub snapshot_status: ::core::option::Option, + #[prost(message, repeated, tag = "3")] + pub cdc_syncs: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MirrorStatusResponse { + #[prost(string, tag = "1")] + pub flow_job_name: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub error_message: ::prost::alloc::string::String, + #[prost(oneof = "mirror_status_response::Status", tags = "2, 3")] + pub status: ::core::option::Option, +} +/// Nested message and enum types in `MirrorStatusResponse`. +pub mod mirror_status_response { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Status { + #[prost(message, tag = "2")] + QrepStatus(super::QRepMirrorStatus), + #[prost(message, tag = "3")] + CdcStatus(super::CdcMirrorStatus), + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ValidatePeerStatus { + CreationUnknown = 0, + Valid = 1, + Invalid = 2, +} +impl ValidatePeerStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + ValidatePeerStatus::CreationUnknown => "CREATION_UNKNOWN", + ValidatePeerStatus::Valid => "VALID", + ValidatePeerStatus::Invalid => "INVALID", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "CREATION_UNKNOWN" => Some(Self::CreationUnknown), + "VALID" => Some(Self::Valid), + "INVALID" => Some(Self::Invalid), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum CreatePeerStatus { + ValidationUnknown = 0, + Created = 1, + Failed = 2, +} +impl CreatePeerStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + CreatePeerStatus::ValidationUnknown => "VALIDATION_UNKNOWN", + CreatePeerStatus::Created => "CREATED", + CreatePeerStatus::Failed => "FAILED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "VALIDATION_UNKNOWN" => Some(Self::ValidationUnknown), + "CREATED" => Some(Self::Created), + "FAILED" => Some(Self::Failed), + _ => None, + } + } } include!("peerdb_route.tonic.rs"); include!("peerdb_route.serde.rs"); -// @@protoc_insertion_point(module) \ No newline at end of file +// @@protoc_insertion_point(module) diff --git a/nexus/pt/src/peerdb_route.serde.rs b/nexus/pt/src/peerdb_route.serde.rs index e63663448..494216718 100644 --- a/nexus/pt/src/peerdb_route.serde.rs +++ b/nexus/pt/src/peerdb_route.serde.rs @@ -1,5 +1,5 @@ // @generated -impl serde::Serialize for CreateCdcFlowRequest { +impl serde::Serialize for CdcMirrorStatus { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -7,30 +7,47 @@ impl serde::Serialize for CreateCdcFlowRequest { { use serde::ser::SerializeStruct; let mut len = 0; - if self.connection_configs.is_some() { + if self.config.is_some() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateCDCFlowRequest", len)?; - if let Some(v) = self.connection_configs.as_ref() { - struct_ser.serialize_field("connectionConfigs", v)?; + if self.snapshot_status.is_some() { + len += 1; + } + if !self.cdc_syncs.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.CDCMirrorStatus", len)?; + if let Some(v) = self.config.as_ref() { + struct_ser.serialize_field("config", v)?; + } + if let Some(v) = self.snapshot_status.as_ref() { + struct_ser.serialize_field("snapshotStatus", v)?; + } + if !self.cdc_syncs.is_empty() { + struct_ser.serialize_field("cdcSyncs", &self.cdc_syncs)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for CreateCdcFlowRequest { +impl<'de> serde::Deserialize<'de> for CdcMirrorStatus { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "connection_configs", - "connectionConfigs", + "config", + "snapshot_status", + "snapshotStatus", + "cdc_syncs", + "cdcSyncs", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - ConnectionConfigs, + Config, + SnapshotStatus, + CdcSyncs, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -53,7 +70,9 @@ impl<'de> serde::Deserialize<'de> for CreateCdcFlowRequest { E: serde::de::Error, { match value { - "connectionConfigs" | "connection_configs" => Ok(GeneratedField::ConnectionConfigs), + "config" => Ok(GeneratedField::Config), + "snapshotStatus" | "snapshot_status" => Ok(GeneratedField::SnapshotStatus), + "cdcSyncs" | "cdc_syncs" => Ok(GeneratedField::CdcSyncs), _ => Ok(GeneratedField::__SkipField__), } } @@ -63,39 +82,55 @@ impl<'de> serde::Deserialize<'de> for CreateCdcFlowRequest { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = CreateCdcFlowRequest; + type Value = CdcMirrorStatus; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.CreateCDCFlowRequest") + formatter.write_str("struct peerdb_route.CDCMirrorStatus") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut connection_configs__ = None; + let mut config__ = None; + let mut snapshot_status__ = None; + let mut cdc_syncs__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::ConnectionConfigs => { - if connection_configs__.is_some() { - return Err(serde::de::Error::duplicate_field("connectionConfigs")); + GeneratedField::Config => { + if config__.is_some() { + return Err(serde::de::Error::duplicate_field("config")); } - connection_configs__ = map.next_value()?; + config__ = map.next_value()?; + } + GeneratedField::SnapshotStatus => { + if snapshot_status__.is_some() { + return Err(serde::de::Error::duplicate_field("snapshotStatus")); + } + snapshot_status__ = map.next_value()?; + } + GeneratedField::CdcSyncs => { + if cdc_syncs__.is_some() { + return Err(serde::de::Error::duplicate_field("cdcSyncs")); + } + cdc_syncs__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(CreateCdcFlowRequest { - connection_configs: connection_configs__, + Ok(CdcMirrorStatus { + config: config__, + snapshot_status: snapshot_status__, + cdc_syncs: cdc_syncs__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_route.CreateCDCFlowRequest", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.CDCMirrorStatus", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for CreateCdcFlowResponse { +impl serde::Serialize for CdcSyncStatus { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -103,30 +138,66 @@ impl serde::Serialize for CreateCdcFlowResponse { { use serde::ser::SerializeStruct; let mut len = 0; - if !self.worflow_id.is_empty() { + if self.start_lsn != 0 { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateCDCFlowResponse", len)?; - if !self.worflow_id.is_empty() { - struct_ser.serialize_field("worflowId", &self.worflow_id)?; + if self.end_lsn != 0 { + len += 1; + } + if self.num_rows != 0 { + len += 1; + } + if self.start_time.is_some() { + len += 1; + } + if self.end_time.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.CDCSyncStatus", len)?; + if self.start_lsn != 0 { + struct_ser.serialize_field("startLsn", ToString::to_string(&self.start_lsn).as_str())?; + } + if self.end_lsn != 0 { + struct_ser.serialize_field("endLsn", ToString::to_string(&self.end_lsn).as_str())?; + } + if self.num_rows != 0 { + struct_ser.serialize_field("numRows", &self.num_rows)?; + } + if let Some(v) = self.start_time.as_ref() { + struct_ser.serialize_field("startTime", v)?; + } + if let Some(v) = self.end_time.as_ref() { + struct_ser.serialize_field("endTime", v)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for CreateCdcFlowResponse { +impl<'de> serde::Deserialize<'de> for CdcSyncStatus { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "worflow_id", - "worflowId", + "start_lsn", + "startLsn", + "end_lsn", + "endLsn", + "num_rows", + "numRows", + "start_time", + "startTime", + "end_time", + "endTime", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - WorflowId, + StartLsn, + EndLsn, + NumRows, + StartTime, + EndTime, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -149,7 +220,11 @@ impl<'de> serde::Deserialize<'de> for CreateCdcFlowResponse { E: serde::de::Error, { match value { - "worflowId" | "worflow_id" => Ok(GeneratedField::WorflowId), + "startLsn" | "start_lsn" => Ok(GeneratedField::StartLsn), + "endLsn" | "end_lsn" => Ok(GeneratedField::EndLsn), + "numRows" | "num_rows" => Ok(GeneratedField::NumRows), + "startTime" | "start_time" => Ok(GeneratedField::StartTime), + "endTime" | "end_time" => Ok(GeneratedField::EndTime), _ => Ok(GeneratedField::__SkipField__), } } @@ -159,39 +234,77 @@ impl<'de> serde::Deserialize<'de> for CreateCdcFlowResponse { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = CreateCdcFlowResponse; + type Value = CdcSyncStatus; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.CreateCDCFlowResponse") + formatter.write_str("struct peerdb_route.CDCSyncStatus") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut worflow_id__ = None; + let mut start_lsn__ = None; + let mut end_lsn__ = None; + let mut num_rows__ = None; + let mut start_time__ = None; + let mut end_time__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::WorflowId => { - if worflow_id__.is_some() { - return Err(serde::de::Error::duplicate_field("worflowId")); + GeneratedField::StartLsn => { + if start_lsn__.is_some() { + return Err(serde::de::Error::duplicate_field("startLsn")); } - worflow_id__ = Some(map.next_value()?); + start_lsn__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::EndLsn => { + if end_lsn__.is_some() { + return Err(serde::de::Error::duplicate_field("endLsn")); + } + end_lsn__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::NumRows => { + if num_rows__.is_some() { + return Err(serde::de::Error::duplicate_field("numRows")); + } + num_rows__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::StartTime => { + if start_time__.is_some() { + return Err(serde::de::Error::duplicate_field("startTime")); + } + start_time__ = map.next_value()?; + } + GeneratedField::EndTime => { + if end_time__.is_some() { + return Err(serde::de::Error::duplicate_field("endTime")); + } + end_time__ = map.next_value()?; } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(CreateCdcFlowResponse { - worflow_id: worflow_id__.unwrap_or_default(), + Ok(CdcSyncStatus { + start_lsn: start_lsn__.unwrap_or_default(), + end_lsn: end_lsn__.unwrap_or_default(), + num_rows: num_rows__.unwrap_or_default(), + start_time: start_time__, + end_time: end_time__, }) } } - deserializer.deserialize_struct("peerdb_route.CreateCDCFlowResponse", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.CDCSyncStatus", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for CreateQRepFlowRequest { +impl serde::Serialize for CreateCdcFlowRequest { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -199,30 +312,39 @@ impl serde::Serialize for CreateQRepFlowRequest { { use serde::ser::SerializeStruct; let mut len = 0; - if self.qrep_config.is_some() { + if self.connection_configs.is_some() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateQRepFlowRequest", len)?; - if let Some(v) = self.qrep_config.as_ref() { - struct_ser.serialize_field("qrepConfig", v)?; + if self.create_catalog_entry { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateCDCFlowRequest", len)?; + if let Some(v) = self.connection_configs.as_ref() { + struct_ser.serialize_field("connectionConfigs", v)?; + } + if self.create_catalog_entry { + struct_ser.serialize_field("createCatalogEntry", &self.create_catalog_entry)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for CreateQRepFlowRequest { +impl<'de> serde::Deserialize<'de> for CreateCdcFlowRequest { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "qrep_config", - "qrepConfig", + "connection_configs", + "connectionConfigs", + "create_catalog_entry", + "createCatalogEntry", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - QrepConfig, + ConnectionConfigs, + CreateCatalogEntry, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -245,7 +367,8 @@ impl<'de> serde::Deserialize<'de> for CreateQRepFlowRequest { E: serde::de::Error, { match value { - "qrepConfig" | "qrep_config" => Ok(GeneratedField::QrepConfig), + "connectionConfigs" | "connection_configs" => Ok(GeneratedField::ConnectionConfigs), + "createCatalogEntry" | "create_catalog_entry" => Ok(GeneratedField::CreateCatalogEntry), _ => Ok(GeneratedField::__SkipField__), } } @@ -255,39 +378,47 @@ impl<'de> serde::Deserialize<'de> for CreateQRepFlowRequest { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = CreateQRepFlowRequest; + type Value = CreateCdcFlowRequest; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.CreateQRepFlowRequest") + formatter.write_str("struct peerdb_route.CreateCDCFlowRequest") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut qrep_config__ = None; + let mut connection_configs__ = None; + let mut create_catalog_entry__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::QrepConfig => { - if qrep_config__.is_some() { - return Err(serde::de::Error::duplicate_field("qrepConfig")); + GeneratedField::ConnectionConfigs => { + if connection_configs__.is_some() { + return Err(serde::de::Error::duplicate_field("connectionConfigs")); } - qrep_config__ = map.next_value()?; + connection_configs__ = map.next_value()?; + } + GeneratedField::CreateCatalogEntry => { + if create_catalog_entry__.is_some() { + return Err(serde::de::Error::duplicate_field("createCatalogEntry")); + } + create_catalog_entry__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(CreateQRepFlowRequest { - qrep_config: qrep_config__, + Ok(CreateCdcFlowRequest { + connection_configs: connection_configs__, + create_catalog_entry: create_catalog_entry__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_route.CreateQRepFlowRequest", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.CreateCDCFlowRequest", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for CreateQRepFlowResponse { +impl serde::Serialize for CreateCdcFlowResponse { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -298,14 +429,14 @@ impl serde::Serialize for CreateQRepFlowResponse { if !self.worflow_id.is_empty() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateQRepFlowResponse", len)?; + let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateCDCFlowResponse", len)?; if !self.worflow_id.is_empty() { struct_ser.serialize_field("worflowId", &self.worflow_id)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for CreateQRepFlowResponse { +impl<'de> serde::Deserialize<'de> for CreateCdcFlowResponse { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where @@ -351,13 +482,13 @@ impl<'de> serde::Deserialize<'de> for CreateQRepFlowResponse { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = CreateQRepFlowResponse; + type Value = CreateCdcFlowResponse; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.CreateQRepFlowResponse") + formatter.write_str("struct peerdb_route.CreateCDCFlowResponse") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { @@ -375,37 +506,45 @@ impl<'de> serde::Deserialize<'de> for CreateQRepFlowResponse { } } } - Ok(CreateQRepFlowResponse { + Ok(CreateCdcFlowResponse { worflow_id: worflow_id__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_route.CreateQRepFlowResponse", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.CreateCDCFlowResponse", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for ListPeersRequest { +impl serde::Serialize for CreatePeerRequest { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { use serde::ser::SerializeStruct; - let len = 0; - let struct_ser = serializer.serialize_struct("peerdb_route.ListPeersRequest", len)?; + let mut len = 0; + if self.peer.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.CreatePeerRequest", len)?; + if let Some(v) = self.peer.as_ref() { + struct_ser.serialize_field("peer", v)?; + } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for ListPeersRequest { +impl<'de> serde::Deserialize<'de> for CreatePeerRequest { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ + "peer", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { + Peer, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -427,7 +566,10 @@ impl<'de> serde::Deserialize<'de> for ListPeersRequest { where E: serde::de::Error, { - Ok(GeneratedField::__SkipField__) + match value { + "peer" => Ok(GeneratedField::Peer), + _ => Ok(GeneratedField::__SkipField__), + } } } deserializer.deserialize_identifier(GeneratedVisitor) @@ -435,27 +577,39 @@ impl<'de> serde::Deserialize<'de> for ListPeersRequest { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = ListPeersRequest; + type Value = CreatePeerRequest; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.ListPeersRequest") + formatter.write_str("struct peerdb_route.CreatePeerRequest") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - while map.next_key::()?.is_some() { - let _ = map.next_value::()?; + let mut peer__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Peer => { + if peer__.is_some() { + return Err(serde::de::Error::duplicate_field("peer")); + } + peer__ = map.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } } - Ok(ListPeersRequest { + Ok(CreatePeerRequest { + peer: peer__, }) } } - deserializer.deserialize_struct("peerdb_route.ListPeersRequest", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.CreatePeerRequest", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for ListPeersResponse { +impl serde::Serialize for CreatePeerResponse { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -463,29 +617,39 @@ impl serde::Serialize for ListPeersResponse { { use serde::ser::SerializeStruct; let mut len = 0; - if !self.peers.is_empty() { + if self.status != 0 { + len += 1; + } + if !self.message.is_empty() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_route.ListPeersResponse", len)?; - if !self.peers.is_empty() { - struct_ser.serialize_field("peers", &self.peers)?; + let mut struct_ser = serializer.serialize_struct("peerdb_route.CreatePeerResponse", len)?; + if self.status != 0 { + let v = CreatePeerStatus::from_i32(self.status) + .ok_or_else(|| serde::ser::Error::custom(format!("Invalid variant {}", self.status)))?; + struct_ser.serialize_field("status", &v)?; + } + if !self.message.is_empty() { + struct_ser.serialize_field("message", &self.message)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for ListPeersResponse { +impl<'de> serde::Deserialize<'de> for CreatePeerResponse { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "peers", + "status", + "message", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - Peers, + Status, + Message, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -508,7 +672,8 @@ impl<'de> serde::Deserialize<'de> for ListPeersResponse { E: serde::de::Error, { match value { - "peers" => Ok(GeneratedField::Peers), + "status" => Ok(GeneratedField::Status), + "message" => Ok(GeneratedField::Message), _ => Ok(GeneratedField::__SkipField__), } } @@ -518,39 +683,123 @@ impl<'de> serde::Deserialize<'de> for ListPeersResponse { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = ListPeersResponse; + type Value = CreatePeerResponse; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.ListPeersResponse") + formatter.write_str("struct peerdb_route.CreatePeerResponse") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut peers__ = None; + let mut status__ = None; + let mut message__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::Peers => { - if peers__.is_some() { - return Err(serde::de::Error::duplicate_field("peers")); + GeneratedField::Status => { + if status__.is_some() { + return Err(serde::de::Error::duplicate_field("status")); + } + status__ = Some(map.next_value::()? as i32); + } + GeneratedField::Message => { + if message__.is_some() { + return Err(serde::de::Error::duplicate_field("message")); } - peers__ = Some(map.next_value()?); + message__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(ListPeersResponse { - peers: peers__.unwrap_or_default(), + Ok(CreatePeerResponse { + status: status__.unwrap_or_default(), + message: message__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_route.ListPeersResponse", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.CreatePeerResponse", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for ShutdownRequest { +impl serde::Serialize for CreatePeerStatus { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::ValidationUnknown => "VALIDATION_UNKNOWN", + Self::Created => "CREATED", + Self::Failed => "FAILED", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for CreatePeerStatus { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "VALIDATION_UNKNOWN", + "CREATED", + "FAILED", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = CreatePeerStatus; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + use std::convert::TryFrom; + i32::try_from(v) + .ok() + .and_then(CreatePeerStatus::from_i32) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + use std::convert::TryFrom; + i32::try_from(v) + .ok() + .and_then(CreatePeerStatus::from_i32) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "VALIDATION_UNKNOWN" => Ok(CreatePeerStatus::ValidationUnknown), + "CREATED" => Ok(CreatePeerStatus::Created), + "FAILED" => Ok(CreatePeerStatus::Failed), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) + } +} +impl serde::Serialize for CreateQRepFlowRequest { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -558,57 +807,39 @@ impl serde::Serialize for ShutdownRequest { { use serde::ser::SerializeStruct; let mut len = 0; - if !self.workflow_id.is_empty() { - len += 1; - } - if !self.flow_job_name.is_empty() { - len += 1; - } - if self.source_peer.is_some() { + if self.qrep_config.is_some() { len += 1; } - if self.destination_peer.is_some() { + if self.create_catalog_entry { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_route.ShutdownRequest", len)?; - if !self.workflow_id.is_empty() { - struct_ser.serialize_field("workflowId", &self.workflow_id)?; - } - if !self.flow_job_name.is_empty() { - struct_ser.serialize_field("flowJobName", &self.flow_job_name)?; - } - if let Some(v) = self.source_peer.as_ref() { - struct_ser.serialize_field("sourcePeer", v)?; + let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateQRepFlowRequest", len)?; + if let Some(v) = self.qrep_config.as_ref() { + struct_ser.serialize_field("qrepConfig", v)?; } - if let Some(v) = self.destination_peer.as_ref() { - struct_ser.serialize_field("destinationPeer", v)?; + if self.create_catalog_entry { + struct_ser.serialize_field("createCatalogEntry", &self.create_catalog_entry)?; } struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for ShutdownRequest { +impl<'de> serde::Deserialize<'de> for CreateQRepFlowRequest { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { const FIELDS: &[&str] = &[ - "workflow_id", - "workflowId", - "flow_job_name", - "flowJobName", - "source_peer", - "sourcePeer", - "destination_peer", - "destinationPeer", + "qrep_config", + "qrepConfig", + "create_catalog_entry", + "createCatalogEntry", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { - WorkflowId, - FlowJobName, - SourcePeer, - DestinationPeer, + QrepConfig, + CreateCatalogEntry, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -631,10 +862,8 @@ impl<'de> serde::Deserialize<'de> for ShutdownRequest { E: serde::de::Error, { match value { - "workflowId" | "workflow_id" => Ok(GeneratedField::WorkflowId), - "flowJobName" | "flow_job_name" => Ok(GeneratedField::FlowJobName), - "sourcePeer" | "source_peer" => Ok(GeneratedField::SourcePeer), - "destinationPeer" | "destination_peer" => Ok(GeneratedField::DestinationPeer), + "qrepConfig" | "qrep_config" => Ok(GeneratedField::QrepConfig), + "createCatalogEntry" | "create_catalog_entry" => Ok(GeneratedField::CreateCatalogEntry), _ => Ok(GeneratedField::__SkipField__), } } @@ -644,63 +873,239 @@ impl<'de> serde::Deserialize<'de> for ShutdownRequest { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = ShutdownRequest; + type Value = CreateQRepFlowRequest; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.ShutdownRequest") + formatter.write_str("struct peerdb_route.CreateQRepFlowRequest") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut workflow_id__ = None; - let mut flow_job_name__ = None; - let mut source_peer__ = None; - let mut destination_peer__ = None; + let mut qrep_config__ = None; + let mut create_catalog_entry__ = None; while let Some(k) = map.next_key()? { match k { - GeneratedField::WorkflowId => { - if workflow_id__.is_some() { - return Err(serde::de::Error::duplicate_field("workflowId")); + GeneratedField::QrepConfig => { + if qrep_config__.is_some() { + return Err(serde::de::Error::duplicate_field("qrepConfig")); } - workflow_id__ = Some(map.next_value()?); + qrep_config__ = map.next_value()?; } - GeneratedField::FlowJobName => { - if flow_job_name__.is_some() { - return Err(serde::de::Error::duplicate_field("flowJobName")); + GeneratedField::CreateCatalogEntry => { + if create_catalog_entry__.is_some() { + return Err(serde::de::Error::duplicate_field("createCatalogEntry")); } - flow_job_name__ = Some(map.next_value()?); + create_catalog_entry__ = Some(map.next_value()?); } - GeneratedField::SourcePeer => { - if source_peer__.is_some() { - return Err(serde::de::Error::duplicate_field("sourcePeer")); + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(CreateQRepFlowRequest { + qrep_config: qrep_config__, + create_catalog_entry: create_catalog_entry__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.CreateQRepFlowRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for CreateQRepFlowResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.worflow_id.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.CreateQRepFlowResponse", len)?; + if !self.worflow_id.is_empty() { + struct_ser.serialize_field("worflowId", &self.worflow_id)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for CreateQRepFlowResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "worflow_id", + "worflowId", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + WorflowId, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "worflowId" | "worflow_id" => Ok(GeneratedField::WorflowId), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = CreateQRepFlowResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.CreateQRepFlowResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut worflow_id__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::WorflowId => { + if worflow_id__.is_some() { + return Err(serde::de::Error::duplicate_field("worflowId")); } - source_peer__ = map.next_value()?; + worflow_id__ = Some(map.next_value()?); } - GeneratedField::DestinationPeer => { - if destination_peer__.is_some() { - return Err(serde::de::Error::duplicate_field("destinationPeer")); + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(CreateQRepFlowResponse { + worflow_id: worflow_id__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.CreateQRepFlowResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for DropPeerRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.peer_name.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.DropPeerRequest", len)?; + if !self.peer_name.is_empty() { + struct_ser.serialize_field("peerName", &self.peer_name)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for DropPeerRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "peer_name", + "peerName", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PeerName, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "peerName" | "peer_name" => Ok(GeneratedField::PeerName), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = DropPeerRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.DropPeerRequest") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut peer_name__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::PeerName => { + if peer_name__.is_some() { + return Err(serde::de::Error::duplicate_field("peerName")); } - destination_peer__ = map.next_value()?; + peer_name__ = Some(map.next_value()?); } GeneratedField::__SkipField__ => { let _ = map.next_value::()?; } } } - Ok(ShutdownRequest { - workflow_id: workflow_id__.unwrap_or_default(), - flow_job_name: flow_job_name__.unwrap_or_default(), - source_peer: source_peer__, - destination_peer: destination_peer__, + Ok(DropPeerRequest { + peer_name: peer_name__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_route.ShutdownRequest", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.DropPeerRequest", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for ShutdownResponse { +impl serde::Serialize for DropPeerResponse { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result where @@ -714,7 +1119,7 @@ impl serde::Serialize for ShutdownResponse { if !self.error_message.is_empty() { len += 1; } - let mut struct_ser = serializer.serialize_struct("peerdb_route.ShutdownResponse", len)?; + let mut struct_ser = serializer.serialize_struct("peerdb_route.DropPeerResponse", len)?; if self.ok { struct_ser.serialize_field("ok", &self.ok)?; } @@ -724,7 +1129,7 @@ impl serde::Serialize for ShutdownResponse { struct_ser.end() } } -impl<'de> serde::Deserialize<'de> for ShutdownResponse { +impl<'de> serde::Deserialize<'de> for DropPeerResponse { #[allow(deprecated)] fn deserialize(deserializer: D) -> std::result::Result where @@ -773,13 +1178,13 @@ impl<'de> serde::Deserialize<'de> for ShutdownResponse { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = ShutdownResponse; + type Value = DropPeerResponse; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct peerdb_route.ShutdownResponse") + formatter.write_str("struct peerdb_route.DropPeerResponse") } - fn visit_map(self, mut map: V) -> std::result::Result + fn visit_map(self, mut map: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { @@ -804,12 +1209,2341 @@ impl<'de> serde::Deserialize<'de> for ShutdownResponse { } } } - Ok(ShutdownResponse { + Ok(DropPeerResponse { ok: ok__.unwrap_or_default(), error_message: error_message__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("peerdb_route.ShutdownResponse", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("peerdb_route.DropPeerResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for MirrorStatusRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.flow_job_name.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.MirrorStatusRequest", len)?; + if !self.flow_job_name.is_empty() { + struct_ser.serialize_field("flowJobName", &self.flow_job_name)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for MirrorStatusRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "flow_job_name", + "flowJobName", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + FlowJobName, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "flowJobName" | "flow_job_name" => Ok(GeneratedField::FlowJobName), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = MirrorStatusRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.MirrorStatusRequest") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut flow_job_name__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::FlowJobName => { + if flow_job_name__.is_some() { + return Err(serde::de::Error::duplicate_field("flowJobName")); + } + flow_job_name__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(MirrorStatusRequest { + flow_job_name: flow_job_name__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.MirrorStatusRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for MirrorStatusResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.flow_job_name.is_empty() { + len += 1; + } + if !self.error_message.is_empty() { + len += 1; + } + if self.status.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.MirrorStatusResponse", len)?; + if !self.flow_job_name.is_empty() { + struct_ser.serialize_field("flowJobName", &self.flow_job_name)?; + } + if !self.error_message.is_empty() { + struct_ser.serialize_field("errorMessage", &self.error_message)?; + } + if let Some(v) = self.status.as_ref() { + match v { + mirror_status_response::Status::QrepStatus(v) => { + struct_ser.serialize_field("qrepStatus", v)?; + } + mirror_status_response::Status::CdcStatus(v) => { + struct_ser.serialize_field("cdcStatus", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for MirrorStatusResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "flow_job_name", + "flowJobName", + "error_message", + "errorMessage", + "qrep_status", + "qrepStatus", + "cdc_status", + "cdcStatus", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + FlowJobName, + ErrorMessage, + QrepStatus, + CdcStatus, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "flowJobName" | "flow_job_name" => Ok(GeneratedField::FlowJobName), + "errorMessage" | "error_message" => Ok(GeneratedField::ErrorMessage), + "qrepStatus" | "qrep_status" => Ok(GeneratedField::QrepStatus), + "cdcStatus" | "cdc_status" => Ok(GeneratedField::CdcStatus), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = MirrorStatusResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.MirrorStatusResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut flow_job_name__ = None; + let mut error_message__ = None; + let mut status__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::FlowJobName => { + if flow_job_name__.is_some() { + return Err(serde::de::Error::duplicate_field("flowJobName")); + } + flow_job_name__ = Some(map.next_value()?); + } + GeneratedField::ErrorMessage => { + if error_message__.is_some() { + return Err(serde::de::Error::duplicate_field("errorMessage")); + } + error_message__ = Some(map.next_value()?); + } + GeneratedField::QrepStatus => { + if status__.is_some() { + return Err(serde::de::Error::duplicate_field("qrepStatus")); + } + status__ = map.next_value::<::std::option::Option<_>>()?.map(mirror_status_response::Status::QrepStatus) +; + } + GeneratedField::CdcStatus => { + if status__.is_some() { + return Err(serde::de::Error::duplicate_field("cdcStatus")); + } + status__ = map.next_value::<::std::option::Option<_>>()?.map(mirror_status_response::Status::CdcStatus) +; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(MirrorStatusResponse { + flow_job_name: flow_job_name__.unwrap_or_default(), + error_message: error_message__.unwrap_or_default(), + status: status__, + }) + } + } + deserializer.deserialize_struct("peerdb_route.MirrorStatusResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PartitionStatus { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.partition_id.is_empty() { + len += 1; + } + if self.start_time.is_some() { + len += 1; + } + if self.end_time.is_some() { + len += 1; + } + if self.num_rows != 0 { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.PartitionStatus", len)?; + if !self.partition_id.is_empty() { + struct_ser.serialize_field("partitionId", &self.partition_id)?; + } + if let Some(v) = self.start_time.as_ref() { + struct_ser.serialize_field("startTime", v)?; + } + if let Some(v) = self.end_time.as_ref() { + struct_ser.serialize_field("endTime", v)?; + } + if self.num_rows != 0 { + struct_ser.serialize_field("numRows", &self.num_rows)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PartitionStatus { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "partition_id", + "partitionId", + "start_time", + "startTime", + "end_time", + "endTime", + "num_rows", + "numRows", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PartitionId, + StartTime, + EndTime, + NumRows, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "partitionId" | "partition_id" => Ok(GeneratedField::PartitionId), + "startTime" | "start_time" => Ok(GeneratedField::StartTime), + "endTime" | "end_time" => Ok(GeneratedField::EndTime), + "numRows" | "num_rows" => Ok(GeneratedField::NumRows), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PartitionStatus; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.PartitionStatus") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut partition_id__ = None; + let mut start_time__ = None; + let mut end_time__ = None; + let mut num_rows__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::PartitionId => { + if partition_id__.is_some() { + return Err(serde::de::Error::duplicate_field("partitionId")); + } + partition_id__ = Some(map.next_value()?); + } + GeneratedField::StartTime => { + if start_time__.is_some() { + return Err(serde::de::Error::duplicate_field("startTime")); + } + start_time__ = map.next_value()?; + } + GeneratedField::EndTime => { + if end_time__.is_some() { + return Err(serde::de::Error::duplicate_field("endTime")); + } + end_time__ = map.next_value()?; + } + GeneratedField::NumRows => { + if num_rows__.is_some() { + return Err(serde::de::Error::duplicate_field("numRows")); + } + num_rows__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(PartitionStatus { + partition_id: partition_id__.unwrap_or_default(), + start_time: start_time__, + end_time: end_time__, + num_rows: num_rows__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.PartitionStatus", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PeerSchemasResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.schemas.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.PeerSchemasResponse", len)?; + if !self.schemas.is_empty() { + struct_ser.serialize_field("schemas", &self.schemas)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PeerSchemasResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "schemas", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Schemas, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "schemas" => Ok(GeneratedField::Schemas), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PeerSchemasResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.PeerSchemasResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut schemas__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Schemas => { + if schemas__.is_some() { + return Err(serde::de::Error::duplicate_field("schemas")); + } + schemas__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(PeerSchemasResponse { + schemas: schemas__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.PeerSchemasResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PeerSlotResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.slot_data.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.PeerSlotResponse", len)?; + if !self.slot_data.is_empty() { + struct_ser.serialize_field("slotData", &self.slot_data)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PeerSlotResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "slot_data", + "slotData", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + SlotData, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "slotData" | "slot_data" => Ok(GeneratedField::SlotData), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PeerSlotResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.PeerSlotResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut slot_data__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::SlotData => { + if slot_data__.is_some() { + return Err(serde::de::Error::duplicate_field("slotData")); + } + slot_data__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(PeerSlotResponse { + slot_data: slot_data__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.PeerSlotResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PeerStatResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.stat_data.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.PeerStatResponse", len)?; + if !self.stat_data.is_empty() { + struct_ser.serialize_field("statData", &self.stat_data)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PeerStatResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "stat_data", + "statData", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + StatData, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "statData" | "stat_data" => Ok(GeneratedField::StatData), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PeerStatResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.PeerStatResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut stat_data__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::StatData => { + if stat_data__.is_some() { + return Err(serde::de::Error::duplicate_field("statData")); + } + stat_data__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(PeerStatResponse { + stat_data: stat_data__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.PeerStatResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PostgresPeerActivityInfoRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.peer_name.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.PostgresPeerActivityInfoRequest", len)?; + if !self.peer_name.is_empty() { + struct_ser.serialize_field("peerName", &self.peer_name)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PostgresPeerActivityInfoRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "peer_name", + "peerName", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PeerName, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "peerName" | "peer_name" => Ok(GeneratedField::PeerName), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PostgresPeerActivityInfoRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.PostgresPeerActivityInfoRequest") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut peer_name__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::PeerName => { + if peer_name__.is_some() { + return Err(serde::de::Error::duplicate_field("peerName")); + } + peer_name__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(PostgresPeerActivityInfoRequest { + peer_name: peer_name__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.PostgresPeerActivityInfoRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for QRepMirrorStatus { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.config.is_some() { + len += 1; + } + if !self.partitions.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.QRepMirrorStatus", len)?; + if let Some(v) = self.config.as_ref() { + struct_ser.serialize_field("config", v)?; + } + if !self.partitions.is_empty() { + struct_ser.serialize_field("partitions", &self.partitions)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for QRepMirrorStatus { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "config", + "partitions", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Config, + Partitions, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "config" => Ok(GeneratedField::Config), + "partitions" => Ok(GeneratedField::Partitions), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = QRepMirrorStatus; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.QRepMirrorStatus") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut config__ = None; + let mut partitions__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Config => { + if config__.is_some() { + return Err(serde::de::Error::duplicate_field("config")); + } + config__ = map.next_value()?; + } + GeneratedField::Partitions => { + if partitions__.is_some() { + return Err(serde::de::Error::duplicate_field("partitions")); + } + partitions__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(QRepMirrorStatus { + config: config__, + partitions: partitions__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.QRepMirrorStatus", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SchemaTablesRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.peer_name.is_empty() { + len += 1; + } + if !self.schema_name.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.SchemaTablesRequest", len)?; + if !self.peer_name.is_empty() { + struct_ser.serialize_field("peerName", &self.peer_name)?; + } + if !self.schema_name.is_empty() { + struct_ser.serialize_field("schemaName", &self.schema_name)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SchemaTablesRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "peer_name", + "peerName", + "schema_name", + "schemaName", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PeerName, + SchemaName, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "peerName" | "peer_name" => Ok(GeneratedField::PeerName), + "schemaName" | "schema_name" => Ok(GeneratedField::SchemaName), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SchemaTablesRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.SchemaTablesRequest") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut peer_name__ = None; + let mut schema_name__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::PeerName => { + if peer_name__.is_some() { + return Err(serde::de::Error::duplicate_field("peerName")); + } + peer_name__ = Some(map.next_value()?); + } + GeneratedField::SchemaName => { + if schema_name__.is_some() { + return Err(serde::de::Error::duplicate_field("schemaName")); + } + schema_name__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(SchemaTablesRequest { + peer_name: peer_name__.unwrap_or_default(), + schema_name: schema_name__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.SchemaTablesRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SchemaTablesResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.tables.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.SchemaTablesResponse", len)?; + if !self.tables.is_empty() { + struct_ser.serialize_field("tables", &self.tables)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SchemaTablesResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "tables", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Tables, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "tables" => Ok(GeneratedField::Tables), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SchemaTablesResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.SchemaTablesResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut tables__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Tables => { + if tables__.is_some() { + return Err(serde::de::Error::duplicate_field("tables")); + } + tables__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(SchemaTablesResponse { + tables: tables__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.SchemaTablesResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for ShutdownRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.workflow_id.is_empty() { + len += 1; + } + if !self.flow_job_name.is_empty() { + len += 1; + } + if self.source_peer.is_some() { + len += 1; + } + if self.destination_peer.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.ShutdownRequest", len)?; + if !self.workflow_id.is_empty() { + struct_ser.serialize_field("workflowId", &self.workflow_id)?; + } + if !self.flow_job_name.is_empty() { + struct_ser.serialize_field("flowJobName", &self.flow_job_name)?; + } + if let Some(v) = self.source_peer.as_ref() { + struct_ser.serialize_field("sourcePeer", v)?; + } + if let Some(v) = self.destination_peer.as_ref() { + struct_ser.serialize_field("destinationPeer", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for ShutdownRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "workflow_id", + "workflowId", + "flow_job_name", + "flowJobName", + "source_peer", + "sourcePeer", + "destination_peer", + "destinationPeer", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + WorkflowId, + FlowJobName, + SourcePeer, + DestinationPeer, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "workflowId" | "workflow_id" => Ok(GeneratedField::WorkflowId), + "flowJobName" | "flow_job_name" => Ok(GeneratedField::FlowJobName), + "sourcePeer" | "source_peer" => Ok(GeneratedField::SourcePeer), + "destinationPeer" | "destination_peer" => Ok(GeneratedField::DestinationPeer), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ShutdownRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.ShutdownRequest") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut workflow_id__ = None; + let mut flow_job_name__ = None; + let mut source_peer__ = None; + let mut destination_peer__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::WorkflowId => { + if workflow_id__.is_some() { + return Err(serde::de::Error::duplicate_field("workflowId")); + } + workflow_id__ = Some(map.next_value()?); + } + GeneratedField::FlowJobName => { + if flow_job_name__.is_some() { + return Err(serde::de::Error::duplicate_field("flowJobName")); + } + flow_job_name__ = Some(map.next_value()?); + } + GeneratedField::SourcePeer => { + if source_peer__.is_some() { + return Err(serde::de::Error::duplicate_field("sourcePeer")); + } + source_peer__ = map.next_value()?; + } + GeneratedField::DestinationPeer => { + if destination_peer__.is_some() { + return Err(serde::de::Error::duplicate_field("destinationPeer")); + } + destination_peer__ = map.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(ShutdownRequest { + workflow_id: workflow_id__.unwrap_or_default(), + flow_job_name: flow_job_name__.unwrap_or_default(), + source_peer: source_peer__, + destination_peer: destination_peer__, + }) + } + } + deserializer.deserialize_struct("peerdb_route.ShutdownRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for ShutdownResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.ok { + len += 1; + } + if !self.error_message.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.ShutdownResponse", len)?; + if self.ok { + struct_ser.serialize_field("ok", &self.ok)?; + } + if !self.error_message.is_empty() { + struct_ser.serialize_field("errorMessage", &self.error_message)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for ShutdownResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "ok", + "error_message", + "errorMessage", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Ok, + ErrorMessage, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "ok" => Ok(GeneratedField::Ok), + "errorMessage" | "error_message" => Ok(GeneratedField::ErrorMessage), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ShutdownResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.ShutdownResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut ok__ = None; + let mut error_message__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Ok => { + if ok__.is_some() { + return Err(serde::de::Error::duplicate_field("ok")); + } + ok__ = Some(map.next_value()?); + } + GeneratedField::ErrorMessage => { + if error_message__.is_some() { + return Err(serde::de::Error::duplicate_field("errorMessage")); + } + error_message__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(ShutdownResponse { + ok: ok__.unwrap_or_default(), + error_message: error_message__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.ShutdownResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SlotInfo { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.slot_name.is_empty() { + len += 1; + } + if !self.redo_l_sn.is_empty() { + len += 1; + } + if !self.restart_l_sn.is_empty() { + len += 1; + } + if self.active { + len += 1; + } + if self.lag_in_mb != 0. { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.SlotInfo", len)?; + if !self.slot_name.is_empty() { + struct_ser.serialize_field("slotName", &self.slot_name)?; + } + if !self.redo_l_sn.is_empty() { + struct_ser.serialize_field("redoLSN", &self.redo_l_sn)?; + } + if !self.restart_l_sn.is_empty() { + struct_ser.serialize_field("restartLSN", &self.restart_l_sn)?; + } + if self.active { + struct_ser.serialize_field("active", &self.active)?; + } + if self.lag_in_mb != 0. { + struct_ser.serialize_field("lagInMb", &self.lag_in_mb)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SlotInfo { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "slot_name", + "slotName", + "redo_lSN", + "redoLSN", + "restart_lSN", + "restartLSN", + "active", + "lag_in_mb", + "lagInMb", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + SlotName, + RedoLSn, + RestartLSn, + Active, + LagInMb, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "slotName" | "slot_name" => Ok(GeneratedField::SlotName), + "redoLSN" | "redo_lSN" => Ok(GeneratedField::RedoLSn), + "restartLSN" | "restart_lSN" => Ok(GeneratedField::RestartLSn), + "active" => Ok(GeneratedField::Active), + "lagInMb" | "lag_in_mb" => Ok(GeneratedField::LagInMb), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SlotInfo; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.SlotInfo") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut slot_name__ = None; + let mut redo_l_sn__ = None; + let mut restart_l_sn__ = None; + let mut active__ = None; + let mut lag_in_mb__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::SlotName => { + if slot_name__.is_some() { + return Err(serde::de::Error::duplicate_field("slotName")); + } + slot_name__ = Some(map.next_value()?); + } + GeneratedField::RedoLSn => { + if redo_l_sn__.is_some() { + return Err(serde::de::Error::duplicate_field("redoLSN")); + } + redo_l_sn__ = Some(map.next_value()?); + } + GeneratedField::RestartLSn => { + if restart_l_sn__.is_some() { + return Err(serde::de::Error::duplicate_field("restartLSN")); + } + restart_l_sn__ = Some(map.next_value()?); + } + GeneratedField::Active => { + if active__.is_some() { + return Err(serde::de::Error::duplicate_field("active")); + } + active__ = Some(map.next_value()?); + } + GeneratedField::LagInMb => { + if lag_in_mb__.is_some() { + return Err(serde::de::Error::duplicate_field("lagInMb")); + } + lag_in_mb__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(SlotInfo { + slot_name: slot_name__.unwrap_or_default(), + redo_l_sn: redo_l_sn__.unwrap_or_default(), + restart_l_sn: restart_l_sn__.unwrap_or_default(), + active: active__.unwrap_or_default(), + lag_in_mb: lag_in_mb__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.SlotInfo", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for SnapshotStatus { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.clones.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.SnapshotStatus", len)?; + if !self.clones.is_empty() { + struct_ser.serialize_field("clones", &self.clones)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for SnapshotStatus { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "clones", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Clones, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "clones" => Ok(GeneratedField::Clones), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = SnapshotStatus; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.SnapshotStatus") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut clones__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Clones => { + if clones__.is_some() { + return Err(serde::de::Error::duplicate_field("clones")); + } + clones__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(SnapshotStatus { + clones: clones__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.SnapshotStatus", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for StatInfo { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.pid != 0 { + len += 1; + } + if !self.wait_event.is_empty() { + len += 1; + } + if !self.wait_event_type.is_empty() { + len += 1; + } + if !self.query_start.is_empty() { + len += 1; + } + if !self.query.is_empty() { + len += 1; + } + if self.duration != 0. { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.StatInfo", len)?; + if self.pid != 0 { + struct_ser.serialize_field("pid", ToString::to_string(&self.pid).as_str())?; + } + if !self.wait_event.is_empty() { + struct_ser.serialize_field("waitEvent", &self.wait_event)?; + } + if !self.wait_event_type.is_empty() { + struct_ser.serialize_field("waitEventType", &self.wait_event_type)?; + } + if !self.query_start.is_empty() { + struct_ser.serialize_field("queryStart", &self.query_start)?; + } + if !self.query.is_empty() { + struct_ser.serialize_field("query", &self.query)?; + } + if self.duration != 0. { + struct_ser.serialize_field("duration", &self.duration)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for StatInfo { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "pid", + "wait_event", + "waitEvent", + "wait_event_type", + "waitEventType", + "query_start", + "queryStart", + "query", + "duration", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Pid, + WaitEvent, + WaitEventType, + QueryStart, + Query, + Duration, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "pid" => Ok(GeneratedField::Pid), + "waitEvent" | "wait_event" => Ok(GeneratedField::WaitEvent), + "waitEventType" | "wait_event_type" => Ok(GeneratedField::WaitEventType), + "queryStart" | "query_start" => Ok(GeneratedField::QueryStart), + "query" => Ok(GeneratedField::Query), + "duration" => Ok(GeneratedField::Duration), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = StatInfo; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.StatInfo") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut pid__ = None; + let mut wait_event__ = None; + let mut wait_event_type__ = None; + let mut query_start__ = None; + let mut query__ = None; + let mut duration__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Pid => { + if pid__.is_some() { + return Err(serde::de::Error::duplicate_field("pid")); + } + pid__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::WaitEvent => { + if wait_event__.is_some() { + return Err(serde::de::Error::duplicate_field("waitEvent")); + } + wait_event__ = Some(map.next_value()?); + } + GeneratedField::WaitEventType => { + if wait_event_type__.is_some() { + return Err(serde::de::Error::duplicate_field("waitEventType")); + } + wait_event_type__ = Some(map.next_value()?); + } + GeneratedField::QueryStart => { + if query_start__.is_some() { + return Err(serde::de::Error::duplicate_field("queryStart")); + } + query_start__ = Some(map.next_value()?); + } + GeneratedField::Query => { + if query__.is_some() { + return Err(serde::de::Error::duplicate_field("query")); + } + query__ = Some(map.next_value()?); + } + GeneratedField::Duration => { + if duration__.is_some() { + return Err(serde::de::Error::duplicate_field("duration")); + } + duration__ = + Some(map.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(StatInfo { + pid: pid__.unwrap_or_default(), + wait_event: wait_event__.unwrap_or_default(), + wait_event_type: wait_event_type__.unwrap_or_default(), + query_start: query_start__.unwrap_or_default(), + query: query__.unwrap_or_default(), + duration: duration__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.StatInfo", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TableColumnsRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.peer_name.is_empty() { + len += 1; + } + if !self.schema_name.is_empty() { + len += 1; + } + if !self.table_name.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.TableColumnsRequest", len)?; + if !self.peer_name.is_empty() { + struct_ser.serialize_field("peerName", &self.peer_name)?; + } + if !self.schema_name.is_empty() { + struct_ser.serialize_field("schemaName", &self.schema_name)?; + } + if !self.table_name.is_empty() { + struct_ser.serialize_field("tableName", &self.table_name)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TableColumnsRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "peer_name", + "peerName", + "schema_name", + "schemaName", + "table_name", + "tableName", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + PeerName, + SchemaName, + TableName, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "peerName" | "peer_name" => Ok(GeneratedField::PeerName), + "schemaName" | "schema_name" => Ok(GeneratedField::SchemaName), + "tableName" | "table_name" => Ok(GeneratedField::TableName), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TableColumnsRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.TableColumnsRequest") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut peer_name__ = None; + let mut schema_name__ = None; + let mut table_name__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::PeerName => { + if peer_name__.is_some() { + return Err(serde::de::Error::duplicate_field("peerName")); + } + peer_name__ = Some(map.next_value()?); + } + GeneratedField::SchemaName => { + if schema_name__.is_some() { + return Err(serde::de::Error::duplicate_field("schemaName")); + } + schema_name__ = Some(map.next_value()?); + } + GeneratedField::TableName => { + if table_name__.is_some() { + return Err(serde::de::Error::duplicate_field("tableName")); + } + table_name__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(TableColumnsRequest { + peer_name: peer_name__.unwrap_or_default(), + schema_name: schema_name__.unwrap_or_default(), + table_name: table_name__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.TableColumnsRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TableColumnsResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.columns.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.TableColumnsResponse", len)?; + if !self.columns.is_empty() { + struct_ser.serialize_field("columns", &self.columns)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TableColumnsResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "columns", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Columns, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "columns" => Ok(GeneratedField::Columns), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TableColumnsResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.TableColumnsResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut columns__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Columns => { + if columns__.is_some() { + return Err(serde::de::Error::duplicate_field("columns")); + } + columns__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(TableColumnsResponse { + columns: columns__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.TableColumnsResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for ValidatePeerRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.peer.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.ValidatePeerRequest", len)?; + if let Some(v) = self.peer.as_ref() { + struct_ser.serialize_field("peer", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for ValidatePeerRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "peer", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Peer, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "peer" => Ok(GeneratedField::Peer), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ValidatePeerRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.ValidatePeerRequest") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut peer__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Peer => { + if peer__.is_some() { + return Err(serde::de::Error::duplicate_field("peer")); + } + peer__ = map.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(ValidatePeerRequest { + peer: peer__, + }) + } + } + deserializer.deserialize_struct("peerdb_route.ValidatePeerRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for ValidatePeerResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.status != 0 { + len += 1; + } + if !self.message.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("peerdb_route.ValidatePeerResponse", len)?; + if self.status != 0 { + let v = ValidatePeerStatus::from_i32(self.status) + .ok_or_else(|| serde::ser::Error::custom(format!("Invalid variant {}", self.status)))?; + struct_ser.serialize_field("status", &v)?; + } + if !self.message.is_empty() { + struct_ser.serialize_field("message", &self.message)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for ValidatePeerResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "status", + "message", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Status, + Message, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "status" => Ok(GeneratedField::Status), + "message" => Ok(GeneratedField::Message), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ValidatePeerResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct peerdb_route.ValidatePeerResponse") + } + + fn visit_map(self, mut map: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut status__ = None; + let mut message__ = None; + while let Some(k) = map.next_key()? { + match k { + GeneratedField::Status => { + if status__.is_some() { + return Err(serde::de::Error::duplicate_field("status")); + } + status__ = Some(map.next_value::()? as i32); + } + GeneratedField::Message => { + if message__.is_some() { + return Err(serde::de::Error::duplicate_field("message")); + } + message__ = Some(map.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map.next_value::()?; + } + } + } + Ok(ValidatePeerResponse { + status: status__.unwrap_or_default(), + message: message__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("peerdb_route.ValidatePeerResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for ValidatePeerStatus { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::CreationUnknown => "CREATION_UNKNOWN", + Self::Valid => "VALID", + Self::Invalid => "INVALID", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for ValidatePeerStatus { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "CREATION_UNKNOWN", + "VALID", + "INVALID", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ValidatePeerStatus; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + use std::convert::TryFrom; + i32::try_from(v) + .ok() + .and_then(ValidatePeerStatus::from_i32) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + use std::convert::TryFrom; + i32::try_from(v) + .ok() + .and_then(ValidatePeerStatus::from_i32) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "CREATION_UNKNOWN" => Ok(ValidatePeerStatus::CreationUnknown), + "VALID" => Ok(ValidatePeerStatus::Valid), + "INVALID" => Ok(ValidatePeerStatus::Invalid), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) } } diff --git a/nexus/pt/src/peerdb_route.tonic.rs b/nexus/pt/src/peerdb_route.tonic.rs index 1f75c4889..f2143777c 100644 --- a/nexus/pt/src/peerdb_route.tonic.rs +++ b/nexus/pt/src/peerdb_route.tonic.rs @@ -86,11 +86,11 @@ pub mod flow_service_client { self } /// - pub async fn list_peers( + pub async fn validate_peer( &mut self, - request: impl tonic::IntoRequest, + request: impl tonic::IntoRequest, ) -> std::result::Result< - tonic::Response, + tonic::Response, tonic::Status, > { self.inner @@ -104,11 +104,63 @@ pub mod flow_service_client { })?; let codec = tonic::codec::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static( - "/peerdb_route.FlowService/ListPeers", + "/peerdb_route.FlowService/ValidatePeer", ); let mut req = request.into_request(); req.extensions_mut() - .insert(GrpcMethod::new("peerdb_route.FlowService", "ListPeers")); + .insert(GrpcMethod::new("peerdb_route.FlowService", "ValidatePeer")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn create_peer( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/CreatePeer", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("peerdb_route.FlowService", "CreatePeer")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn drop_peer( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/DropPeer", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("peerdb_route.FlowService", "DropPeer")); self.inner.unary(req, path, codec).await } /// @@ -164,6 +216,138 @@ pub mod flow_service_client { self.inner.unary(req, path, codec).await } /// + pub async fn get_schemas( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/GetSchemas", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("peerdb_route.FlowService", "GetSchemas")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn get_tables_in_schema( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/GetTablesInSchema", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("peerdb_route.FlowService", "GetTablesInSchema"), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn get_columns( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/GetColumns", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("peerdb_route.FlowService", "GetColumns")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn get_slot_info( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/GetSlotInfo", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("peerdb_route.FlowService", "GetSlotInfo")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn get_stat_info( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/GetStatInfo", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("peerdb_route.FlowService", "GetStatInfo")); + self.inner.unary(req, path, codec).await + } + /// pub async fn shutdown_flow( &mut self, request: impl tonic::IntoRequest, @@ -189,6 +373,32 @@ pub mod flow_service_client { .insert(GrpcMethod::new("peerdb_route.FlowService", "ShutdownFlow")); self.inner.unary(req, path, codec).await } + /// + pub async fn mirror_status( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/peerdb_route.FlowService/MirrorStatus", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("peerdb_route.FlowService", "MirrorStatus")); + self.inner.unary(req, path, codec).await + } } } /// Generated server implementations. @@ -199,11 +409,27 @@ pub mod flow_service_server { #[async_trait] pub trait FlowService: Send + Sync + 'static { /// - async fn list_peers( + async fn validate_peer( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn create_peer( &self, - request: tonic::Request, + request: tonic::Request, ) -> std::result::Result< - tonic::Response, + tonic::Response, + tonic::Status, + >; + /// + async fn drop_peer( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, tonic::Status, >; /// @@ -223,6 +449,46 @@ pub mod flow_service_server { tonic::Status, >; /// + async fn get_schemas( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn get_tables_in_schema( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn get_columns( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn get_slot_info( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn get_stat_info( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// async fn shutdown_flow( &self, request: tonic::Request, @@ -230,6 +496,14 @@ pub mod flow_service_server { tonic::Response, tonic::Status, >; + /// + async fn mirror_status( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; } /// #[derive(Debug)] @@ -311,24 +585,114 @@ pub mod flow_service_server { fn call(&mut self, req: http::Request) -> Self::Future { let inner = self.inner.clone(); match req.uri().path() { - "/peerdb_route.FlowService/ListPeers" => { + "/peerdb_route.FlowService/ValidatePeer" => { + #[allow(non_camel_case_types)] + struct ValidatePeerSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for ValidatePeerSvc { + type Response = super::ValidatePeerResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + (*inner).validate_peer(request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = ValidatePeerSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/peerdb_route.FlowService/CreatePeer" => { + #[allow(non_camel_case_types)] + struct CreatePeerSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for CreatePeerSvc { + type Response = super::CreatePeerResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { (*inner).create_peer(request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = CreatePeerSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/peerdb_route.FlowService/DropPeer" => { #[allow(non_camel_case_types)] - struct ListPeersSvc(pub Arc); + struct DropPeerSvc(pub Arc); impl< T: FlowService, - > tonic::server::UnaryService - for ListPeersSvc { - type Response = super::ListPeersResponse; + > tonic::server::UnaryService + for DropPeerSvc { + type Response = super::DropPeerResponse; type Future = BoxFuture< tonic::Response, tonic::Status, >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { (*inner).list_peers(request).await }; + let fut = async move { (*inner).drop_peer(request).await }; Box::pin(fut) } } @@ -339,7 +703,7 @@ pub mod flow_service_server { let inner = self.inner.clone(); let fut = async move { let inner = inner.0; - let method = ListPeersSvc(inner); + let method = DropPeerSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( @@ -447,6 +811,238 @@ pub mod flow_service_server { }; Box::pin(fut) } + "/peerdb_route.FlowService/GetSchemas" => { + #[allow(non_camel_case_types)] + struct GetSchemasSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for GetSchemasSvc { + type Response = super::PeerSchemasResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::PostgresPeerActivityInfoRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { (*inner).get_schemas(request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = GetSchemasSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/peerdb_route.FlowService/GetTablesInSchema" => { + #[allow(non_camel_case_types)] + struct GetTablesInSchemaSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for GetTablesInSchemaSvc { + type Response = super::SchemaTablesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + (*inner).get_tables_in_schema(request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = GetTablesInSchemaSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/peerdb_route.FlowService/GetColumns" => { + #[allow(non_camel_case_types)] + struct GetColumnsSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for GetColumnsSvc { + type Response = super::TableColumnsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { (*inner).get_columns(request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = GetColumnsSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/peerdb_route.FlowService/GetSlotInfo" => { + #[allow(non_camel_case_types)] + struct GetSlotInfoSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for GetSlotInfoSvc { + type Response = super::PeerSlotResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::PostgresPeerActivityInfoRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + (*inner).get_slot_info(request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = GetSlotInfoSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/peerdb_route.FlowService/GetStatInfo" => { + #[allow(non_camel_case_types)] + struct GetStatInfoSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for GetStatInfoSvc { + type Response = super::PeerStatResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::PostgresPeerActivityInfoRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + (*inner).get_stat_info(request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = GetStatInfoSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } "/peerdb_route.FlowService/ShutdownFlow" => { #[allow(non_camel_case_types)] struct ShutdownFlowSvc(pub Arc); @@ -493,6 +1089,52 @@ pub mod flow_service_server { }; Box::pin(fut) } + "/peerdb_route.FlowService/MirrorStatus" => { + #[allow(non_camel_case_types)] + struct MirrorStatusSvc(pub Arc); + impl< + T: FlowService, + > tonic::server::UnaryService + for MirrorStatusSvc { + type Response = super::MirrorStatusResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + (*inner).mirror_status(request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = MirrorStatusSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } _ => { Box::pin(async move { Ok( diff --git a/nexus/server/src/cursor.rs b/nexus/server/src/cursor.rs index 025fdbcf5..36fee27c3 100644 --- a/nexus/server/src/cursor.rs +++ b/nexus/server/src/cursor.rs @@ -24,7 +24,7 @@ impl PeerCursors { self.cursors.remove(&name); } - pub fn get_peer(&self, name: &str) -> Option<&Box> { - self.cursors.get(name) + pub fn get_peer(&self, name: &str) -> Option<&Peer> { + self.cursors.get(name).map(|peer| peer.as_ref()) } } diff --git a/nexus/server/src/main.rs b/nexus/server/src/main.rs index 4b6acec35..c2a9032a2 100644 --- a/nexus/server/src/main.rs +++ b/nexus/server/src/main.rs @@ -7,7 +7,7 @@ use catalog::{Catalog, CatalogConfig, WorkflowDetails}; use clap::Parser; use cursor::PeerCursors; use dashmap::DashMap; -use flow_rs::grpc::FlowGrpcClient; +use flow_rs::grpc::{FlowGrpcClient, PeerValidationResult}; use peer_bigquery::BigQueryQueryExecutor; use peer_connections::{PeerConnectionTracker, PeerConnections}; use peer_cursor::{ @@ -150,20 +150,20 @@ impl NexusBackend { } fn is_peer_validity_supported(peer_type: i32) -> bool { - let unsupported_peer_types = vec![ + let unsupported_peer_types = [ 4, // EVENTHUB 5, // S3 - 6, // SQLSERVER + 7, ]; !unsupported_peer_types.contains(&peer_type) } async fn check_for_mirror( catalog: &MutexGuard<'_, Catalog>, - flow_name: String, + flow_name: &str, ) -> PgWireResult> { let workflow_details = catalog - .get_workflow_details_for_flow_job(&flow_name) + .get_workflow_details_for_flow_job(flow_name) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -204,33 +204,73 @@ impl NexusBackend { } } + async fn validate_peer<'a>(&self, peer_type: i32, peer: &Peer) -> anyhow::Result<()> { + if peer_type != 6 { + let peer_executor = self.get_peer_executor(peer).await.map_err(|err| { + PgWireError::ApiError(Box::new(PgError::Internal { + err_msg: format!("unable to get peer executor: {:?}", err), + })) + })?; + peer_executor.is_connection_valid().await.map_err(|e| { + self.executors.remove(&peer.name); // Otherwise it will keep returning the earlier configured executor + PgWireError::UserError(Box::new(ErrorInfo::new( + "ERROR".to_owned(), + "internal_error".to_owned(), + format!("[peer]: invalid configuration: {}", e), + ))) + })?; + self.executors.remove(&peer.name); + Ok(()) + } else { + let mut flow_handler = self.flow_handler.as_ref().unwrap().lock().await; + let validate_request = pt::peerdb_route::ValidatePeerRequest { + peer: Some(Peer { + name: peer.name.clone(), + r#type: peer.r#type, + config: peer.config.clone(), + }), + }; + let validity = flow_handler + .validate_peer(&validate_request) + .await + .map_err(|err| { + PgWireError::ApiError(Box::new(PgError::Internal { + err_msg: format!("unable to check peer validity: {:?}", err), + })) + })?; + if let PeerValidationResult::Invalid(validation_err) = validity { + Err(PgWireError::UserError(Box::new(ErrorInfo::new( + "ERROR".to_owned(), + "internal_error".to_owned(), + format!("[peer]: invalid configuration: {}", validation_err), + ))) + .into()) + } else { + Ok(()) + } + } + } + async fn handle_query<'a>( &self, nexus_stmt: NexusStatement, ) -> PgWireResult>> { let mut peer_holder: Option> = None; match nexus_stmt { - NexusStatement::PeerDDL { stmt: _, ddl } => match ddl { + NexusStatement::PeerDDL { stmt: _, ddl } => match ddl.as_ref() { PeerDDL::CreatePeer { peer, if_not_exists: _, } => { let peer_type = peer.r#type; if Self::is_peer_validity_supported(peer_type) { - let peer_executor = self.get_peer_executor(&peer).await.map_err(|err| { - PgWireError::ApiError(Box::new(PgError::Internal { - err_msg: format!("unable to get peer executor: {:?}", err), - })) - })?; - peer_executor.is_connection_valid().await.map_err(|e| { - self.executors.remove(&peer.name); // Otherwise it will keep returning the earlier configured executor + self.validate_peer(peer_type, peer).await.map_err(|e| { PgWireError::UserError(Box::new(ErrorInfo::new( "ERROR".to_owned(), "internal_error".to_owned(), - format!("[peer]: invalid configuration: {}", e), + e.to_string(), ))) })?; - self.executors.remove(&peer.name); } let catalog = self.catalog.lock().await; @@ -255,11 +295,10 @@ impl NexusBackend { }))); } let catalog = self.catalog.lock().await; - let mirror_details = - Self::check_for_mirror(&catalog, flow_job.name.clone()).await?; + let mirror_details = Self::check_for_mirror(&catalog, &flow_job.name).await?; if mirror_details.is_none() { catalog - .create_flow_job_entry(&flow_job) + .create_flow_job_entry(flow_job) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -281,7 +320,7 @@ impl NexusBackend { // make a request to the flow service to start the job. let mut flow_handler = self.flow_handler.as_ref().unwrap().lock().await; let workflow_id = flow_handler - .start_peer_flow_job(&flow_job, src_peer, dst_peer) + .start_peer_flow_job(flow_job, src_peer, dst_peer) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -304,7 +343,7 @@ impl NexusBackend { None, ))]) } else { - Self::handle_mirror_existence(if_not_exists, flow_job.name) + Self::handle_mirror_existence(*if_not_exists, flow_job.name.clone()) } } PeerDDL::CreateMirrorForSelect { @@ -320,13 +359,13 @@ impl NexusBackend { { let catalog = self.catalog.lock().await; mirror_details = - Self::check_for_mirror(&catalog, qrep_flow_job.name.clone()).await?; + Self::check_for_mirror(&catalog, &qrep_flow_job.name).await?; } if mirror_details.is_none() { { let catalog = self.catalog.lock().await; catalog - .create_qrep_flow_job_entry(&qrep_flow_job) + .create_qrep_flow_job_entry(qrep_flow_job) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -347,14 +386,14 @@ impl NexusBackend { ))]); } - let _workflow_id = self.run_qrep_mirror(&qrep_flow_job).await?; + let _workflow_id = self.run_qrep_mirror(qrep_flow_job).await?; let create_mirror_success = format!("CREATE MIRROR {}", qrep_flow_job.name); Ok(vec![Response::Execution(Tag::new_for_execution( &create_mirror_success, None, ))]) } else { - Self::handle_mirror_existence(if_not_exists, qrep_flow_job.name) + Self::handle_mirror_existence(*if_not_exists, qrep_flow_job.name.clone()) } } PeerDDL::ExecuteMirrorForSelect { flow_job_name } => { @@ -367,7 +406,7 @@ impl NexusBackend { if let Some(job) = { let catalog = self.catalog.lock().await; catalog - .get_qrep_flow_job_by_name(&flow_job_name) + .get_qrep_flow_job_by_name(flow_job_name) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -402,7 +441,7 @@ impl NexusBackend { let catalog = self.catalog.lock().await; tracing::info!("mirror_name: {}, if_exists: {}", flow_job_name, if_exists); let workflow_details = catalog - .get_workflow_details_for_flow_job(&flow_job_name) + .get_workflow_details_for_flow_job(flow_job_name) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -416,11 +455,10 @@ impl NexusBackend { "got workflow id: {:?}", workflow_details.as_ref().map(|w| &w.workflow_id) ); - if workflow_details.is_some() { - let workflow_details = workflow_details.unwrap(); + if let Some(workflow_details) = workflow_details { let mut flow_handler = self.flow_handler.as_ref().unwrap().lock().await; flow_handler - .shutdown_flow_job(&flow_job_name, workflow_details) + .shutdown_flow_job(flow_job_name, workflow_details) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -428,7 +466,7 @@ impl NexusBackend { })) })?; catalog - .delete_flow_job_entry(&flow_job_name) + .delete_flow_job_entry(flow_job_name) .await .map_err(|err| { PgWireError::ApiError(Box::new(PgError::Internal { @@ -440,7 +478,7 @@ impl NexusBackend { &drop_mirror_success, None, ))]) - } else if if_exists { + } else if *if_exists { let no_mirror_success = "NO SUCH MIRROR"; Ok(vec![Response::Execution(Tag::new_for_execution( no_mirror_success, @@ -454,6 +492,54 @@ impl NexusBackend { )))) } } + PeerDDL::DropPeer { + if_exists, + peer_name, + } => { + if self.flow_handler.is_none() { + return Err(PgWireError::ApiError(Box::new(PgError::Internal { + err_msg: "flow service is not configured".to_owned(), + }))); + } + + let catalog = self.catalog.lock().await; + tracing::info!("drop peer_name: {}, if_exists: {}", peer_name, if_exists); + let peer_exists = + catalog.check_peer_entry(peer_name).await.map_err(|err| { + PgWireError::ApiError(Box::new(PgError::Internal { + err_msg: format!( + "unable to query catalog for peer metadata: {:?}", + err + ), + })) + })?; + tracing::info!("peer exist count: {}", peer_exists); + if peer_exists != 0 { + let mut flow_handler = self.flow_handler.as_ref().unwrap().lock().await; + flow_handler.drop_peer(peer_name).await.map_err(|err| { + PgWireError::ApiError(Box::new(PgError::Internal { + err_msg: format!("unable to drop peer: {:?}", err), + })) + })?; + let drop_peer_success = format!("DROP PEER {}", peer_name); + Ok(vec![Response::Execution(Tag::new_for_execution( + &drop_peer_success, + None, + ))]) + } else if *if_exists { + let no_peer_success = "NO SUCH PEER"; + Ok(vec![Response::Execution(Tag::new_for_execution( + no_peer_success, + None, + ))]) + } else { + Err(PgWireError::UserError(Box::new(ErrorInfo::new( + "ERROR".to_owned(), + "error".to_owned(), + format!("no such peer: {:?}", peer_name), + )))) + } + } }, NexusStatement::PeerQuery { stmt, assoc } => { // get the query executor diff --git a/nexus/sqlparser-rs b/nexus/sqlparser-rs index 92bc0e62d..096cac913 160000 --- a/nexus/sqlparser-rs +++ b/nexus/sqlparser-rs @@ -1 +1 @@ -Subproject commit 92bc0e62d83a957911a3b22a869208fa822a840b +Subproject commit 096cac91355948cffcdb0f5c9273293ae4505760 diff --git a/nexus/value/src/array.rs b/nexus/value/src/array.rs index 9b50c8c67..2fa299bb3 100644 --- a/nexus/value/src/array.rs +++ b/nexus/value/src/array.rs @@ -128,7 +128,7 @@ impl ArrayValue { } } -impl<'a> ToSql for ArrayValue { +impl ToSql for ArrayValue { fn to_sql( &self, ty: &Type, @@ -235,7 +235,6 @@ impl ToSqlText for ArrayValue { ArrayValue::Timestamp(arr) => array_to_sql_text!(arr, ty, out), ArrayValue::TimestampWithTimeZone(arr) => array_to_sql_text!(arr, ty, out), ArrayValue::Empty => {} - _ => todo!(), } // remove trailing comma diff --git a/nexus/value/src/lib.rs b/nexus/value/src/lib.rs index 8627a50d4..f6dbe0687 100644 --- a/nexus/value/src/lib.rs +++ b/nexus/value/src/lib.rs @@ -1,4 +1,5 @@ use array::ArrayValue; +use base64::prelude::{Engine as _, BASE64_STANDARD}; use bytes::Bytes; use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc}; use rust_decimal::Decimal; @@ -236,8 +237,8 @@ impl Value { Value::Char(c) => serde_json::Value::String(c.to_string()), Value::VarChar(s) => serde_json::Value::String(s.clone()), Value::Text(s) => serde_json::Value::String(s.clone()), - Value::Binary(b) => serde_json::Value::String(base64::encode(b)), - Value::VarBinary(b) => serde_json::Value::String(base64::encode(b)), + Value::Binary(b) => serde_json::Value::String(BASE64_STANDARD.encode(b)), + Value::VarBinary(b) => serde_json::Value::String(BASE64_STANDARD.encode(b)), Value::Date(d) => serde_json::Value::String(d.to_string()), Value::Time(t) => serde_json::Value::String(t.to_string()), Value::TimeWithTimeZone(t) => serde_json::Value::String(t.to_string()), diff --git a/protos/buf.lock b/protos/buf.lock new file mode 100644 index 000000000..6c4355d41 --- /dev/null +++ b/protos/buf.lock @@ -0,0 +1,8 @@ +# Generated by buf. DO NOT EDIT. +version: v1 +deps: + - remote: buf.build + owner: googleapis + repository: googleapis + commit: 28151c0d0a1641bf938a7672c500e01d + digest: shake256:49215edf8ef57f7863004539deff8834cfb2195113f0b890dd1f67815d9353e28e668019165b9d872395871eeafcbab3ccfdb2b5f11734d3cca95be9e8d139de diff --git a/protos/buf.yaml b/protos/buf.yaml index 1a5194568..69baf0fd4 100644 --- a/protos/buf.yaml +++ b/protos/buf.yaml @@ -1,4 +1,6 @@ version: v1 +deps: + - buf.build/googleapis/googleapis breaking: use: - FILE diff --git a/protos/flow.proto b/protos/flow.proto index 428b8ae25..d01aa7fd4 100644 --- a/protos/flow.proto +++ b/protos/flow.proto @@ -22,12 +22,18 @@ message RelationMessage { repeated RelationMessageColumn columns = 3; } +message TableMapping { + string source_table_identifier = 1; + string destination_table_identifier = 2; + string partition_key = 3; +} + message FlowConnectionConfigs { peerdb_peers.Peer source = 1; peerdb_peers.Peer destination = 2; string flow_job_name = 3; TableSchema table_schema = 4; - map table_name_mapping = 5; + repeated TableMapping table_mappings = 5; map src_table_id_name_mapping = 6; map table_name_schema_mapping = 7; @@ -56,9 +62,27 @@ message FlowConnectionConfigs { // the below two are for eventhub only int64 push_batch_size = 21; int64 push_parallelism = 22; + + // if true, then the flow will be resynced + bool resync = 23; +} + +message RenameTableOption { + string current_name = 1; + string new_name = 2; +} + +message RenameTablesInput { + string flow_job_name = 1; + peerdb_peers.Peer peer = 2; + repeated RenameTableOption rename_table_options = 3; +} + +message RenameTablesOutput { + string flow_job_name = 1; } -message SyncFlowOptions { +message SyncFlowOptions { int32 batch_size = 1; map relation_message_mapping = 2; } @@ -148,7 +172,7 @@ message TableSchema { // list of column names and types, types can be one of the following: // "string", "int", "float", "bool", "timestamp". map columns = 2; - string primary_key_column = 3; + repeated string primary_key_columns = 3; bool is_replica_identity_full = 4; } @@ -202,18 +226,12 @@ message TIDPartitionRange { TID end = 2; } -message XMINPartitionRange { - uint32 start = 1; - uint32 end = 2; -} - message PartitionRange { // can be a timestamp range or an integer range oneof range { IntPartitionRange int_range = 1; TimestampPartitionRange timestamp_range = 2; TIDPartitionRange tid_range = 3; - XMINPartitionRange xmin_range = 4; } } @@ -251,7 +269,9 @@ message QRepConfig { bool initial_copy_only = 8; QRepSyncMode sync_mode = 9; + // DEPRECATED: eliminate when breaking changes are allowed. uint32 batch_size_int = 10; + // DEPRECATED: eliminate when breaking changes are allowed. uint32 batch_duration_seconds = 11; uint32 max_parallel_workers = 12; @@ -273,6 +293,9 @@ message QRepConfig { // and instead uses the number of rows per partition to determine // how many rows to process per batch. uint32 num_rows_per_partition = 16; + + // Creates the watermark table on the destination as-is, can be used for some queries. + bool setup_watermark_table_on_destination = 17; } message QRepPartition { @@ -303,10 +326,9 @@ message TableSchemaDelta { string src_table_name = 1; string dst_table_name = 2; repeated DeltaAddedColumn added_columns = 3; - repeated string dropped_columns = 4; } message ReplayTableSchemaDeltaInput { FlowConnectionConfigs flow_connection_configs = 1; - TableSchemaDelta table_schema_delta = 2; + repeated TableSchemaDelta table_schema_deltas = 2; } diff --git a/protos/peers.proto b/protos/peers.proto index 1f58a3ea7..162a63f3a 100644 --- a/protos/peers.proto +++ b/protos/peers.proto @@ -52,10 +52,29 @@ message EventHubConfig { string resource_group = 2; string location = 3; PostgresConfig metadata_db = 4; + // if this is empty PeerDB uses `AZURE_SUBSCRIPTION_ID` environment variable. + string subscription_id = 5; + // defaults to 3 + uint32 partition_count = 6; + // defaults to 7 + uint32 message_retention_in_days = 7; +} + +message EventHubGroupConfig { + // event hub peer name to event hub config + map eventhubs = 1; + PostgresConfig metadata_db = 2; + repeated string unnest_columns = 3; } message S3Config { string url = 1; + optional string access_key_id = 2; + optional string secret_access_key = 3; + optional string role_arn = 4; + optional string region = 5; + optional string endpoint = 6; + PostgresConfig metadata_db = 7; } message SqlServerConfig { @@ -74,6 +93,7 @@ enum DBType { EVENTHUB = 4; S3 = 5; SQLSERVER = 6; + EVENTHUB_GROUP = 7; } message Peer { @@ -87,5 +107,6 @@ message Peer { EventHubConfig eventhub_config = 7; S3Config s3_config = 8; SqlServerConfig sqlserver_config = 9; + EventHubGroupConfig eventhub_group_config = 10; } } diff --git a/protos/route.proto b/protos/route.proto index d7cd10f08..907973c50 100644 --- a/protos/route.proto +++ b/protos/route.proto @@ -1,6 +1,8 @@ syntax = "proto3"; +import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; + import "peers.proto"; import "flow.proto"; @@ -8,6 +10,7 @@ package peerdb_route; message CreateCDCFlowRequest { peerdb_flow.FlowConnectionConfigs connection_configs = 1; + bool create_catalog_entry = 2; } message CreateCDCFlowResponse { @@ -16,6 +19,7 @@ message CreateCDCFlowResponse { message CreateQRepFlowRequest { peerdb_flow.QRepConfig qrep_config = 1; + bool create_catalog_entry = 2; } message CreateQRepFlowResponse { @@ -34,16 +38,196 @@ message ShutdownResponse { string error_message = 2; } -message ListPeersRequest { +message ValidatePeerRequest { + peerdb_peers.Peer peer = 1; +} + +message CreatePeerRequest { + peerdb_peers.Peer peer = 1; +} + +message DropPeerRequest { + string peer_name = 1; +} + +message DropPeerResponse { + bool ok = 1; + string error_message = 2; +} + +enum ValidatePeerStatus { + CREATION_UNKNOWN = 0; + VALID = 1; + INVALID = 2; +} + +enum CreatePeerStatus { + VALIDATION_UNKNOWN = 0; + CREATED = 1; + FAILED = 2; +} + +message ValidatePeerResponse { + ValidatePeerStatus status = 1; + string message = 2; +} + +message CreatePeerResponse { + CreatePeerStatus status = 1; + string message = 2; +} + +message MirrorStatusRequest { + string flow_job_name = 1; +} + +message PartitionStatus { + string partition_id = 1; + google.protobuf.Timestamp start_time = 2; + google.protobuf.Timestamp end_time = 3; + int32 num_rows = 4; +} + +message QRepMirrorStatus { + peerdb_flow.QRepConfig config = 1; + repeated PartitionStatus partitions = 2; + // TODO make note to see if we are still in initial copy + // or if we are in the continuous streaming mode. +} + +message CDCSyncStatus { + int64 start_lsn = 1; + int64 end_lsn = 2; + int32 num_rows = 3; + google.protobuf.Timestamp start_time = 4; + google.protobuf.Timestamp end_time = 5; +} + +message PeerSchemasResponse { + repeated string schemas = 1; } -message ListPeersResponse { - repeated peerdb_peers.Peer peers = 1; +message SchemaTablesRequest { + string peer_name = 1; + string schema_name = 2; +} + +message SchemaTablesResponse { + repeated string tables = 1; +} + +message TableColumnsRequest { + string peer_name = 1; + string schema_name = 2; + string table_name = 3; +} + +message TableColumnsResponse { + repeated string columns = 1; +} + +message PostgresPeerActivityInfoRequest { + string peer_name = 1; +} + +message SlotInfo { + string slot_name = 1; + string redo_lSN = 2; + string restart_lSN = 3; + bool active = 4; + float lag_in_mb = 5; +} + +message StatInfo { + int64 pid = 1; + string wait_event = 2; + string wait_event_type = 3; + string query_start = 4; + string query = 5; + float duration = 6; +} + +message PeerSlotResponse { + repeated SlotInfo slot_data = 1; +} + +message PeerStatResponse { + repeated StatInfo stat_data = 1; +} + +message SnapshotStatus { + repeated QRepMirrorStatus clones = 1; +} + +message CDCMirrorStatus { + peerdb_flow.FlowConnectionConfigs config = 1; + SnapshotStatus snapshot_status = 2; + repeated CDCSyncStatus cdc_syncs = 3; +} + +message MirrorStatusResponse { + string flow_job_name = 1; + oneof status { + QRepMirrorStatus qrep_status = 2; + CDCMirrorStatus cdc_status = 3; + } + string error_message = 4; } service FlowService { - rpc ListPeers(ListPeersRequest) returns (ListPeersResponse) {} - rpc CreateCDCFlow(CreateCDCFlowRequest) returns (CreateCDCFlowResponse) {} - rpc CreateQRepFlow(CreateQRepFlowRequest) returns (CreateQRepFlowResponse) {} - rpc ShutdownFlow(ShutdownRequest) returns (ShutdownResponse) {} + rpc ValidatePeer(ValidatePeerRequest) returns (ValidatePeerResponse) { + option (google.api.http) = { + post: "/v1/peers/validate", + body: "*" + }; + } + rpc CreatePeer(CreatePeerRequest) returns (CreatePeerResponse) { + option (google.api.http) = { + post: "/v1/peers/create", + body: "*" + }; + } + rpc DropPeer(DropPeerRequest) returns (DropPeerResponse) { + option (google.api.http) = { + post: "/v1/peers/drop", + body: "*" + }; + } + rpc CreateCDCFlow(CreateCDCFlowRequest) returns (CreateCDCFlowResponse) { + option (google.api.http) = { + post: "/v1/flows/cdc/create", + body: "*" + }; + } + rpc CreateQRepFlow(CreateQRepFlowRequest) returns (CreateQRepFlowResponse) { + option (google.api.http) = { + post: "/v1/flows/qrep/create", + body: "*" + }; + } + + rpc GetSchemas(PostgresPeerActivityInfoRequest) returns (PeerSchemasResponse) { + option (google.api.http) = { get: "/v1/peers/schemas" }; + } + + rpc GetTablesInSchema(SchemaTablesRequest) returns (SchemaTablesResponse) { + option (google.api.http) = { get: "/v1/peers/tables" }; + } + + rpc GetColumns(TableColumnsRequest) returns (TableColumnsResponse) { + option (google.api.http) = { get: "/v1/peers/columns" }; + } + + rpc GetSlotInfo(PostgresPeerActivityInfoRequest) returns (PeerSlotResponse) { + option (google.api.http) = { get: "/v1/peers/slots/{peer_name}" }; + } + rpc GetStatInfo(PostgresPeerActivityInfoRequest) returns (PeerStatResponse) { + option (google.api.http) = { get: "/v1/peers/stats/{peer_name}" }; + } + rpc ShutdownFlow(ShutdownRequest) returns (ShutdownResponse) { + option (google.api.http) = { post: "/v1/mirrors/drop", body: "*" }; + } + rpc MirrorStatus(MirrorStatusRequest) returns (MirrorStatusResponse) { + option (google.api.http) = { get: "/v1/mirrors/{flow_job_name}" }; + } } diff --git a/run-peerdb.sh b/run-peerdb.sh new file mode 100755 index 000000000..74b1dfda7 --- /dev/null +++ b/run-peerdb.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -Eeuo pipefail + +if ! command -v docker &> /dev/null +then + echo "docker could not be found on PATH" + exit 1 +fi + +docker compose pull +docker compose -f docker-compose.yml up --no-attach catalog --no-attach temporal --no-attach temporal-ui --no-attach temporal-admin-tools diff --git a/stacks/flow-api.Dockerfile b/stacks/flow-api.Dockerfile deleted file mode 100644 index f84c5da35..000000000 --- a/stacks/flow-api.Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -# syntax=docker/dockerfile:1.2 - -# Start from the latest Golang base image -FROM golang:1.21-alpine AS builder -WORKDIR /root/flow - -# first copy only go.mod and go.sum to cache dependencies -COPY flow/go.mod . -COPY flow/go.sum . - -# download all the dependencies -RUN --mount=type=cache,target=/go/pkg/mod \ - go mod download - -# Copy all the code -COPY flow . - -# build the binary from cmd folder -WORKDIR /root/flow/cmd -RUN --mount=type=cache,target=/root/.cache/go-build \ - CGO_ENABLED=0 go build -ldflags="-s -w" -o /root/peer-flow . - -FROM ubuntu:20.04 -RUN apt-get update && apt-get install -y ca-certificates curl -WORKDIR /root -COPY --from=builder /root/peer-flow . -EXPOSE 8112 -ENTRYPOINT ["./peer-flow", "api", "--port", "8112"] diff --git a/stacks/flow-snapshot-worker.Dockerfile b/stacks/flow-snapshot-worker.Dockerfile deleted file mode 100644 index cf3a78881..000000000 --- a/stacks/flow-snapshot-worker.Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -# syntax=docker/dockerfile:1.2 - -# Start from the latest Golang base image -FROM golang:1.21-alpine AS builder - -WORKDIR /root/ - -# first copy only go.mod and go.sum to cache dependencies -COPY flow/go.mod . -COPY flow/go.sum . - -# download all the dependencies -RUN --mount=type=cache,target=/go/pkg/mod \ - go mod download - -COPY flow . - -# build the binary from cmd folder -WORKDIR /root/cmd -RUN --mount=type=cache,target=/root/.cache/go-build \ - CGO_ENABLED=0 go build -ldflags="-s -w" -o /root/peer-flow . - -FROM ubuntu:20.04 -RUN apt-get update && apt-get install -y ca-certificates -WORKDIR /root -COPY --from=builder /root/peer-flow . -EXPOSE 8112 -ENTRYPOINT ["./peer-flow", "snapshot-worker"] diff --git a/stacks/flow-worker.Dockerfile b/stacks/flow-worker.Dockerfile deleted file mode 100644 index cd6725b9f..000000000 --- a/stacks/flow-worker.Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -# syntax=docker/dockerfile:1.2 - -# Start from the latest Golang base image -FROM golang:1.21-alpine AS builder - -WORKDIR /root/ - -# first copy only go.mod and go.sum to cache dependencies -COPY flow/go.mod . -COPY flow/go.sum . - -# download all the dependencies -RUN --mount=type=cache,target=/go/pkg/mod \ - go mod download - -COPY flow . - -# build the binary from cmd folder -WORKDIR /root/cmd -RUN --mount=type=cache,target=/root/.cache/go-build \ - CGO_ENABLED=0 go build -ldflags="-s -w" -o /root/peer-flow . - -FROM ubuntu:20.04 -RUN apt-get update && apt-get install -y ca-certificates -WORKDIR /root -COPY --from=builder /root/peer-flow . -EXPOSE 8112 -ENTRYPOINT ["./peer-flow", "worker"] diff --git a/stacks/flow.Dockerfile b/stacks/flow.Dockerfile new file mode 100644 index 000000000..d04b30739 --- /dev/null +++ b/stacks/flow.Dockerfile @@ -0,0 +1,49 @@ +# syntax=docker/dockerfile:1.2 + +FROM golang:1.21.3-bookworm AS builder +RUN apt-get update && apt-get install -y gcc libgeos-dev +WORKDIR /root/flow + +# first copy only go.mod and go.sum to cache dependencies +COPY flow/go.mod . +COPY flow/go.sum . + +# download all the dependencies +RUN go mod download + +# Copy all the code +COPY flow . + +# build the binary from cmd folder +WORKDIR /root/flow/cmd +ENV CGO_ENABLED=1 +RUN go build -ldflags="-s -w" -o /root/peer-flow . + +FROM debian:bookworm-slim AS flow-base +RUN apt-get update && apt-get install -y ca-certificates gcc libgeos-dev +WORKDIR /root +COPY --from=builder /root/peer-flow . + +FROM flow-base AS flow-api +EXPOSE 8112 +EXPOSE 8113 +ENTRYPOINT [\ + "./peer-flow",\ + "api",\ + "--port",\ + "8112",\ + "--gateway-port",\ + "8113"\ + ] + +FROM flow-base AS flow-worker +ENTRYPOINT [\ + "./peer-flow",\ + "worker"\ + ] + +FROM flow-base AS flow-snapshot-worker +ENTRYPOINT [\ + "./peer-flow",\ + "snapshot-worker"\ + ] diff --git a/stacks/grafana/flow_monitoring_dashboard.json b/stacks/grafana/flow_monitoring_dashboard.json index 722f757fc..0c1477ff8 100644 --- a/stacks/grafana/flow_monitoring_dashboard.json +++ b/stacks/grafana/flow_monitoring_dashboard.json @@ -1,585 +1,536 @@ { - "__inputs": [ - { - "name": "peerdb_prometheus", - "description": "", - "type": "datasource", - "pluginId": "prometheus", - "pluginName": "Prometheus" - } - ], - "__elements": {}, - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "10.0.2" - }, - { - "type": "datasource", - "id": "prometheus", - "name": "Prometheus", - "version": "1.0.0" + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] }, - { - "type": "panel", - "id": "timeseries", - "name": "Time series", - "version": "" - } - ], - "annotations": { - "list": [ + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "links": [], + "liveNow": false, + "panels": [ { - "builtIn": 1, "datasource": { - "type": "grafana", - "uid": "-- Grafana --" + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "fiscalYearStartMonth": 0, - "graphTooltip": 0, - "id": null, - "links": [], - "liveNow": false, - "panels": [ - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } }, - "thresholdsStyle": { - "mode": "off" + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] } }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - } + "overrides": [] }, - "overrides": [] - }, - "gridPos": { - "h": 10, - "w": 11, - "x": 0, - "y": 0 - }, - "id": 4, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true + "gridPos": { + "h": 10, + "w": 11, + "x": 0, + "y": 0 }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" - }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_records_synced_per_second", - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "A" - } - ], - "title": "records synced / second", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_records_synced_per_second", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" + } + ], + "title": "records synced / second", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "P8C2E8E0157474F52" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } }, - "thresholdsStyle": { - "mode": "off" + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] } }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "flow_worker_cdcflow_schemaa_records_throughput" + ], + "prefix": "All except:", + "readOnly": true + } }, - { - "color": "red", - "value": 80 - } - ] + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] + }, + "gridPos": { + "h": 10, + "w": 13, + "x": 11, + "y": 0 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" } }, - "overrides": [ + "targets": [ { - "__systemRef": "hideSeriesFrom", - "matcher": { - "id": "byNames", - "options": { - "mode": "exclude", - "names": [ - "difference_in_record_counts" - ], - "prefix": "All except:", - "readOnly": true - } + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "properties": [ - { - "id": "custom.hideFrom", - "value": { - "legend": false, - "tooltip": false, - "viz": true - } - } - ] + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_records_throughput", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" } - ] - }, - "gridPos": { - "h": 10, - "w": 13, - "x": 11, - "y": 0 + ], + "title": "overall mirror throughput", + "transformations": [], + "type": "timeseries" }, - "id": 2, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_records_at_source", - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "A" + "overrides": [] }, - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" + "gridPos": { + "h": 10, + "w": 11, + "x": 0, + "y": 10 + }, + "id": 3, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_records_at_target", - "hide": false, - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "B" - } - ], - "title": "difference in records between source and target", - "transformations": [ - { - "id": "calculateField", - "options": { - "alias": "difference_in_record_counts", - "binary": { - "left": "flow_worker_${job_type}_${job_name}_records_at_source", - "operator": "-", - "reducer": "sum", - "right": "flow_worker_${job_type}_${job_name}_records_at_target" + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "mode": "binary", - "reduce": { - "reducer": "sum" - } + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_records_normalized_per_second", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" } - } - ], - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" + ], + "title": "records normalized / second", + "type": "timeseries" }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } }, - "thresholdsStyle": { - "mode": "off" + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] } }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - } + "overrides": [] }, - "overrides": [] - }, - "gridPos": { - "h": 10, - "w": 11, - "x": 0, - "y": 10 - }, - "id": 3, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true + "gridPos": { + "h": 10, + "w": 13, + "x": 11, + "y": 10 }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" - }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_records_normalized_per_second", - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "A" - } - ], - "title": "records normalized / second", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 0, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_insert_records_pulled", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "showPoints": "auto", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_update_records_pulled", + "hide": false, + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" }, - "thresholdsStyle": { - "mode": "off" - } + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_delete_records_pulled", + "hide": false, + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "C" }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_total_records_pulled", + "hide": false, + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "D" } - }, - "overrides": [] - }, - "gridPos": { - "h": 10, - "w": 13, - "x": 11, - "y": 10 - }, - "id": 1, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "targets": [ + ], + "title": "records pulled / second with types", + "type": "timeseries" + } + ], + "refresh": "10s", + "schemaVersion": 38, + "style": "dark", + "tags": [], + "templating": { + "list": [ { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" + "current": { + "selected": false, + "text": "schemaa", + "value": "schemaa" }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_insert_records_pulled", - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "A" - }, - { "datasource": { "type": "prometheus", "uid": "peerdb_prometheus" }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_update_records_pulled", - "hide": false, - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "B" - }, - { - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" + "definition": "metrics(flow_worker_${job_type}_.*_total_records_pulled)", + "hide": 0, + "includeAll": false, + "multi": false, + "name": "job_name", + "options": [], + "query": { + "query": "metrics(flow_worker_${job_type}_.*_total_records_pulled)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_delete_records_pulled", - "hide": false, - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "C" + "refresh": 1, + "regex": "flow_worker_${job_type}_(?.*)_total_records_pulled", + "skipUrlSync": false, + "sort": 0, + "type": "query" }, { + "current": { + "selected": false, + "text": "cdcflow", + "value": "cdcflow" + }, "datasource": { "type": "prometheus", "uid": "peerdb_prometheus" }, - "editorMode": "code", - "expr": "flow_worker_${job_type}_${job_name}_total_records_pulled", - "hide": false, - "instant": false, - "legendFormat": "{{__name__}}", - "range": true, - "refId": "D" + "definition": "metrics(flow_worker_.*_total_records_pulled)", + "hide": 0, + "includeAll": false, + "multi": false, + "name": "job_type", + "options": [], + "query": { + "query": "metrics(flow_worker_.*_total_records_pulled)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "flow_worker_(?.*flow)_.*", + "skipUrlSync": false, + "sort": 0, + "type": "query" } - ], - "title": "records pulled / second with types", - "type": "timeseries" - } - ], - "refresh": "10s", - "schemaVersion": 38, - "style": "dark", - "tags": [], - "templating": { - "list": [ - { - "current": {}, - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" - }, - "definition": "metrics(flow_worker_${job_type}_.*_total_records_pulled)", - "hide": 0, - "includeAll": false, - "multi": false, - "name": "job_name", - "options": [], - "query": { - "query": "metrics(flow_worker_${job_type}_.*_total_records_pulled)", - "refId": "PrometheusVariableQueryEditor-VariableQuery" - }, - "refresh": 1, - "regex": "flow_worker_${job_type}_(?.*)_total_records_pulled", - "skipUrlSync": false, - "sort": 0, - "type": "query" - }, - { - "current": {}, - "datasource": { - "type": "prometheus", - "uid": "peerdb_prometheus" - }, - "definition": "metrics(flow_worker_.*_total_records_pulled)", - "hide": 0, - "includeAll": false, - "multi": false, - "name": "job_type", - "options": [], - "query": { - "query": "metrics(flow_worker_.*_total_records_pulled)", - "refId": "PrometheusVariableQueryEditor-VariableQuery" - }, - "refresh": 1, - "regex": "flow_worker_(?.*flow)_.*", - "skipUrlSync": false, - "sort": 0, - "type": "query" - } - ] - }, - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": {}, - "timezone": "", - "title": "PeerDB mirror monitoring dashboard", - "uid": "cac849d7-5353-4bd2-8f4f-925ad428cf1d", - "version": 11, - "weekStart": "" -} \ No newline at end of file + ] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "PeerDB mirror monitoring dashboard", + "uid": "cac849d7-5353-4bd2-8f4f-925ad428cf1d", + "version": 1, + "weekStart": "" + } \ No newline at end of file diff --git a/stacks/grafana/flow_monitoring_dashboard_v0.json b/stacks/grafana/flow_monitoring_dashboard_v0.json new file mode 100644 index 000000000..722f757fc --- /dev/null +++ b/stacks/grafana/flow_monitoring_dashboard_v0.json @@ -0,0 +1,585 @@ +{ + "__inputs": [ + { + "name": "peerdb_prometheus", + "description": "", + "type": "datasource", + "pluginId": "prometheus", + "pluginName": "Prometheus" + } + ], + "__elements": {}, + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "10.0.2" + }, + { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "timeseries", + "name": "Time series", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 11, + "x": 0, + "y": 0 + }, + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_records_synced_per_second", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" + } + ], + "title": "records synced / second", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "difference_in_record_counts" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] + }, + "gridPos": { + "h": 10, + "w": 13, + "x": 11, + "y": 0 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_records_at_source", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_records_at_target", + "hide": false, + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "B" + } + ], + "title": "difference in records between source and target", + "transformations": [ + { + "id": "calculateField", + "options": { + "alias": "difference_in_record_counts", + "binary": { + "left": "flow_worker_${job_type}_${job_name}_records_at_source", + "operator": "-", + "reducer": "sum", + "right": "flow_worker_${job_type}_${job_name}_records_at_target" + }, + "mode": "binary", + "reduce": { + "reducer": "sum" + } + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 11, + "x": 0, + "y": 10 + }, + "id": 3, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_records_normalized_per_second", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" + } + ], + "title": "records normalized / second", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 10, + "w": 13, + "x": 11, + "y": 10 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_insert_records_pulled", + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_update_records_pulled", + "hide": false, + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_delete_records_pulled", + "hide": false, + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "C" + }, + { + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "editorMode": "code", + "expr": "flow_worker_${job_type}_${job_name}_total_records_pulled", + "hide": false, + "instant": false, + "legendFormat": "{{__name__}}", + "range": true, + "refId": "D" + } + ], + "title": "records pulled / second with types", + "type": "timeseries" + } + ], + "refresh": "10s", + "schemaVersion": 38, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "definition": "metrics(flow_worker_${job_type}_.*_total_records_pulled)", + "hide": 0, + "includeAll": false, + "multi": false, + "name": "job_name", + "options": [], + "query": { + "query": "metrics(flow_worker_${job_type}_.*_total_records_pulled)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "flow_worker_${job_type}_(?.*)_total_records_pulled", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "current": {}, + "datasource": { + "type": "prometheus", + "uid": "peerdb_prometheus" + }, + "definition": "metrics(flow_worker_.*_total_records_pulled)", + "hide": 0, + "includeAll": false, + "multi": false, + "name": "job_type", + "options": [], + "query": { + "query": "metrics(flow_worker_.*_total_records_pulled)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, + "refresh": 1, + "regex": "flow_worker_(?.*flow)_.*", + "skipUrlSync": false, + "sort": 0, + "type": "query" + } + ] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "PeerDB mirror monitoring dashboard", + "uid": "cac849d7-5353-4bd2-8f4f-925ad428cf1d", + "version": 11, + "weekStart": "" +} \ No newline at end of file diff --git a/stacks/nexus.Dockerfile b/stacks/peerdb-server.Dockerfile similarity index 100% rename from stacks/nexus.Dockerfile rename to stacks/peerdb-server.Dockerfile diff --git a/stacks/ui.Dockerfile b/stacks/peerdb-ui.Dockerfile similarity index 90% rename from stacks/ui.Dockerfile rename to stacks/peerdb-ui.Dockerfile index 3e58b5e64..249636146 100644 --- a/stacks/ui.Dockerfile +++ b/stacks/peerdb-ui.Dockerfile @@ -2,6 +2,7 @@ # Base stage FROM node:18-bookworm-slim AS base +RUN apt-get update && apt-get install -y openssl WORKDIR /app # Dependencies stage @@ -17,6 +18,9 @@ WORKDIR /app COPY --from=deps /app/node_modules ./node_modules COPY ui/ ./ +# Prisma +RUN yarn prisma generate + ENV NEXT_TELEMETRY_DISABLED 1 RUN yarn build @@ -38,7 +42,7 @@ RUN chown nextjs:nodejs .next # https://nextjs.org/docs/advanced-features/output-file-tracing COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static -COPY stacks/ui-entrypoint.sh /app/entrypoint.sh +COPY stacks/ui/ui-entrypoint.sh /app/entrypoint.sh # allow permissions for nextjs user to do anything in /app RUN chown -R nextjs:nodejs /app diff --git a/stacks/ui-entrypoint.sh b/stacks/ui/ui-entrypoint.sh similarity index 100% rename from stacks/ui-entrypoint.sh rename to stacks/ui/ui-entrypoint.sh diff --git a/ui/README.md b/ui/README.md index 5352c77e2..0cf45cf15 100644 --- a/ui/README.md +++ b/ui/README.md @@ -36,15 +36,3 @@ yarn storybook Open [http://localhost:6000](http://localhost:6000) with your browser to see the result. The stories and their corresponding components resides inside the `lib` folder. - -## Storybook Github pages - -The Storybook in this repositories Github pages at [Storybook](https://peerdb-io.github.io/peerdb-cloud-template). - -To deploy a new version of Storybook to Github pages run the script - -```bash -yarn storybook:deploy -``` - -It will automatically run the storybook build, push the content to the branch `gh-pages` to automatically deply the newly built Storybook to Github pages. diff --git a/ui/app/api/mirrors/cdc/route.ts b/ui/app/api/mirrors/cdc/route.ts new file mode 100644 index 000000000..025dfc149 --- /dev/null +++ b/ui/app/api/mirrors/cdc/route.ts @@ -0,0 +1,32 @@ +import { UCreateMirrorResponse } from '@/app/dto/MirrorsDTO'; +import { + CreateCDCFlowRequest, + CreateCDCFlowResponse, +} from '@/grpc_generated/route'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +export async function POST(request: Request) { + const body = await request.json(); + const { config } = body; + console.log('/mirrors/cdc config: ', config); + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const req: CreateCDCFlowRequest = { + connectionConfigs: config, + createCatalogEntry: true, + }; + const createStatus: CreateCDCFlowResponse = await fetch( + `${flowServiceAddr}/v1/flows/cdc/create`, + { + method: 'POST', + body: JSON.stringify(req), + } + ).then((res) => { + return res.json(); + }); + + let response: UCreateMirrorResponse = { + created: !!createStatus.worflowId, + }; + + return new Response(JSON.stringify(response)); +} diff --git a/ui/app/api/mirrors/drop/route.ts b/ui/app/api/mirrors/drop/route.ts new file mode 100644 index 000000000..4a10f26ad --- /dev/null +++ b/ui/app/api/mirrors/drop/route.ts @@ -0,0 +1,31 @@ +import { UDropMirrorResponse } from '@/app/dto/MirrorsDTO'; +import { ShutdownRequest, ShutdownResponse } from '@/grpc_generated/route'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +export async function POST(request: Request) { + const body = await request.json(); + const { workflowId, flowJobName, sourcePeer, destinationPeer } = body; + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const req: ShutdownRequest = { + workflowId, + flowJobName, + sourcePeer, + destinationPeer, + }; + console.log('/drop/mirror: req:', req); + const dropStatus: ShutdownResponse = await fetch( + `${flowServiceAddr}/v1/mirrors/drop`, + { + method: 'POST', + body: JSON.stringify(req), + } + ).then((res) => { + return res.json(); + }); + let response: UDropMirrorResponse = { + dropped: dropStatus.ok, + errorMessage: dropStatus.errorMessage, + }; + + return new Response(JSON.stringify(response)); +} diff --git a/ui/app/api/mirrors/qrep/route.ts b/ui/app/api/mirrors/qrep/route.ts new file mode 100644 index 000000000..ff160e703 --- /dev/null +++ b/ui/app/api/mirrors/qrep/route.ts @@ -0,0 +1,31 @@ +import { UCreateMirrorResponse } from '@/app/dto/MirrorsDTO'; +import { + CreateQRepFlowRequest, + CreateQRepFlowResponse, +} from '@/grpc_generated/route'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +export async function POST(request: Request) { + const body = await request.json(); + const { config } = body; + console.log('/qrep/post config:', config); + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const req: CreateQRepFlowRequest = { + qrepConfig: config, + createCatalogEntry: true, + }; + const createStatus: CreateQRepFlowResponse = await fetch( + `${flowServiceAddr}/v1/flows/qrep/create`, + { + method: 'POST', + body: JSON.stringify(req), + } + ).then((res) => { + return res.json(); + }); + let response: UCreateMirrorResponse = { + created: !!createStatus.worflowId, + }; + + return new Response(JSON.stringify(response)); +} diff --git a/ui/app/api/peers/columns/route.ts b/ui/app/api/peers/columns/route.ts new file mode 100644 index 000000000..ce944907f --- /dev/null +++ b/ui/app/api/peers/columns/route.ts @@ -0,0 +1,18 @@ +import { UColumnsResponse } from '@/app/dto/PeersDTO'; +import { TableColumnsResponse } from '@/grpc_generated/route'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +export async function POST(request: Request) { + const body = await request.json(); + const { peerName, schemaName, tableName } = body; + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const columnsList: TableColumnsResponse = await fetch( + `${flowServiceAddr}/v1/peers/columns?peer_name=${peerName}&schema_name=${schemaName}&table_name=${tableName}` + ).then((res) => { + return res.json(); + }); + let response: UColumnsResponse = { + columns: columnsList.columns, + }; + return new Response(JSON.stringify(response)); +} diff --git a/ui/app/api/peers/drop/route.ts b/ui/app/api/peers/drop/route.ts new file mode 100644 index 000000000..0a7eb2541 --- /dev/null +++ b/ui/app/api/peers/drop/route.ts @@ -0,0 +1,28 @@ +import { UDropPeerResponse } from '@/app/dto/PeersDTO'; +import { DropPeerRequest, DropPeerResponse } from '@/grpc_generated/route'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +export async function POST(request: Request) { + const body = await request.json(); + const { peerName } = body; + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const req: DropPeerRequest = { + peerName, + }; + console.log('/drop/peer: req:', req); + const dropStatus: DropPeerResponse = await fetch( + `${flowServiceAddr}/v1/peers/drop`, + { + method: 'POST', + body: JSON.stringify(req), + } + ).then((res) => { + return res.json(); + }); + let response: UDropPeerResponse = { + dropped: dropStatus.ok, + errorMessage: dropStatus.errorMessage, + }; + + return new Response(JSON.stringify(response)); +} diff --git a/ui/app/api/peers/route.ts b/ui/app/api/peers/route.ts new file mode 100644 index 000000000..dc1292c98 --- /dev/null +++ b/ui/app/api/peers/route.ts @@ -0,0 +1,152 @@ +import { + CatalogPeer, + PeerConfig, + UCreatePeerResponse, + UValidatePeerResponse, +} from '@/app/dto/PeersDTO'; +import prisma from '@/app/utils/prisma'; +import { + BigqueryConfig, + DBType, + EventHubConfig, + EventHubGroupConfig, + Peer, + PostgresConfig, + S3Config, + SnowflakeConfig, + SqlServerConfig, +} from '@/grpc_generated/peers'; +import { + CreatePeerRequest, + CreatePeerResponse, + CreatePeerStatus, + ValidatePeerRequest, + ValidatePeerResponse, + ValidatePeerStatus, + createPeerStatusFromJSON, + validatePeerStatusFromJSON, +} from '@/grpc_generated/route'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +const constructPeer = ( + name: string, + type: string, + config: PeerConfig +): Peer | undefined => { + switch (type) { + case 'POSTGRES': + return { + name, + type: DBType.POSTGRES, + postgresConfig: config as PostgresConfig, + }; + case 'SNOWFLAKE': + return { + name, + type: DBType.SNOWFLAKE, + snowflakeConfig: config as SnowflakeConfig, + }; + default: + return; + } +}; + +export async function POST(request: Request) { + const body = await request.json(); + const { name, type, config, mode } = body; + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const peer = constructPeer(name, type, config); + if (mode === 'validate') { + const validateReq: ValidatePeerRequest = { peer }; + const validateStatus: ValidatePeerResponse = await fetch( + `${flowServiceAddr}/v1/peers/validate`, + { + method: 'POST', + body: JSON.stringify(validateReq), + } + ).then((res) => { + return res.json(); + }); + let response: UValidatePeerResponse = { + valid: + validatePeerStatusFromJSON(validateStatus.status) === + ValidatePeerStatus.VALID, + message: validateStatus.message, + }; + return new Response(JSON.stringify(response)); + } else if (mode === 'create') { + const req: CreatePeerRequest = { peer }; + const createStatus: CreatePeerResponse = await fetch( + `${flowServiceAddr}/v1/peers/create`, + { + method: 'POST', + body: JSON.stringify(req), + } + ).then((res) => { + return res.json(); + }); + let response: UCreatePeerResponse = { + created: + createPeerStatusFromJSON(createStatus.status) === + CreatePeerStatus.CREATED, + message: createStatus.message, + }; + return new Response(JSON.stringify(response)); + } +} + +export const getTruePeer = (peer: CatalogPeer) => { + const newPeer: Peer = { + name: peer.name, + type: peer.type, + }; + const options = peer.options; + let config: + | BigqueryConfig + | SnowflakeConfig + | PostgresConfig + | EventHubConfig + | S3Config + | SqlServerConfig + | EventHubGroupConfig; + switch (peer.type) { + case 0: + config = BigqueryConfig.decode(options); + newPeer.bigqueryConfig = config; + break; + case 1: + config = SnowflakeConfig.decode(options); + newPeer.snowflakeConfig = config; + break; + case 3: + config = PostgresConfig.decode(options); + newPeer.postgresConfig = config; + break; + case 4: + config = EventHubConfig.decode(options); + newPeer.eventhubConfig = config; + break; + case 5: + config = S3Config.decode(options); + newPeer.s3Config = config; + break; + case 6: + config = SqlServerConfig.decode(options); + newPeer.sqlserverConfig = config; + break; + case 7: + config = EventHubGroupConfig.decode(options); + newPeer.eventhubGroupConfig = config; + break; + default: + return newPeer; + } + return newPeer; +}; + +// GET all the peers from the database +export async function GET(request: Request) { + const peers = await prisma.peers.findMany(); + const truePeers: Peer[] = peers.map((peer) => getTruePeer(peer)); + return new Response(JSON.stringify(truePeers)); +} diff --git a/ui/app/api/peers/schemas/route.ts b/ui/app/api/peers/schemas/route.ts new file mode 100644 index 000000000..084869270 --- /dev/null +++ b/ui/app/api/peers/schemas/route.ts @@ -0,0 +1,22 @@ +import { USchemasResponse } from '@/app/dto/PeersDTO'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +export async function POST(request: Request) { + const body = await request.json(); + const { peerName } = body; + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const schemaList = await fetch( + `${flowServiceAddr}/v1/peers/schemas?peer_name=${peerName}` + ).then((res) => { + return res.json(); + }); + let response: USchemasResponse = { + schemas: schemaList.schemas, + }; + if (schemaList.message === 'no rows in result set') { + response = { + schemas: [], + }; + } + return new Response(JSON.stringify(response)); +} diff --git a/ui/app/api/peers/tables/route.ts b/ui/app/api/peers/tables/route.ts new file mode 100644 index 000000000..53f606d48 --- /dev/null +++ b/ui/app/api/peers/tables/route.ts @@ -0,0 +1,18 @@ +import { UTablesResponse } from '@/app/dto/PeersDTO'; +import { SchemaTablesResponse } from '@/grpc_generated/route'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; + +export async function POST(request: Request) { + const body = await request.json(); + const { peerName, schemaName } = body; + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const tableList: SchemaTablesResponse = await fetch( + `${flowServiceAddr}/v1/peers/tables?peer_name=${peerName}&schema_name=${schemaName}` + ).then((res) => { + return res.json(); + }); + let response: UTablesResponse = { + tables: tableList.tables, + }; + return new Response(JSON.stringify(response)); +} diff --git a/ui/app/connectors/create/page.tsx b/ui/app/connectors/create/page.tsx deleted file mode 100644 index f55ba52cb..000000000 --- a/ui/app/connectors/create/page.tsx +++ /dev/null @@ -1,41 +0,0 @@ -import { Action } from '@/lib/Action'; -import { Button } from '@/lib/Button'; -import { ButtonGroup } from '@/lib/ButtonGroup'; -import { Icon } from '@/lib/Icon'; -import { Label } from '@/lib/Label'; -import { LayoutMain, RowWithSelect } from '@/lib/Layout'; -import { Panel } from '@/lib/Panel'; -import { Select } from '@/lib/Select'; - -export default function CreateConnector() { - return ( - - - - - }>Learn about connectors - - - - - Data source - - } - action={ - - - - - - - - - + handleChange(val, { + label: 'Source Peer', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + source: value as Peer, + })), + }) + } + > + {(props.peers ?? []) + .map((peer) => peer.name) + .map((peerName, id) => { + return ( + + {peerName} + + ); + })} + + + + } + /> + {props.mirrorConfig.source && ( + + )} + {props.settings.map((setting, id) => { + return ( + paramDisplayCondition(setting) && + (setting.type === 'switch' ? ( + {setting.label}} + action={ +
+ + handleChange(state, setting) + } + /> + {setting.tips && ( + + )} +
+ } + /> + ) : setting.type === 'select' ? ( + + {setting.label} + {RequiredIndicator(setting.required)} + + } + action={ +
+ + {setting.tips && ( + + )} +
+ } + /> + ) : ( + + {setting.label} + {RequiredIndicator(setting.required)} + + } + action={ +
+ ) => + handleChange(e.target.value, setting) + } + /> + {setting.tips && ( + + )} +
+ } + /> + )) + ); + })} + + ); +} diff --git a/ui/app/mirrors/create/columns.tsx b/ui/app/mirrors/create/columns.tsx new file mode 100644 index 000000000..20dca1a5e --- /dev/null +++ b/ui/app/mirrors/create/columns.tsx @@ -0,0 +1,132 @@ +'use client'; +import { Button } from '@/lib/Button'; +import { Dispatch, SetStateAction, useState } from 'react'; +import { PulseLoader } from 'react-spinners'; +import { fetchColumns } from './handlers'; + +interface ColumnsDisplayProps { + setColumns: Dispatch< + SetStateAction< + { + tableName: string; + columns: string[]; + }[] + > + >; + columns?: { + tableName: string; + columns: string[]; + }[]; + peerName: string; + schemaName: string; + tableName: string; +} + +const ColumnsDisplay = (props: ColumnsDisplayProps) => { + const [loading, setLoading] = useState(false); + const addTableColumns = (table: string) => { + // add table to columns + fetchColumns( + props.peerName, + props.schemaName, + props.tableName, + setLoading + ).then((res) => + props.setColumns((prev) => { + return [...prev, { tableName: table, columns: res }]; + }) + ); + }; + + const removeTableColumns = (table: string) => { + // remove table from columns + props.setColumns((prev) => { + return prev.filter((column) => column.tableName !== table); + }); + }; + + const getTableColumns = (tableName: string) => { + // get table columns + return props.columns?.find((column) => column.tableName === tableName) + ?.columns; + }; + return ( +
+ + +
+ {getTableColumns(props.tableName)?.map((column, id) => { + const columnName = column.split(':')[0]; + const columnType = column.split(':')[1]; + return ( +
+
+ {columnName} +
+
+ {columnType} +
+
+ ); + })} +
+
+ ); +}; + +export default ColumnsDisplay; diff --git a/ui/app/mirrors/create/handlers.ts b/ui/app/mirrors/create/handlers.ts new file mode 100644 index 000000000..cd08210f2 --- /dev/null +++ b/ui/app/mirrors/create/handlers.ts @@ -0,0 +1,214 @@ +import { UCreateMirrorResponse } from '@/app/dto/MirrorsDTO'; +import { + UColumnsResponse, + USchemasResponse, + UTablesResponse, +} from '@/app/dto/PeersDTO'; +import { QRepConfig, QRepWriteType } from '@/grpc_generated/flow'; +import { Dispatch, SetStateAction } from 'react'; +import { CDCConfig, TableMapRow } from '../../dto/MirrorsDTO'; +import { cdcSchema, qrepSchema, tableMappingSchema } from './schema'; + +const validateCDCFields = ( + tableMapping: TableMapRow[], + setMsg: Dispatch>, + config: CDCConfig +): boolean => { + let validationErr: string | undefined; + const tablesValidity = tableMappingSchema.safeParse(tableMapping); + if (!tablesValidity.success) { + validationErr = tablesValidity.error.issues[0].message; + setMsg({ ok: false, msg: validationErr }); + return false; + } + const configValidity = cdcSchema.safeParse(config); + if (!configValidity.success) { + validationErr = configValidity.error.issues[0].message; + setMsg({ ok: false, msg: validationErr }); + return false; + } + setMsg({ ok: true, msg: '' }); + return true; +}; + +const validateQRepFields = ( + query: string, + setMsg: Dispatch>, + config: QRepConfig +): boolean => { + if (query.length < 5) { + setMsg({ ok: false, msg: 'Query is invalid' }); + return false; + } + + let validationErr: string | undefined; + const configValidity = qrepSchema.safeParse(config); + if (!configValidity.success) { + validationErr = configValidity.error.issues[0].message; + setMsg({ ok: false, msg: validationErr }); + return false; + } + setMsg({ ok: true, msg: '' }); + return true; +}; + +const reformattedTableMapping = (tableMapping: TableMapRow[]) => { + const mapping = tableMapping.map((row) => { + return { + sourceTableIdentifier: row.source, + destinationTableIdentifier: row.destination, + partitionKey: row.partitionKey, + }; + }); + return mapping; +}; + +export const handleCreateCDC = async ( + flowJobName: string, + rows: TableMapRow[], + config: CDCConfig, + setMsg: Dispatch< + SetStateAction<{ + ok: boolean; + msg: string; + }> + >, + setLoading: Dispatch>, + route: RouteCallback +) => { + if (!flowJobName) { + setMsg({ ok: false, msg: 'Mirror name is required' }); + return; + } + const isValid = validateCDCFields(rows, setMsg, config); + if (!isValid) return; + const tableNameMapping = reformattedTableMapping(rows); + config['tableMappings'] = tableNameMapping; + config['flowJobName'] = flowJobName; + setLoading(true); + const statusMessage: UCreateMirrorResponse = await fetch('/api/mirrors/cdc', { + method: 'POST', + body: JSON.stringify({ + config, + }), + }).then((res) => res.json()); + if (!statusMessage.created) { + setMsg({ ok: false, msg: 'unable to create mirror.' }); + setLoading(false); + return; + } + setMsg({ ok: true, msg: 'CDC Mirror created successfully' }); + route(); + setLoading(false); +}; + +export const handleCreateQRep = async ( + flowJobName: string, + query: string, + config: QRepConfig, + setMsg: Dispatch< + SetStateAction<{ + ok: boolean; + msg: string; + }> + >, + setLoading: Dispatch>, + route: RouteCallback, + xmin?: boolean +) => { + if (!flowJobName) { + setMsg({ ok: false, msg: 'Mirror name is required' }); + return; + } + if (xmin == true) { + config.watermarkColumn = 'xmin'; + config.query = `SELECT * FROM ${config.watermarkTable} WHERE xmin::text::bigint BETWEEN {{.start}} AND {{.end}}`; + query = config.query; + config.initialCopyOnly = false; + } + + if ( + config.writeMode?.writeType == QRepWriteType.QREP_WRITE_MODE_UPSERT && + !config.writeMode?.upsertKeyColumns + ) { + setMsg({ + ok: false, + msg: 'For upsert mode, unique key columns cannot be empty.', + }); + return; + } + const isValid = validateQRepFields(query, setMsg, config); + if (!isValid) return; + config.flowJobName = flowJobName; + config.query = query; + setLoading(true); + const statusMessage: UCreateMirrorResponse = await fetch( + '/api/mirrors/qrep', + { + method: 'POST', + body: JSON.stringify({ + config, + }), + } + ).then((res) => res.json()); + if (!statusMessage.created) { + setMsg({ ok: false, msg: 'unable to create mirror.' }); + setLoading(false); + return; + } + setMsg({ ok: true, msg: 'Query Replication Mirror created successfully' }); + route(); + setLoading(false); +}; + +export const fetchSchemas = async ( + peerName: string, + setLoading: Dispatch> +) => { + setLoading(true); + const schemasRes: USchemasResponse = await fetch('/api/peers/schemas', { + method: 'POST', + body: JSON.stringify({ + peerName, + }), + }).then((res) => res.json()); + setLoading(false); + return schemasRes.schemas; +}; + +export const fetchTables = async ( + peerName: string, + schemaName: string, + setLoading: Dispatch> +) => { + if (schemaName.length === 0) return []; + setLoading(true); + const tablesRes: UTablesResponse = await fetch('/api/peers/tables', { + method: 'POST', + body: JSON.stringify({ + peerName, + schemaName, + }), + }).then((res) => res.json()); + setLoading(false); + return tablesRes.tables; +}; + +export const fetchColumns = async ( + peerName: string, + schemaName: string, + tableName: string, + setLoading: Dispatch> +) => { + setLoading(true); + const columnsRes: UColumnsResponse = await fetch('/api/peers/columns', { + method: 'POST', + body: JSON.stringify({ + peerName, + schemaName, + tableName, + }), + }).then((res) => res.json()); + setLoading(false); + return columnsRes.columns; +}; diff --git a/ui/app/mirrors/create/helpers/cdc.ts b/ui/app/mirrors/create/helpers/cdc.ts new file mode 100644 index 000000000..b1b03e8bd --- /dev/null +++ b/ui/app/mirrors/create/helpers/cdc.ts @@ -0,0 +1,128 @@ +import { QRepSyncMode } from '@/grpc_generated/flow'; +import { Peer } from '@/grpc_generated/peers'; +import { CDCConfig } from '../../../dto/MirrorsDTO'; +import { MirrorSetting } from './common'; +export const cdcSettings: MirrorSetting[] = [ + { + label: 'Destination Peer', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ ...curr, destination: value as Peer })), + tips: 'The peer to which data will be replicated.', + type: 'select', + required: true, + }, + { + label: 'Initial Copy', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + doInitialCopy: (value as boolean) || false, + })), + tips: 'Specify if you want initial load to happen for your tables.', + type: 'switch', + }, + { + label: 'Publication Name', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + publicationName: (value as string) || '', + })), + tips: 'If set, PeerDB will use this publication for the mirror.', + }, + { + label: 'Replication Slot Name', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + replicationSlotName: (value as string) || '', + })), + tips: 'If set, PeerDB will use this slot for the mirror.', + }, + { + label: 'Snapshot Number of Rows Per Partition', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + snapshotNumRowsPerPartition: parseInt(value as string, 10) || 500000, + })), + tips: 'PeerDB splits up table data into partitions for increased performance. This setting controls the number of rows per partition. The default value is 500000.', + default: '500000', + type: 'number', + }, + { + label: 'Snapshot Maximum Parallel Workers', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + snapshotMaxParallelWorkers: parseInt(value as string, 10) || 8, + })), + tips: 'PeerDB spins up parallel threads for each partition. This setting controls the number of partitions to sync in parallel. The default value is 8.', + default: '8', + type: 'number', + }, + { + label: 'Snapshot Number of Tables In Parallel', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + snapshotNumTablesInParallel: parseInt(value as string, 10) || 1, + })), + tips: 'Specify the number of tables to sync perform initial load for, in parallel. The default value is 1.', + default: '1', + type: 'number', + }, + { + label: 'Snapshot Sync Mode', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + snapshotSyncMode: + (value as QRepSyncMode) || QRepSyncMode.QREP_SYNC_MODE_MULTI_INSERT, + })), + tips: 'Specify whether you want the sync mode for initial load to be via SQL or by staging AVRO files. The default mode is SQL.', + default: 'SQL', + type: 'select', + }, + { + label: 'CDC Sync Mode', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + cdcSyncMode: + (value as QRepSyncMode) || QRepSyncMode.QREP_SYNC_MODE_MULTI_INSERT, + })), + tips: 'Specify whether you want the sync mode for CDC to be via SQL or by staging AVRO files. The default mode is SQL.', + default: 'SQL', + type: 'select', + }, + { + label: 'Snapshot Staging Path', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + snapshotStagingPath: value as string | '', + })), + tips: 'You can specify staging path if you have set the Snapshot sync mode as AVRO. For Snowflake as destination peer, this must be either empty or an S3 bucket URL.', + }, + { + label: 'CDC Staging Path', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + cdcStagingPath: (value as string) || '', + })), + tips: 'You can specify staging path if you have set the CDC sync mode as AVRO. For Snowflake as destination peer, this must be either empty or an S3 bucket url', + }, + { + label: 'Soft Delete', + stateHandler: (value, setter) => + setter((curr: CDCConfig) => ({ + ...curr, + softDelete: (value as boolean) || false, + })), + tips: 'Allows you to mark some records as deleted without actual erasure from the database', + default: 'SQL', + type: 'switch', + }, +]; diff --git a/ui/app/mirrors/create/helpers/common.ts b/ui/app/mirrors/create/helpers/common.ts new file mode 100644 index 000000000..01fb3bd5d --- /dev/null +++ b/ui/app/mirrors/create/helpers/common.ts @@ -0,0 +1,66 @@ +import { + FlowConnectionConfigs, + QRepConfig, + QRepSyncMode, + QRepWriteType, +} from '@/grpc_generated/flow'; +import { Peer } from '@/grpc_generated/peers'; + +export interface MirrorSetting { + label: string; + stateHandler: ( + value: string | string[] | Peer | boolean | QRepSyncMode | QRepWriteType, + setter: any + ) => void; + type?: string; + required?: boolean; + tips?: string; + helpfulLink?: string; + default?: string | number; +} + +export const blankCDCSetting: FlowConnectionConfigs = { + source: undefined, + destination: undefined, + flowJobName: '', + tableSchema: undefined, + tableMappings: [], + srcTableIdNameMapping: {}, + tableNameSchemaMapping: {}, + metadataPeer: undefined, + maxBatchSize: 0, + doInitialCopy: false, + publicationName: '', + snapshotNumRowsPerPartition: 500000, + snapshotMaxParallelWorkers: 8, + snapshotNumTablesInParallel: 1, + snapshotSyncMode: 0, + cdcSyncMode: 0, + snapshotStagingPath: '', + cdcStagingPath: '', + softDelete: false, + replicationSlotName: '', + pushBatchSize: 0, + pushParallelism: 0, + resync: false, +}; + +export const blankQRepSetting: QRepConfig = { + flowJobName: '', + sourcePeer: undefined, + destinationPeer: undefined, + destinationTableIdentifier: '', + query: '', + watermarkTable: '', + watermarkColumn: '', + initialCopyOnly: false, + syncMode: 0, + batchSizeInt: 0, + batchDurationSeconds: 0, + maxParallelWorkers: 8, + waitBetweenBatchesSeconds: 30, + writeMode: undefined, + stagingPath: '', + numRowsPerPartition: 0, + setupWatermarkTableOnDestination: false, +}; diff --git a/ui/app/mirrors/create/helpers/qrep.ts b/ui/app/mirrors/create/helpers/qrep.ts new file mode 100644 index 000000000..f0f594bd5 --- /dev/null +++ b/ui/app/mirrors/create/helpers/qrep.ts @@ -0,0 +1,173 @@ +import { + QRepConfig, + QRepSyncMode, + QRepWriteMode, + QRepWriteType, +} from '@/grpc_generated/flow'; +import { Peer } from '@/grpc_generated/peers'; +import { MirrorSetting } from './common'; +export const qrepSettings: MirrorSetting[] = [ + { + label: 'Source Peer', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ ...curr, sourcePeer: value as Peer })), + tips: 'The peer from which we will be replicating data. Ensure the prerequisites for this peer are met.', + helpfulLink: + 'https://docs.peerdb.io/usecases/Real-time%20CDC/postgres-to-snowflake#prerequisites', + type: 'select', + required: true, + }, + { + label: 'Destination Peer', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + destinationPeer: value as Peer, + })), + tips: 'The peer to which data will be replicated.', + type: 'select', + required: true, + }, + { + label: 'Table', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + watermarkTable: (value as string) || '', + })), + tips: 'The source table of the replication and the table to which the watermark column belongs.', + required: true, + }, + { + label: 'Watermark Column', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + watermarkColumn: (value as string) || '', + })), + tips: 'Watermark column is used to track the progress of the replication. This column should be a unique column in the query. Example: id', + required: true, + }, + { + label: 'Create Destination Table', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + setupWatermarkTableOnDestination: (value as boolean) || false, + })), + tips: 'Specify if you want to create the watermark table on the destination as-is, can be used for some queries.', + type: 'switch', + }, + { + label: 'Destination Table Name', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + destinationTableIdentifier: value as string, + })), + tips: 'Name of the destination. For any destination peer apart from BigQuery, this must be schema-qualified. Example: public.users', + required: true, + }, + { + label: 'Rows Per Partition', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + numRowsPerPartition: parseInt(value as string, 10), + })), + tips: 'PeerDB splits up table data into partitions for increased performance. This setting controls the number of rows per partition.', + type: 'number', + required: true, + }, + { + label: 'Maximum Parallel Workers', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + maxParallelWorkers: parseInt(value as string, 10) || 8, + })), + tips: 'PeerDB spins up parallel threads for each partition. This setting controls the number of partitions to sync in parallel. The default value is 8.', + default: '8', + type: 'number', + }, + { + label: 'Sync Mode', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + syncMode: + (value as QRepSyncMode) || QRepSyncMode.QREP_SYNC_MODE_MULTI_INSERT, + })), + tips: 'Specify whether you want the sync mode to be via SQL or by staging AVRO files. The default mode is SQL.', + default: 'SQL', + type: 'select', + }, + + { + label: 'Staging Path', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + stagingPath: (value as string) || '', + })), + tips: `You can specify staging path if you have set the sync mode as AVRO. For Snowflake as destination peer. + If this starts with gs:// then it will be written to GCS. + If this starts with s3:// then it will be written to S3. + If nothing is specified then it will be written to local disk.`, + }, + { + label: 'Write Type', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => { + let currWriteMode = curr.writeMode || { writeType: undefined }; + currWriteMode.writeType = value as QRepWriteType; + return { + ...curr, + writeMode: currWriteMode, + }; + }), + tips: `Specify whether you want the write mode to be via APPEND, UPSERT or OVERWRITE. + Append mode is for insert-only workloads. Upsert mode is append mode but also supports updates. + Overwrite mode overwrites the destination table data every sync.`, + type: 'select', + }, + { + label: 'Upsert Key Columns', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => { + let defaultMode: QRepWriteMode = { + writeType: QRepWriteType.QREP_WRITE_MODE_APPEND, + upsertKeyColumns: [], + }; + let currWriteMode = curr.writeMode || defaultMode; + currWriteMode.upsertKeyColumns = value as string[]; + return { + ...curr, + writeMode: currWriteMode, + }; + }), + tips: `Comma separated string column names. Needed when write mode is set to UPSERT. + These columns need to be unique and are used for updates.`, + }, + { + label: 'Initial Copy Only', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + initialCopyOnly: (value as boolean) || false, + })), + tips: 'Specify if you want query replication to stop at initial load.', + type: 'switch', + }, + { + label: 'Wait Time Between Batches', + stateHandler: (value, setter) => + setter((curr: QRepConfig) => ({ + ...curr, + waitBetweenBatchesSeconds: parseInt(value as string, 10) || 30, + })), + tips: 'Time to wait (in seconds) between getting partitions to process.', + default: '0', + type: 'number', + }, +]; diff --git a/ui/app/mirrors/create/page.tsx b/ui/app/mirrors/create/page.tsx index 75341d491..94c8262a1 100644 --- a/ui/app/mirrors/create/page.tsx +++ b/ui/app/mirrors/create/page.tsx @@ -1,14 +1,69 @@ +'use client'; +import { QRepConfig } from '@/grpc_generated/flow'; +import { Peer } from '@/grpc_generated/peers'; import { Button } from '@/lib/Button'; import { ButtonGroup } from '@/lib/ButtonGroup'; import { Label } from '@/lib/Label'; -import { LayoutMain, RowWithSelect, RowWithTextField } from '@/lib/Layout'; +import { RowWithRadiobutton, RowWithTextField } from '@/lib/Layout'; import { Panel } from '@/lib/Panel'; -import { Select } from '@/lib/Select'; +import { RadioButton, RadioButtonGroup } from '@/lib/RadioButtonGroup'; import { TextField } from '@/lib/TextField'; +import { Divider } from '@tremor/react'; +import Link from 'next/link'; +import { useRouter } from 'next/navigation'; +import { useEffect, useState } from 'react'; +import { CDCConfig, TableMapRow } from '../../dto/MirrorsDTO'; +import CDCConfigForm from './cdc'; +import { handleCreateCDC, handleCreateQRep } from './handlers'; +import { cdcSettings } from './helpers/cdc'; +import { blankCDCSetting, blankQRepSetting } from './helpers/common'; +import { qrepSettings } from './helpers/qrep'; +import QRepConfigForm from './qrep'; +import QRepQuery from './query'; export default function CreateMirrors() { + const router = useRouter(); + const [mirrorName, setMirrorName] = useState(''); + const [mirrorType, setMirrorType] = useState< + 'CDC' | 'Query Replication' | 'XMIN' + >('CDC'); + const [formMessage, setFormMessage] = useState<{ ok: boolean; msg: string }>({ + ok: true, + msg: '', + }); + const [loading, setLoading] = useState(false); + const [config, setConfig] = useState(blankCDCSetting); + const [peers, setPeers] = useState([]); + const [rows, setRows] = useState([]); + const [sourceSchema, setSourceSchema] = useState('public'); + const [qrepQuery, setQrepQuery] = useState(''); + + useEffect(() => { + fetch('/api/peers') + .then((res) => res.json()) + .then((res) => { + setPeers(res); + }); + + if (mirrorType === 'Query Replication' || mirrorType === 'XMIN') { + setConfig(blankQRepSetting); + if (mirrorType === 'XMIN') { + setConfig((curr) => { + return { ...curr, setupWatermarkTableOnDestination: true }; + }); + } else + setConfig((curr) => { + return { ...curr, setupWatermarkTableOnDestination: false }; + }); + } else setConfig(blankCDCSetting); + }, [mirrorType]); + + let listMirrorsPage = () => { + router.push('/mirrors'); + }; + return ( - +
- - } - action={} - /> - - Destination - - } - action={ handleChange(val, setting)} + disabled={setToDefault(setting)} + value={ + setToDefault(setting) + ? defaultSyncMode( + props.mirrorConfig.destinationPeer?.type + ) + : undefined + } + > + {(setting.label.includes('Peer') + ? (props.peers ?? []).map((peer) => peer.name) + : setting.label.includes('Sync') + ? ['AVRO', 'Copy with Binary'] + : ['Append', 'Upsert', 'Overwrite'] + ).map((item, id) => { + return ( + + {item.toString()} + + ); + })} + + {setting.tips && ( + + )} +
+ } + /> + ) : ( + + {setting.label} + {setting.required && ( + + + + )} + + } + action={ +
+ ) => + handleChange(e.target.value, setting) + } + /> + {setting.tips && ( + + )} +
+ } + /> + )) + ); + })} + + ); +} diff --git a/ui/app/mirrors/create/query.tsx b/ui/app/mirrors/create/query.tsx new file mode 100644 index 000000000..64d0f893a --- /dev/null +++ b/ui/app/mirrors/create/query.tsx @@ -0,0 +1,26 @@ +import Editor from '@monaco-editor/react'; +import { Dispatch, SetStateAction } from 'react'; +const options = { + readOnly: false, + minimap: { enabled: false }, + fontSize: 14, +}; + +interface QueryProps { + setter: Dispatch>; + query: string; +} +const QRepQuery = (props: QueryProps) => { + return ( + props.setter(value as string)} + /> + ); +}; + +export default QRepQuery; diff --git a/ui/app/mirrors/create/schema.ts b/ui/app/mirrors/create/schema.ts new file mode 100644 index 000000000..d7d03d45f --- /dev/null +++ b/ui/app/mirrors/create/schema.ts @@ -0,0 +1,158 @@ +import * as z from 'zod'; + +export const tableMappingSchema = z + .array( + z.object({ + source: z + .string() + .min(1, 'source table names, if added, must be non-empty'), + destination: z + .string() + .min(1, 'destination table names, if added, must be non-empty'), + partitionKey: z.string().optional(), + }) + ) + .nonempty('At least one table mapping is required'); + +export const cdcSchema = z.object({ + source: z.object( + { + name: z.string().min(1), + type: z.any(), + config: z.any(), + }, + { required_error: 'Source peer is required' } + ), + destination: z.object( + { + name: z.string().min(1), + type: z.any(), + config: z.any(), + }, + { required_error: 'Destination peer is required' } + ), + doInitialCopy: z.boolean().optional(), + publicationName: z + .string({ + invalid_type_error: 'Publication name must be a string', + }) + .max(255, 'Publication name must be less than 255 characters') + .optional(), + replicationSlotName: z + .string({ + invalid_type_error: 'Publication name must be a string', + }) + .max(255, 'Publication name must be less than 255 characters') + .optional(), + snapshotNumRowsPerPartition: z + .number({ + invalid_type_error: 'Snapshow rows per partition must be a number', + }) + .int() + .min(1, 'Snapshow rows per partition must be a positive integer') + .optional(), + snapshotMaxParallelWorkers: z + .number({ + invalid_type_error: 'Snapshow max workers must be a number', + }) + .int() + .min(1, 'Snapshow max workers must be a positive integer') + .optional(), + snapshotNumTablesInParallel: z + .number({ + invalid_type_error: 'Snapshow parallel tables must be a number', + }) + .int() + .min(1, 'Snapshow parallel tables must be a positive integer') + .optional(), + snapshotStagingPath: z + .string({ + invalid_type_error: 'Snapshot staging path must be a string', + }) + .max(255, 'Snapshot staging path must be less than 255 characters') + .optional(), + cdcStagingPath: z + .string({ + invalid_type_error: 'CDC staging path must be a string', + }) + .max(255, 'CDC staging path must be less than 255 characters') + .optional(), + softDelete: z.boolean().optional(), +}); + +export const qrepSchema = z.object({ + sourcePeer: z.object( + { + name: z.string().min(1), + type: z.any(), + config: z.any(), + }, + { required_error: 'Source peer is required' } + ), + destinationPeer: z.object( + { + name: z.string().min(1), + type: z.any(), + config: z.any(), + }, + { required_error: 'Destination peer is required' } + ), + initialCopyOnly: z.boolean().optional(), + setupWatermarkTableOnDestination: z.boolean().optional(), + destinationTableIdentifier: z + .string({ + invalid_type_error: 'Destination table name must be a string', + required_error: 'Destination table name is required', + }) + .min(1, 'Destination table name must be non-empty') + .max(255, 'Destination table name must be less than 255 characters'), + watermarkTable: z + .string({ + invalid_type_error: 'Watermark table must be a string', + required_error: 'Watermark table is required', + }) + .min(1, 'Watermark table must be non-empty') + .max(255, 'Watermark table must be less than 255 characters'), + watermarkColumn: z + .string({ + invalid_type_error: 'Watermark column must be a string', + required_error: 'Watermark column is required', + }) + .min(1, 'Watermark column must be non-empty') + .max(255, 'Watermark column must be less than 255 characters'), + numRowsPerPartition: z + .number({ + invalid_type_error: 'Rows per partition must be a number', + required_error: 'Rows per partition is required', + }) + .int() + .min(1, 'Rows per partition must be a positive integer'), + maxParallelWorkers: z + .number({ + invalid_type_error: 'max workers must be a number', + }) + .int() + .min(1, 'max workers must be a positive integer') + .optional(), + stagingPath: z + .string({ + invalid_type_error: 'Staging path must be a string', + }) + .max(255, 'Staging path must be less than 255 characters') + .optional(), + writeMode: z.object({ + writeType: z + .number({ required_error: 'Write type is required' }) + .int() + .min(0) + .max(2), + upsert_key_columns: z.array(z.string()).optional(), + }), + waitBetweenBatchesSeconds: z + .number({ + invalid_type_error: 'Batch wait must be a number', + }) + .int() + .min(1, 'Batch wait must be a non-negative integer') + .optional(), +}); diff --git a/ui/app/mirrors/create/tablemapping.tsx b/ui/app/mirrors/create/tablemapping.tsx new file mode 100644 index 000000000..84a9adce7 --- /dev/null +++ b/ui/app/mirrors/create/tablemapping.tsx @@ -0,0 +1,268 @@ +'use client'; +import { RequiredIndicator } from '@/components/RequiredIndicator'; +import { Label } from '@/lib/Label'; +import { RowWithSelect, RowWithTextField } from '@/lib/Layout'; +import { Select, SelectItem } from '@/lib/Select'; +import { Switch } from '@/lib/Switch'; +import { TextField } from '@/lib/TextField'; +import { Dispatch, SetStateAction, useEffect, useState } from 'react'; +import { BarLoader } from 'react-spinners/'; +import { TableMapRow } from '../../dto/MirrorsDTO'; +import ColumnsDisplay from './columns'; +import { fetchSchemas, fetchTables } from './handlers'; +interface TableMappingProps { + sourcePeerName: string; + rows: TableMapRow[]; + setRows: Dispatch>; + schema: string; + setSchema: Dispatch>; +} +const TableMapping = ({ + sourcePeerName, + rows, + setRows, + schema, + setSchema, +}: TableMappingProps) => { + const [allSchemas, setAllSchemas] = useState(); + const [allTables, setAllTables] = useState(); + const [tableColumns, setTableColumns] = useState< + { tableName: string; columns: string[] }[] + >([]); + const [loading, setLoading] = useState(false); + + const handleAddRow = (source: string) => { + setRows([...rows, { source, destination: source, partitionKey: '' }]); + }; + + const handleRemoveRow = (source: string) => { + const newRows = [...rows]; + const index = newRows.findIndex((row) => row.source === source); + newRows.splice(index, 1); + setRows(newRows); + }; + + const handleSwitch = (on: boolean, source: string) => { + if (on) { + handleAddRow(`${schema}.${source}`); + } else { + handleRemoveRow(`${schema}.${source}`); + } + }; + + const updateDestination = (source: string, dest: string) => { + // find the row with source and update the destination + const newRows = [...rows]; + const index = newRows.findIndex((row) => row.source === source); + newRows[index].destination = dest; + return newRows; + }; + + const updatePartitionKey = (source: string, pkey: string) => { + const newRows = [...rows]; + const index = newRows.findIndex((row) => row.source === source); + newRows[index].partitionKey = pkey; + return newRows; + }; + + const getTablesOfSchema = (schemaName: string) => { + fetchTables(sourcePeerName, schemaName, setLoading).then((res) => + setAllTables(res) + ); + }; + + useEffect(() => { + fetchSchemas(sourcePeerName, setLoading).then((res) => setAllSchemas(res)); + setSchema('public'); + getTablesOfSchema('public'); + }, [sourcePeerName]); + + return ( +
+ + Source Schema} + action={ + + } + /> + +
+ {allTables ? ( + allTables.map((sourceTableName, index) => ( +
+
+
+
+ + handleSwitch(state, sourceTableName) + } + /> +
+ {sourceTableName} +
+
+ {rows.find( + (row) => row.source === `${schema}.${sourceTableName}` + )?.destination && ( +
+ + Destination Table Name + {RequiredIndicator(true)} +
+ } + action={ +
+ + row.source === + `${schema}.${sourceTableName}` + )?.destination + } + onChange={( + e: React.ChangeEvent + ) => + updateDestination( + `${schema}.${sourceTableName}`, + e.target.value + ) + } + /> +
+ } + /> + + Partition Key +
+ } + action={ +
+ + ) => + updatePartitionKey( + `${schema}.${sourceTableName}`, + e.target.value + ) + } + /> +
+ } + /> +
+ This is used only if you enable initial load, and + specifies its watermark. +
+
+ )} +
+ +
+
+ )) + ) : ( +
+ +
+ )} + + + ); +}; + +export default TableMapping; diff --git a/ui/app/mirrors/edit/[mirrorId]/cdc.tsx b/ui/app/mirrors/edit/[mirrorId]/cdc.tsx new file mode 100644 index 000000000..a1f515d94 --- /dev/null +++ b/ui/app/mirrors/edit/[mirrorId]/cdc.tsx @@ -0,0 +1,234 @@ +'use client'; + +import { + CDCMirrorStatus, + QRepMirrorStatus, + SnapshotStatus, +} from '@/grpc_generated/route'; +import { Button } from '@/lib/Button'; +import { Checkbox } from '@/lib/Checkbox'; +import { Icon } from '@/lib/Icon'; +import { Label } from '@/lib/Label'; +import { ProgressBar } from '@/lib/ProgressBar'; +import { SearchField } from '@/lib/SearchField'; +import { Table, TableCell, TableRow } from '@/lib/Table'; +import * as Tabs from '@radix-ui/react-tabs'; +import moment, { Duration, Moment } from 'moment'; +import { useQueryState } from 'next-usequerystate'; +import Link from 'next/link'; +import styled from 'styled-components'; +import CDCDetails from './cdcDetails'; + +class TableCloneSummary { + flowJobName: string; + tableName: string; + totalNumPartitions: number; + totalNumRows: number; + completedNumPartitions: number; + completedNumRows: number; + avgTimePerPartition: Duration | null; + cloneStartTime: Moment | null; + + constructor(clone: QRepMirrorStatus) { + this.flowJobName = clone.config?.flowJobName || ''; + this.tableName = clone.config?.watermarkTable || ''; + this.totalNumPartitions = 0; + this.totalNumRows = 0; + this.completedNumPartitions = 0; + this.completedNumRows = 0; + this.avgTimePerPartition = null; + this.cloneStartTime = null; + + this.calculate(clone); + } + + private calculate(clone: QRepMirrorStatus): void { + let totalTime = moment.duration(0); + clone.partitions?.forEach((partition) => { + this.totalNumPartitions++; + this.totalNumRows += partition.numRows; + + if (partition.startTime) { + let st = moment(partition.startTime); + if (!this.cloneStartTime || st.isBefore(this.cloneStartTime)) { + this.cloneStartTime = st; + } + } + + if (partition.endTime) { + this.completedNumPartitions++; + this.completedNumRows += partition.numRows; + let st = moment(partition.startTime); + let et = moment(partition.endTime); + let duration = moment.duration(et.diff(st)); + totalTime = totalTime.add(duration); + } + }); + + if (this.completedNumPartitions > 0) { + this.avgTimePerPartition = moment.duration( + totalTime.asMilliseconds() / this.completedNumPartitions + ); + } + } + + getRowProgressPercentage(): number { + if (this.totalNumRows === 0) { + return 0; + } + return (this.completedNumRows / this.totalNumRows) * 100; + } + + getPartitionProgressPercentage(): number { + if (this.totalNumPartitions === 0) { + return 0; + } + return (this.completedNumPartitions / this.totalNumPartitions) * 100; + } +} + +function summarizeTableClone(clone: QRepMirrorStatus): TableCloneSummary { + return new TableCloneSummary(clone); +} + +type SnapshotStatusProps = { + status: SnapshotStatus; +}; +const SnapshotStatusTable = ({ status }: SnapshotStatusProps) => ( + Initial Copy} + toolbar={{ + left: ( + <> + + + + + ), + right: , + }} + header={ + + + + + Table Identifier + Start Time + Progress Partitions + Num Rows Synced + Avg Time Per Partition + + } + > + {status.clones.map(summarizeTableClone).map((clone, index) => ( + + + + + + + + + + + + + {clone.completedNumPartitions} / {clone.totalNumPartitions} + + {clone.completedNumRows} + + + + + ))} +
+); + +const Trigger = styled(({ isActive, ...props }) => ( + +))<{ isActive?: boolean }>` + background-color: ${({ theme, isActive }) => + isActive ? theme.colors.accent.surface.selected : 'white'}; + + font-weight: ${({ isActive }) => (isActive ? 'bold' : 'normal')}; + + &:hover { + color: ${({ theme }) => theme.colors.accent.text.highContrast}; + } +`; + +type CDCMirrorStatusProps = { + cdc: CDCMirrorStatus; + syncStatusChild?: React.ReactNode; +}; +export function CDCMirror({ cdc, syncStatusChild }: CDCMirrorStatusProps) { + const [selectedTab, setSelectedTab] = useQueryState('tab', { + history: 'push', + defaultValue: 'tab1', + }); + + let snapshot = <>; + if (cdc.snapshotStatus) { + snapshot = ; + } + + return ( + + + + Details + + + Sync Status + + + Initial Copy + + + + + + + {syncStatusChild} + + + {snapshot} + + + ); +} diff --git a/ui/app/mirrors/edit/[mirrorId]/cdcDetails.tsx b/ui/app/mirrors/edit/[mirrorId]/cdcDetails.tsx new file mode 100644 index 000000000..bd20ad185 --- /dev/null +++ b/ui/app/mirrors/edit/[mirrorId]/cdcDetails.tsx @@ -0,0 +1,35 @@ +import { FlowConnectionConfigs } from '@/grpc_generated/flow'; + +type CDCDetailsProps = { + config: FlowConnectionConfigs | undefined; +}; + +export default function CDCDetails({ config }: CDCDetailsProps) { + if (!config) { + return
No configuration provided
; + } + + return ( +
+

CDC Details

+
+ + + + + + + + + + + + + + + +
Source{config.source?.name || '-'}
Destination{config.destination?.name || '-'}
Flow Job Name{config.flowJobName}
+
+
+ ); +} diff --git a/ui/app/mirrors/edit/[mirrorId]/page.tsx b/ui/app/mirrors/edit/[mirrorId]/page.tsx index f01ce5908..e2c5e6596 100644 --- a/ui/app/mirrors/edit/[mirrorId]/page.tsx +++ b/ui/app/mirrors/edit/[mirrorId]/page.tsx @@ -1,195 +1,60 @@ -import { Badge } from '@/lib/Badge'; -import { Button } from '@/lib/Button'; -import { ButtonGroup } from '@/lib/ButtonGroup'; -import { TrackerChart } from '@/lib/Chart'; -import { Checkbox } from '@/lib/Checkbox'; +import { MirrorStatusResponse } from '@/grpc_generated/route'; import { Header } from '@/lib/Header'; -import { Icon } from '@/lib/Icon'; -import { Label } from '@/lib/Label'; -import { LayoutMain, Row, RowWithToggleGroup } from '@/lib/Layout'; -import { Panel } from '@/lib/Panel'; -import { SearchField } from '@/lib/SearchField'; -import { Select } from '@/lib/Select'; -import { Table, TableCell, TableRow } from '@/lib/Table'; -import { ToggleGroup, ToggleGroupItem } from '@/lib/Toggle'; -import { Color } from '@tremor/react'; +import { LayoutMain } from '@/lib/Layout'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { redirect } from 'next/navigation'; +import { Suspense } from 'react'; +import { CDCMirror } from './cdc'; +import SyncStatus from './syncStatus'; -const weekdays = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']; +export const dynamic = 'force-dynamic'; -interface Tracker { - color: Color; - tooltip: string; +type EditMirrorProps = { + params: { mirrorId: string }; +}; + +function getMirrorStatusUrl(mirrorId: string) { + let base = GetFlowHttpAddressFromEnv(); + return `${base}/v1/mirrors/${mirrorId}`; +} + +async function getMirrorStatus(mirrorId: string) { + const url = getMirrorStatusUrl(mirrorId); + const resp = await fetch(url); + const json = await resp.json(); + return json; } -const weekData = weekdays.map((weekDay) => ({ - tooltip: weekDay, - color: Math.random() > 0.5 ? 'blue' : 'gray', -})); +function Loading() { + return
Loading...
; +} -const Badges = [ - - - Active - , - - - Paused - , - - - Broken - , - - - Incomplete - , -]; +export default async function EditMirror({ + params: { mirrorId }, +}: EditMirrorProps) { + const mirrorStatus: MirrorStatusResponse = await getMirrorStatus(mirrorId); + if (!mirrorStatus) { + return
No mirror status found!
; + } -const ExampleTable = ({ title }: { title: string }) => ( - {title}} - toolbar={{ - left: ( - <> - - - - - - - ), - right: , - }} - header={ - - - - - - - - - - - - - - - - - - } - > - {Array(8) - .fill(null) - .map((_, index) => ( - - - - - - - - - - - - - - - - - - - - {Badges[index % Badges.length]} - - - - - ))} -
-); + let syncStatusChild = <>; + if (mirrorStatus.cdcStatus) { + syncStatusChild = ; + } else { + redirect(`/mirrors/status/qrep/${mirrorId}`); + } -type EditMirrorProps = { - params: { mirrorId: string }; -}; -export default function EditMirror({ params: { mirrorId } }: EditMirrorProps) { return ( - -
- - - - - } - > - {mirrorId} -
-
- -
- - - - -
-
- - - - {mirrorId} + }> + {mirrorStatus.cdcStatus && ( + -
- Sync history} - action={ - - Month - Week - Day - - } - /> - } - /> -
- - - + )} +
); } diff --git a/ui/app/mirrors/edit/[mirrorId]/syncStatus.tsx b/ui/app/mirrors/edit/[mirrorId]/syncStatus.tsx new file mode 100644 index 000000000..80cb35701 --- /dev/null +++ b/ui/app/mirrors/edit/[mirrorId]/syncStatus.tsx @@ -0,0 +1,33 @@ +import prisma from '@/app/utils/prisma'; +import { SyncStatusTable } from './syncStatusTable'; + +type SyncStatusProps = { + flowJobName: string | undefined; +}; + +export default async function SyncStatus({ flowJobName }: SyncStatusProps) { + if (!flowJobName) { + return
Flow job name not provided!
; + } + + const syncs = await prisma.cdc_batches.findMany({ + where: { + flow_name: flowJobName, + start_time: { + not: undefined, + }, + }, + orderBy: { + start_time: 'desc', + }, + }); + + const rows = syncs.map((sync) => ({ + batchId: sync.id, + startTime: sync.start_time, + endTime: sync.end_time, + numRows: sync.rows_in_batch, + })); + + return ; +} diff --git a/ui/app/mirrors/edit/[mirrorId]/syncStatusTable.tsx b/ui/app/mirrors/edit/[mirrorId]/syncStatusTable.tsx new file mode 100644 index 000000000..20f44ea90 --- /dev/null +++ b/ui/app/mirrors/edit/[mirrorId]/syncStatusTable.tsx @@ -0,0 +1,123 @@ +'use client'; + +import { Button } from '@/lib/Button'; +import { Checkbox } from '@/lib/Checkbox'; +import { Icon } from '@/lib/Icon'; +import { Label } from '@/lib/Label'; +import { ProgressCircle } from '@/lib/ProgressCircle'; +import { SearchField } from '@/lib/SearchField'; +import { Table, TableCell, TableRow } from '@/lib/Table'; +import moment from 'moment'; +import { useState } from 'react'; + +type SyncStatusRow = { + batchId: number; + startTime: Date; + endTime: Date | null; + numRows: number; +}; + +type SyncStatusTableProps = { + rows: SyncStatusRow[]; +}; + +function TimeWithDurationOrRunning({ + startTime, + endTime, +}: { + startTime: Date; + endTime: Date | null; +}) { + if (endTime) { + return ( + + ); + } else { + return ( + + ); + } +} + +const ROWS_PER_PAGE = 10; + +export const SyncStatusTable = ({ rows }: SyncStatusTableProps) => { + const [currentPage, setCurrentPage] = useState(1); + const totalPages = Math.ceil(rows.length / ROWS_PER_PAGE); + + const startRow = (currentPage - 1) * ROWS_PER_PAGE; + const endRow = startRow + ROWS_PER_PAGE; + + const displayedRows = rows.slice(startRow, endRow); + + const handlePrevPage = () => { + if (currentPage > 1) setCurrentPage(currentPage - 1); + }; + + const handleNextPage = () => { + if (currentPage < totalPages) setCurrentPage(currentPage + 1); + }; + + return ( + Initial Copy} + toolbar={{ + left: ( + <> + + + + + + ), + right: , + }} + header={ + + + + + Batch ID + Start Time + End Time (Duration) + Num Rows Synced + + } + > + {displayedRows.map((row, index) => ( + + + + + + + + + + + + + + {row.numRows} + + ))} +
+ ); +}; diff --git a/ui/app/mirrors/page.tsx b/ui/app/mirrors/page.tsx index 84e572d79..5827ac44e 100644 --- a/ui/app/mirrors/page.tsx +++ b/ui/app/mirrors/page.tsx @@ -1,127 +1,209 @@ -import { Badge } from '@/lib/Badge'; +import { DropDialog } from '@/components/DropDialog'; import { Button } from '@/lib/Button'; -import { Checkbox } from '@/lib/Checkbox'; import { Header } from '@/lib/Header'; import { Icon } from '@/lib/Icon'; import { Label } from '@/lib/Label'; import { LayoutMain } from '@/lib/Layout'; import { Panel } from '@/lib/Panel'; import { SearchField } from '@/lib/SearchField'; -import { Select } from '@/lib/Select'; import { Table, TableCell, TableRow } from '@/lib/Table'; +import moment from 'moment'; import Link from 'next/link'; +import { getTruePeer } from '../api/peers/route'; +import prisma from '../utils/prisma'; -const Badges = [ - - - Active - , - - - Paused - , - - - Broken - , - - - Incomplete - , -]; +export const dynamic = 'force-dynamic'; -const ExampleTable = ({ title }: { title: string }) => ( - {title}} - toolbar={{ - left: ( - <> - - - - - - - ), - right: , - }} - header={ - - - - - - - - - - - - - - - - - - } - > - {Array(8) - .fill(null) - .map((_, index) => ( - - - - - - - - - - - - - - - - - - - - {Badges[index % Badges.length]} - - - - - ))} -
-); +async function CDCFlows() { + const flows = await prisma.flows.findMany({ + include: { + sourcePeer: true, + destinationPeer: true, + }, + }); + + let cdcFlows = flows.filter((flow) => { + return !flow.query_string; + }); + + return ( + <> + +
+ + + + + + + + ), + right: , + }} + header={ + + Name + Source + Destination + Start Time + + + } + > + {cdcFlows.map((flow) => ( + + + + + +
+ +
+
+ +
+ +
+
+ + + + + + +
+ ))} +
+
+ + ); +} + +// query replication flows table like CDC flows table +async function QRepFlows() { + const flows = await prisma.flows.findMany({ + include: { + sourcePeer: true, + destinationPeer: true, + }, + }); + + let qrepFlows = flows.filter((flow) => { + return flow.query_string; + }); -export default function Mirrors() { + return ( + <> + +
+ + + + + + + + ), + right: , + }} + header={ + + Name + Source + Destination + Start Time + + + } + > + {qrepFlows.map((flow) => ( + + + + + +
+ +
+
+ +
+ +
+
+ + + + + + +
+ ))} +
+
+ + ); +} + +export default async function Mirrors() { return ( @@ -137,13 +219,10 @@ export default function Mirrors() { - - - - + - - + + ); diff --git a/ui/app/mirrors/status/qrep/[mirrorId]/page.tsx b/ui/app/mirrors/status/qrep/[mirrorId]/page.tsx new file mode 100644 index 000000000..a3fed7e0e --- /dev/null +++ b/ui/app/mirrors/status/qrep/[mirrorId]/page.tsx @@ -0,0 +1,47 @@ +import prisma from '@/app/utils/prisma'; +import { Header } from '@/lib/Header'; +import { LayoutMain } from '@/lib/Layout'; +import QRepConfigViewer from './qrepConfigViewer'; +import QRepStatusTable, { QRepPartitionStatus } from './qrepStatusTable'; + +export const dynamic = 'force-dynamic'; + +type QRepMirrorStatusProps = { + params: { mirrorId: string }; +}; + +export default async function QRepMirrorStatus({ + params: { mirrorId }, +}: QRepMirrorStatusProps) { + const runs = await prisma.qrep_partitions.findMany({ + where: { + flow_name: mirrorId, + start_time: { + not: null, + }, + }, + orderBy: { + start_time: 'desc', + }, + }); + + const partitions = runs.map((run) => { + let ret: QRepPartitionStatus = { + partitionId: run.partition_uuid, + runUuid: run.run_uuid, + startTime: run.start_time, + endTime: run.end_time, + numRows: run.rows_in_partition, + status: '', + }; + return ret; + }); + + return ( + +
{mirrorId}
+ + +
+ ); +} diff --git a/ui/app/mirrors/status/qrep/[mirrorId]/qrepConfigViewer.tsx b/ui/app/mirrors/status/qrep/[mirrorId]/qrepConfigViewer.tsx new file mode 100644 index 000000000..ba8c85ce9 --- /dev/null +++ b/ui/app/mirrors/status/qrep/[mirrorId]/qrepConfigViewer.tsx @@ -0,0 +1,49 @@ +import prisma from '@/app/utils/prisma'; +import { QRepConfig } from '@/grpc_generated/flow'; +import { Badge } from '@/lib/Badge'; +import { Icon } from '@/lib/Icon'; +import { ProgressCircle } from '@/lib/ProgressCircle'; + +export const dynamic = 'force-dynamic'; + +type QRepConfigViewerProps = { + mirrorId: string; +}; + +export default async function QRepConfigViewer({ + mirrorId, +}: QRepConfigViewerProps) { + const configBuffer = await prisma.qrep_runs.findFirst({ + select: { + config_proto: true, + }, + where: { + flow_name: mirrorId, + config_proto: { + not: null, + }, + }, + }); + + if (!configBuffer?.config_proto) { + return ( +
+ + Waiting for mirror to start... +
+ ); + } + + let qrepConfig = QRepConfig.decode(configBuffer.config_proto); + + return ( +
+ + +
+ {qrepConfig.initialCopyOnly ? 'Initial Load' : 'Continuous Sync'} +
+
+
+ ); +} diff --git a/ui/app/mirrors/status/qrep/[mirrorId]/qrepStatusTable.tsx b/ui/app/mirrors/status/qrep/[mirrorId]/qrepStatusTable.tsx new file mode 100644 index 000000000..e90038d45 --- /dev/null +++ b/ui/app/mirrors/status/qrep/[mirrorId]/qrepStatusTable.tsx @@ -0,0 +1,160 @@ +'use client'; + +import { Button } from '@/lib/Button'; +import { Checkbox } from '@/lib/Checkbox'; +import { Icon } from '@/lib/Icon'; +import { Label } from '@/lib/Label'; +import { ProgressCircle } from '@/lib/ProgressCircle'; +import { SearchField } from '@/lib/SearchField'; +import { Table, TableCell, TableRow } from '@/lib/Table'; +import moment from 'moment'; +import { useState } from 'react'; + +export type QRepPartitionStatus = { + partitionId: string; + runUuid: string; + status: string; + startTime: Date | null; + endTime: Date | null; + numRows: number | null; +}; + +function TimeOrProgressBar({ time }: { time: Date | null }) { + if (time === null) { + return ; + } else { + return ; + } +} + +function RowPerPartition({ + partitionId, + runUuid, + status, + startTime, + endTime, + numRows, +}: QRepPartitionStatus) { + let duration = 'N/A'; + if (startTime && endTime) { + duration = moment + .duration(moment(endTime).diff(moment(startTime))) + .humanize({ ss: 1 }); + } + + return ( + + + + + + + + + + + + + + + + + + + + + + + + ); +} + +type QRepStatusTableProps = { + flowJobName: string; + partitions: QRepPartitionStatus[]; +}; + +export default function QRepStatusTable({ + flowJobName, + partitions, +}: QRepStatusTableProps) { + const ROWS_PER_PAGE = 10; + const [currentPage, setCurrentPage] = useState(1); + const totalPages = Math.ceil(partitions.length / ROWS_PER_PAGE); + + const visiblePartitions = partitions.slice( + (currentPage - 1) * ROWS_PER_PAGE, + currentPage * ROWS_PER_PAGE + ); + + const handleNext = () => { + if (currentPage < totalPages) setCurrentPage(currentPage + 1); + }; + + const handlePrevious = () => { + if (currentPage > 1) setCurrentPage(currentPage - 1); + }; + + return ( + Progress} + toolbar={{ + left: ( + <> + + + + + +
+ +
+ + ), + right: , + }} + header={ + + + + + Partition UUID + Run UUID + Duration + Start Time + End Time + Num Rows Synced + + } + > + {visiblePartitions.map((partition, index) => ( + + ))} +
+ ); +} diff --git a/ui/app/mirrors/types.ts b/ui/app/mirrors/types.ts new file mode 100644 index 000000000..16054b2fa --- /dev/null +++ b/ui/app/mirrors/types.ts @@ -0,0 +1,7 @@ +import { FlowConnectionConfigs, QRepConfig } from '@/grpc_generated/flow'; +import { Dispatch, SetStateAction } from 'react'; + +export type CDCConfig = FlowConnectionConfigs; +export type MirrorConfig = CDCConfig | QRepConfig; +export type MirrorSetter = Dispatch>; +export type TableMapRow = { source: string; destination: string }; diff --git a/ui/app/peers/[peerName]/datatables.tsx b/ui/app/peers/[peerName]/datatables.tsx new file mode 100644 index 000000000..a9f5d9d46 --- /dev/null +++ b/ui/app/peers/[peerName]/datatables.tsx @@ -0,0 +1,109 @@ +import { CopyButton } from '@/components/CopyButton'; +import { SlotInfo, StatInfo } from '@/grpc_generated/route'; +import { Label } from '@/lib/Label'; +import { Table, TableCell, TableRow } from '@/lib/Table'; +import { DurationDisplay, SlotNameDisplay } from './helpers'; + +export const SlotTable = ({ data }: { data: SlotInfo[] }) => { + return ( +
+ +
+ + Slot Name + Active + Redo LSN + Restart LSN + Lag (In MB) + + } + > + {data.map(({ slotName, active, redoLSN, restartLSN, lagInMb }) => { + return ( + + + + + {active ? 'Yes' : 'No'} + {redoLSN} + {restartLSN} + {lagInMb} + + ); + })} +
+
+
+ ); +}; + +export const StatTable = ({ data }: { data: StatInfo[] }) => { + return ( +
+ +
+ + PID + Duration + Wait Event + Wait Event Type + Query Start Time + Query + + } + > + {data.map((stat) => ( + + {stat.pid} + + + + {stat.waitEvent || 'N/A'} + {stat.waitEventType || 'N/A'} + {stat.queryStart || 'N/A'} + +
+ {stat.query} + +
+
+
+ ))} +
+
+
+ ); +}; diff --git a/ui/app/peers/[peerName]/helpers.tsx b/ui/app/peers/[peerName]/helpers.tsx new file mode 100644 index 000000000..4aaf17832 --- /dev/null +++ b/ui/app/peers/[peerName]/helpers.tsx @@ -0,0 +1,40 @@ +import { Label } from '@/lib/Label'; +import Link from 'next/link'; + +const getFlowName = (slotName: string) => { + if (slotName.startsWith('peerflow_slot_')) { + return slotName.slice(14); + } + return ''; +}; + +export const SlotNameDisplay = ({ slotName }: { slotName: string }) => { + const flowName = getFlowName(slotName); + return flowName.length >= 1 ? ( + + ) : ( + + ); +}; + +export const DurationDisplay = ({ duration }: { duration: number }) => { + if (duration < 0) return 'N/A'; + return duration >= 3600 + ? `${Math.floor(duration / 3600)} hour(s) ${Math.floor( + (duration % 3600) / 60 + )} minutes` + : duration >= 60 + ? `${Math.floor(duration / 60)} minute(s) ${Math.floor( + duration % 60 + )} seconds` + : `${duration.toFixed(2)} seconds`; +}; diff --git a/ui/app/peers/[peerName]/page.tsx b/ui/app/peers/[peerName]/page.tsx new file mode 100644 index 000000000..3fcf4ce36 --- /dev/null +++ b/ui/app/peers/[peerName]/page.tsx @@ -0,0 +1,79 @@ +import ReloadButton from '@/components/ReloadButton'; +import { PeerSlotResponse, PeerStatResponse } from '@/grpc_generated/route'; +import { Label } from '@/lib/Label'; +import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import Link from 'next/link'; +import { SlotTable, StatTable } from './datatables'; +export const dynamic = 'force-dynamic'; + +type DataConfigProps = { + params: { peerName: string }; +}; + +const PeerData = async ({ params: { peerName } }: DataConfigProps) => { + const getSlotData = async () => { + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + + const peerSlots: PeerSlotResponse = await fetch( + `${flowServiceAddr}/v1/peers/slots/${peerName}` + ).then((res) => res.json()); + + return peerSlots.slotData; + }; + + const getStatData = async () => { + const flowServiceAddr = GetFlowHttpAddressFromEnv(); + + const peerStats: PeerStatResponse = await fetch( + `${flowServiceAddr}/v1/peers/stats/${peerName}` + ).then((res) => res.json()); + + return peerStats.statData; + }; + + const slots = await getSlotData(); + const stats = await getStatData(); + + return ( +
+
+
{peerName}
+ +
+ {slots && stats ? ( +
+ + +
+ ) : ( +
+ We do not have stats to show for this peer at the moment. Please check + if your PostgreSQL peer is open for connections. Note that peer + replication slot information and stat activity is currently only + supported for PostgreSQL peers. + +
+ )} +
+ ); +}; + +export default PeerData; diff --git a/ui/app/peers/create/[peerType]/handlers.ts b/ui/app/peers/create/[peerType]/handlers.ts new file mode 100644 index 000000000..0d0925552 --- /dev/null +++ b/ui/app/peers/create/[peerType]/handlers.ts @@ -0,0 +1,98 @@ +import { + PeerConfig, + UCreatePeerResponse, + UValidatePeerResponse, +} from '@/app/dto/PeersDTO'; +import { Dispatch, SetStateAction } from 'react'; +import { pgSchema, sfSchema } from './schema'; + +// Frontend form validation +const validateFields = ( + type: string, + config: PeerConfig, + setMessage: Dispatch>, + name?: string +): boolean => { + if (!name) { + setMessage({ ok: false, msg: 'Peer name is required' }); + return false; + } + let validationErr: string | undefined; + switch (type) { + case 'POSTGRES': + const pgConfig = pgSchema.safeParse(config); + if (!pgConfig.success) validationErr = pgConfig.error.issues[0].message; + break; + case 'SNOWFLAKE': + const sfConfig = sfSchema.safeParse(config); + if (!sfConfig.success) validationErr = sfConfig.error.issues[0].message; + break; + default: + validationErr = 'Unsupported peer type ' + type; + } + if (validationErr) { + setMessage({ ok: false, msg: validationErr }); + return false; + } else setMessage({ ok: true, msg: '' }); + return true; +}; + +// API call to validate peer +export const handleValidate = async ( + type: string, + config: PeerConfig, + setMessage: Dispatch>, + setLoading: Dispatch>, + name?: string +) => { + const isValid = validateFields(type, config, setMessage, name); + if (!isValid) return; + setLoading(true); + const valid: UValidatePeerResponse = await fetch('/api/peers/', { + method: 'POST', + body: JSON.stringify({ + name, + type, + config, + mode: 'validate', + }), + }).then((res) => res.json()); + if (!valid.valid) { + setMessage({ ok: false, msg: valid.message }); + setLoading(false); + return; + } + setMessage({ ok: true, msg: 'Peer is valid' }); + setLoading(false); +}; + +// API call to create peer +export const handleCreate = async ( + type: string, + config: PeerConfig, + setMessage: Dispatch>, + setLoading: Dispatch>, + route: RouteCallback, + name?: string +) => { + let isValid = validateFields(type, config, setMessage, name); + if (!isValid) return; + setLoading(true); + const createdPeer: UCreatePeerResponse = await fetch('/api/peers/', { + method: 'POST', + body: JSON.stringify({ + name, + type, + config, + mode: 'create', + }), + }).then((res) => res.json()); + if (!createdPeer.created) { + setMessage({ ok: false, msg: createdPeer.message }); + setLoading(false); + return; + } + setMessage({ ok: true, msg: 'Peer created successfully' }); + route(); + setLoading(false); +}; diff --git a/ui/app/peers/create/[peerType]/helpers/common.ts b/ui/app/peers/create/[peerType]/helpers/common.ts new file mode 100644 index 000000000..8aad61812 --- /dev/null +++ b/ui/app/peers/create/[peerType]/helpers/common.ts @@ -0,0 +1,25 @@ +import { PeerConfig } from '@/app/dto/PeersDTO'; +import { PeerSetter } from '@/components/ConfigForm'; +import { blankPostgresSetting } from './pg'; +import { blankSnowflakeSetting } from './sf'; + +export interface PeerSetting { + label: string; + stateHandler: (value: string, setter: PeerSetter) => void; + type?: string; + optional?: boolean; + tips?: string; + helpfulLink?: string; + default?: string | number; +} + +export const getBlankSetting = (dbType: string): PeerConfig => { + switch (dbType) { + case 'POSTGRES': + return blankPostgresSetting; + case 'SNOWFLAKE': + return blankSnowflakeSetting; + default: + return blankPostgresSetting; + } +}; diff --git a/ui/app/peers/create/[peerType]/helpers/pg.ts b/ui/app/peers/create/[peerType]/helpers/pg.ts new file mode 100644 index 000000000..84c464bf7 --- /dev/null +++ b/ui/app/peers/create/[peerType]/helpers/pg.ts @@ -0,0 +1,58 @@ +import { PostgresConfig } from '@/grpc_generated/peers'; +import { PeerSetting } from './common'; + +export const postgresSetting: PeerSetting[] = [ + { + label: 'Host', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, host: value })), + tips: 'Specifies the IP host name or address on which postgres is to listen for TCP/IP connections from client applications. Ensure that this host has us whitelisted so we can connect to it.', + }, + { + label: 'Port', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, port: parseInt(value, 10) })), + type: 'number', // type for textfield + default: 5432, + tips: 'Specifies the TCP/IP port or local Unix domain socket file extension on which postgres is listening for connections from client applications.', + }, + { + label: 'User', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, user: value })), + tips: 'Specify the user that we should use to connect to this host.', + helpfulLink: 'https://www.postgresql.org/docs/8.0/user-manag.html', + }, + { + label: 'Password', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, password: value })), + type: 'password', + tips: 'Password associated with the user you provided.', + helpfulLink: 'https://www.postgresql.org/docs/current/auth-password.html', + }, + { + label: 'Database', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, database: value })), + tips: 'Specify which database to associate with this peer.', + helpfulLink: + 'https://www.postgresql.org/docs/current/sql-createdatabase.html', + }, + { + label: 'Transaction Snapshot', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, transactionSnapshot: value })), + optional: true, + tips: 'This is optional and only needed if this peer is part of any query replication mirror.', + }, +]; + +export const blankPostgresSetting: PostgresConfig = { + host: '', + port: 5432, + user: '', + password: '', + database: '', + transactionSnapshot: '', +}; diff --git a/ui/app/peers/create/[peerType]/helpers/sf.ts b/ui/app/peers/create/[peerType]/helpers/sf.ts new file mode 100644 index 000000000..44cbde7fe --- /dev/null +++ b/ui/app/peers/create/[peerType]/helpers/sf.ts @@ -0,0 +1,95 @@ +import { SnowflakeConfig } from '@/grpc_generated/peers'; +import { PeerSetting } from './common'; + +export const snowflakeSetting: PeerSetting[] = [ + { + label: 'Account ID', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, accountId: value })), + tips: 'This is the unique identifier for your Snowflake account. It has a URL-like format', + helpfulLink: + 'https://docs.snowflake.com/en/user-guide/admin-account-identifier', + }, + { + label: 'Username', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, username: value })), + tips: 'This is the username you use to login to your Snowflake account.', + helpfulLink: + 'https://docs.snowflake.com/en/user-guide/admin-user-management', + }, + { + label: 'Private Key', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, privateKey: value })), + type: 'file', + tips: 'This can be of any file extension. If you are using an encrypted key, you must fill the below password field for decryption.', + helpfulLink: 'https://docs.snowflake.com/en/user-guide/key-pair-auth', + }, + { + label: 'Warehouse', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, warehouse: value })), + tips: 'Warehouses denote a cluster of snowflake resources.', + helpfulLink: 'https://docs.snowflake.com/en/user-guide/warehouses', + }, + { + label: 'Database', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, database: value })), + tips: 'Specify which database to associate with this peer.', + helpfulLink: 'https://docs.snowflake.com/en/sql-reference/snowflake-db', + }, + { + label: 'Role', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, role: value })), + tips: 'You could use a default role, or setup a role with the required permissions.', + helpfulLink: + 'https://docs.snowflake.com/en/user-guide/security-access-control-overview#roles', + }, + { + label: 'Query Timeout', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, queryTimeout: parseInt(value, 10) || 30 })), + optional: true, + tips: 'This is the maximum time in seconds that a query can run before being cancelled. If not specified, the default is 30 seconds', + default: 30, + }, + { + label: 'S3 Integration', + stateHandler: (value, setter) => + setter((curr) => ({ ...curr, s3Integration: value })), + optional: true, + tips: `This is needed only if you plan to run a mirror and you wish to stage AVRO files on S3.`, + helpfulLink: + 'https://docs.snowflake.com/en/user-guide/data-load-s3-config-storage-integration', + }, + { + label: 'Password', + stateHandler: (value, setter) => { + if (!value.length) { + // remove password key from state if empty + setter((curr) => { + delete curr['password']; + return curr; + }); + } else setter((curr) => ({ ...curr, password: value })); + }, + type: 'password', + optional: true, + tips: 'This is needed only if the private key you provided is encrypted.', + helpfulLink: 'https://docs.snowflake.com/en/user-guide/key-pair-auth', + }, +]; + +export const blankSnowflakeSetting: SnowflakeConfig = { + accountId: '', + privateKey: '', + username: '', + warehouse: '', + database: '', + role: '', + queryTimeout: 30, + s3Integration: '', +}; diff --git a/ui/app/peers/create/[peerType]/page.tsx b/ui/app/peers/create/[peerType]/page.tsx new file mode 100644 index 000000000..01af2b0dd --- /dev/null +++ b/ui/app/peers/create/[peerType]/page.tsx @@ -0,0 +1,146 @@ +'use client'; +import { PeerConfig } from '@/app/dto/PeersDTO'; +import { Button } from '@/lib/Button'; +import { ButtonGroup } from '@/lib/ButtonGroup'; +import { Label } from '@/lib/Label'; +import { LayoutMain, RowWithTextField } from '@/lib/Layout'; +import { Panel } from '@/lib/Panel'; +import { TextField } from '@/lib/TextField'; +import { Tooltip } from '@/lib/Tooltip'; +import Link from 'next/link'; +import { useRouter } from 'next/navigation'; +import { useState } from 'react'; +import ConfigForm from '../../../../components/ConfigForm'; +import { handleCreate, handleValidate } from './handlers'; +import { PeerSetting, getBlankSetting } from './helpers/common'; +import { postgresSetting } from './helpers/pg'; +import { snowflakeSetting } from './helpers/sf'; + +type CreateConfigProps = { + params: { peerType: string }; +}; + +export default function CreateConfig({ + params: { peerType }, +}: CreateConfigProps) { + const router = useRouter(); + const dbType = peerType; + const blankSetting = getBlankSetting(dbType); + const [name, setName] = useState(''); + const [config, setConfig] = useState(blankSetting); + const [formMessage, setFormMessage] = useState<{ ok: boolean; msg: string }>({ + ok: true, + msg: '', + }); + const [loading, setLoading] = useState(false); + const configComponentMap = (dbType: string) => { + const configForm = (settingList: PeerSetting[]) => ( + + ); + switch (dbType) { + case 'POSTGRES': + return configForm(postgresSetting); + case 'SNOWFLAKE': + return configForm(snowflakeSetting); + default: + return <>; + } + }; + + let listPeersRoute = () => { + router.push('/peers'); + }; + + return ( + + + + + + + Name + { + + + + } + + } + action={ + ) => + setName(e.target.value) + } + /> + } + /> + + {dbType && configComponentMap(dbType)} + + + + + + + + + {loading && ( + + )} + {!loading && formMessage.msg.length > 0 && ( + + )} + + + + ); +} diff --git a/ui/app/peers/create/[peerType]/schema.ts b/ui/app/peers/create/[peerType]/schema.ts new file mode 100644 index 000000000..4134f08f1 --- /dev/null +++ b/ui/app/peers/create/[peerType]/schema.ts @@ -0,0 +1,108 @@ +import * as z from 'zod'; + +export const pgSchema = z.object({ + host: z + .string({ + required_error: 'Host is required', + invalid_type_error: 'Host must be a string', + }) + .nonempty({ message: 'Host cannot be empty' }) + .max(255, 'Host must be less than 255 characters'), + port: z + .number({ + required_error: 'Port is required', + invalid_type_error: 'Port must be a number', + }) + .int() + .min(1, 'Port must be a positive integer') + .max(65535, 'Port must be below 65535'), + database: z + .string({ + required_error: 'Database is required', + invalid_type_error: 'Database must be a string', + }) + .min(1, { message: 'Database name should be non-empty' }) + .max(100, 'Database must be less than 100 characters'), + user: z + .string({ + required_error: 'User is required', + invalid_type_error: 'User must be a string', + }) + .min(1, 'User must be non-empty') + .max(64, 'User must be less than 64 characters'), + password: z + .string({ + required_error: 'Password is required', + invalid_type_error: 'Password must be a string', + }) + .min(1, 'Password must be non-empty') + .max(100, 'Password must be less than 100 characters'), + transactionSnapshot: z + .string() + .max(100, 'Transaction snapshot too long (100 char limit)') + .optional(), +}); + +export const sfSchema = z.object({ + accountId: z + .string({ + required_error: 'Account ID is required', + invalid_type_error: 'Account ID must be a string', + }) + .nonempty({ message: 'Account ID must be non-empty' }) + .max(255, 'Account ID must be less than 255 characters'), + privateKey: z + .string({ + required_error: 'Private Key is required', + invalid_type_error: 'Private Key must be a string', + }) + .nonempty({ message: 'Private Key must be non-empty' }), + username: z + .string({ + required_error: 'Username is required', + invalid_type_error: 'Username must be a string', + }) + .nonempty({ message: 'Username must be non-empty' }) + .max(255, 'Username must be less than 255 characters'), + database: z + .string({ + required_error: 'Database is required', + invalid_type_error: 'Database must be a string', + }) + .nonempty({ message: 'Database must be non-empty' }) + .max(255, 'Database must be less than 100 characters'), + warehouse: z + .string({ + required_error: 'Warehouse is required', + invalid_type_error: 'Warehouse must be a string', + }) + .nonempty({ message: 'Warehouse must be non-empty' }) + .max(255, 'Warehouse must be less than 64 characters'), + role: z + .string({ + invalid_type_error: 'Role must be a string', + }) + .nonempty({ message: 'Role must be non-empty' }) + .max(255, 'Role must be below 255 characters'), + queryTimeout: z + .number({ + invalid_type_error: 'Query timeout must be a number', + }) + .int() + .min(0, 'Query timeout must be a positive integer') + .max(65535, 'Query timeout must be below 65535 seconds') + .optional(), + password: z + .string({ + invalid_type_error: 'Password must be a string', + }) + .max(255, 'Password must be less than 255 characters') + .optional() + .transform((e) => (e === '' ? undefined : e)), + s3Integration: z + .string({ + invalid_type_error: 's3Integration must be a string', + }) + .max(255, 's3Integration must be less than 255 characters') + .optional(), +}); diff --git a/ui/app/peers/create/page.tsx b/ui/app/peers/create/page.tsx new file mode 100644 index 000000000..8297a9c7b --- /dev/null +++ b/ui/app/peers/create/page.tsx @@ -0,0 +1,56 @@ +'use client'; +import SelectSource from '@/components/SelectSource'; +import { Action } from '@/lib/Action'; +import { Button } from '@/lib/Button'; +import { ButtonGroup } from '@/lib/ButtonGroup'; +import { Icon } from '@/lib/Icon'; +import { Label } from '@/lib/Label'; +import { LayoutMain, RowWithSelect } from '@/lib/Layout'; +import { Panel } from '@/lib/Panel'; +import Link from 'next/link'; +import { useState } from 'react'; + +export default function CreatePeer() { + const [peerType, setPeerType] = useState(''); + return ( + + + + + } + href='https://docs.peerdb.io/sql/commands/create-peer' + target='_blank' + > + Learn about peers + + + + + Data source + + } + action={ + + } + /> + + + + + + + + + + + ); +} diff --git a/ui/app/connectors/edit/[connectorId]/page.tsx b/ui/app/peers/edit/[connectorId]/page.tsx similarity index 95% rename from ui/app/connectors/edit/[connectorId]/page.tsx rename to ui/app/peers/edit/[connectorId]/page.tsx index d830080c0..f7021b8c0 100644 --- a/ui/app/connectors/edit/[connectorId]/page.tsx +++ b/ui/app/peers/edit/[connectorId]/page.tsx @@ -65,7 +65,10 @@ const ExampleTable = ({ title }: { title: string }) => ( - + - + } > - {connectorId} + {peerId} } bottomRow={ @@ -23,18 +25,11 @@ export default function SidebarComponent() { bottomLabel={} > } as={Link} - href={'/dashboard'} - > - Dashboard - - } > - Connectors + Peers >> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.sourceTableIdentifier = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.destinationTableIdentifier = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.partitionKey = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): TableMapping { + return { + sourceTableIdentifier: isSet(object.sourceTableIdentifier) ? String(object.sourceTableIdentifier) : "", + destinationTableIdentifier: isSet(object.destinationTableIdentifier) + ? String(object.destinationTableIdentifier) + : "", + partitionKey: isSet(object.partitionKey) ? String(object.partitionKey) : "", + }; + }, + + toJSON(message: TableMapping): unknown { + const obj: any = {}; + if (message.sourceTableIdentifier !== "") { + obj.sourceTableIdentifier = message.sourceTableIdentifier; + } + if (message.destinationTableIdentifier !== "") { + obj.destinationTableIdentifier = message.destinationTableIdentifier; + } + if (message.partitionKey !== "") { + obj.partitionKey = message.partitionKey; + } + return obj; + }, + + create, I>>(base?: I): TableMapping { + return TableMapping.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TableMapping { + const message = createBaseTableMapping(); + message.sourceTableIdentifier = object.sourceTableIdentifier ?? ""; + message.destinationTableIdentifier = object.destinationTableIdentifier ?? ""; + message.partitionKey = object.partitionKey ?? ""; + return message; + }, +}; + function createBaseFlowConnectionConfigs(): FlowConnectionConfigs { return { source: undefined, destination: undefined, flowJobName: "", tableSchema: undefined, - tableNameMapping: {}, + tableMappings: [], srcTableIdNameMapping: {}, tableNameSchemaMapping: {}, metadataPeer: undefined, @@ -699,6 +805,7 @@ function createBaseFlowConnectionConfigs(): FlowConnectionConfigs { replicationSlotName: "", pushBatchSize: 0, pushParallelism: 0, + resync: false, }; } @@ -716,9 +823,9 @@ export const FlowConnectionConfigs = { if (message.tableSchema !== undefined) { TableSchema.encode(message.tableSchema, writer.uint32(34).fork()).ldelim(); } - Object.entries(message.tableNameMapping).forEach(([key, value]) => { - FlowConnectionConfigs_TableNameMappingEntry.encode({ key: key as any, value }, writer.uint32(42).fork()).ldelim(); - }); + for (const v of message.tableMappings) { + TableMapping.encode(v!, writer.uint32(42).fork()).ldelim(); + } Object.entries(message.srcTableIdNameMapping).forEach(([key, value]) => { FlowConnectionConfigs_SrcTableIdNameMappingEntry.encode({ key: key as any, value }, writer.uint32(50).fork()) .ldelim(); @@ -772,6 +879,9 @@ export const FlowConnectionConfigs = { if (message.pushParallelism !== 0) { writer.uint32(176).int64(message.pushParallelism); } + if (message.resync === true) { + writer.uint32(184).bool(message.resync); + } return writer; }, @@ -815,10 +925,7 @@ export const FlowConnectionConfigs = { break; } - const entry5 = FlowConnectionConfigs_TableNameMappingEntry.decode(reader, reader.uint32()); - if (entry5.value !== undefined) { - message.tableNameMapping[entry5.key] = entry5.value; - } + message.tableMappings.push(TableMapping.decode(reader, reader.uint32())); continue; case 6: if (tag !== 50) { @@ -945,6 +1052,13 @@ export const FlowConnectionConfigs = { message.pushParallelism = longToNumber(reader.int64() as Long); continue; + case 23: + if (tag !== 184) { + break; + } + + message.resync = reader.bool(); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -960,12 +1074,9 @@ export const FlowConnectionConfigs = { destination: isSet(object.destination) ? Peer.fromJSON(object.destination) : undefined, flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", tableSchema: isSet(object.tableSchema) ? TableSchema.fromJSON(object.tableSchema) : undefined, - tableNameMapping: isObject(object.tableNameMapping) - ? Object.entries(object.tableNameMapping).reduce<{ [key: string]: string }>((acc, [key, value]) => { - acc[key] = String(value); - return acc; - }, {}) - : {}, + tableMappings: Array.isArray(object?.tableMappings) + ? object.tableMappings.map((e: any) => TableMapping.fromJSON(e)) + : [], srcTableIdNameMapping: isObject(object.srcTableIdNameMapping) ? Object.entries(object.srcTableIdNameMapping).reduce<{ [key: number]: string }>((acc, [key, value]) => { acc[Number(key)] = String(value); @@ -999,6 +1110,7 @@ export const FlowConnectionConfigs = { replicationSlotName: isSet(object.replicationSlotName) ? String(object.replicationSlotName) : "", pushBatchSize: isSet(object.pushBatchSize) ? Number(object.pushBatchSize) : 0, pushParallelism: isSet(object.pushParallelism) ? Number(object.pushParallelism) : 0, + resync: isSet(object.resync) ? Boolean(object.resync) : false, }; }, @@ -1016,14 +1128,8 @@ export const FlowConnectionConfigs = { if (message.tableSchema !== undefined) { obj.tableSchema = TableSchema.toJSON(message.tableSchema); } - if (message.tableNameMapping) { - const entries = Object.entries(message.tableNameMapping); - if (entries.length > 0) { - obj.tableNameMapping = {}; - entries.forEach(([k, v]) => { - obj.tableNameMapping[k] = v; - }); - } + if (message.tableMappings?.length) { + obj.tableMappings = message.tableMappings.map((e) => TableMapping.toJSON(e)); } if (message.srcTableIdNameMapping) { const entries = Object.entries(message.srcTableIdNameMapping); @@ -1088,6 +1194,9 @@ export const FlowConnectionConfigs = { if (message.pushParallelism !== 0) { obj.pushParallelism = Math.round(message.pushParallelism); } + if (message.resync === true) { + obj.resync = message.resync; + } return obj; }, @@ -1106,15 +1215,7 @@ export const FlowConnectionConfigs = { message.tableSchema = (object.tableSchema !== undefined && object.tableSchema !== null) ? TableSchema.fromPartial(object.tableSchema) : undefined; - message.tableNameMapping = Object.entries(object.tableNameMapping ?? {}).reduce<{ [key: string]: string }>( - (acc, [key, value]) => { - if (value !== undefined) { - acc[key] = String(value); - } - return acc; - }, - {}, - ); + message.tableMappings = object.tableMappings?.map((e) => TableMapping.fromPartial(e)) || []; message.srcTableIdNameMapping = Object.entries(object.srcTableIdNameMapping ?? {}).reduce< { [key: number]: string } >((acc, [key, value]) => { @@ -1148,81 +1249,7 @@ export const FlowConnectionConfigs = { message.replicationSlotName = object.replicationSlotName ?? ""; message.pushBatchSize = object.pushBatchSize ?? 0; message.pushParallelism = object.pushParallelism ?? 0; - return message; - }, -}; - -function createBaseFlowConnectionConfigs_TableNameMappingEntry(): FlowConnectionConfigs_TableNameMappingEntry { - return { key: "", value: "" }; -} - -export const FlowConnectionConfigs_TableNameMappingEntry = { - encode(message: FlowConnectionConfigs_TableNameMappingEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== "") { - writer.uint32(18).string(message.value); - } - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): FlowConnectionConfigs_TableNameMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseFlowConnectionConfigs_TableNameMappingEntry(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (tag !== 10) { - break; - } - - message.key = reader.string(); - continue; - case 2: - if (tag !== 18) { - break; - } - - message.value = reader.string(); - continue; - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skipType(tag & 7); - } - return message; - }, - - fromJSON(object: any): FlowConnectionConfigs_TableNameMappingEntry { - return { key: isSet(object.key) ? String(object.key) : "", value: isSet(object.value) ? String(object.value) : "" }; - }, - - toJSON(message: FlowConnectionConfigs_TableNameMappingEntry): unknown { - const obj: any = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== "") { - obj.value = message.value; - } - return obj; - }, - - create, I>>( - base?: I, - ): FlowConnectionConfigs_TableNameMappingEntry { - return FlowConnectionConfigs_TableNameMappingEntry.fromPartial(base ?? ({} as any)); - }, - fromPartial, I>>( - object: I, - ): FlowConnectionConfigs_TableNameMappingEntry { - const message = createBaseFlowConnectionConfigs_TableNameMappingEntry(); - message.key = object.key ?? ""; - message.value = object.value ?? ""; + message.resync = object.resync ?? false; return message; }, }; @@ -1388,6 +1415,228 @@ export const FlowConnectionConfigs_TableNameSchemaMappingEntry = { }, }; +function createBaseRenameTableOption(): RenameTableOption { + return { currentName: "", newName: "" }; +} + +export const RenameTableOption = { + encode(message: RenameTableOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.currentName !== "") { + writer.uint32(10).string(message.currentName); + } + if (message.newName !== "") { + writer.uint32(18).string(message.newName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RenameTableOption { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRenameTableOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.currentName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.newName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): RenameTableOption { + return { + currentName: isSet(object.currentName) ? String(object.currentName) : "", + newName: isSet(object.newName) ? String(object.newName) : "", + }; + }, + + toJSON(message: RenameTableOption): unknown { + const obj: any = {}; + if (message.currentName !== "") { + obj.currentName = message.currentName; + } + if (message.newName !== "") { + obj.newName = message.newName; + } + return obj; + }, + + create, I>>(base?: I): RenameTableOption { + return RenameTableOption.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): RenameTableOption { + const message = createBaseRenameTableOption(); + message.currentName = object.currentName ?? ""; + message.newName = object.newName ?? ""; + return message; + }, +}; + +function createBaseRenameTablesInput(): RenameTablesInput { + return { flowJobName: "", peer: undefined, renameTableOptions: [] }; +} + +export const RenameTablesInput = { + encode(message: RenameTablesInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.flowJobName !== "") { + writer.uint32(10).string(message.flowJobName); + } + if (message.peer !== undefined) { + Peer.encode(message.peer, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.renameTableOptions) { + RenameTableOption.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RenameTablesInput { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRenameTablesInput(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.flowJobName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.peer = Peer.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.renameTableOptions.push(RenameTableOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): RenameTablesInput { + return { + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", + peer: isSet(object.peer) ? Peer.fromJSON(object.peer) : undefined, + renameTableOptions: Array.isArray(object?.renameTableOptions) + ? object.renameTableOptions.map((e: any) => RenameTableOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: RenameTablesInput): unknown { + const obj: any = {}; + if (message.flowJobName !== "") { + obj.flowJobName = message.flowJobName; + } + if (message.peer !== undefined) { + obj.peer = Peer.toJSON(message.peer); + } + if (message.renameTableOptions?.length) { + obj.renameTableOptions = message.renameTableOptions.map((e) => RenameTableOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): RenameTablesInput { + return RenameTablesInput.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): RenameTablesInput { + const message = createBaseRenameTablesInput(); + message.flowJobName = object.flowJobName ?? ""; + message.peer = (object.peer !== undefined && object.peer !== null) ? Peer.fromPartial(object.peer) : undefined; + message.renameTableOptions = object.renameTableOptions?.map((e) => RenameTableOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseRenameTablesOutput(): RenameTablesOutput { + return { flowJobName: "" }; +} + +export const RenameTablesOutput = { + encode(message: RenameTablesOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.flowJobName !== "") { + writer.uint32(10).string(message.flowJobName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RenameTablesOutput { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRenameTablesOutput(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.flowJobName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): RenameTablesOutput { + return { flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "" }; + }, + + toJSON(message: RenameTablesOutput): unknown { + const obj: any = {}; + if (message.flowJobName !== "") { + obj.flowJobName = message.flowJobName; + } + return obj; + }, + + create, I>>(base?: I): RenameTablesOutput { + return RenameTablesOutput.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): RenameTablesOutput { + const message = createBaseRenameTablesOutput(); + message.flowJobName = object.flowJobName ?? ""; + return message; + }, +}; + function createBaseSyncFlowOptions(): SyncFlowOptions { return { batchSize: 0, relationMessageMapping: {} }; } @@ -3189,7 +3438,7 @@ export const CreateRawTableOutput = { }; function createBaseTableSchema(): TableSchema { - return { tableIdentifier: "", columns: {}, primaryKeyColumn: "", isReplicaIdentityFull: false }; + return { tableIdentifier: "", columns: {}, primaryKeyColumns: [], isReplicaIdentityFull: false }; } export const TableSchema = { @@ -3200,8 +3449,8 @@ export const TableSchema = { Object.entries(message.columns).forEach(([key, value]) => { TableSchema_ColumnsEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).ldelim(); }); - if (message.primaryKeyColumn !== "") { - writer.uint32(26).string(message.primaryKeyColumn); + for (const v of message.primaryKeyColumns) { + writer.uint32(26).string(v!); } if (message.isReplicaIdentityFull === true) { writer.uint32(32).bool(message.isReplicaIdentityFull); @@ -3238,7 +3487,7 @@ export const TableSchema = { break; } - message.primaryKeyColumn = reader.string(); + message.primaryKeyColumns.push(reader.string()); continue; case 4: if (tag !== 32) { @@ -3265,7 +3514,9 @@ export const TableSchema = { return acc; }, {}) : {}, - primaryKeyColumn: isSet(object.primaryKeyColumn) ? String(object.primaryKeyColumn) : "", + primaryKeyColumns: Array.isArray(object?.primaryKeyColumns) + ? object.primaryKeyColumns.map((e: any) => String(e)) + : [], isReplicaIdentityFull: isSet(object.isReplicaIdentityFull) ? Boolean(object.isReplicaIdentityFull) : false, }; }, @@ -3284,8 +3535,8 @@ export const TableSchema = { }); } } - if (message.primaryKeyColumn !== "") { - obj.primaryKeyColumn = message.primaryKeyColumn; + if (message.primaryKeyColumns?.length) { + obj.primaryKeyColumns = message.primaryKeyColumns; } if (message.isReplicaIdentityFull === true) { obj.isReplicaIdentityFull = message.isReplicaIdentityFull; @@ -3305,7 +3556,7 @@ export const TableSchema = { } return acc; }, {}); - message.primaryKeyColumn = object.primaryKeyColumn ?? ""; + message.primaryKeyColumns = object.primaryKeyColumns?.map((e) => e) || []; message.isReplicaIdentityFull = object.isReplicaIdentityFull ?? false; return message; }, @@ -4436,79 +4687,8 @@ export const TIDPartitionRange = { }, }; -function createBaseXMINPartitionRange(): XMINPartitionRange { - return { start: 0, end: 0 }; -} - -export const XMINPartitionRange = { - encode(message: XMINPartitionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.start !== 0) { - writer.uint32(8).uint32(message.start); - } - if (message.end !== 0) { - writer.uint32(16).uint32(message.end); - } - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): XMINPartitionRange { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseXMINPartitionRange(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (tag !== 8) { - break; - } - - message.start = reader.uint32(); - continue; - case 2: - if (tag !== 16) { - break; - } - - message.end = reader.uint32(); - continue; - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skipType(tag & 7); - } - return message; - }, - - fromJSON(object: any): XMINPartitionRange { - return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; - }, - - toJSON(message: XMINPartitionRange): unknown { - const obj: any = {}; - if (message.start !== 0) { - obj.start = Math.round(message.start); - } - if (message.end !== 0) { - obj.end = Math.round(message.end); - } - return obj; - }, - - create, I>>(base?: I): XMINPartitionRange { - return XMINPartitionRange.fromPartial(base ?? ({} as any)); - }, - fromPartial, I>>(object: I): XMINPartitionRange { - const message = createBaseXMINPartitionRange(); - message.start = object.start ?? 0; - message.end = object.end ?? 0; - return message; - }, -}; - function createBasePartitionRange(): PartitionRange { - return { intRange: undefined, timestampRange: undefined, tidRange: undefined, xminRange: undefined }; + return { intRange: undefined, timestampRange: undefined, tidRange: undefined }; } export const PartitionRange = { @@ -4522,9 +4702,6 @@ export const PartitionRange = { if (message.tidRange !== undefined) { TIDPartitionRange.encode(message.tidRange, writer.uint32(26).fork()).ldelim(); } - if (message.xminRange !== undefined) { - XMINPartitionRange.encode(message.xminRange, writer.uint32(34).fork()).ldelim(); - } return writer; }, @@ -4556,13 +4733,6 @@ export const PartitionRange = { message.tidRange = TIDPartitionRange.decode(reader, reader.uint32()); continue; - case 4: - if (tag !== 34) { - break; - } - - message.xminRange = XMINPartitionRange.decode(reader, reader.uint32()); - continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -4579,7 +4749,6 @@ export const PartitionRange = { ? TimestampPartitionRange.fromJSON(object.timestampRange) : undefined, tidRange: isSet(object.tidRange) ? TIDPartitionRange.fromJSON(object.tidRange) : undefined, - xminRange: isSet(object.xminRange) ? XMINPartitionRange.fromJSON(object.xminRange) : undefined, }; }, @@ -4594,9 +4763,6 @@ export const PartitionRange = { if (message.tidRange !== undefined) { obj.tidRange = TIDPartitionRange.toJSON(message.tidRange); } - if (message.xminRange !== undefined) { - obj.xminRange = XMINPartitionRange.toJSON(message.xminRange); - } return obj; }, @@ -4614,9 +4780,6 @@ export const PartitionRange = { message.tidRange = (object.tidRange !== undefined && object.tidRange !== null) ? TIDPartitionRange.fromPartial(object.tidRange) : undefined; - message.xminRange = (object.xminRange !== undefined && object.xminRange !== null) - ? XMINPartitionRange.fromPartial(object.xminRange) - : undefined; return message; }, }; @@ -4715,6 +4878,7 @@ function createBaseQRepConfig(): QRepConfig { writeMode: undefined, stagingPath: "", numRowsPerPartition: 0, + setupWatermarkTableOnDestination: false, }; } @@ -4768,6 +4932,9 @@ export const QRepConfig = { if (message.numRowsPerPartition !== 0) { writer.uint32(128).uint32(message.numRowsPerPartition); } + if (message.setupWatermarkTableOnDestination === true) { + writer.uint32(136).bool(message.setupWatermarkTableOnDestination); + } return writer; }, @@ -4890,6 +5057,13 @@ export const QRepConfig = { message.numRowsPerPartition = reader.uint32(); continue; + case 17: + if (tag !== 136) { + break; + } + + message.setupWatermarkTableOnDestination = reader.bool(); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -4919,6 +5093,9 @@ export const QRepConfig = { writeMode: isSet(object.writeMode) ? QRepWriteMode.fromJSON(object.writeMode) : undefined, stagingPath: isSet(object.stagingPath) ? String(object.stagingPath) : "", numRowsPerPartition: isSet(object.numRowsPerPartition) ? Number(object.numRowsPerPartition) : 0, + setupWatermarkTableOnDestination: isSet(object.setupWatermarkTableOnDestination) + ? Boolean(object.setupWatermarkTableOnDestination) + : false, }; }, @@ -4972,6 +5149,9 @@ export const QRepConfig = { if (message.numRowsPerPartition !== 0) { obj.numRowsPerPartition = Math.round(message.numRowsPerPartition); } + if (message.setupWatermarkTableOnDestination === true) { + obj.setupWatermarkTableOnDestination = message.setupWatermarkTableOnDestination; + } return obj; }, @@ -5002,6 +5182,7 @@ export const QRepConfig = { : undefined; message.stagingPath = object.stagingPath ?? ""; message.numRowsPerPartition = object.numRowsPerPartition ?? 0; + message.setupWatermarkTableOnDestination = object.setupWatermarkTableOnDestination ?? false; return message; }, }; @@ -5362,7 +5543,7 @@ export const DeltaAddedColumn = { }; function createBaseTableSchemaDelta(): TableSchemaDelta { - return { srcTableName: "", dstTableName: "", addedColumns: [], droppedColumns: [] }; + return { srcTableName: "", dstTableName: "", addedColumns: [] }; } export const TableSchemaDelta = { @@ -5376,9 +5557,6 @@ export const TableSchemaDelta = { for (const v of message.addedColumns) { DeltaAddedColumn.encode(v!, writer.uint32(26).fork()).ldelim(); } - for (const v of message.droppedColumns) { - writer.uint32(34).string(v!); - } return writer; }, @@ -5410,13 +5588,6 @@ export const TableSchemaDelta = { message.addedColumns.push(DeltaAddedColumn.decode(reader, reader.uint32())); continue; - case 4: - if (tag !== 34) { - break; - } - - message.droppedColumns.push(reader.string()); - continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -5433,7 +5604,6 @@ export const TableSchemaDelta = { addedColumns: Array.isArray(object?.addedColumns) ? object.addedColumns.map((e: any) => DeltaAddedColumn.fromJSON(e)) : [], - droppedColumns: Array.isArray(object?.droppedColumns) ? object.droppedColumns.map((e: any) => String(e)) : [], }; }, @@ -5448,9 +5618,6 @@ export const TableSchemaDelta = { if (message.addedColumns?.length) { obj.addedColumns = message.addedColumns.map((e) => DeltaAddedColumn.toJSON(e)); } - if (message.droppedColumns?.length) { - obj.droppedColumns = message.droppedColumns; - } return obj; }, @@ -5462,13 +5629,12 @@ export const TableSchemaDelta = { message.srcTableName = object.srcTableName ?? ""; message.dstTableName = object.dstTableName ?? ""; message.addedColumns = object.addedColumns?.map((e) => DeltaAddedColumn.fromPartial(e)) || []; - message.droppedColumns = object.droppedColumns?.map((e) => e) || []; return message; }, }; function createBaseReplayTableSchemaDeltaInput(): ReplayTableSchemaDeltaInput { - return { flowConnectionConfigs: undefined, tableSchemaDelta: undefined }; + return { flowConnectionConfigs: undefined, tableSchemaDeltas: [] }; } export const ReplayTableSchemaDeltaInput = { @@ -5476,8 +5642,8 @@ export const ReplayTableSchemaDeltaInput = { if (message.flowConnectionConfigs !== undefined) { FlowConnectionConfigs.encode(message.flowConnectionConfigs, writer.uint32(10).fork()).ldelim(); } - if (message.tableSchemaDelta !== undefined) { - TableSchemaDelta.encode(message.tableSchemaDelta, writer.uint32(18).fork()).ldelim(); + for (const v of message.tableSchemaDeltas) { + TableSchemaDelta.encode(v!, writer.uint32(18).fork()).ldelim(); } return writer; }, @@ -5501,7 +5667,7 @@ export const ReplayTableSchemaDeltaInput = { break; } - message.tableSchemaDelta = TableSchemaDelta.decode(reader, reader.uint32()); + message.tableSchemaDeltas.push(TableSchemaDelta.decode(reader, reader.uint32())); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -5517,7 +5683,9 @@ export const ReplayTableSchemaDeltaInput = { flowConnectionConfigs: isSet(object.flowConnectionConfigs) ? FlowConnectionConfigs.fromJSON(object.flowConnectionConfigs) : undefined, - tableSchemaDelta: isSet(object.tableSchemaDelta) ? TableSchemaDelta.fromJSON(object.tableSchemaDelta) : undefined, + tableSchemaDeltas: Array.isArray(object?.tableSchemaDeltas) + ? object.tableSchemaDeltas.map((e: any) => TableSchemaDelta.fromJSON(e)) + : [], }; }, @@ -5526,8 +5694,8 @@ export const ReplayTableSchemaDeltaInput = { if (message.flowConnectionConfigs !== undefined) { obj.flowConnectionConfigs = FlowConnectionConfigs.toJSON(message.flowConnectionConfigs); } - if (message.tableSchemaDelta !== undefined) { - obj.tableSchemaDelta = TableSchemaDelta.toJSON(message.tableSchemaDelta); + if (message.tableSchemaDeltas?.length) { + obj.tableSchemaDeltas = message.tableSchemaDeltas.map((e) => TableSchemaDelta.toJSON(e)); } return obj; }, @@ -5541,9 +5709,7 @@ export const ReplayTableSchemaDeltaInput = { (object.flowConnectionConfigs !== undefined && object.flowConnectionConfigs !== null) ? FlowConnectionConfigs.fromPartial(object.flowConnectionConfigs) : undefined; - message.tableSchemaDelta = (object.tableSchemaDelta !== undefined && object.tableSchemaDelta !== null) - ? TableSchemaDelta.fromPartial(object.tableSchemaDelta) - : undefined; + message.tableSchemaDeltas = object.tableSchemaDeltas?.map((e) => TableSchemaDelta.fromPartial(e)) || []; return message; }, }; diff --git a/ui/grpc_generated/google/api/annotations.ts b/ui/grpc_generated/google/api/annotations.ts new file mode 100644 index 000000000..c2161053d --- /dev/null +++ b/ui/grpc_generated/google/api/annotations.ts @@ -0,0 +1,3 @@ +/* eslint-disable */ + +export const protobufPackage = "google.api"; diff --git a/ui/grpc_generated/google/api/http.ts b/ui/grpc_generated/google/api/http.ts new file mode 100644 index 000000000..339db6c54 --- /dev/null +++ b/ui/grpc_generated/google/api/http.ts @@ -0,0 +1,745 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They + * are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL + * query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP + * request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRule { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax + * details. + */ + selector: string; + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + get?: + | string + | undefined; + /** Maps to HTTP PUT. Used for replacing a resource. */ + put?: + | string + | undefined; + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + post?: + | string + | undefined; + /** Maps to HTTP DELETE. Used for deleting a resource. */ + delete?: + | string + | undefined; + /** Maps to HTTP PATCH. Used for updating a resource. */ + patch?: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom?: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.rules.push(HttpRule.decode(reader, reader.uint32())); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.fullyDecodeReservedExpansion = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules?.length) { + obj.rules = message.rules.map((e) => HttpRule.toJSON(e)); + } + if (message.fullyDecodeReservedExpansion === true) { + obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + } + return obj; + }, + + create, I>>(base?: I): Http { + return Http.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.selector = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.get = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.put = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.post = reader.string(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.delete = reader.string(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.patch = reader.string(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.body = reader.string(); + continue; + case 12: + if (tag !== 98) { + break; + } + + message.responseBody = reader.string(); + continue; + case 11: + if (tag !== 90) { + break; + } + + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + if (message.selector !== "") { + obj.selector = message.selector; + } + if (message.get !== undefined) { + obj.get = message.get; + } + if (message.put !== undefined) { + obj.put = message.put; + } + if (message.post !== undefined) { + obj.post = message.post; + } + if (message.delete !== undefined) { + obj.delete = message.delete; + } + if (message.patch !== undefined) { + obj.patch = message.patch; + } + if (message.custom !== undefined) { + obj.custom = CustomHttpPattern.toJSON(message.custom); + } + if (message.body !== "") { + obj.body = message.body; + } + if (message.responseBody !== "") { + obj.responseBody = message.responseBody; + } + if (message.additionalBindings?.length) { + obj.additionalBindings = message.additionalBindings.map((e) => HttpRule.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): HttpRule { + return HttpRule.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.kind = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.path = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + if (message.kind !== "") { + obj.kind = message.kind; + } + if (message.path !== "") { + obj.path = message.path; + } + return obj; + }, + + create, I>>(base?: I): CustomHttpPattern { + return CustomHttpPattern.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ui/grpc_generated/google/protobuf/descriptor.ts b/ui/grpc_generated/google/protobuf/descriptor.ts new file mode 100644 index 000000000..0ebf9063a --- /dev/null +++ b/ui/grpc_generated/google/protobuf/descriptor.ts @@ -0,0 +1,4831 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2", "proto3", and "editions". + * + * If `edition` is present, this value must be "editions". + */ + syntax: string; + /** The edition of the proto file, which is an opaque string. */ + edition: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + * + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + */ + mapEntry: boolean; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * + * This should only be used as a temporary measure against broken builds due + * to the change in behavior for JSON field name conflicts. + * + * TODO(b/261750190) This is legacy behavior we plan to remove once downstream + * teams have had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + * + * As of May 2022, lazy verifies the contents of the byte stream during + * parsing. An invalid byte stream will cause the overall parsing to fail. + */ + lazy: boolean; + /** + * unverified_lazy does no correctness checks on the byte stream. This should + * only be used where lazy with verification is prohibitive for performance + * reasons. + */ + unverifiedLazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** + * Indicate that the field value should not be printed out when using debug + * formats, e.g. when the field contains sensitive credentials. + */ + debugRedact: boolean; + retention: FieldOptions_OptionRetention; + target: FieldOptions_OptionTargetType; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * If set to RETENTION_SOURCE, the option will be omitted from the binary. + * Note: as of January 2023, support for this is in progress and does not yet + * have an effect (b/264593489). + */ +export enum FieldOptions_OptionRetention { + RETENTION_UNKNOWN = 0, + RETENTION_RUNTIME = 1, + RETENTION_SOURCE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionRetentionFromJSON(object: any): FieldOptions_OptionRetention { + switch (object) { + case 0: + case "RETENTION_UNKNOWN": + return FieldOptions_OptionRetention.RETENTION_UNKNOWN; + case 1: + case "RETENTION_RUNTIME": + return FieldOptions_OptionRetention.RETENTION_RUNTIME; + case 2: + case "RETENTION_SOURCE": + return FieldOptions_OptionRetention.RETENTION_SOURCE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionRetention.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionRetentionToJSON(object: FieldOptions_OptionRetention): string { + switch (object) { + case FieldOptions_OptionRetention.RETENTION_UNKNOWN: + return "RETENTION_UNKNOWN"; + case FieldOptions_OptionRetention.RETENTION_RUNTIME: + return "RETENTION_RUNTIME"; + case FieldOptions_OptionRetention.RETENTION_SOURCE: + return "RETENTION_SOURCE"; + case FieldOptions_OptionRetention.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * This indicates the types of entities that the field may apply to when used + * as an option. If it is unset, then the field may be freely used as an + * option on any kind of entity. Note: as of January 2023, support for this is + * in progress and does not yet have an effect (b/264593489). + */ +export enum FieldOptions_OptionTargetType { + TARGET_TYPE_UNKNOWN = 0, + TARGET_TYPE_FILE = 1, + TARGET_TYPE_EXTENSION_RANGE = 2, + TARGET_TYPE_MESSAGE = 3, + TARGET_TYPE_FIELD = 4, + TARGET_TYPE_ONEOF = 5, + TARGET_TYPE_ENUM = 6, + TARGET_TYPE_ENUM_ENTRY = 7, + TARGET_TYPE_SERVICE = 8, + TARGET_TYPE_METHOD = 9, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionTargetTypeFromJSON(object: any): FieldOptions_OptionTargetType { + switch (object) { + case 0: + case "TARGET_TYPE_UNKNOWN": + return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN; + case 1: + case "TARGET_TYPE_FILE": + return FieldOptions_OptionTargetType.TARGET_TYPE_FILE; + case 2: + case "TARGET_TYPE_EXTENSION_RANGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE; + case 3: + case "TARGET_TYPE_MESSAGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE; + case 4: + case "TARGET_TYPE_FIELD": + return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD; + case 5: + case "TARGET_TYPE_ONEOF": + return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF; + case 6: + case "TARGET_TYPE_ENUM": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM; + case 7: + case "TARGET_TYPE_ENUM_ENTRY": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY; + case 8: + case "TARGET_TYPE_SERVICE": + return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE; + case 9: + case "TARGET_TYPE_METHOD": + return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionTargetType.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionTargetTypeToJSON(object: FieldOptions_OptionTargetType): string { + switch (object) { + case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN: + return "TARGET_TYPE_UNKNOWN"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FILE: + return "TARGET_TYPE_FILE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE: + return "TARGET_TYPE_EXTENSION_RANGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE: + return "TARGET_TYPE_MESSAGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD: + return "TARGET_TYPE_FIELD"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF: + return "TARGET_TYPE_ONEOF"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM: + return "TARGET_TYPE_ENUM"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY: + return "TARGET_TYPE_ENUM_ENTRY"; + case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE: + return "TARGET_TYPE_SERVICE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD: + return "TARGET_TYPE_METHOD"; + case FieldOptions_OptionTargetType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * TODO(b/261750190) Remove this legacy behavior once downstream teams have + * had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + * "foo.(bar.baz).moo". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition occurs. + * For example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to moo. + * // + * // Another line attached to moo. + * optional double moo = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to moo or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified object. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; + semantic: GeneratedCodeInfo_Annotation_Semantic; +} + +/** + * Represents the identified object's effect on the element in the original + * .proto file. + */ +export enum GeneratedCodeInfo_Annotation_Semantic { + /** NONE - There is no effect or the effect is indescribable. */ + NONE = 0, + /** SET - The element is set or otherwise mutated. */ + SET = 1, + /** ALIAS - An alias to the element is returned. */ + ALIAS = 2, + UNRECOGNIZED = -1, +} + +export function generatedCodeInfo_Annotation_SemanticFromJSON(object: any): GeneratedCodeInfo_Annotation_Semantic { + switch (object) { + case 0: + case "NONE": + return GeneratedCodeInfo_Annotation_Semantic.NONE; + case 1: + case "SET": + return GeneratedCodeInfo_Annotation_Semantic.SET; + case 2: + case "ALIAS": + return GeneratedCodeInfo_Annotation_Semantic.ALIAS; + case -1: + case "UNRECOGNIZED": + default: + return GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED; + } +} + +export function generatedCodeInfo_Annotation_SemanticToJSON(object: GeneratedCodeInfo_Annotation_Semantic): string { + switch (object) { + case GeneratedCodeInfo_Annotation_Semantic.NONE: + return "NONE"; + case GeneratedCodeInfo_Annotation_Semantic.SET: + return "SET"; + case GeneratedCodeInfo_Annotation_Semantic.ALIAS: + return "ALIAS"; + case GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file?.length) { + obj.file = message.file.map((e) => FileDescriptorProto.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FileDescriptorSet { + return FileDescriptorSet.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + edition: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + if (message.edition !== "") { + writer.uint32(106).string(message.edition); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.package = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.dependency.push(reader.string()); + continue; + case 10: + if (tag === 80) { + message.publicDependency.push(reader.int32()); + + continue; + } + + if (tag === 82) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + + continue; + } + + break; + case 11: + if (tag === 88) { + message.weakDependency.push(reader.int32()); + + continue; + } + + if (tag === 90) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + + continue; + } + + break; + case 4: + if (tag !== 34) { + break; + } + + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.options = FileOptions.decode(reader, reader.uint32()); + continue; + case 9: + if (tag !== 74) { + break; + } + + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + continue; + case 12: + if (tag !== 98) { + break; + } + + message.syntax = reader.string(); + continue; + case 13: + if (tag !== 106) { + break; + } + + message.edition = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + edition: isSet(object.edition) ? String(object.edition) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.package !== "") { + obj.package = message.package; + } + if (message.dependency?.length) { + obj.dependency = message.dependency; + } + if (message.publicDependency?.length) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + if (message.weakDependency?.length) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + if (message.messageType?.length) { + obj.messageType = message.messageType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.service?.length) { + obj.service = message.service.map((e) => ServiceDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = FileOptions.toJSON(message.options); + } + if (message.sourceCodeInfo !== undefined) { + obj.sourceCodeInfo = SourceCodeInfo.toJSON(message.sourceCodeInfo); + } + if (message.syntax !== "") { + obj.syntax = message.syntax; + } + if (message.edition !== "") { + obj.edition = message.edition; + } + return obj; + }, + + create, I>>(base?: I): FileDescriptorProto { + return FileDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + message.edition = object.edition ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.options = MessageOptions.decode(reader, reader.uint32()); + continue; + case 9: + if (tag !== 74) { + break; + } + + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + continue; + case 10: + if (tag !== 82) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.field?.length) { + obj.field = message.field.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.nestedType?.length) { + obj.nestedType = message.nestedType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.extensionRange?.length) { + obj.extensionRange = message.extensionRange.map((e) => DescriptorProto_ExtensionRange.toJSON(e)); + } + if (message.oneofDecl?.length) { + obj.oneofDecl = message.oneofDecl.map((e) => OneofDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = MessageOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => DescriptorProto_ReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto { + return DescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + if (message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.options !== undefined) { + obj.options = ExtensionRangeOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + if (message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): ExtensionRangeOptions { + return ExtensionRangeOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.number = reader.int32(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.label = reader.int32() as any; + continue; + case 5: + if (tag !== 40) { + break; + } + + message.type = reader.int32() as any; + continue; + case 6: + if (tag !== 50) { + break; + } + + message.typeName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.extendee = reader.string(); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.defaultValue = reader.string(); + continue; + case 9: + if (tag !== 72) { + break; + } + + message.oneofIndex = reader.int32(); + continue; + case 10: + if (tag !== 82) { + break; + } + + message.jsonName = reader.string(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.options = FieldOptions.decode(reader, reader.uint32()); + continue; + case 17: + if (tag !== 136) { + break; + } + + message.proto3Optional = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.label !== 1) { + obj.label = fieldDescriptorProto_LabelToJSON(message.label); + } + if (message.type !== 1) { + obj.type = fieldDescriptorProto_TypeToJSON(message.type); + } + if (message.typeName !== "") { + obj.typeName = message.typeName; + } + if (message.extendee !== "") { + obj.extendee = message.extendee; + } + if (message.defaultValue !== "") { + obj.defaultValue = message.defaultValue; + } + if (message.oneofIndex !== 0) { + obj.oneofIndex = Math.round(message.oneofIndex); + } + if (message.jsonName !== "") { + obj.jsonName = message.jsonName; + } + if (message.options !== undefined) { + obj.options = FieldOptions.toJSON(message.options); + } + if (message.proto3Optional === true) { + obj.proto3Optional = message.proto3Optional; + } + return obj; + }, + + create, I>>(base?: I): FieldDescriptorProto { + return FieldDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.options = OneofOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.options !== undefined) { + obj.options = OneofOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): OneofDescriptorProto { + return OneofDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = EnumOptions.decode(reader, reader.uint32()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.value?.length) { + obj.value = message.value.map((e) => EnumValueDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = EnumOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => EnumDescriptorProto_EnumReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + return obj; + }, + + create, I>>(base?: I): EnumDescriptorProto { + return EnumDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) || + []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + if (message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create, I>>( + base?: I, + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.number = reader.int32(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = EnumValueOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.options !== undefined) { + obj.options = EnumValueOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): EnumValueDescriptorProto { + return EnumValueDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = ServiceOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.method?.length) { + obj.method = message.method.map((e) => MethodDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = ServiceOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): ServiceDescriptorProto { + return ServiceDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.inputType = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.outputType = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.options = MethodOptions.decode(reader, reader.uint32()); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.clientStreaming = reader.bool(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.serverStreaming = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.inputType !== "") { + obj.inputType = message.inputType; + } + if (message.outputType !== "") { + obj.outputType = message.outputType; + } + if (message.options !== undefined) { + obj.options = MethodOptions.toJSON(message.options); + } + if (message.clientStreaming === true) { + obj.clientStreaming = message.clientStreaming; + } + if (message.serverStreaming === true) { + obj.serverStreaming = message.serverStreaming; + } + return obj; + }, + + create, I>>(base?: I): MethodDescriptorProto { + return MethodDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.javaPackage = reader.string(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.javaOuterClassname = reader.string(); + continue; + case 10: + if (tag !== 80) { + break; + } + + message.javaMultipleFiles = reader.bool(); + continue; + case 20: + if (tag !== 160) { + break; + } + + message.javaGenerateEqualsAndHash = reader.bool(); + continue; + case 27: + if (tag !== 216) { + break; + } + + message.javaStringCheckUtf8 = reader.bool(); + continue; + case 9: + if (tag !== 72) { + break; + } + + message.optimizeFor = reader.int32() as any; + continue; + case 11: + if (tag !== 90) { + break; + } + + message.goPackage = reader.string(); + continue; + case 16: + if (tag !== 128) { + break; + } + + message.ccGenericServices = reader.bool(); + continue; + case 17: + if (tag !== 136) { + break; + } + + message.javaGenericServices = reader.bool(); + continue; + case 18: + if (tag !== 144) { + break; + } + + message.pyGenericServices = reader.bool(); + continue; + case 42: + if (tag !== 336) { + break; + } + + message.phpGenericServices = reader.bool(); + continue; + case 23: + if (tag !== 184) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 31: + if (tag !== 248) { + break; + } + + message.ccEnableArenas = reader.bool(); + continue; + case 36: + if (tag !== 290) { + break; + } + + message.objcClassPrefix = reader.string(); + continue; + case 37: + if (tag !== 298) { + break; + } + + message.csharpNamespace = reader.string(); + continue; + case 39: + if (tag !== 314) { + break; + } + + message.swiftPrefix = reader.string(); + continue; + case 40: + if (tag !== 322) { + break; + } + + message.phpClassPrefix = reader.string(); + continue; + case 41: + if (tag !== 330) { + break; + } + + message.phpNamespace = reader.string(); + continue; + case 44: + if (tag !== 354) { + break; + } + + message.phpMetadataNamespace = reader.string(); + continue; + case 45: + if (tag !== 362) { + break; + } + + message.rubyPackage = reader.string(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + if (message.javaPackage !== "") { + obj.javaPackage = message.javaPackage; + } + if (message.javaOuterClassname !== "") { + obj.javaOuterClassname = message.javaOuterClassname; + } + if (message.javaMultipleFiles === true) { + obj.javaMultipleFiles = message.javaMultipleFiles; + } + if (message.javaGenerateEqualsAndHash === true) { + obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + } + if (message.javaStringCheckUtf8 === true) { + obj.javaStringCheckUtf8 = message.javaStringCheckUtf8; + } + if (message.optimizeFor !== 1) { + obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor); + } + if (message.goPackage !== "") { + obj.goPackage = message.goPackage; + } + if (message.ccGenericServices === true) { + obj.ccGenericServices = message.ccGenericServices; + } + if (message.javaGenericServices === true) { + obj.javaGenericServices = message.javaGenericServices; + } + if (message.pyGenericServices === true) { + obj.pyGenericServices = message.pyGenericServices; + } + if (message.phpGenericServices === true) { + obj.phpGenericServices = message.phpGenericServices; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.ccEnableArenas === true) { + obj.ccEnableArenas = message.ccEnableArenas; + } + if (message.objcClassPrefix !== "") { + obj.objcClassPrefix = message.objcClassPrefix; + } + if (message.csharpNamespace !== "") { + obj.csharpNamespace = message.csharpNamespace; + } + if (message.swiftPrefix !== "") { + obj.swiftPrefix = message.swiftPrefix; + } + if (message.phpClassPrefix !== "") { + obj.phpClassPrefix = message.phpClassPrefix; + } + if (message.phpNamespace !== "") { + obj.phpNamespace = message.phpNamespace; + } + if (message.phpMetadataNamespace !== "") { + obj.phpMetadataNamespace = message.phpMetadataNamespace; + } + if (message.rubyPackage !== "") { + obj.rubyPackage = message.rubyPackage; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FileOptions { + return FileOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + deprecatedLegacyJsonFieldConflicts: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + if (message.deprecatedLegacyJsonFieldConflicts === true) { + writer.uint32(88).bool(message.deprecatedLegacyJsonFieldConflicts); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.messageSetWireFormat = reader.bool(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.noStandardDescriptorAccessor = reader.bool(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 7: + if (tag !== 56) { + break; + } + + message.mapEntry = reader.bool(); + continue; + case 11: + if (tag !== 88) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? Boolean(object.deprecatedLegacyJsonFieldConflicts) + : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + if (message.messageSetWireFormat === true) { + obj.messageSetWireFormat = message.messageSetWireFormat; + } + if (message.noStandardDescriptorAccessor === true) { + obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.mapEntry === true) { + obj.mapEntry = message.mapEntry; + } + if (message.deprecatedLegacyJsonFieldConflicts === true) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): MessageOptions { + return MessageOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + debugRedact: false, + retention: 0, + target: 0, + uninterpretedOption: [], + }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.unverifiedLazy === true) { + writer.uint32(120).bool(message.unverifiedLazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + if (message.debugRedact === true) { + writer.uint32(128).bool(message.debugRedact); + } + if (message.retention !== 0) { + writer.uint32(136).int32(message.retention); + } + if (message.target !== 0) { + writer.uint32(144).int32(message.target); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.ctype = reader.int32() as any; + continue; + case 2: + if (tag !== 16) { + break; + } + + message.packed = reader.bool(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.jstype = reader.int32() as any; + continue; + case 5: + if (tag !== 40) { + break; + } + + message.lazy = reader.bool(); + continue; + case 15: + if (tag !== 120) { + break; + } + + message.unverifiedLazy = reader.bool(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 10: + if (tag !== 80) { + break; + } + + message.weak = reader.bool(); + continue; + case 16: + if (tag !== 128) { + break; + } + + message.debugRedact = reader.bool(); + continue; + case 17: + if (tag !== 136) { + break; + } + + message.retention = reader.int32() as any; + continue; + case 18: + if (tag !== 144) { + break; + } + + message.target = reader.int32() as any; + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + debugRedact: isSet(object.debugRedact) ? Boolean(object.debugRedact) : false, + retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0, + target: isSet(object.target) ? fieldOptions_OptionTargetTypeFromJSON(object.target) : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + if (message.ctype !== 0) { + obj.ctype = fieldOptions_CTypeToJSON(message.ctype); + } + if (message.packed === true) { + obj.packed = message.packed; + } + if (message.jstype !== 0) { + obj.jstype = fieldOptions_JSTypeToJSON(message.jstype); + } + if (message.lazy === true) { + obj.lazy = message.lazy; + } + if (message.unverifiedLazy === true) { + obj.unverifiedLazy = message.unverifiedLazy; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.weak === true) { + obj.weak = message.weak; + } + if (message.debugRedact === true) { + obj.debugRedact = message.debugRedact; + } + if (message.retention !== 0) { + obj.retention = fieldOptions_OptionRetentionToJSON(message.retention); + } + if (message.target !== 0) { + obj.target = fieldOptions_OptionTargetTypeToJSON(message.target); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FieldOptions { + return FieldOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.unverifiedLazy = object.unverifiedLazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.debugRedact = object.debugRedact ?? false; + message.retention = object.retention ?? 0; + message.target = object.target ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): OneofOptions { + return OneofOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, deprecatedLegacyJsonFieldConflicts: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.deprecatedLegacyJsonFieldConflicts === true) { + writer.uint32(48).bool(message.deprecatedLegacyJsonFieldConflicts); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 16) { + break; + } + + message.allowAlias = reader.bool(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? Boolean(object.deprecatedLegacyJsonFieldConflicts) + : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + if (message.allowAlias === true) { + obj.allowAlias = message.allowAlias; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.deprecatedLegacyJsonFieldConflicts === true) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): EnumOptions { + return EnumOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): EnumValueOptions { + return EnumValueOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): ServiceOptions { + return ServiceOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 34: + if (tag !== 272) { + break; + } + + message.idempotencyLevel = reader.int32() as any; + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.idempotencyLevel !== 0) { + obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): MethodOptions { + return MethodOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(0), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 18) { + break; + } + + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.identifierValue = reader.string(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.positiveIntValue = longToNumber(reader.uint64() as Long); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.negativeIntValue = longToNumber(reader.int64() as Long); + continue; + case 6: + if (tag !== 49) { + break; + } + + message.doubleValue = reader.double(); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.stringValue = reader.bytes(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.aggregateValue = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(0), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name?.length) { + obj.name = message.name.map((e) => UninterpretedOption_NamePart.toJSON(e)); + } + if (message.identifierValue !== "") { + obj.identifierValue = message.identifierValue; + } + if (message.positiveIntValue !== 0) { + obj.positiveIntValue = Math.round(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + obj.negativeIntValue = Math.round(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + obj.doubleValue = message.doubleValue; + } + if (message.stringValue.length !== 0) { + obj.stringValue = base64FromBytes(message.stringValue); + } + if (message.aggregateValue !== "") { + obj.aggregateValue = message.aggregateValue; + } + return obj; + }, + + create, I>>(base?: I): UninterpretedOption { + return UninterpretedOption.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(0); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.namePart = reader.string(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.isExtension = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + if (message.namePart !== "") { + obj.namePart = message.namePart; + } + if (message.isExtension === true) { + obj.isExtension = message.isExtension; + } + return obj; + }, + + create, I>>(base?: I): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location?.length) { + obj.location = message.location.map((e) => SourceCodeInfo_Location.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): SourceCodeInfo { + return SourceCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + case 2: + if (tag === 16) { + message.span.push(reader.int32()); + + continue; + } + + if (tag === 18) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + + continue; + } + + break; + case 3: + if (tag !== 26) { + break; + } + + message.leadingComments = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.trailingComments = reader.string(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.leadingDetachedComments.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.span?.length) { + obj.span = message.span.map((e) => Math.round(e)); + } + if (message.leadingComments !== "") { + obj.leadingComments = message.leadingComments; + } + if (message.trailingComments !== "") { + obj.trailingComments = message.trailingComments; + } + if (message.leadingDetachedComments?.length) { + obj.leadingDetachedComments = message.leadingDetachedComments; + } + return obj; + }, + + create, I>>(base?: I): SourceCodeInfo_Location { + return SourceCodeInfo_Location.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation?.length) { + obj.annotation = message.annotation.map((e) => GeneratedCodeInfo_Annotation.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): GeneratedCodeInfo { + return GeneratedCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0, semantic: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + if (message.semantic !== 0) { + writer.uint32(40).int32(message.semantic); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + case 2: + if (tag !== 18) { + break; + } + + message.sourceFile = reader.string(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.begin = reader.int32(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.end = reader.int32(); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.semantic = reader.int32() as any; + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.sourceFile !== "") { + obj.sourceFile = message.sourceFile; + } + if (message.begin !== 0) { + obj.begin = Math.round(message.begin); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.semantic !== 0) { + obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic); + } + return obj; + }, + + create, I>>(base?: I): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + message.semantic = object.semantic ?? 0; + return message; + }, +}; + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ui/grpc_generated/peers.ts b/ui/grpc_generated/peers.ts index 46db253f2..6d9bb4b30 100644 --- a/ui/grpc_generated/peers.ts +++ b/ui/grpc_generated/peers.ts @@ -12,6 +12,7 @@ export enum DBType { EVENTHUB = 4, S3 = 5, SQLSERVER = 6, + EVENTHUB_GROUP = 7, UNRECOGNIZED = -1, } @@ -38,6 +39,9 @@ export function dBTypeFromJSON(object: any): DBType { case 6: case "SQLSERVER": return DBType.SQLSERVER; + case 7: + case "EVENTHUB_GROUP": + return DBType.EVENTHUB_GROUP; case -1: case "UNRECOGNIZED": default: @@ -61,6 +65,8 @@ export function dBTypeToJSON(object: DBType): string { return "S3"; case DBType.SQLSERVER: return "SQLSERVER"; + case DBType.EVENTHUB_GROUP: + return "EVENTHUB_GROUP"; case DBType.UNRECOGNIZED: default: return "UNRECOGNIZED"; @@ -115,11 +121,37 @@ export interface EventHubConfig { namespace: string; resourceGroup: string; location: string; + metadataDb: + | PostgresConfig + | undefined; + /** if this is empty PeerDB uses `AZURE_SUBSCRIPTION_ID` environment variable. */ + subscriptionId: string; + /** defaults to 3 */ + partitionCount: number; + /** defaults to 7 */ + messageRetentionInDays: number; +} + +export interface EventHubGroupConfig { + /** event hub peer name to event hub config */ + eventhubs: { [key: string]: EventHubConfig }; metadataDb: PostgresConfig | undefined; + unnestColumns: string[]; +} + +export interface EventHubGroupConfig_EventhubsEntry { + key: string; + value: EventHubConfig | undefined; } export interface S3Config { url: string; + accessKeyId?: string | undefined; + secretAccessKey?: string | undefined; + roleArn?: string | undefined; + region?: string | undefined; + endpoint?: string | undefined; + metadataDb: PostgresConfig | undefined; } export interface SqlServerConfig { @@ -140,6 +172,7 @@ export interface Peer { eventhubConfig?: EventHubConfig | undefined; s3Config?: S3Config | undefined; sqlserverConfig?: SqlServerConfig | undefined; + eventhubGroupConfig?: EventHubGroupConfig | undefined; } function createBaseSnowflakeConfig(): SnowflakeConfig { @@ -806,7 +839,15 @@ export const PostgresConfig = { }; function createBaseEventHubConfig(): EventHubConfig { - return { namespace: "", resourceGroup: "", location: "", metadataDb: undefined }; + return { + namespace: "", + resourceGroup: "", + location: "", + metadataDb: undefined, + subscriptionId: "", + partitionCount: 0, + messageRetentionInDays: 0, + }; } export const EventHubConfig = { @@ -823,6 +864,15 @@ export const EventHubConfig = { if (message.metadataDb !== undefined) { PostgresConfig.encode(message.metadataDb, writer.uint32(34).fork()).ldelim(); } + if (message.subscriptionId !== "") { + writer.uint32(42).string(message.subscriptionId); + } + if (message.partitionCount !== 0) { + writer.uint32(48).uint32(message.partitionCount); + } + if (message.messageRetentionInDays !== 0) { + writer.uint32(56).uint32(message.messageRetentionInDays); + } return writer; }, @@ -861,6 +911,27 @@ export const EventHubConfig = { message.metadataDb = PostgresConfig.decode(reader, reader.uint32()); continue; + case 5: + if (tag !== 42) { + break; + } + + message.subscriptionId = reader.string(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.partitionCount = reader.uint32(); + continue; + case 7: + if (tag !== 56) { + break; + } + + message.messageRetentionInDays = reader.uint32(); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -876,6 +947,9 @@ export const EventHubConfig = { resourceGroup: isSet(object.resourceGroup) ? String(object.resourceGroup) : "", location: isSet(object.location) ? String(object.location) : "", metadataDb: isSet(object.metadataDb) ? PostgresConfig.fromJSON(object.metadataDb) : undefined, + subscriptionId: isSet(object.subscriptionId) ? String(object.subscriptionId) : "", + partitionCount: isSet(object.partitionCount) ? Number(object.partitionCount) : 0, + messageRetentionInDays: isSet(object.messageRetentionInDays) ? Number(object.messageRetentionInDays) : 0, }; }, @@ -893,6 +967,15 @@ export const EventHubConfig = { if (message.metadataDb !== undefined) { obj.metadataDb = PostgresConfig.toJSON(message.metadataDb); } + if (message.subscriptionId !== "") { + obj.subscriptionId = message.subscriptionId; + } + if (message.partitionCount !== 0) { + obj.partitionCount = Math.round(message.partitionCount); + } + if (message.messageRetentionInDays !== 0) { + obj.messageRetentionInDays = Math.round(message.messageRetentionInDays); + } return obj; }, @@ -907,12 +990,216 @@ export const EventHubConfig = { message.metadataDb = (object.metadataDb !== undefined && object.metadataDb !== null) ? PostgresConfig.fromPartial(object.metadataDb) : undefined; + message.subscriptionId = object.subscriptionId ?? ""; + message.partitionCount = object.partitionCount ?? 0; + message.messageRetentionInDays = object.messageRetentionInDays ?? 0; + return message; + }, +}; + +function createBaseEventHubGroupConfig(): EventHubGroupConfig { + return { eventhubs: {}, metadataDb: undefined, unnestColumns: [] }; +} + +export const EventHubGroupConfig = { + encode(message: EventHubGroupConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + Object.entries(message.eventhubs).forEach(([key, value]) => { + EventHubGroupConfig_EventhubsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).ldelim(); + }); + if (message.metadataDb !== undefined) { + PostgresConfig.encode(message.metadataDb, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.unnestColumns) { + writer.uint32(26).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventHubGroupConfig { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHubGroupConfig(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + const entry1 = EventHubGroupConfig_EventhubsEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.eventhubs[entry1.key] = entry1.value; + } + continue; + case 2: + if (tag !== 18) { + break; + } + + message.metadataDb = PostgresConfig.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.unnestColumns.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHubGroupConfig { + return { + eventhubs: isObject(object.eventhubs) + ? Object.entries(object.eventhubs).reduce<{ [key: string]: EventHubConfig }>((acc, [key, value]) => { + acc[key] = EventHubConfig.fromJSON(value); + return acc; + }, {}) + : {}, + metadataDb: isSet(object.metadataDb) ? PostgresConfig.fromJSON(object.metadataDb) : undefined, + unnestColumns: Array.isArray(object?.unnestColumns) ? object.unnestColumns.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EventHubGroupConfig): unknown { + const obj: any = {}; + if (message.eventhubs) { + const entries = Object.entries(message.eventhubs); + if (entries.length > 0) { + obj.eventhubs = {}; + entries.forEach(([k, v]) => { + obj.eventhubs[k] = EventHubConfig.toJSON(v); + }); + } + } + if (message.metadataDb !== undefined) { + obj.metadataDb = PostgresConfig.toJSON(message.metadataDb); + } + if (message.unnestColumns?.length) { + obj.unnestColumns = message.unnestColumns; + } + return obj; + }, + + create, I>>(base?: I): EventHubGroupConfig { + return EventHubGroupConfig.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EventHubGroupConfig { + const message = createBaseEventHubGroupConfig(); + message.eventhubs = Object.entries(object.eventhubs ?? {}).reduce<{ [key: string]: EventHubConfig }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = EventHubConfig.fromPartial(value); + } + return acc; + }, + {}, + ); + message.metadataDb = (object.metadataDb !== undefined && object.metadataDb !== null) + ? PostgresConfig.fromPartial(object.metadataDb) + : undefined; + message.unnestColumns = object.unnestColumns?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEventHubGroupConfig_EventhubsEntry(): EventHubGroupConfig_EventhubsEntry { + return { key: "", value: undefined }; +} + +export const EventHubGroupConfig_EventhubsEntry = { + encode(message: EventHubGroupConfig_EventhubsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + EventHubConfig.encode(message.value, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventHubGroupConfig_EventhubsEntry { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHubGroupConfig_EventhubsEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = EventHubConfig.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHubGroupConfig_EventhubsEntry { + return { + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? EventHubConfig.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: EventHubGroupConfig_EventhubsEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = EventHubConfig.toJSON(message.value); + } + return obj; + }, + + create, I>>( + base?: I, + ): EventHubGroupConfig_EventhubsEntry { + return EventHubGroupConfig_EventhubsEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): EventHubGroupConfig_EventhubsEntry { + const message = createBaseEventHubGroupConfig_EventhubsEntry(); + message.key = object.key ?? ""; + message.value = (object.value !== undefined && object.value !== null) + ? EventHubConfig.fromPartial(object.value) + : undefined; return message; }, }; function createBaseS3Config(): S3Config { - return { url: "" }; + return { + url: "", + accessKeyId: undefined, + secretAccessKey: undefined, + roleArn: undefined, + region: undefined, + endpoint: undefined, + metadataDb: undefined, + }; } export const S3Config = { @@ -920,6 +1207,24 @@ export const S3Config = { if (message.url !== "") { writer.uint32(10).string(message.url); } + if (message.accessKeyId !== undefined) { + writer.uint32(18).string(message.accessKeyId); + } + if (message.secretAccessKey !== undefined) { + writer.uint32(26).string(message.secretAccessKey); + } + if (message.roleArn !== undefined) { + writer.uint32(34).string(message.roleArn); + } + if (message.region !== undefined) { + writer.uint32(42).string(message.region); + } + if (message.endpoint !== undefined) { + writer.uint32(50).string(message.endpoint); + } + if (message.metadataDb !== undefined) { + PostgresConfig.encode(message.metadataDb, writer.uint32(58).fork()).ldelim(); + } return writer; }, @@ -937,6 +1242,48 @@ export const S3Config = { message.url = reader.string(); continue; + case 2: + if (tag !== 18) { + break; + } + + message.accessKeyId = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.secretAccessKey = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.roleArn = reader.string(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.region = reader.string(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.endpoint = reader.string(); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.metadataDb = PostgresConfig.decode(reader, reader.uint32()); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -947,7 +1294,15 @@ export const S3Config = { }, fromJSON(object: any): S3Config { - return { url: isSet(object.url) ? String(object.url) : "" }; + return { + url: isSet(object.url) ? String(object.url) : "", + accessKeyId: isSet(object.accessKeyId) ? String(object.accessKeyId) : undefined, + secretAccessKey: isSet(object.secretAccessKey) ? String(object.secretAccessKey) : undefined, + roleArn: isSet(object.roleArn) ? String(object.roleArn) : undefined, + region: isSet(object.region) ? String(object.region) : undefined, + endpoint: isSet(object.endpoint) ? String(object.endpoint) : undefined, + metadataDb: isSet(object.metadataDb) ? PostgresConfig.fromJSON(object.metadataDb) : undefined, + }; }, toJSON(message: S3Config): unknown { @@ -955,6 +1310,24 @@ export const S3Config = { if (message.url !== "") { obj.url = message.url; } + if (message.accessKeyId !== undefined) { + obj.accessKeyId = message.accessKeyId; + } + if (message.secretAccessKey !== undefined) { + obj.secretAccessKey = message.secretAccessKey; + } + if (message.roleArn !== undefined) { + obj.roleArn = message.roleArn; + } + if (message.region !== undefined) { + obj.region = message.region; + } + if (message.endpoint !== undefined) { + obj.endpoint = message.endpoint; + } + if (message.metadataDb !== undefined) { + obj.metadataDb = PostgresConfig.toJSON(message.metadataDb); + } return obj; }, @@ -964,6 +1337,14 @@ export const S3Config = { fromPartial, I>>(object: I): S3Config { const message = createBaseS3Config(); message.url = object.url ?? ""; + message.accessKeyId = object.accessKeyId ?? undefined; + message.secretAccessKey = object.secretAccessKey ?? undefined; + message.roleArn = object.roleArn ?? undefined; + message.region = object.region ?? undefined; + message.endpoint = object.endpoint ?? undefined; + message.metadataDb = (object.metadataDb !== undefined && object.metadataDb !== null) + ? PostgresConfig.fromPartial(object.metadataDb) + : undefined; return message; }, }; @@ -1098,6 +1479,7 @@ function createBasePeer(): Peer { eventhubConfig: undefined, s3Config: undefined, sqlserverConfig: undefined, + eventhubGroupConfig: undefined, }; } @@ -1130,6 +1512,9 @@ export const Peer = { if (message.sqlserverConfig !== undefined) { SqlServerConfig.encode(message.sqlserverConfig, writer.uint32(74).fork()).ldelim(); } + if (message.eventhubGroupConfig !== undefined) { + EventHubGroupConfig.encode(message.eventhubGroupConfig, writer.uint32(82).fork()).ldelim(); + } return writer; }, @@ -1203,6 +1588,13 @@ export const Peer = { message.sqlserverConfig = SqlServerConfig.decode(reader, reader.uint32()); continue; + case 10: + if (tag !== 82) { + break; + } + + message.eventhubGroupConfig = EventHubGroupConfig.decode(reader, reader.uint32()); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -1223,6 +1615,9 @@ export const Peer = { eventhubConfig: isSet(object.eventhubConfig) ? EventHubConfig.fromJSON(object.eventhubConfig) : undefined, s3Config: isSet(object.s3Config) ? S3Config.fromJSON(object.s3Config) : undefined, sqlserverConfig: isSet(object.sqlserverConfig) ? SqlServerConfig.fromJSON(object.sqlserverConfig) : undefined, + eventhubGroupConfig: isSet(object.eventhubGroupConfig) + ? EventHubGroupConfig.fromJSON(object.eventhubGroupConfig) + : undefined, }; }, @@ -1255,6 +1650,9 @@ export const Peer = { if (message.sqlserverConfig !== undefined) { obj.sqlserverConfig = SqlServerConfig.toJSON(message.sqlserverConfig); } + if (message.eventhubGroupConfig !== undefined) { + obj.eventhubGroupConfig = EventHubGroupConfig.toJSON(message.eventhubGroupConfig); + } return obj; }, @@ -1286,6 +1684,9 @@ export const Peer = { message.sqlserverConfig = (object.sqlserverConfig !== undefined && object.sqlserverConfig !== null) ? SqlServerConfig.fromPartial(object.sqlserverConfig) : undefined; + message.eventhubGroupConfig = (object.eventhubGroupConfig !== undefined && object.eventhubGroupConfig !== null) + ? EventHubGroupConfig.fromPartial(object.eventhubGroupConfig) + : undefined; return message; }, }; @@ -1332,6 +1733,10 @@ if (_m0.util.Long !== Long) { _m0.configure(); } +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + function isSet(value: any): boolean { return value !== null && value !== undefined; } diff --git a/ui/grpc_generated/route.ts b/ui/grpc_generated/route.ts index 387a418b0..ec1447205 100644 --- a/ui/grpc_generated/route.ts +++ b/ui/grpc_generated/route.ts @@ -11,14 +11,95 @@ import { ServiceError, UntypedServiceImplementation, } from "@grpc/grpc-js"; +import Long from "long"; import _m0 from "protobufjs/minimal"; import { FlowConnectionConfigs, QRepConfig } from "./flow"; +import { Timestamp } from "./google/protobuf/timestamp"; import { Peer } from "./peers"; export const protobufPackage = "peerdb_route"; +export enum ValidatePeerStatus { + CREATION_UNKNOWN = 0, + VALID = 1, + INVALID = 2, + UNRECOGNIZED = -1, +} + +export function validatePeerStatusFromJSON(object: any): ValidatePeerStatus { + switch (object) { + case 0: + case "CREATION_UNKNOWN": + return ValidatePeerStatus.CREATION_UNKNOWN; + case 1: + case "VALID": + return ValidatePeerStatus.VALID; + case 2: + case "INVALID": + return ValidatePeerStatus.INVALID; + case -1: + case "UNRECOGNIZED": + default: + return ValidatePeerStatus.UNRECOGNIZED; + } +} + +export function validatePeerStatusToJSON(object: ValidatePeerStatus): string { + switch (object) { + case ValidatePeerStatus.CREATION_UNKNOWN: + return "CREATION_UNKNOWN"; + case ValidatePeerStatus.VALID: + return "VALID"; + case ValidatePeerStatus.INVALID: + return "INVALID"; + case ValidatePeerStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum CreatePeerStatus { + VALIDATION_UNKNOWN = 0, + CREATED = 1, + FAILED = 2, + UNRECOGNIZED = -1, +} + +export function createPeerStatusFromJSON(object: any): CreatePeerStatus { + switch (object) { + case 0: + case "VALIDATION_UNKNOWN": + return CreatePeerStatus.VALIDATION_UNKNOWN; + case 1: + case "CREATED": + return CreatePeerStatus.CREATED; + case 2: + case "FAILED": + return CreatePeerStatus.FAILED; + case -1: + case "UNRECOGNIZED": + default: + return CreatePeerStatus.UNRECOGNIZED; + } +} + +export function createPeerStatusToJSON(object: CreatePeerStatus): string { + switch (object) { + case CreatePeerStatus.VALIDATION_UNKNOWN: + return "VALIDATION_UNKNOWN"; + case CreatePeerStatus.CREATED: + return "CREATED"; + case CreatePeerStatus.FAILED: + return "FAILED"; + case CreatePeerStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + export interface CreateCDCFlowRequest { connectionConfigs: FlowConnectionConfigs | undefined; + createCatalogEntry: boolean; } export interface CreateCDCFlowResponse { @@ -27,6 +108,7 @@ export interface CreateCDCFlowResponse { export interface CreateQRepFlowRequest { qrepConfig: QRepConfig | undefined; + createCatalogEntry: boolean; } export interface CreateQRepFlowResponse { @@ -45,15 +127,134 @@ export interface ShutdownResponse { errorMessage: string; } -export interface ListPeersRequest { +export interface ValidatePeerRequest { + peer: Peer | undefined; +} + +export interface CreatePeerRequest { + peer: Peer | undefined; +} + +export interface DropPeerRequest { + peerName: string; +} + +export interface DropPeerResponse { + ok: boolean; + errorMessage: string; +} + +export interface ValidatePeerResponse { + status: ValidatePeerStatus; + message: string; +} + +export interface CreatePeerResponse { + status: CreatePeerStatus; + message: string; +} + +export interface MirrorStatusRequest { + flowJobName: string; +} + +export interface PartitionStatus { + partitionId: string; + startTime: Date | undefined; + endTime: Date | undefined; + numRows: number; +} + +export interface QRepMirrorStatus { + config: + | QRepConfig + | undefined; + /** + * TODO make note to see if we are still in initial copy + * or if we are in the continuous streaming mode. + */ + partitions: PartitionStatus[]; +} + +export interface CDCSyncStatus { + startLsn: number; + endLsn: number; + numRows: number; + startTime: Date | undefined; + endTime: Date | undefined; +} + +export interface PeerSchemasResponse { + schemas: string[]; +} + +export interface SchemaTablesRequest { + peerName: string; + schemaName: string; +} + +export interface SchemaTablesResponse { + tables: string[]; +} + +export interface TableColumnsRequest { + peerName: string; + schemaName: string; + tableName: string; +} + +export interface TableColumnsResponse { + columns: string[]; +} + +export interface PostgresPeerActivityInfoRequest { + peerName: string; +} + +export interface SlotInfo { + slotName: string; + redoLSN: string; + restartLSN: string; + active: boolean; + lagInMb: number; +} + +export interface StatInfo { + pid: number; + waitEvent: string; + waitEventType: string; + queryStart: string; + query: string; + duration: number; +} + +export interface PeerSlotResponse { + slotData: SlotInfo[]; +} + +export interface PeerStatResponse { + statData: StatInfo[]; +} + +export interface SnapshotStatus { + clones: QRepMirrorStatus[]; +} + +export interface CDCMirrorStatus { + config: FlowConnectionConfigs | undefined; + snapshotStatus: SnapshotStatus | undefined; + cdcSyncs: CDCSyncStatus[]; } -export interface ListPeersResponse { - peers: Peer[]; +export interface MirrorStatusResponse { + flowJobName: string; + qrepStatus?: QRepMirrorStatus | undefined; + cdcStatus?: CDCMirrorStatus | undefined; + errorMessage: string; } function createBaseCreateCDCFlowRequest(): CreateCDCFlowRequest { - return { connectionConfigs: undefined }; + return { connectionConfigs: undefined, createCatalogEntry: false }; } export const CreateCDCFlowRequest = { @@ -61,6 +262,9 @@ export const CreateCDCFlowRequest = { if (message.connectionConfigs !== undefined) { FlowConnectionConfigs.encode(message.connectionConfigs, writer.uint32(10).fork()).ldelim(); } + if (message.createCatalogEntry === true) { + writer.uint32(16).bool(message.createCatalogEntry); + } return writer; }, @@ -78,6 +282,13 @@ export const CreateCDCFlowRequest = { message.connectionConfigs = FlowConnectionConfigs.decode(reader, reader.uint32()); continue; + case 2: + if (tag !== 16) { + break; + } + + message.createCatalogEntry = reader.bool(); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -92,6 +303,7 @@ export const CreateCDCFlowRequest = { connectionConfigs: isSet(object.connectionConfigs) ? FlowConnectionConfigs.fromJSON(object.connectionConfigs) : undefined, + createCatalogEntry: isSet(object.createCatalogEntry) ? Boolean(object.createCatalogEntry) : false, }; }, @@ -100,6 +312,9 @@ export const CreateCDCFlowRequest = { if (message.connectionConfigs !== undefined) { obj.connectionConfigs = FlowConnectionConfigs.toJSON(message.connectionConfigs); } + if (message.createCatalogEntry === true) { + obj.createCatalogEntry = message.createCatalogEntry; + } return obj; }, @@ -111,6 +326,7 @@ export const CreateCDCFlowRequest = { message.connectionConfigs = (object.connectionConfigs !== undefined && object.connectionConfigs !== null) ? FlowConnectionConfigs.fromPartial(object.connectionConfigs) : undefined; + message.createCatalogEntry = object.createCatalogEntry ?? false; return message; }, }; @@ -173,7 +389,7 @@ export const CreateCDCFlowResponse = { }; function createBaseCreateQRepFlowRequest(): CreateQRepFlowRequest { - return { qrepConfig: undefined }; + return { qrepConfig: undefined, createCatalogEntry: false }; } export const CreateQRepFlowRequest = { @@ -181,6 +397,9 @@ export const CreateQRepFlowRequest = { if (message.qrepConfig !== undefined) { QRepConfig.encode(message.qrepConfig, writer.uint32(10).fork()).ldelim(); } + if (message.createCatalogEntry === true) { + writer.uint32(16).bool(message.createCatalogEntry); + } return writer; }, @@ -198,6 +417,13 @@ export const CreateQRepFlowRequest = { message.qrepConfig = QRepConfig.decode(reader, reader.uint32()); continue; + case 2: + if (tag !== 16) { + break; + } + + message.createCatalogEntry = reader.bool(); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -208,7 +434,10 @@ export const CreateQRepFlowRequest = { }, fromJSON(object: any): CreateQRepFlowRequest { - return { qrepConfig: isSet(object.qrepConfig) ? QRepConfig.fromJSON(object.qrepConfig) : undefined }; + return { + qrepConfig: isSet(object.qrepConfig) ? QRepConfig.fromJSON(object.qrepConfig) : undefined, + createCatalogEntry: isSet(object.createCatalogEntry) ? Boolean(object.createCatalogEntry) : false, + }; }, toJSON(message: CreateQRepFlowRequest): unknown { @@ -216,6 +445,9 @@ export const CreateQRepFlowRequest = { if (message.qrepConfig !== undefined) { obj.qrepConfig = QRepConfig.toJSON(message.qrepConfig); } + if (message.createCatalogEntry === true) { + obj.createCatalogEntry = message.createCatalogEntry; + } return obj; }, @@ -227,6 +459,7 @@ export const CreateQRepFlowRequest = { message.qrepConfig = (object.qrepConfig !== undefined && object.qrepConfig !== null) ? QRepConfig.fromPartial(object.qrepConfig) : undefined; + message.createCatalogEntry = object.createCatalogEntry ?? false; return message; }, }; @@ -470,22 +703,32 @@ export const ShutdownResponse = { }, }; -function createBaseListPeersRequest(): ListPeersRequest { - return {}; +function createBaseValidatePeerRequest(): ValidatePeerRequest { + return { peer: undefined }; } -export const ListPeersRequest = { - encode(_: ListPeersRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const ValidatePeerRequest = { + encode(message: ValidatePeerRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.peer !== undefined) { + Peer.encode(message.peer, writer.uint32(10).fork()).ldelim(); + } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ListPeersRequest { + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatePeerRequest { const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseListPeersRequest(); + const message = createBaseValidatePeerRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.peer = Peer.decode(reader, reader.uint32()); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -495,40 +738,44 @@ export const ListPeersRequest = { return message; }, - fromJSON(_: any): ListPeersRequest { - return {}; + fromJSON(object: any): ValidatePeerRequest { + return { peer: isSet(object.peer) ? Peer.fromJSON(object.peer) : undefined }; }, - toJSON(_: ListPeersRequest): unknown { + toJSON(message: ValidatePeerRequest): unknown { const obj: any = {}; + if (message.peer !== undefined) { + obj.peer = Peer.toJSON(message.peer); + } return obj; }, - create, I>>(base?: I): ListPeersRequest { - return ListPeersRequest.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): ValidatePeerRequest { + return ValidatePeerRequest.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(_: I): ListPeersRequest { - const message = createBaseListPeersRequest(); + fromPartial, I>>(object: I): ValidatePeerRequest { + const message = createBaseValidatePeerRequest(); + message.peer = (object.peer !== undefined && object.peer !== null) ? Peer.fromPartial(object.peer) : undefined; return message; }, }; -function createBaseListPeersResponse(): ListPeersResponse { - return { peers: [] }; +function createBaseCreatePeerRequest(): CreatePeerRequest { + return { peer: undefined }; } -export const ListPeersResponse = { - encode(message: ListPeersResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - for (const v of message.peers) { - Peer.encode(v!, writer.uint32(10).fork()).ldelim(); +export const CreatePeerRequest = { + encode(message: CreatePeerRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.peer !== undefined) { + Peer.encode(message.peer, writer.uint32(10).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ListPeersResponse { + decode(input: _m0.Reader | Uint8Array, length?: number): CreatePeerRequest { const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseListPeersResponse(); + const message = createBaseCreatePeerRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -537,7 +784,7 @@ export const ListPeersResponse = { break; } - message.peers.push(Peer.decode(reader, reader.uint32())); + message.peer = Peer.decode(reader, reader.uint32()); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -548,156 +795,2067 @@ export const ListPeersResponse = { return message; }, - fromJSON(object: any): ListPeersResponse { - return { peers: Array.isArray(object?.peers) ? object.peers.map((e: any) => Peer.fromJSON(e)) : [] }; + fromJSON(object: any): CreatePeerRequest { + return { peer: isSet(object.peer) ? Peer.fromJSON(object.peer) : undefined }; }, - toJSON(message: ListPeersResponse): unknown { + toJSON(message: CreatePeerRequest): unknown { const obj: any = {}; - if (message.peers?.length) { - obj.peers = message.peers.map((e) => Peer.toJSON(e)); + if (message.peer !== undefined) { + obj.peer = Peer.toJSON(message.peer); } return obj; }, - create, I>>(base?: I): ListPeersResponse { - return ListPeersResponse.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): CreatePeerRequest { + return CreatePeerRequest.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): ListPeersResponse { - const message = createBaseListPeersResponse(); - message.peers = object.peers?.map((e) => Peer.fromPartial(e)) || []; + fromPartial, I>>(object: I): CreatePeerRequest { + const message = createBaseCreatePeerRequest(); + message.peer = (object.peer !== undefined && object.peer !== null) ? Peer.fromPartial(object.peer) : undefined; return message; }, }; -export type FlowServiceService = typeof FlowServiceService; -export const FlowServiceService = { - listPeers: { - path: "/peerdb_route.FlowService/ListPeers", - requestStream: false, - responseStream: false, - requestSerialize: (value: ListPeersRequest) => Buffer.from(ListPeersRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => ListPeersRequest.decode(value), - responseSerialize: (value: ListPeersResponse) => Buffer.from(ListPeersResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => ListPeersResponse.decode(value), +function createBaseDropPeerRequest(): DropPeerRequest { + return { peerName: "" }; +} + +export const DropPeerRequest = { + encode(message: DropPeerRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.peerName !== "") { + writer.uint32(10).string(message.peerName); + } + return writer; }, - createCdcFlow: { - path: "/peerdb_route.FlowService/CreateCDCFlow", - requestStream: false, - responseStream: false, - requestSerialize: (value: CreateCDCFlowRequest) => Buffer.from(CreateCDCFlowRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => CreateCDCFlowRequest.decode(value), - responseSerialize: (value: CreateCDCFlowResponse) => Buffer.from(CreateCDCFlowResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => CreateCDCFlowResponse.decode(value), + + decode(input: _m0.Reader | Uint8Array, length?: number): DropPeerRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDropPeerRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.peerName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; }, - createQRepFlow: { - path: "/peerdb_route.FlowService/CreateQRepFlow", - requestStream: false, - responseStream: false, - requestSerialize: (value: CreateQRepFlowRequest) => Buffer.from(CreateQRepFlowRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => CreateQRepFlowRequest.decode(value), - responseSerialize: (value: CreateQRepFlowResponse) => Buffer.from(CreateQRepFlowResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => CreateQRepFlowResponse.decode(value), + + fromJSON(object: any): DropPeerRequest { + return { peerName: isSet(object.peerName) ? String(object.peerName) : "" }; }, - shutdownFlow: { - path: "/peerdb_route.FlowService/ShutdownFlow", - requestStream: false, - responseStream: false, - requestSerialize: (value: ShutdownRequest) => Buffer.from(ShutdownRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => ShutdownRequest.decode(value), - responseSerialize: (value: ShutdownResponse) => Buffer.from(ShutdownResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => ShutdownResponse.decode(value), + + toJSON(message: DropPeerRequest): unknown { + const obj: any = {}; + if (message.peerName !== "") { + obj.peerName = message.peerName; + } + return obj; }, -} as const; -export interface FlowServiceServer extends UntypedServiceImplementation { - listPeers: handleUnaryCall; - createCdcFlow: handleUnaryCall; - createQRepFlow: handleUnaryCall; - shutdownFlow: handleUnaryCall; -} + create, I>>(base?: I): DropPeerRequest { + return DropPeerRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DropPeerRequest { + const message = createBaseDropPeerRequest(); + message.peerName = object.peerName ?? ""; + return message; + }, +}; -export interface FlowServiceClient extends Client { - listPeers( - request: ListPeersRequest, - callback: (error: ServiceError | null, response: ListPeersResponse) => void, - ): ClientUnaryCall; - listPeers( - request: ListPeersRequest, - metadata: Metadata, - callback: (error: ServiceError | null, response: ListPeersResponse) => void, - ): ClientUnaryCall; - listPeers( - request: ListPeersRequest, - metadata: Metadata, - options: Partial, - callback: (error: ServiceError | null, response: ListPeersResponse) => void, - ): ClientUnaryCall; - createCdcFlow( - request: CreateCDCFlowRequest, - callback: (error: ServiceError | null, response: CreateCDCFlowResponse) => void, - ): ClientUnaryCall; - createCdcFlow( - request: CreateCDCFlowRequest, - metadata: Metadata, - callback: (error: ServiceError | null, response: CreateCDCFlowResponse) => void, - ): ClientUnaryCall; - createCdcFlow( - request: CreateCDCFlowRequest, - metadata: Metadata, - options: Partial, - callback: (error: ServiceError | null, response: CreateCDCFlowResponse) => void, - ): ClientUnaryCall; - createQRepFlow( - request: CreateQRepFlowRequest, - callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, - ): ClientUnaryCall; - createQRepFlow( - request: CreateQRepFlowRequest, - metadata: Metadata, - callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, - ): ClientUnaryCall; - createQRepFlow( - request: CreateQRepFlowRequest, - metadata: Metadata, - options: Partial, - callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, - ): ClientUnaryCall; - shutdownFlow( - request: ShutdownRequest, - callback: (error: ServiceError | null, response: ShutdownResponse) => void, - ): ClientUnaryCall; - shutdownFlow( - request: ShutdownRequest, - metadata: Metadata, - callback: (error: ServiceError | null, response: ShutdownResponse) => void, - ): ClientUnaryCall; - shutdownFlow( - request: ShutdownRequest, - metadata: Metadata, - options: Partial, - callback: (error: ServiceError | null, response: ShutdownResponse) => void, - ): ClientUnaryCall; +function createBaseDropPeerResponse(): DropPeerResponse { + return { ok: false, errorMessage: "" }; } -export const FlowServiceClient = makeGenericClientConstructor( - FlowServiceService, - "peerdb_route.FlowService", -) as unknown as { - new (address: string, credentials: ChannelCredentials, options?: Partial): FlowServiceClient; - service: typeof FlowServiceService; -}; +export const DropPeerResponse = { + encode(message: DropPeerResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ok === true) { + writer.uint32(8).bool(message.ok); + } + if (message.errorMessage !== "") { + writer.uint32(18).string(message.errorMessage); + } + return writer; + }, -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + decode(input: _m0.Reader | Uint8Array, length?: number): DropPeerResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDropPeerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } -export type DeepPartial = T extends Builtin ? T - : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> - : T extends {} ? { [K in keyof T]?: DeepPartial } - : Partial; + message.ok = reader.bool(); + continue; + case 2: + if (tag !== 18) { + break; + } -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P - : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + message.errorMessage = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DropPeerResponse { + return { + ok: isSet(object.ok) ? Boolean(object.ok) : false, + errorMessage: isSet(object.errorMessage) ? String(object.errorMessage) : "", + }; + }, + + toJSON(message: DropPeerResponse): unknown { + const obj: any = {}; + if (message.ok === true) { + obj.ok = message.ok; + } + if (message.errorMessage !== "") { + obj.errorMessage = message.errorMessage; + } + return obj; + }, + + create, I>>(base?: I): DropPeerResponse { + return DropPeerResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DropPeerResponse { + const message = createBaseDropPeerResponse(); + message.ok = object.ok ?? false; + message.errorMessage = object.errorMessage ?? ""; + return message; + }, +}; + +function createBaseValidatePeerResponse(): ValidatePeerResponse { + return { status: 0, message: "" }; +} + +export const ValidatePeerResponse = { + encode(message: ValidatePeerResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.status !== 0) { + writer.uint32(8).int32(message.status); + } + if (message.message !== "") { + writer.uint32(18).string(message.message); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatePeerResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatePeerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.status = reader.int32() as any; + continue; + case 2: + if (tag !== 18) { + break; + } + + message.message = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ValidatePeerResponse { + return { + status: isSet(object.status) ? validatePeerStatusFromJSON(object.status) : 0, + message: isSet(object.message) ? String(object.message) : "", + }; + }, + + toJSON(message: ValidatePeerResponse): unknown { + const obj: any = {}; + if (message.status !== 0) { + obj.status = validatePeerStatusToJSON(message.status); + } + if (message.message !== "") { + obj.message = message.message; + } + return obj; + }, + + create, I>>(base?: I): ValidatePeerResponse { + return ValidatePeerResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ValidatePeerResponse { + const message = createBaseValidatePeerResponse(); + message.status = object.status ?? 0; + message.message = object.message ?? ""; + return message; + }, +}; + +function createBaseCreatePeerResponse(): CreatePeerResponse { + return { status: 0, message: "" }; +} + +export const CreatePeerResponse = { + encode(message: CreatePeerResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.status !== 0) { + writer.uint32(8).int32(message.status); + } + if (message.message !== "") { + writer.uint32(18).string(message.message); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CreatePeerResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCreatePeerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.status = reader.int32() as any; + continue; + case 2: + if (tag !== 18) { + break; + } + + message.message = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CreatePeerResponse { + return { + status: isSet(object.status) ? createPeerStatusFromJSON(object.status) : 0, + message: isSet(object.message) ? String(object.message) : "", + }; + }, + + toJSON(message: CreatePeerResponse): unknown { + const obj: any = {}; + if (message.status !== 0) { + obj.status = createPeerStatusToJSON(message.status); + } + if (message.message !== "") { + obj.message = message.message; + } + return obj; + }, + + create, I>>(base?: I): CreatePeerResponse { + return CreatePeerResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CreatePeerResponse { + const message = createBaseCreatePeerResponse(); + message.status = object.status ?? 0; + message.message = object.message ?? ""; + return message; + }, +}; + +function createBaseMirrorStatusRequest(): MirrorStatusRequest { + return { flowJobName: "" }; +} + +export const MirrorStatusRequest = { + encode(message: MirrorStatusRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.flowJobName !== "") { + writer.uint32(10).string(message.flowJobName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MirrorStatusRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMirrorStatusRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.flowJobName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MirrorStatusRequest { + return { flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "" }; + }, + + toJSON(message: MirrorStatusRequest): unknown { + const obj: any = {}; + if (message.flowJobName !== "") { + obj.flowJobName = message.flowJobName; + } + return obj; + }, + + create, I>>(base?: I): MirrorStatusRequest { + return MirrorStatusRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MirrorStatusRequest { + const message = createBaseMirrorStatusRequest(); + message.flowJobName = object.flowJobName ?? ""; + return message; + }, +}; + +function createBasePartitionStatus(): PartitionStatus { + return { partitionId: "", startTime: undefined, endTime: undefined, numRows: 0 }; +} + +export const PartitionStatus = { + encode(message: PartitionStatus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.partitionId !== "") { + writer.uint32(10).string(message.partitionId); + } + if (message.startTime !== undefined) { + Timestamp.encode(toTimestamp(message.startTime), writer.uint32(18).fork()).ldelim(); + } + if (message.endTime !== undefined) { + Timestamp.encode(toTimestamp(message.endTime), writer.uint32(26).fork()).ldelim(); + } + if (message.numRows !== 0) { + writer.uint32(32).int32(message.numRows); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PartitionStatus { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePartitionStatus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.partitionId = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.startTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.endTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.numRows = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PartitionStatus { + return { + partitionId: isSet(object.partitionId) ? String(object.partitionId) : "", + startTime: isSet(object.startTime) ? fromJsonTimestamp(object.startTime) : undefined, + endTime: isSet(object.endTime) ? fromJsonTimestamp(object.endTime) : undefined, + numRows: isSet(object.numRows) ? Number(object.numRows) : 0, + }; + }, + + toJSON(message: PartitionStatus): unknown { + const obj: any = {}; + if (message.partitionId !== "") { + obj.partitionId = message.partitionId; + } + if (message.startTime !== undefined) { + obj.startTime = message.startTime.toISOString(); + } + if (message.endTime !== undefined) { + obj.endTime = message.endTime.toISOString(); + } + if (message.numRows !== 0) { + obj.numRows = Math.round(message.numRows); + } + return obj; + }, + + create, I>>(base?: I): PartitionStatus { + return PartitionStatus.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PartitionStatus { + const message = createBasePartitionStatus(); + message.partitionId = object.partitionId ?? ""; + message.startTime = object.startTime ?? undefined; + message.endTime = object.endTime ?? undefined; + message.numRows = object.numRows ?? 0; + return message; + }, +}; + +function createBaseQRepMirrorStatus(): QRepMirrorStatus { + return { config: undefined, partitions: [] }; +} + +export const QRepMirrorStatus = { + encode(message: QRepMirrorStatus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.config !== undefined) { + QRepConfig.encode(message.config, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.partitions) { + PartitionStatus.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QRepMirrorStatus { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQRepMirrorStatus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.config = QRepConfig.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.partitions.push(PartitionStatus.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): QRepMirrorStatus { + return { + config: isSet(object.config) ? QRepConfig.fromJSON(object.config) : undefined, + partitions: Array.isArray(object?.partitions) + ? object.partitions.map((e: any) => PartitionStatus.fromJSON(e)) + : [], + }; + }, + + toJSON(message: QRepMirrorStatus): unknown { + const obj: any = {}; + if (message.config !== undefined) { + obj.config = QRepConfig.toJSON(message.config); + } + if (message.partitions?.length) { + obj.partitions = message.partitions.map((e) => PartitionStatus.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): QRepMirrorStatus { + return QRepMirrorStatus.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): QRepMirrorStatus { + const message = createBaseQRepMirrorStatus(); + message.config = (object.config !== undefined && object.config !== null) + ? QRepConfig.fromPartial(object.config) + : undefined; + message.partitions = object.partitions?.map((e) => PartitionStatus.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCDCSyncStatus(): CDCSyncStatus { + return { startLsn: 0, endLsn: 0, numRows: 0, startTime: undefined, endTime: undefined }; +} + +export const CDCSyncStatus = { + encode(message: CDCSyncStatus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.startLsn !== 0) { + writer.uint32(8).int64(message.startLsn); + } + if (message.endLsn !== 0) { + writer.uint32(16).int64(message.endLsn); + } + if (message.numRows !== 0) { + writer.uint32(24).int32(message.numRows); + } + if (message.startTime !== undefined) { + Timestamp.encode(toTimestamp(message.startTime), writer.uint32(34).fork()).ldelim(); + } + if (message.endTime !== undefined) { + Timestamp.encode(toTimestamp(message.endTime), writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CDCSyncStatus { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCDCSyncStatus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.startLsn = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.endLsn = longToNumber(reader.int64() as Long); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.numRows = reader.int32(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.startTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.endTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CDCSyncStatus { + return { + startLsn: isSet(object.startLsn) ? Number(object.startLsn) : 0, + endLsn: isSet(object.endLsn) ? Number(object.endLsn) : 0, + numRows: isSet(object.numRows) ? Number(object.numRows) : 0, + startTime: isSet(object.startTime) ? fromJsonTimestamp(object.startTime) : undefined, + endTime: isSet(object.endTime) ? fromJsonTimestamp(object.endTime) : undefined, + }; + }, + + toJSON(message: CDCSyncStatus): unknown { + const obj: any = {}; + if (message.startLsn !== 0) { + obj.startLsn = Math.round(message.startLsn); + } + if (message.endLsn !== 0) { + obj.endLsn = Math.round(message.endLsn); + } + if (message.numRows !== 0) { + obj.numRows = Math.round(message.numRows); + } + if (message.startTime !== undefined) { + obj.startTime = message.startTime.toISOString(); + } + if (message.endTime !== undefined) { + obj.endTime = message.endTime.toISOString(); + } + return obj; + }, + + create, I>>(base?: I): CDCSyncStatus { + return CDCSyncStatus.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CDCSyncStatus { + const message = createBaseCDCSyncStatus(); + message.startLsn = object.startLsn ?? 0; + message.endLsn = object.endLsn ?? 0; + message.numRows = object.numRows ?? 0; + message.startTime = object.startTime ?? undefined; + message.endTime = object.endTime ?? undefined; + return message; + }, +}; + +function createBasePeerSchemasResponse(): PeerSchemasResponse { + return { schemas: [] }; +} + +export const PeerSchemasResponse = { + encode(message: PeerSchemasResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.schemas) { + writer.uint32(10).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeerSchemasResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeerSchemasResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.schemas.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PeerSchemasResponse { + return { schemas: Array.isArray(object?.schemas) ? object.schemas.map((e: any) => String(e)) : [] }; + }, + + toJSON(message: PeerSchemasResponse): unknown { + const obj: any = {}; + if (message.schemas?.length) { + obj.schemas = message.schemas; + } + return obj; + }, + + create, I>>(base?: I): PeerSchemasResponse { + return PeerSchemasResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PeerSchemasResponse { + const message = createBasePeerSchemasResponse(); + message.schemas = object.schemas?.map((e) => e) || []; + return message; + }, +}; + +function createBaseSchemaTablesRequest(): SchemaTablesRequest { + return { peerName: "", schemaName: "" }; +} + +export const SchemaTablesRequest = { + encode(message: SchemaTablesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.peerName !== "") { + writer.uint32(10).string(message.peerName); + } + if (message.schemaName !== "") { + writer.uint32(18).string(message.schemaName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SchemaTablesRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSchemaTablesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.peerName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.schemaName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SchemaTablesRequest { + return { + peerName: isSet(object.peerName) ? String(object.peerName) : "", + schemaName: isSet(object.schemaName) ? String(object.schemaName) : "", + }; + }, + + toJSON(message: SchemaTablesRequest): unknown { + const obj: any = {}; + if (message.peerName !== "") { + obj.peerName = message.peerName; + } + if (message.schemaName !== "") { + obj.schemaName = message.schemaName; + } + return obj; + }, + + create, I>>(base?: I): SchemaTablesRequest { + return SchemaTablesRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SchemaTablesRequest { + const message = createBaseSchemaTablesRequest(); + message.peerName = object.peerName ?? ""; + message.schemaName = object.schemaName ?? ""; + return message; + }, +}; + +function createBaseSchemaTablesResponse(): SchemaTablesResponse { + return { tables: [] }; +} + +export const SchemaTablesResponse = { + encode(message: SchemaTablesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.tables) { + writer.uint32(10).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SchemaTablesResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSchemaTablesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.tables.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SchemaTablesResponse { + return { tables: Array.isArray(object?.tables) ? object.tables.map((e: any) => String(e)) : [] }; + }, + + toJSON(message: SchemaTablesResponse): unknown { + const obj: any = {}; + if (message.tables?.length) { + obj.tables = message.tables; + } + return obj; + }, + + create, I>>(base?: I): SchemaTablesResponse { + return SchemaTablesResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SchemaTablesResponse { + const message = createBaseSchemaTablesResponse(); + message.tables = object.tables?.map((e) => e) || []; + return message; + }, +}; + +function createBaseTableColumnsRequest(): TableColumnsRequest { + return { peerName: "", schemaName: "", tableName: "" }; +} + +export const TableColumnsRequest = { + encode(message: TableColumnsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.peerName !== "") { + writer.uint32(10).string(message.peerName); + } + if (message.schemaName !== "") { + writer.uint32(18).string(message.schemaName); + } + if (message.tableName !== "") { + writer.uint32(26).string(message.tableName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TableColumnsRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTableColumnsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.peerName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.schemaName = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.tableName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): TableColumnsRequest { + return { + peerName: isSet(object.peerName) ? String(object.peerName) : "", + schemaName: isSet(object.schemaName) ? String(object.schemaName) : "", + tableName: isSet(object.tableName) ? String(object.tableName) : "", + }; + }, + + toJSON(message: TableColumnsRequest): unknown { + const obj: any = {}; + if (message.peerName !== "") { + obj.peerName = message.peerName; + } + if (message.schemaName !== "") { + obj.schemaName = message.schemaName; + } + if (message.tableName !== "") { + obj.tableName = message.tableName; + } + return obj; + }, + + create, I>>(base?: I): TableColumnsRequest { + return TableColumnsRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TableColumnsRequest { + const message = createBaseTableColumnsRequest(); + message.peerName = object.peerName ?? ""; + message.schemaName = object.schemaName ?? ""; + message.tableName = object.tableName ?? ""; + return message; + }, +}; + +function createBaseTableColumnsResponse(): TableColumnsResponse { + return { columns: [] }; +} + +export const TableColumnsResponse = { + encode(message: TableColumnsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.columns) { + writer.uint32(10).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TableColumnsResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTableColumnsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.columns.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): TableColumnsResponse { + return { columns: Array.isArray(object?.columns) ? object.columns.map((e: any) => String(e)) : [] }; + }, + + toJSON(message: TableColumnsResponse): unknown { + const obj: any = {}; + if (message.columns?.length) { + obj.columns = message.columns; + } + return obj; + }, + + create, I>>(base?: I): TableColumnsResponse { + return TableColumnsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TableColumnsResponse { + const message = createBaseTableColumnsResponse(); + message.columns = object.columns?.map((e) => e) || []; + return message; + }, +}; + +function createBasePostgresPeerActivityInfoRequest(): PostgresPeerActivityInfoRequest { + return { peerName: "" }; +} + +export const PostgresPeerActivityInfoRequest = { + encode(message: PostgresPeerActivityInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.peerName !== "") { + writer.uint32(10).string(message.peerName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PostgresPeerActivityInfoRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePostgresPeerActivityInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.peerName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PostgresPeerActivityInfoRequest { + return { peerName: isSet(object.peerName) ? String(object.peerName) : "" }; + }, + + toJSON(message: PostgresPeerActivityInfoRequest): unknown { + const obj: any = {}; + if (message.peerName !== "") { + obj.peerName = message.peerName; + } + return obj; + }, + + create, I>>(base?: I): PostgresPeerActivityInfoRequest { + return PostgresPeerActivityInfoRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): PostgresPeerActivityInfoRequest { + const message = createBasePostgresPeerActivityInfoRequest(); + message.peerName = object.peerName ?? ""; + return message; + }, +}; + +function createBaseSlotInfo(): SlotInfo { + return { slotName: "", redoLSN: "", restartLSN: "", active: false, lagInMb: 0 }; +} + +export const SlotInfo = { + encode(message: SlotInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.slotName !== "") { + writer.uint32(10).string(message.slotName); + } + if (message.redoLSN !== "") { + writer.uint32(18).string(message.redoLSN); + } + if (message.restartLSN !== "") { + writer.uint32(26).string(message.restartLSN); + } + if (message.active === true) { + writer.uint32(32).bool(message.active); + } + if (message.lagInMb !== 0) { + writer.uint32(45).float(message.lagInMb); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SlotInfo { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSlotInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.slotName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.redoLSN = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.restartLSN = reader.string(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.active = reader.bool(); + continue; + case 5: + if (tag !== 45) { + break; + } + + message.lagInMb = reader.float(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SlotInfo { + return { + slotName: isSet(object.slotName) ? String(object.slotName) : "", + redoLSN: isSet(object.redoLSN) ? String(object.redoLSN) : "", + restartLSN: isSet(object.restartLSN) ? String(object.restartLSN) : "", + active: isSet(object.active) ? Boolean(object.active) : false, + lagInMb: isSet(object.lagInMb) ? Number(object.lagInMb) : 0, + }; + }, + + toJSON(message: SlotInfo): unknown { + const obj: any = {}; + if (message.slotName !== "") { + obj.slotName = message.slotName; + } + if (message.redoLSN !== "") { + obj.redoLSN = message.redoLSN; + } + if (message.restartLSN !== "") { + obj.restartLSN = message.restartLSN; + } + if (message.active === true) { + obj.active = message.active; + } + if (message.lagInMb !== 0) { + obj.lagInMb = message.lagInMb; + } + return obj; + }, + + create, I>>(base?: I): SlotInfo { + return SlotInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SlotInfo { + const message = createBaseSlotInfo(); + message.slotName = object.slotName ?? ""; + message.redoLSN = object.redoLSN ?? ""; + message.restartLSN = object.restartLSN ?? ""; + message.active = object.active ?? false; + message.lagInMb = object.lagInMb ?? 0; + return message; + }, +}; + +function createBaseStatInfo(): StatInfo { + return { pid: 0, waitEvent: "", waitEventType: "", queryStart: "", query: "", duration: 0 }; +} + +export const StatInfo = { + encode(message: StatInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pid !== 0) { + writer.uint32(8).int64(message.pid); + } + if (message.waitEvent !== "") { + writer.uint32(18).string(message.waitEvent); + } + if (message.waitEventType !== "") { + writer.uint32(26).string(message.waitEventType); + } + if (message.queryStart !== "") { + writer.uint32(34).string(message.queryStart); + } + if (message.query !== "") { + writer.uint32(42).string(message.query); + } + if (message.duration !== 0) { + writer.uint32(53).float(message.duration); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StatInfo { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStatInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.pid = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.waitEvent = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.waitEventType = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.queryStart = reader.string(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.query = reader.string(); + continue; + case 6: + if (tag !== 53) { + break; + } + + message.duration = reader.float(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): StatInfo { + return { + pid: isSet(object.pid) ? Number(object.pid) : 0, + waitEvent: isSet(object.waitEvent) ? String(object.waitEvent) : "", + waitEventType: isSet(object.waitEventType) ? String(object.waitEventType) : "", + queryStart: isSet(object.queryStart) ? String(object.queryStart) : "", + query: isSet(object.query) ? String(object.query) : "", + duration: isSet(object.duration) ? Number(object.duration) : 0, + }; + }, + + toJSON(message: StatInfo): unknown { + const obj: any = {}; + if (message.pid !== 0) { + obj.pid = Math.round(message.pid); + } + if (message.waitEvent !== "") { + obj.waitEvent = message.waitEvent; + } + if (message.waitEventType !== "") { + obj.waitEventType = message.waitEventType; + } + if (message.queryStart !== "") { + obj.queryStart = message.queryStart; + } + if (message.query !== "") { + obj.query = message.query; + } + if (message.duration !== 0) { + obj.duration = message.duration; + } + return obj; + }, + + create, I>>(base?: I): StatInfo { + return StatInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): StatInfo { + const message = createBaseStatInfo(); + message.pid = object.pid ?? 0; + message.waitEvent = object.waitEvent ?? ""; + message.waitEventType = object.waitEventType ?? ""; + message.queryStart = object.queryStart ?? ""; + message.query = object.query ?? ""; + message.duration = object.duration ?? 0; + return message; + }, +}; + +function createBasePeerSlotResponse(): PeerSlotResponse { + return { slotData: [] }; +} + +export const PeerSlotResponse = { + encode(message: PeerSlotResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.slotData) { + SlotInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeerSlotResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeerSlotResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.slotData.push(SlotInfo.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PeerSlotResponse { + return { slotData: Array.isArray(object?.slotData) ? object.slotData.map((e: any) => SlotInfo.fromJSON(e)) : [] }; + }, + + toJSON(message: PeerSlotResponse): unknown { + const obj: any = {}; + if (message.slotData?.length) { + obj.slotData = message.slotData.map((e) => SlotInfo.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): PeerSlotResponse { + return PeerSlotResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PeerSlotResponse { + const message = createBasePeerSlotResponse(); + message.slotData = object.slotData?.map((e) => SlotInfo.fromPartial(e)) || []; + return message; + }, +}; + +function createBasePeerStatResponse(): PeerStatResponse { + return { statData: [] }; +} + +export const PeerStatResponse = { + encode(message: PeerStatResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.statData) { + StatInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeerStatResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeerStatResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.statData.push(StatInfo.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PeerStatResponse { + return { statData: Array.isArray(object?.statData) ? object.statData.map((e: any) => StatInfo.fromJSON(e)) : [] }; + }, + + toJSON(message: PeerStatResponse): unknown { + const obj: any = {}; + if (message.statData?.length) { + obj.statData = message.statData.map((e) => StatInfo.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): PeerStatResponse { + return PeerStatResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PeerStatResponse { + const message = createBasePeerStatResponse(); + message.statData = object.statData?.map((e) => StatInfo.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSnapshotStatus(): SnapshotStatus { + return { clones: [] }; +} + +export const SnapshotStatus = { + encode(message: SnapshotStatus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.clones) { + QRepMirrorStatus.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotStatus { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotStatus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.clones.push(QRepMirrorStatus.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SnapshotStatus { + return { clones: Array.isArray(object?.clones) ? object.clones.map((e: any) => QRepMirrorStatus.fromJSON(e)) : [] }; + }, + + toJSON(message: SnapshotStatus): unknown { + const obj: any = {}; + if (message.clones?.length) { + obj.clones = message.clones.map((e) => QRepMirrorStatus.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): SnapshotStatus { + return SnapshotStatus.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SnapshotStatus { + const message = createBaseSnapshotStatus(); + message.clones = object.clones?.map((e) => QRepMirrorStatus.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCDCMirrorStatus(): CDCMirrorStatus { + return { config: undefined, snapshotStatus: undefined, cdcSyncs: [] }; +} + +export const CDCMirrorStatus = { + encode(message: CDCMirrorStatus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.config !== undefined) { + FlowConnectionConfigs.encode(message.config, writer.uint32(10).fork()).ldelim(); + } + if (message.snapshotStatus !== undefined) { + SnapshotStatus.encode(message.snapshotStatus, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.cdcSyncs) { + CDCSyncStatus.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CDCMirrorStatus { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCDCMirrorStatus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.config = FlowConnectionConfigs.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.snapshotStatus = SnapshotStatus.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.cdcSyncs.push(CDCSyncStatus.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CDCMirrorStatus { + return { + config: isSet(object.config) ? FlowConnectionConfigs.fromJSON(object.config) : undefined, + snapshotStatus: isSet(object.snapshotStatus) ? SnapshotStatus.fromJSON(object.snapshotStatus) : undefined, + cdcSyncs: Array.isArray(object?.cdcSyncs) ? object.cdcSyncs.map((e: any) => CDCSyncStatus.fromJSON(e)) : [], + }; + }, + + toJSON(message: CDCMirrorStatus): unknown { + const obj: any = {}; + if (message.config !== undefined) { + obj.config = FlowConnectionConfigs.toJSON(message.config); + } + if (message.snapshotStatus !== undefined) { + obj.snapshotStatus = SnapshotStatus.toJSON(message.snapshotStatus); + } + if (message.cdcSyncs?.length) { + obj.cdcSyncs = message.cdcSyncs.map((e) => CDCSyncStatus.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): CDCMirrorStatus { + return CDCMirrorStatus.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CDCMirrorStatus { + const message = createBaseCDCMirrorStatus(); + message.config = (object.config !== undefined && object.config !== null) + ? FlowConnectionConfigs.fromPartial(object.config) + : undefined; + message.snapshotStatus = (object.snapshotStatus !== undefined && object.snapshotStatus !== null) + ? SnapshotStatus.fromPartial(object.snapshotStatus) + : undefined; + message.cdcSyncs = object.cdcSyncs?.map((e) => CDCSyncStatus.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMirrorStatusResponse(): MirrorStatusResponse { + return { flowJobName: "", qrepStatus: undefined, cdcStatus: undefined, errorMessage: "" }; +} + +export const MirrorStatusResponse = { + encode(message: MirrorStatusResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.flowJobName !== "") { + writer.uint32(10).string(message.flowJobName); + } + if (message.qrepStatus !== undefined) { + QRepMirrorStatus.encode(message.qrepStatus, writer.uint32(18).fork()).ldelim(); + } + if (message.cdcStatus !== undefined) { + CDCMirrorStatus.encode(message.cdcStatus, writer.uint32(26).fork()).ldelim(); + } + if (message.errorMessage !== "") { + writer.uint32(34).string(message.errorMessage); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MirrorStatusResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMirrorStatusResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.flowJobName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.qrepStatus = QRepMirrorStatus.decode(reader, reader.uint32()); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.cdcStatus = CDCMirrorStatus.decode(reader, reader.uint32()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.errorMessage = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MirrorStatusResponse { + return { + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", + qrepStatus: isSet(object.qrepStatus) ? QRepMirrorStatus.fromJSON(object.qrepStatus) : undefined, + cdcStatus: isSet(object.cdcStatus) ? CDCMirrorStatus.fromJSON(object.cdcStatus) : undefined, + errorMessage: isSet(object.errorMessage) ? String(object.errorMessage) : "", + }; + }, + + toJSON(message: MirrorStatusResponse): unknown { + const obj: any = {}; + if (message.flowJobName !== "") { + obj.flowJobName = message.flowJobName; + } + if (message.qrepStatus !== undefined) { + obj.qrepStatus = QRepMirrorStatus.toJSON(message.qrepStatus); + } + if (message.cdcStatus !== undefined) { + obj.cdcStatus = CDCMirrorStatus.toJSON(message.cdcStatus); + } + if (message.errorMessage !== "") { + obj.errorMessage = message.errorMessage; + } + return obj; + }, + + create, I>>(base?: I): MirrorStatusResponse { + return MirrorStatusResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MirrorStatusResponse { + const message = createBaseMirrorStatusResponse(); + message.flowJobName = object.flowJobName ?? ""; + message.qrepStatus = (object.qrepStatus !== undefined && object.qrepStatus !== null) + ? QRepMirrorStatus.fromPartial(object.qrepStatus) + : undefined; + message.cdcStatus = (object.cdcStatus !== undefined && object.cdcStatus !== null) + ? CDCMirrorStatus.fromPartial(object.cdcStatus) + : undefined; + message.errorMessage = object.errorMessage ?? ""; + return message; + }, +}; + +export type FlowServiceService = typeof FlowServiceService; +export const FlowServiceService = { + validatePeer: { + path: "/peerdb_route.FlowService/ValidatePeer", + requestStream: false, + responseStream: false, + requestSerialize: (value: ValidatePeerRequest) => Buffer.from(ValidatePeerRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => ValidatePeerRequest.decode(value), + responseSerialize: (value: ValidatePeerResponse) => Buffer.from(ValidatePeerResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => ValidatePeerResponse.decode(value), + }, + createPeer: { + path: "/peerdb_route.FlowService/CreatePeer", + requestStream: false, + responseStream: false, + requestSerialize: (value: CreatePeerRequest) => Buffer.from(CreatePeerRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => CreatePeerRequest.decode(value), + responseSerialize: (value: CreatePeerResponse) => Buffer.from(CreatePeerResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => CreatePeerResponse.decode(value), + }, + dropPeer: { + path: "/peerdb_route.FlowService/DropPeer", + requestStream: false, + responseStream: false, + requestSerialize: (value: DropPeerRequest) => Buffer.from(DropPeerRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => DropPeerRequest.decode(value), + responseSerialize: (value: DropPeerResponse) => Buffer.from(DropPeerResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => DropPeerResponse.decode(value), + }, + createCdcFlow: { + path: "/peerdb_route.FlowService/CreateCDCFlow", + requestStream: false, + responseStream: false, + requestSerialize: (value: CreateCDCFlowRequest) => Buffer.from(CreateCDCFlowRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => CreateCDCFlowRequest.decode(value), + responseSerialize: (value: CreateCDCFlowResponse) => Buffer.from(CreateCDCFlowResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => CreateCDCFlowResponse.decode(value), + }, + createQRepFlow: { + path: "/peerdb_route.FlowService/CreateQRepFlow", + requestStream: false, + responseStream: false, + requestSerialize: (value: CreateQRepFlowRequest) => Buffer.from(CreateQRepFlowRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => CreateQRepFlowRequest.decode(value), + responseSerialize: (value: CreateQRepFlowResponse) => Buffer.from(CreateQRepFlowResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => CreateQRepFlowResponse.decode(value), + }, + getSchemas: { + path: "/peerdb_route.FlowService/GetSchemas", + requestStream: false, + responseStream: false, + requestSerialize: (value: PostgresPeerActivityInfoRequest) => + Buffer.from(PostgresPeerActivityInfoRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => PostgresPeerActivityInfoRequest.decode(value), + responseSerialize: (value: PeerSchemasResponse) => Buffer.from(PeerSchemasResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => PeerSchemasResponse.decode(value), + }, + getTablesInSchema: { + path: "/peerdb_route.FlowService/GetTablesInSchema", + requestStream: false, + responseStream: false, + requestSerialize: (value: SchemaTablesRequest) => Buffer.from(SchemaTablesRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => SchemaTablesRequest.decode(value), + responseSerialize: (value: SchemaTablesResponse) => Buffer.from(SchemaTablesResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => SchemaTablesResponse.decode(value), + }, + getColumns: { + path: "/peerdb_route.FlowService/GetColumns", + requestStream: false, + responseStream: false, + requestSerialize: (value: TableColumnsRequest) => Buffer.from(TableColumnsRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => TableColumnsRequest.decode(value), + responseSerialize: (value: TableColumnsResponse) => Buffer.from(TableColumnsResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => TableColumnsResponse.decode(value), + }, + getSlotInfo: { + path: "/peerdb_route.FlowService/GetSlotInfo", + requestStream: false, + responseStream: false, + requestSerialize: (value: PostgresPeerActivityInfoRequest) => + Buffer.from(PostgresPeerActivityInfoRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => PostgresPeerActivityInfoRequest.decode(value), + responseSerialize: (value: PeerSlotResponse) => Buffer.from(PeerSlotResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => PeerSlotResponse.decode(value), + }, + getStatInfo: { + path: "/peerdb_route.FlowService/GetStatInfo", + requestStream: false, + responseStream: false, + requestSerialize: (value: PostgresPeerActivityInfoRequest) => + Buffer.from(PostgresPeerActivityInfoRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => PostgresPeerActivityInfoRequest.decode(value), + responseSerialize: (value: PeerStatResponse) => Buffer.from(PeerStatResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => PeerStatResponse.decode(value), + }, + shutdownFlow: { + path: "/peerdb_route.FlowService/ShutdownFlow", + requestStream: false, + responseStream: false, + requestSerialize: (value: ShutdownRequest) => Buffer.from(ShutdownRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => ShutdownRequest.decode(value), + responseSerialize: (value: ShutdownResponse) => Buffer.from(ShutdownResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => ShutdownResponse.decode(value), + }, + mirrorStatus: { + path: "/peerdb_route.FlowService/MirrorStatus", + requestStream: false, + responseStream: false, + requestSerialize: (value: MirrorStatusRequest) => Buffer.from(MirrorStatusRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => MirrorStatusRequest.decode(value), + responseSerialize: (value: MirrorStatusResponse) => Buffer.from(MirrorStatusResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => MirrorStatusResponse.decode(value), + }, +} as const; + +export interface FlowServiceServer extends UntypedServiceImplementation { + validatePeer: handleUnaryCall; + createPeer: handleUnaryCall; + dropPeer: handleUnaryCall; + createCdcFlow: handleUnaryCall; + createQRepFlow: handleUnaryCall; + getSchemas: handleUnaryCall; + getTablesInSchema: handleUnaryCall; + getColumns: handleUnaryCall; + getSlotInfo: handleUnaryCall; + getStatInfo: handleUnaryCall; + shutdownFlow: handleUnaryCall; + mirrorStatus: handleUnaryCall; +} + +export interface FlowServiceClient extends Client { + validatePeer( + request: ValidatePeerRequest, + callback: (error: ServiceError | null, response: ValidatePeerResponse) => void, + ): ClientUnaryCall; + validatePeer( + request: ValidatePeerRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: ValidatePeerResponse) => void, + ): ClientUnaryCall; + validatePeer( + request: ValidatePeerRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: ValidatePeerResponse) => void, + ): ClientUnaryCall; + createPeer( + request: CreatePeerRequest, + callback: (error: ServiceError | null, response: CreatePeerResponse) => void, + ): ClientUnaryCall; + createPeer( + request: CreatePeerRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: CreatePeerResponse) => void, + ): ClientUnaryCall; + createPeer( + request: CreatePeerRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: CreatePeerResponse) => void, + ): ClientUnaryCall; + dropPeer( + request: DropPeerRequest, + callback: (error: ServiceError | null, response: DropPeerResponse) => void, + ): ClientUnaryCall; + dropPeer( + request: DropPeerRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: DropPeerResponse) => void, + ): ClientUnaryCall; + dropPeer( + request: DropPeerRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: DropPeerResponse) => void, + ): ClientUnaryCall; + createCdcFlow( + request: CreateCDCFlowRequest, + callback: (error: ServiceError | null, response: CreateCDCFlowResponse) => void, + ): ClientUnaryCall; + createCdcFlow( + request: CreateCDCFlowRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: CreateCDCFlowResponse) => void, + ): ClientUnaryCall; + createCdcFlow( + request: CreateCDCFlowRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: CreateCDCFlowResponse) => void, + ): ClientUnaryCall; + createQRepFlow( + request: CreateQRepFlowRequest, + callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, + ): ClientUnaryCall; + createQRepFlow( + request: CreateQRepFlowRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, + ): ClientUnaryCall; + createQRepFlow( + request: CreateQRepFlowRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, + ): ClientUnaryCall; + getSchemas( + request: PostgresPeerActivityInfoRequest, + callback: (error: ServiceError | null, response: PeerSchemasResponse) => void, + ): ClientUnaryCall; + getSchemas( + request: PostgresPeerActivityInfoRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: PeerSchemasResponse) => void, + ): ClientUnaryCall; + getSchemas( + request: PostgresPeerActivityInfoRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: PeerSchemasResponse) => void, + ): ClientUnaryCall; + getTablesInSchema( + request: SchemaTablesRequest, + callback: (error: ServiceError | null, response: SchemaTablesResponse) => void, + ): ClientUnaryCall; + getTablesInSchema( + request: SchemaTablesRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: SchemaTablesResponse) => void, + ): ClientUnaryCall; + getTablesInSchema( + request: SchemaTablesRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: SchemaTablesResponse) => void, + ): ClientUnaryCall; + getColumns( + request: TableColumnsRequest, + callback: (error: ServiceError | null, response: TableColumnsResponse) => void, + ): ClientUnaryCall; + getColumns( + request: TableColumnsRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: TableColumnsResponse) => void, + ): ClientUnaryCall; + getColumns( + request: TableColumnsRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: TableColumnsResponse) => void, + ): ClientUnaryCall; + getSlotInfo( + request: PostgresPeerActivityInfoRequest, + callback: (error: ServiceError | null, response: PeerSlotResponse) => void, + ): ClientUnaryCall; + getSlotInfo( + request: PostgresPeerActivityInfoRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: PeerSlotResponse) => void, + ): ClientUnaryCall; + getSlotInfo( + request: PostgresPeerActivityInfoRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: PeerSlotResponse) => void, + ): ClientUnaryCall; + getStatInfo( + request: PostgresPeerActivityInfoRequest, + callback: (error: ServiceError | null, response: PeerStatResponse) => void, + ): ClientUnaryCall; + getStatInfo( + request: PostgresPeerActivityInfoRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: PeerStatResponse) => void, + ): ClientUnaryCall; + getStatInfo( + request: PostgresPeerActivityInfoRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: PeerStatResponse) => void, + ): ClientUnaryCall; + shutdownFlow( + request: ShutdownRequest, + callback: (error: ServiceError | null, response: ShutdownResponse) => void, + ): ClientUnaryCall; + shutdownFlow( + request: ShutdownRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: ShutdownResponse) => void, + ): ClientUnaryCall; + shutdownFlow( + request: ShutdownRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: ShutdownResponse) => void, + ): ClientUnaryCall; + mirrorStatus( + request: MirrorStatusRequest, + callback: (error: ServiceError | null, response: MirrorStatusResponse) => void, + ): ClientUnaryCall; + mirrorStatus( + request: MirrorStatusRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: MirrorStatusResponse) => void, + ): ClientUnaryCall; + mirrorStatus( + request: MirrorStatusRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: MirrorStatusResponse) => void, + ): ClientUnaryCall; +} + +export const FlowServiceClient = makeGenericClientConstructor( + FlowServiceService, + "peerdb_route.FlowService", +) as unknown as { + new (address: string, credentials: ChannelCredentials, options?: Partial): FlowServiceClient; + service: typeof FlowServiceService; +}; + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = (t.seconds || 0) * 1_000; + millis += (t.nanos || 0) / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} function isSet(value: any): boolean { return value !== null && value !== undefined; diff --git a/ui/lib/Action/Action.tsx b/ui/lib/Action/Action.tsx index bca91743f..0ddefd1f9 100644 --- a/ui/lib/Action/Action.tsx +++ b/ui/lib/Action/Action.tsx @@ -14,6 +14,7 @@ export function Action({ icon, children, disabled, + ref: _ref, ...actionProps }: ActionProps) { const Icon = renderObjectWith(icon); diff --git a/ui/lib/Avatar/Avatar.tsx b/ui/lib/Avatar/Avatar.tsx index 94265e651..1fbfa5e73 100644 --- a/ui/lib/Avatar/Avatar.tsx +++ b/ui/lib/Avatar/Avatar.tsx @@ -41,7 +41,7 @@ const isAvatarIcon = ( */ export function Avatar({ size, ...baseProps }: AvatarProps) { if (isAvatarImage(baseProps)) { - const { variant: _variant, ...imageProps } = baseProps; + const { variant: _variant, ref: _ref, ...imageProps } = baseProps; return ; } if (isAvatarIcon(baseProps)) { diff --git a/ui/lib/Badge/Badge.styles.ts b/ui/lib/Badge/Badge.styles.ts index de564a384..caa2ee443 100644 --- a/ui/lib/Badge/Badge.styles.ts +++ b/ui/lib/Badge/Badge.styles.ts @@ -40,9 +40,16 @@ const singleDigitTypeStyle = css` height: 24px; justify-content: center; `; +const longTextTypeStyle = css` + padding: 10; + width: max-content; + height: 32px; + justify-content: center; +`; const types = { default: defaultTypeStyle, singleDigit: singleDigitTypeStyle, + longText: longTextTypeStyle, }; const baseStyle = css` diff --git a/ui/lib/CodeSnippet/CodeSnippet.tsx b/ui/lib/CodeSnippet/CodeSnippet.tsx index 2bc6c1ca1..9f9d39491 100644 --- a/ui/lib/CodeSnippet/CodeSnippet.tsx +++ b/ui/lib/CodeSnippet/CodeSnippet.tsx @@ -4,7 +4,7 @@ import { BaseTextArea } from './CodeSnippet.styles'; type CodeSnipperProps = ComponentProps<'textarea'>; -export function CodeSnippet({ ...textAreaProps }: CodeSnipperProps) { +export function CodeSnippet({ ref: _ref, ...textAreaProps }: CodeSnipperProps) { return ( ); diff --git a/ui/lib/Icon/DeterminateProgressCircle.tsx b/ui/lib/Icon/DeterminateProgressCircle.tsx index ee6f11499..b74b9e693 100644 --- a/ui/lib/Icon/DeterminateProgressCircle.tsx +++ b/ui/lib/Icon/DeterminateProgressCircle.tsx @@ -19,12 +19,12 @@ export function DeterminateProgressCircle({ cy='12' r='9' stroke='currentColor' - stroke-width='2' + strokeWidth='2' /> ); diff --git a/ui/lib/Media/Media.tsx b/ui/lib/Media/Media.tsx index 653f67fda..9eb4944dc 100644 --- a/ui/lib/Media/Media.tsx +++ b/ui/lib/Media/Media.tsx @@ -11,6 +11,6 @@ type MediaProps = ComponentProps<'img'> & { * * [Figma spec](https://www.figma.com/file/DBMDh1LNNvp9H99N9lZgJ7/PeerDB?type=design&node-id=1-1872&mode=dev) */ -export function Media({ ratio, ...imageProps }: MediaProps) { +export function Media({ ratio, ref: _ref, ...imageProps }: MediaProps) { return ; } diff --git a/ui/lib/ProgressCircle/ProgressCircle.styles.ts b/ui/lib/ProgressCircle/ProgressCircle.styles.ts index 61733dc0a..957ed477e 100644 --- a/ui/lib/ProgressCircle/ProgressCircle.styles.ts +++ b/ui/lib/ProgressCircle/ProgressCircle.styles.ts @@ -1,4 +1,8 @@ -import styled, { RuleSet, css, keyframes } from 'styled-components'; +import styled, { + FlattenSimpleInterpolation, + css, + keyframes, +} from 'styled-components'; import { Icon, IconProps } from '../Icon'; const spin = keyframes` @@ -34,7 +38,7 @@ export type ProgressCircleVariant = Extract< const variants = { determinate_progress_circle: spinLinear, intermediate_progress_circle: spin45degIncrements, -} satisfies Record; +} satisfies Record; type BaseIconProps = { $variant: ProgressCircleVariant; diff --git a/ui/lib/Switch/Switch.styles.ts b/ui/lib/Switch/Switch.styles.ts index 968378d55..13fb593dc 100644 --- a/ui/lib/Switch/Switch.styles.ts +++ b/ui/lib/Switch/Switch.styles.ts @@ -6,8 +6,8 @@ export const SwitchRoot = styled(RadixSwitch.Root)` all: unset; - width: 40px; - height: 24px; + width: 30px; + height: 14px; position: relative; background-color: var(--background-color); @@ -41,8 +41,8 @@ export const SwitchRoot = styled(RadixSwitch.Root)` export const SwitchThumb = styled(RadixSwitch.Thumb)` display: block; - width: 20px; - height: 20px; + width: 10px; + height: 10px; border-radius: 50%; background-color: ${({ theme }) => theme.colors.base.surface.normal}; diff --git a/ui/lib/Table/Table.styles.ts b/ui/lib/Table/Table.styles.ts index 8b9375656..09958cbf8 100644 --- a/ui/lib/Table/Table.styles.ts +++ b/ui/lib/Table/Table.styles.ts @@ -19,7 +19,9 @@ export const StyledTable = styled.table` export const StyledTableBody = styled.tbody``; -export const StyledTableHeader = styled.thead``; +export const StyledTableHeader = styled.thead` + text-align: left; +`; export const ToolbarWrapper = styled.div` display: flex; diff --git a/ui/lib/Table/TableCell.styles.ts b/ui/lib/Table/TableCell.styles.ts index da48a6cb0..301a4287e 100644 --- a/ui/lib/Table/TableCell.styles.ts +++ b/ui/lib/Table/TableCell.styles.ts @@ -20,7 +20,5 @@ type BaseTableCellProps = { export const BaseTableCell = styled.td` border-collapse: collapse; overflow: hidden; - white-space: nowrap; - text-overflow: ellipsis; ${({ $variant }) => variants[$variant]} `; diff --git a/ui/lib/Thumbnail/Thumbnail.tsx b/ui/lib/Thumbnail/Thumbnail.tsx index 9436f0eb1..ba7e667cd 100644 --- a/ui/lib/Thumbnail/Thumbnail.tsx +++ b/ui/lib/Thumbnail/Thumbnail.tsx @@ -12,6 +12,6 @@ type ThumbnailProps = { * * [Figma spec](https://www.figma.com/file/DBMDh1LNNvp9H99N9lZgJ7/PeerDB?type=design&node-id=1-1532&mode=dev) */ -export function Thumbnail({ size, ...imageProps }: ThumbnailProps) { +export function Thumbnail({ size, ref: _ref, ...imageProps }: ThumbnailProps) { return ; } diff --git a/ui/lib/Toast/Toast.styles.ts b/ui/lib/Toast/Toast.styles.ts index b9e7a8961..22ca966d7 100644 --- a/ui/lib/Toast/Toast.styles.ts +++ b/ui/lib/Toast/Toast.styles.ts @@ -1,5 +1,5 @@ import * as RadixToast from '@radix-ui/react-toast'; -import { keyframes, styled } from 'styled-components'; +import styled, { keyframes } from 'styled-components'; export const ToastAction = styled(RadixToast.Action)` all: unset; diff --git a/ui/lib/utils/cn.ts b/ui/lib/utils/cn.ts new file mode 100644 index 000000000..2819a830d --- /dev/null +++ b/ui/lib/utils/cn.ts @@ -0,0 +1,6 @@ +import { clsx, type ClassValue } from 'clsx'; +import { twMerge } from 'tailwind-merge'; + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)); +} diff --git a/ui/package.json b/ui/package.json index 5d1bdedb7..c06026b57 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,5 +1,5 @@ { - "name": "peerdb-cloud-template", + "name": "peerdb-ui", "version": "0.1.0", "private": true, "scripts": { @@ -15,13 +15,19 @@ }, "dependencies": { "@grpc/grpc-js": "^1.9.2", + "@monaco-editor/react": "^4.6.0", + "@prisma/client": "^5.4.2", "@radix-ui/react-checkbox": "^1.0.4", + "@radix-ui/react-collapsible": "^1.0.3", "@radix-ui/react-dialog": "^1.0.4", "@radix-ui/react-form": "^0.0.3", + "@radix-ui/react-icons": "^1.3.0", + "@radix-ui/react-popover": "^1.0.7", "@radix-ui/react-progress": "^1.0.3", "@radix-ui/react-radio-group": "^1.1.3", "@radix-ui/react-select": "^1.2.2", "@radix-ui/react-switch": "^1.0.3", + "@radix-ui/react-tabs": "^1.0.4", "@radix-ui/react-toast": "^1.1.4", "@radix-ui/react-toggle": "^1.0.3", "@radix-ui/react-toggle-group": "^1.0.4", @@ -31,14 +37,21 @@ "@types/react": "18.2.21", "@types/react-dom": "18.2.7", "classnames": "^2.3.2", + "clsx": "^2.0.0", "long": "^5.2.3", + "lucide-react": "^0.287.0", "material-symbols": "0.11.0", - "next": "13.4.16", + "moment": "^2.29.4", + "next": "13.5.5", + "next-usequerystate": "^1.8.4", "prop-types": "^15.8.1", "protobufjs": "^7.2.5", "react": "18.2.0", "react-dom": "18.2.0", - "styled-components": "^6.0.7" + "react-spinners": "^0.13.8", + "styled-components": "^6.0.7", + "swr": "^2.2.4", + "zod": "^3.22.4" }, "devDependencies": { "@storybook/addon-essentials": "^7.3.0", @@ -49,6 +62,7 @@ "@storybook/nextjs": "^7.3.0", "@storybook/react": "^7.3.0", "@storybook/testing-library": "^0.2.0", + "@types/styled-components": "^5.1.28", "autoprefixer": "^10.4.15", "copy-webpack-plugin": "^11.0.0", "eslint": "^8.48.0", @@ -60,9 +74,11 @@ "postcss": "^8.4.28", "prettier": "^3.0.1", "prettier-plugin-organize-imports": "^3.2.3", + "prisma": "^5.4.2", "storybook": "^7.3.0", "string-width": "^6.1.0", "tailwindcss": "^3.3.3", + "tailwindcss-animate": "^1.0.7", "typescript": "^5.2.2", "webpack": "^5.76.0" }, diff --git a/ui/prisma/schema.prisma b/ui/prisma/schema.prisma new file mode 100644 index 000000000..81007f190 --- /dev/null +++ b/ui/prisma/schema.prisma @@ -0,0 +1,146 @@ +generator client { + provider = "prisma-client-js" + previewFeatures = ["multiSchema"] +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + schemas = ["peerdb_stats", "public"] +} + +model flows { + id Int @id @default(autoincrement()) + name String @db.VarChar(255) + source_peer Int + destination_peer Int + description String? + created_at DateTime @default(now()) @db.Timestamp(6) + updated_at DateTime @default(now()) @db.Timestamp(6) + source_table_identifier String? + destination_table_identifier String? + workflow_id String? + flow_status String? + flow_metadata Json? + query_string String? + config_proto Bytes? + destinationPeer peers @relation("flows_destination_peerTopeers", fields: [destination_peer], references: [id], onDelete: NoAction, onUpdate: NoAction) + sourcePeer peers @relation("flows_source_peerTopeers", fields: [source_peer], references: [id], onDelete: NoAction, onUpdate: NoAction) + + @@schema("public") +} + +model peer_connections { + id Int @id @default(autoincrement()) + conn_uuid String? @db.Uuid + peer_name String + query String? + opened_at DateTime @default(now()) @db.Timestamptz(6) + closed_at DateTime? @db.Timestamptz(6) + peers peers @relation(fields: [peer_name], references: [name], onDelete: NoAction, onUpdate: NoAction) + + @@schema("public") +} + +model peers { + id Int @id @default(autoincrement()) + name String @unique + type Int + options Bytes + flows_flows_destination_peerTopeers flows[] @relation("flows_destination_peerTopeers") + flows_flows_source_peerTopeers flows[] @relation("flows_source_peerTopeers") + peer_connections peer_connections[] + + @@schema("public") +} + +model refinery_schema_history { + version Int @id + name String? @db.VarChar(255) + applied_on String? @db.VarChar(255) + checksum String? @db.VarChar(255) + + @@ignore + @@schema("public") +} + +model cdc_batch_table { + flow_name String + batch_id BigInt + destination_table_name String + num_rows BigInt + metadata Json? + id Int @id @default(autoincrement()) + cdc_flows cdc_flows @relation(fields: [flow_name], references: [flow_name], onDelete: Cascade, onUpdate: NoAction, map: "fk_cdc_batch_table_flow_name") + + @@index([flow_name, batch_id], map: "idx_cdc_batch_table_flow_name_batch_id") + @@schema("peerdb_stats") +} + +model cdc_batches { + flow_name String + batch_id BigInt + rows_in_batch Int + batch_start_lsn Decimal @db.Decimal + batch_end_lsn Decimal @db.Decimal + start_time DateTime @db.Timestamp(6) + end_time DateTime? @db.Timestamp(6) + metadata Json? + id Int @id @default(autoincrement()) + cdc_flows cdc_flows @relation(fields: [flow_name], references: [flow_name], onDelete: Cascade, onUpdate: NoAction, map: "fk_cdc_batches_flow_name") + + @@index([batch_id], map: "idx_cdc_batches_batch_id") + @@index([flow_name], map: "idx_cdc_batches_flow_name", type: Hash) + @@index([start_time], map: "idx_cdc_batches_start_time") + @@schema("peerdb_stats") +} + +model cdc_flows { + flow_name String @id + latest_lsn_at_source Decimal @db.Decimal + latest_lsn_at_target Decimal @db.Decimal + metadata Json? + cdc_batch_table cdc_batch_table[] + cdc_batches cdc_batches[] + + @@schema("peerdb_stats") +} + +model qrep_partitions { + flow_name String + run_uuid String + partition_uuid String + partition_start String + partition_end String + rows_in_partition Int? + start_time DateTime? @db.Timestamp(6) + pull_end_time DateTime? @db.Timestamp(6) + end_time DateTime? @db.Timestamp(6) + restart_count Int + metadata Json? + id Int @id @default(autoincrement()) + qrep_runs qrep_runs @relation(fields: [flow_name, run_uuid], references: [flow_name, run_uuid], onDelete: Cascade, onUpdate: NoAction, map: "fk_qrep_partitions_run") + + @@unique([run_uuid, partition_uuid]) + @@index([flow_name, run_uuid], map: "idx_qrep_partitions_flow_name_run_uuid") + @@index([partition_uuid], map: "idx_qrep_partitions_partition_uuid", type: Hash) + @@index([start_time], map: "idx_qrep_partitions_start_time") + @@schema("peerdb_stats") +} + +model qrep_runs { + flow_name String + run_uuid String + start_time DateTime? @db.Timestamp(6) + end_time DateTime? @db.Timestamp(6) + metadata Json? + config_proto Bytes? + id Int @id @default(autoincrement()) + qrep_partitions qrep_partitions[] + + @@unique([flow_name, run_uuid], map: "uq_qrep_runs_flow_run") + @@index([flow_name], map: "idx_qrep_runs_flow_name", type: Hash) + @@index([run_uuid], map: "idx_qrep_runs_run_uuid", type: Hash) + @@index([start_time], map: "idx_qrep_runs_start_time") + @@schema("peerdb_stats") +} diff --git a/ui/rpc/http.ts b/ui/rpc/http.ts new file mode 100644 index 000000000..c0d9b7554 --- /dev/null +++ b/ui/rpc/http.ts @@ -0,0 +1,5 @@ +import 'server-only'; + +export function GetFlowHttpAddressFromEnv() { + return process.env.PEERDB_FLOW_SERVER_HTTP!; +} diff --git a/ui/rpc/promisify.ts b/ui/rpc/promisify.ts deleted file mode 100644 index b561f7c01..000000000 --- a/ui/rpc/promisify.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type { - CallOptions, - Client, - ClientUnaryCall, - Metadata, - ServiceError, -} from '@grpc/grpc-js'; - -type OriginalCall = ( - request: T, - metadata: Metadata, - options: Partial, - callback: (err: ServiceError | null, res?: U) => void -) => ClientUnaryCall; - -type PromisifiedCall = ( - request: T, - metadata?: Metadata, - options?: Partial -) => Promise; - -export type PromisifiedClient = { $: C } & { - [prop in Exclude]: C[prop] extends OriginalCall< - infer T, - infer U - > - ? PromisifiedCall - : never; -}; - -export function promisifyClient(client: C) { - return new Proxy(client, { - get: (target, descriptor) => { - const key = descriptor as keyof PromisifiedClient; - - if (key === '$') return target; - - const func = target[key]; - if (typeof func === 'function') - return (...args: unknown[]) => - new Promise((resolve, reject) => - func.call( - target, - ...[ - ...args, - (err: unknown, res: unknown) => - err ? reject(err) : resolve(res), - ] - ) - ); - }, - }) as unknown as PromisifiedClient; -} diff --git a/ui/rpc/rpc.ts b/ui/rpc/rpc.ts deleted file mode 100644 index 1c344ae81..000000000 --- a/ui/rpc/rpc.ts +++ /dev/null @@ -1,12 +0,0 @@ -import 'server-only'; - -import { FlowServiceClient } from '@/grpc_generated/route'; -import { credentials } from '@grpc/grpc-js'; -import { promisifyClient } from './promisify'; - -export function GetFlowServiceClient(address: string) { - console.log(`Connecting to Flow server at ${address}`); - return promisifyClient( - new FlowServiceClient(address, credentials.createInsecure()) - ); -} diff --git a/ui/tailwind.config.ts b/ui/tailwind.config.ts index 08d37404c..49e081636 100644 --- a/ui/tailwind.config.ts +++ b/ui/tailwind.config.ts @@ -101,5 +101,5 @@ module.exports = { /^(fill-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/, }, ], - plugins: [require('@headlessui/tailwindcss')], + plugins: [require('@headlessui/tailwindcss'), require('tailwindcss-animate')], } satisfies Config; diff --git a/ui/yarn.lock b/ui/yarn.lock index 726a031fd..555909847 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -4,17 +4,17 @@ "@aashutoshrathi/word-wrap@^1.2.3": version "1.2.6" - resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" + resolved "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz" integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== "@alloc/quick-lru@^5.2.0": version "5.2.0" - resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30" + resolved "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz" integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw== "@ampproject/remapping@^2.2.0": version "2.2.1" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + resolved "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz" integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== dependencies: "@jridgewell/gen-mapping" "^0.3.0" @@ -22,19 +22,19 @@ "@aw-web-design/x-default-browser@1.4.126": version "1.4.126" - resolved "https://registry.yarnpkg.com/@aw-web-design/x-default-browser/-/x-default-browser-1.4.126.tgz#43e4bd8f0314ed907a8718d7e862a203af79bc16" + resolved "https://registry.npmjs.org/@aw-web-design/x-default-browser/-/x-default-browser-1.4.126.tgz" integrity sha512-Xk1sIhyNC/esHGGVjL/niHLowM0csl/kFO5uawBy4IrWwy0o1G8LGt3jP6nmWGz+USxeeqbihAmp/oVZju6wug== dependencies: default-browser-id "3.0.0" "@babel/cli@^7.21.0": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/cli/-/cli-7.22.15.tgz#22ed82d76745a43caa60a89917bedb7c9b5bd145" - integrity sha512-prtg5f6zCERIaECeTZzd2fMtVjlfjhUcO+fBLQ6DXXdq5FljN+excVitJ2nogsusdf31LeqkjAfXZ7Xq+HmN8g== + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/cli/-/cli-7.23.0.tgz" + integrity sha512-17E1oSkGk2IwNILM4jtfAvgjt+ohmpfBky8aLerUfYZhiPNg7ca+CRCxZn8QDxwNhV/upsc2VHBCqGFIR+iBfA== dependencies: "@jridgewell/trace-mapping" "^0.3.17" commander "^4.0.1" - convert-source-map "^1.1.0" + convert-source-map "^2.0.0" fs-readdir-recursive "^1.1.0" glob "^7.2.0" make-dir "^2.1.0" @@ -45,65 +45,65 @@ "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.7", "@babel/code-frame@^7.22.13": version "7.22.13" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz" integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== dependencies: "@babel/highlight" "^7.22.13" chalk "^2.4.2" -"@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.6", "@babel/compat-data@^7.22.9": - version "7.22.9" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730" - integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ== +"@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.6", "@babel/compat-data@^7.22.9", "@babel/compat-data@^7.23.2": + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.2.tgz" + integrity sha512-0S9TQMmDHlqAZ2ITT95irXKfxN9bncq8ZCoJhun3nHL/lLUxd2NKBJYoNGWH7S0hz6fRQwWlAWn/ILM0C70KZQ== -"@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.13.16", "@babel/core@^7.21.0", "@babel/core@^7.22.0", "@babel/core@^7.22.9", "@babel/core@^7.7.5": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.15.tgz#15d4fd03f478a459015a4b94cfbb3bd42c48d2f4" - integrity sha512-PtZqMmgRrvj8ruoEOIwVA3yoF91O+Hgw9o7DAUTNBA6Mo2jpu31clx9a7Nz/9JznqetTR6zwfC4L3LAjKQXUwA== +"@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.13.16", "@babel/core@^7.21.0", "@babel/core@^7.22.9", "@babel/core@^7.7.5": + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/core/-/core-7.23.2.tgz" + integrity sha512-n7s51eWdaWZ3vGT2tD4T7J6eJs3QoBXydv7vkUM06Bf1cbVD2Kc2UrkzhiQwobfV7NwOnQXYL7UBJ5VPU+RGoQ== dependencies: "@ampproject/remapping" "^2.2.0" "@babel/code-frame" "^7.22.13" - "@babel/generator" "^7.22.15" + "@babel/generator" "^7.23.0" "@babel/helper-compilation-targets" "^7.22.15" - "@babel/helper-module-transforms" "^7.22.15" - "@babel/helpers" "^7.22.15" - "@babel/parser" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.0" + "@babel/helpers" "^7.23.2" + "@babel/parser" "^7.23.0" "@babel/template" "^7.22.15" - "@babel/traverse" "^7.22.15" - "@babel/types" "^7.22.15" - convert-source-map "^1.7.0" + "@babel/traverse" "^7.23.2" + "@babel/types" "^7.23.0" + convert-source-map "^2.0.0" debug "^4.1.0" gensync "^1.0.0-beta.2" json5 "^2.2.3" semver "^6.3.1" -"@babel/generator@^7.12.11", "@babel/generator@^7.22.15", "@babel/generator@^7.22.9": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.15.tgz#1564189c7ec94cb8f77b5e8a90c4d200d21b2339" - integrity sha512-Zu9oWARBqeVOW0dZOjXc3JObrzuqothQ3y/n1kUtrjCoCPLkXUwMvOo/F/TCfoHMbWIFlWwpZtkZVb9ga4U2pA== +"@babel/generator@^7.12.11", "@babel/generator@^7.22.9", "@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== dependencies: - "@babel/types" "^7.22.15" + "@babel/types" "^7.23.0" "@jridgewell/gen-mapping" "^0.3.2" "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" + resolved "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz" integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== dependencies: "@babel/types" "^7.22.5" "@babel/helper-builder-binary-assignment-operator-visitor@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz#5426b109cf3ad47b91120f8328d8ab1be8b0b956" + resolved "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz" integrity sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw== dependencies: "@babel/types" "^7.22.15" "@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.22.15", "@babel/helper-compilation-targets@^7.22.5", "@babel/helper-compilation-targets@^7.22.6": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" + resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz" integrity sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw== dependencies: "@babel/compat-data" "^7.22.9" @@ -114,7 +114,7 @@ "@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.22.11", "@babel/helper-create-class-features-plugin@^7.22.15", "@babel/helper-create-class-features-plugin@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.15.tgz#97a61b385e57fe458496fad19f8e63b63c867de4" + resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.15.tgz" integrity sha512-jKkwA59IXcvSaiK2UN45kKwSC9o+KuoXsBDvHvU/7BecYIp8GQ2UwrVvFgJASUT+hBnwJx6MhvMCuMzwZZ7jlg== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" @@ -129,17 +129,17 @@ "@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz#5ee90093914ea09639b01c711db0d6775e558be1" + resolved "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz" integrity sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" regexpu-core "^5.3.1" semver "^6.3.1" -"@babel/helper-define-polyfill-provider@^0.4.2": - version "0.4.2" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.2.tgz#82c825cadeeeee7aad237618ebbe8fa1710015d7" - integrity sha512-k0qnnOqHn5dK9pZpfD5XXZ9SojAITdCKRn2Lp6rnDGzIbaP0rHyMPk/4wsSxVBVz4RfN0q6VpXWP2pDGIoQ7hw== +"@babel/helper-define-polyfill-provider@^0.4.3": + version "0.4.3" + resolved "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.3.tgz" + integrity sha512-WBrLmuPP47n7PNwsZ57pqam6G/RGo1vw/87b0Blc53tZNGZ4x7YvZ6HgQe2vo1W/FR20OgjeZuGXzudPiXHFug== dependencies: "@babel/helper-compilation-targets" "^7.22.6" "@babel/helper-plugin-utils" "^7.22.5" @@ -147,159 +147,159 @@ lodash.debounce "^4.0.8" resolve "^1.14.2" -"@babel/helper-environment-visitor@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" - integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== +"@babel/helper-environment-visitor@^7.22.20", "@babel/helper-environment-visitor@^7.22.5": + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== -"@babel/helper-function-name@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz#ede300828905bb15e582c037162f99d5183af1be" - integrity sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ== +"@babel/helper-function-name@^7.22.5", "@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== dependencies: - "@babel/template" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz" integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-member-expression-to-functions@^7.22.15", "@babel/helper-member-expression-to-functions@^7.22.5": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.22.15.tgz#b95a144896f6d491ca7863576f820f3628818621" - integrity sha512-qLNsZbgrNh0fDQBCPocSL8guki1hcPvltGDv/NxvUoABwFq7GkKSu1nRXeJkVZc+wJvne2E0RKQz+2SQrz6eAA== +"@babel/helper-member-expression-to-functions@^7.22.15": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.23.0.tgz" + integrity sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA== dependencies: - "@babel/types" "^7.22.15" + "@babel/types" "^7.23.0" "@babel/helper-module-imports@^7.18.6", "@babel/helper-module-imports@^7.22.15", "@babel/helper-module-imports@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz" integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== dependencies: "@babel/types" "^7.22.15" -"@babel/helper-module-transforms@^7.22.15", "@babel/helper-module-transforms@^7.22.5", "@babel/helper-module-transforms@^7.22.9": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.15.tgz#40ad2f6950f143900e9c1c72363c0b431a606082" - integrity sha512-l1UiX4UyHSFsYt17iQ3Se5pQQZZHa22zyIXURmvkmLCD4t/aU+dvNWHatKac/D9Vm9UES7nvIqHs4jZqKviUmQ== +"@babel/helper-module-transforms@^7.22.5", "@babel/helper-module-transforms@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.23.0.tgz" + integrity sha512-WhDWw1tdrlT0gMgUJSlX0IQvoO1eN279zrAUbVB+KpV2c3Tylz8+GnKOLllCS6Z/iZQEyVYxhZVUdPTqs2YYPw== dependencies: - "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.20" "@babel/helper-module-imports" "^7.22.15" "@babel/helper-simple-access" "^7.22.5" "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/helper-validator-identifier" "^7.22.15" + "@babel/helper-validator-identifier" "^7.22.20" "@babel/helper-optimise-call-expression@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" + resolved "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz" integrity sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw== dependencies: "@babel/types" "^7.22.5" "@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz" integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== -"@babel/helper-remap-async-to-generator@^7.22.5", "@babel/helper-remap-async-to-generator@^7.22.9": - version "7.22.9" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.9.tgz#53a25b7484e722d7efb9c350c75c032d4628de82" - integrity sha512-8WWC4oR4Px+tr+Fp0X3RHDVfINGpF3ad1HIbrc8A77epiR6eMMc6jsgozkzT2uDiOOdoS9cLIQ+XD2XvI2WSmQ== +"@babel/helper-remap-async-to-generator@^7.22.20", "@babel/helper-remap-async-to-generator@^7.22.5": + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.20.tgz" + integrity sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-wrap-function" "^7.22.9" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-wrap-function" "^7.22.20" "@babel/helper-replace-supers@^7.22.5", "@babel/helper-replace-supers@^7.22.9": - version "7.22.9" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.9.tgz#cbdc27d6d8d18cd22c81ae4293765a5d9afd0779" - integrity sha512-LJIKvvpgPOPUThdYqcX6IXRuIcTkcAub0IaDRGCZH0p5GPUp7PhRU9QVgFcDDd51BaPkk77ZjqFwh6DZTAEmGg== + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.22.20.tgz" + integrity sha512-qsW0In3dbwQUbK8kejJ4R7IHVGwHJlV6lpG6UA7a9hSa2YEiAib+N1T2kr6PEeUT+Fl7najmSOS6SmAwCHK6Tw== dependencies: - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-member-expression-to-functions" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-member-expression-to-functions" "^7.22.15" "@babel/helper-optimise-call-expression" "^7.22.5" "@babel/helper-simple-access@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz" integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== dependencies: "@babel/types" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers@^7.20.0", "@babel/helper-skip-transparent-expression-wrappers@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847" + resolved "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz" integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q== dependencies: "@babel/types" "^7.22.5" "@babel/helper-split-export-declaration@^7.22.6": version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz" integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== dependencies: "@babel/types" "^7.22.5" "@babel/helper-string-parser@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz" integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== -"@babel/helper-validator-identifier@^7.22.15", "@babel/helper-validator-identifier@^7.22.5": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.15.tgz#601fa28e4cc06786c18912dca138cec73b882044" - integrity sha512-4E/F9IIEi8WR94324mbDUMo074YTheJmd7eZF5vITTeYchqAi6sYXRLHUVsmkdmY4QjfKTcB2jB7dVP3NaBElQ== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== "@babel/helper-validator-option@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.15.tgz#694c30dfa1d09a6534cdfcafbe56789d36aba040" + resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.15.tgz" integrity sha512-bMn7RmyFjY/mdECUbgn9eoSY4vqvacUnS9i9vGAGttgFWesO6B4CYWA7XlpbWgBt71iv/hfbPlynohStqnu5hA== -"@babel/helper-wrap-function@^7.22.9": - version "7.22.10" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.10.tgz#d845e043880ed0b8c18bd194a12005cb16d2f614" - integrity sha512-OnMhjWjuGYtdoO3FmsEFWvBStBAe2QOgwOLsLNDjN+aaiMD8InJk1/O3HSD8lkqTjCgg5YI34Tz15KNNA3p+nQ== +"@babel/helper-wrap-function@^7.22.20": + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz" + integrity sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw== dependencies: "@babel/helper-function-name" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/types" "^7.22.10" + "@babel/template" "^7.22.15" + "@babel/types" "^7.22.19" -"@babel/helpers@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.15.tgz#f09c3df31e86e3ea0b7ff7556d85cdebd47ea6f1" - integrity sha512-7pAjK0aSdxOwR+CcYAqgWOGy5dcfvzsTIfFTb2odQqW47MDfv14UaJDY6eng8ylM2EaeKXdxaSWESbkmaQHTmw== +"@babel/helpers@^7.23.2": + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.2.tgz" + integrity sha512-lzchcp8SjTSVe/fPmLwtWVBFC7+Tbn8LGHDVfDp9JGxpAY5opSaEFgt8UQvrnECWOTdji2mOWMz1rOhkHscmGQ== dependencies: "@babel/template" "^7.22.15" - "@babel/traverse" "^7.22.15" - "@babel/types" "^7.22.15" + "@babel/traverse" "^7.23.2" + "@babel/types" "^7.23.0" "@babel/highlight@^7.22.13": - version "7.22.13" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.13.tgz#9cda839e5d3be9ca9e8c26b6dd69e7548f0cbf16" - integrity sha512-C/BaXcnnvBCmHTpz/VGZ8jgtE2aYlW4hxDhseJAWZb7gqGM/qtCK6iZUb0TyKFf7BOUsBH7Q7fkRsDRhg1XklQ== + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz" + integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== dependencies: - "@babel/helper-validator-identifier" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@^7.1.0", "@babel/parser@^7.13.16", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.15", "@babel/parser@^7.22.7": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.15.tgz#d34592bfe288a32e741aa0663dbc4829fcd55160" - integrity sha512-RWmQ/sklUN9BvGGpCDgSubhHWfAx24XDTDObup4ffvxaYsptOg2P3KG0j+1eWKLxpkX0j0uHxmpq2Z1SP/VhxA== +"@babel/parser@^7.1.0", "@babel/parser@^7.13.16", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.15", "@babel/parser@^7.22.7", "@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.15.tgz#02dc8a03f613ed5fdc29fb2f728397c78146c962" + resolved "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.15.tgz" integrity sha512-FB9iYlz7rURmRJyXRKEnalYPPdn87H5no108cyuQQyMwlpJ2SJtpIUBI27kdTin956pz+LPypkPVPUTlxOmrsg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.22.15.tgz#2aeb91d337d4e1a1e7ce85b76a37f5301781200f" + resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.22.15.tgz" integrity sha512-Hyph9LseGvAeeXzikV88bczhsrLrIZqDPxO+sSmAunMPaGrBGhfMWzCPYTtiW9t+HzSE2wtV8e5cc5P6r1xMDQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -308,14 +308,14 @@ "@babel/plugin-external-helpers@^7.18.6": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-external-helpers/-/plugin-external-helpers-7.22.5.tgz#92b0705b74756123f289388320e0e12c407fdf9a" + resolved "https://registry.npmjs.org/@babel/plugin-external-helpers/-/plugin-external-helpers-7.22.5.tgz" integrity sha512-ngnNEWxmykPk82mH4ajZT0qTztr3Je6hrMuKAslZVM8G1YZTENJSYwrIGtt6KOtznug3exmAtF4so/nPqJuA4A== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-proposal-class-properties@^7.13.0", "@babel/plugin-proposal-class-properties@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz" integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== dependencies: "@babel/helper-create-class-features-plugin" "^7.18.6" @@ -323,7 +323,7 @@ "@babel/plugin-proposal-export-namespace-from@^7.18.9": version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz" integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== dependencies: "@babel/helper-plugin-utils" "^7.18.9" @@ -331,7 +331,7 @@ "@babel/plugin-proposal-nullish-coalescing-operator@^7.13.8": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz" integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== dependencies: "@babel/helper-plugin-utils" "^7.18.6" @@ -339,7 +339,7 @@ "@babel/plugin-proposal-numeric-separator@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz" integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== dependencies: "@babel/helper-plugin-utils" "^7.18.6" @@ -347,7 +347,7 @@ "@babel/plugin-proposal-object-rest-spread@^7.20.7": version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz#aa662940ef425779c75534a5c41e9d936edc390a" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz" integrity sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg== dependencies: "@babel/compat-data" "^7.20.5" @@ -358,7 +358,7 @@ "@babel/plugin-proposal-optional-chaining@^7.13.12": version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz#886f5c8978deb7d30f678b2e24346b287234d3ea" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz" integrity sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA== dependencies: "@babel/helper-plugin-utils" "^7.20.2" @@ -367,159 +367,159 @@ "@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": version "7.21.0-placeholder-for-preset-env.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703" + resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz" integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w== "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz" integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-bigint@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz" integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-class-properties@^7.12.13": version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz" integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== dependencies: "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-syntax-class-static-block@^7.14.5": version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz" integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== dependencies: "@babel/helper-plugin-utils" "^7.14.5" "@babel/plugin-syntax-dynamic-import@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz" integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-export-namespace-from@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz" integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== dependencies: "@babel/helper-plugin-utils" "^7.8.3" "@babel/plugin-syntax-flow@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.22.5.tgz#163b820b9e7696ce134df3ee716d9c0c98035859" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.22.5.tgz" integrity sha512-9RdCl0i+q0QExayk2nOS7853w08yLucnnPML6EN9S8fgMPVtdLDCdx/cOQ/i44Lb9UeQX9A35yaqBBOMMZxPxQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-import-assertions@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz" integrity sha512-rdV97N7KqsRzeNGoWUOK6yUsWarLjE5Su/Snk9IYPU9CwkWHs4t+rTGOvffTR8XGkJMTAdLfO0xVnXm8wugIJg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-import-attributes@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.22.5.tgz#ab840248d834410b829f569f5262b9e517555ecb" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.22.5.tgz" integrity sha512-KwvoWDeNKPETmozyFE0P2rOLqh39EoQHNjqizrI5B8Vt0ZNS7M56s7dAiAqbYfiAYOuIzIh96z3iR2ktgu3tEg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-import-meta@^7.10.4": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz" integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz" integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-jsx@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz" integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-logical-assignment-operators@^7.10.4": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz" integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-numeric-separator@^7.10.4": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz" integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-catch-binding@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz" integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-chaining@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz" integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-private-property-in-object@^7.14.5": version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz" integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== dependencies: "@babel/helper-plugin-utils" "^7.14.5" "@babel/plugin-syntax-top-level-await@^7.14.5": version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz" integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== dependencies: "@babel/helper-plugin-utils" "^7.14.5" "@babel/plugin-syntax-typescript@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz" integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-unicode-sets-regex@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz" integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.18.6" @@ -527,24 +527,24 @@ "@babel/plugin-transform-arrow-functions@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.22.5.tgz#e5ba566d0c58a5b2ba2a8b795450641950b71958" + resolved "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.22.5.tgz" integrity sha512-26lTNXoVRdAnsaDXPpvCNUq+OVWEVC6bx7Vvz9rC53F2bagUWW4u4ii2+h8Fejfh7RYqPxn+libeFBBck9muEw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-async-generator-functions@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.22.15.tgz#3b153af4a6b779f340d5b80d3f634f55820aefa3" - integrity sha512-jBm1Es25Y+tVoTi5rfd5t1KLmL8ogLKpXszboWOTTtGFGz2RKnQe2yn7HbZ+kb/B8N0FVSGQo874NSlOU1T4+w== +"@babel/plugin-transform-async-generator-functions@^7.23.2": + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.23.2.tgz" + integrity sha512-BBYVGxbDVHfoeXbOwcagAkOQAm9NxoTdMGfTqghu1GrvadSaw6iW3Je6IcL5PNOw8VwjxqBECXy50/iCQSY/lQ== dependencies: - "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.20" "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-remap-async-to-generator" "^7.22.9" + "@babel/helper-remap-async-to-generator" "^7.22.20" "@babel/plugin-syntax-async-generators" "^7.8.4" "@babel/plugin-transform-async-to-generator@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.22.5.tgz#c7a85f44e46f8952f6d27fe57c2ed3cc084c3775" + resolved "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.22.5.tgz" integrity sha512-b1A8D8ZzE/VhNDoV1MSJTnpKkCG5bJo+19R4o4oy03zM7ws8yEMK755j61Dc3EyvdysbqH5BOOTquJ7ZX9C6vQ== dependencies: "@babel/helper-module-imports" "^7.22.5" @@ -553,21 +553,21 @@ "@babel/plugin-transform-block-scoped-functions@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.22.5.tgz#27978075bfaeb9fa586d3cb63a3d30c1de580024" + resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.22.5.tgz" integrity sha512-tdXZ2UdknEKQWKJP1KMNmuF5Lx3MymtMN/pvA+p/VEkhK8jVcQ1fzSy8KM9qRYhAf2/lV33hoMPKI/xaI9sADA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoping@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.22.15.tgz#494eb82b87b5f8b1d8f6f28ea74078ec0a10a841" - integrity sha512-G1czpdJBZCtngoK1sJgloLiOHUnkb/bLZwqVZD8kXmq0ZnVfTTWUcs9OWtp0mBtYJ+4LQY1fllqBkOIPhXmFmw== +"@babel/plugin-transform-block-scoping@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.23.0.tgz" + integrity sha512-cOsrbmIOXmf+5YbL99/S49Y3j46k/T16b9ml8bm9lP6N9US5iQ2yBK7gpui1pg0V/WMcXdkfKbTb7HXq9u+v4g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-class-properties@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.22.5.tgz#97a56e31ad8c9dc06a0b3710ce7803d5a48cca77" + resolved "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.22.5.tgz" integrity sha512-nDkQ0NfkOhPTq8YCLiWNxp1+f9fCobEjCb0n8WdbNUBc4IB5V7P1QnX9IjpSoquKrXF5SKojHleVNs2vGeHCHQ== dependencies: "@babel/helper-create-class-features-plugin" "^7.22.5" @@ -575,7 +575,7 @@ "@babel/plugin-transform-class-static-block@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.22.11.tgz#dc8cc6e498f55692ac6b4b89e56d87cec766c974" + resolved "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.22.11.tgz" integrity sha512-GMM8gGmqI7guS/llMFk1bJDkKfn3v3C4KHK9Yg1ey5qcHcOlKb0QvcMrgzvxo+T03/4szNh5lghY+fEC98Kq9g== dependencies: "@babel/helper-create-class-features-plugin" "^7.22.11" @@ -584,7 +584,7 @@ "@babel/plugin-transform-classes@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.22.15.tgz#aaf4753aee262a232bbc95451b4bdf9599c65a0b" + resolved "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.22.15.tgz" integrity sha512-VbbC3PGjBdE0wAWDdHM9G8Gm977pnYI0XpqMd6LrKISj8/DJXEsWqgRuTYaNE9Bv0JGhTZUzHDlMk18IpOuoqw== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" @@ -599,22 +599,22 @@ "@babel/plugin-transform-computed-properties@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.22.5.tgz#cd1e994bf9f316bd1c2dafcd02063ec261bb3869" + resolved "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.22.5.tgz" integrity sha512-4GHWBgRf0krxPX+AaPtgBAlTgTeZmqDynokHOX7aqqAB4tHs3U2Y02zH6ETFdLZGcg9UQSD1WCmkVrE9ErHeOg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/template" "^7.22.5" -"@babel/plugin-transform-destructuring@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.22.15.tgz#e7404ea5bb3387073b9754be654eecb578324694" - integrity sha512-HzG8sFl1ZVGTme74Nw+X01XsUTqERVQ6/RLHo3XjGRzm7XD6QTtfS3NJotVgCGy8BzkDqRjRBD8dAyJn5TuvSQ== +"@babel/plugin-transform-destructuring@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.23.0.tgz" + integrity sha512-vaMdgNXFkYrB+8lbgniSYWHsgqK5gjaMNcc84bMIOMRLH0L9AqYq3hwMdvnyqj1OPqea8UtjPEuS/DCenah1wg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-dotall-regex@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.22.5.tgz#dbb4f0e45766eb544e193fb00e65a1dd3b2a4165" + resolved "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.22.5.tgz" integrity sha512-5/Yk9QxCQCl+sOIB1WelKnVRxTJDSAIxtJLL2/pqL14ZVlbH0fUQUZa/T5/UnQtBNgghR7mfB8ERBKyKPCi7Vw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" @@ -622,14 +622,14 @@ "@babel/plugin-transform-duplicate-keys@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.22.5.tgz#b6e6428d9416f5f0bba19c70d1e6e7e0b88ab285" + resolved "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.22.5.tgz" integrity sha512-dEnYD+9BBgld5VBXHnF/DbYGp3fqGMsyxKbtD1mDyIA7AkTSpKXFhCVuj/oQVOoALfBs77DudA0BE4d5mcpmqw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-dynamic-import@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.22.11.tgz#2c7722d2a5c01839eaf31518c6ff96d408e447aa" + resolved "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.22.11.tgz" integrity sha512-g/21plo58sfteWjaO0ZNVb+uEOkJNjAaHhbejrnBmu011l/eNDScmkbjCC3l4FKb10ViaGU4aOkFznSu2zRHgA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -637,7 +637,7 @@ "@babel/plugin-transform-exponentiation-operator@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.22.5.tgz#402432ad544a1f9a480da865fda26be653e48f6a" + resolved "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.22.5.tgz" integrity sha512-vIpJFNM/FjZ4rh1myqIya9jXwrwwgFRHPjT3DkUA9ZLHuzox8jiXkOLvwm1H+PQIP3CqfC++WPKeuDi0Sjdj1g== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.5" @@ -645,7 +645,7 @@ "@babel/plugin-transform-export-namespace-from@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.22.11.tgz#b3c84c8f19880b6c7440108f8929caf6056db26c" + resolved "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.22.11.tgz" integrity sha512-xa7aad7q7OiT8oNZ1mU7NrISjlSkVdMbNxn9IuLZyL9AJEhs1Apba3I+u5riX1dIkdptP5EKDG5XDPByWxtehw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -653,7 +653,7 @@ "@babel/plugin-transform-flow-strip-types@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.22.5.tgz#0bb17110c7bf5b35a60754b2f00c58302381dee2" + resolved "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.22.5.tgz" integrity sha512-tujNbZdxdG0/54g/oua8ISToaXTFBf8EnSb5PgQSciIXWOWKX3S4+JR7ZE9ol8FZwf9kxitzkGQ+QWeov/mCiA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -661,14 +661,14 @@ "@babel/plugin-transform-for-of@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.22.15.tgz#f64b4ccc3a4f131a996388fae7680b472b306b29" + resolved "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.22.15.tgz" integrity sha512-me6VGeHsx30+xh9fbDLLPi0J1HzmeIIyenoOQHuw2D4m2SAU3NrspX5XxJLBpqn5yrLzrlw2Iy3RA//Bx27iOA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-function-name@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.22.5.tgz#935189af68b01898e0d6d99658db6b164205c143" + resolved "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.22.5.tgz" integrity sha512-UIzQNMS0p0HHiQm3oelztj+ECwFnj+ZRV4KnguvlsD2of1whUeM6o7wGNj6oLwcDoAXQ8gEqfgC24D+VdIcevg== dependencies: "@babel/helper-compilation-targets" "^7.22.5" @@ -677,7 +677,7 @@ "@babel/plugin-transform-json-strings@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.22.11.tgz#689a34e1eed1928a40954e37f74509f48af67835" + resolved "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.22.11.tgz" integrity sha512-CxT5tCqpA9/jXFlme9xIBCc5RPtdDq3JpkkhgHQqtDdiTnTI0jtZ0QzXhr5DILeYifDPp2wvY2ad+7+hLMW5Pw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -685,14 +685,14 @@ "@babel/plugin-transform-literals@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.22.5.tgz#e9341f4b5a167952576e23db8d435849b1dd7920" + resolved "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.22.5.tgz" integrity sha512-fTLj4D79M+mepcw3dgFBTIDYpbcB9Sm0bpm4ppXPaO+U+PKFFyV9MGRvS0gvGw62sd10kT5lRMKXAADb9pWy8g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-logical-assignment-operators@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.22.11.tgz#24c522a61688bde045b7d9bc3c2597a4d948fc9c" + resolved "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.22.11.tgz" integrity sha512-qQwRTP4+6xFCDV5k7gZBF3C31K34ut0tbEcTKxlX/0KXxm9GLcO14p570aWxFvVzx6QAfPgq7gaeIHXJC8LswQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -700,41 +700,41 @@ "@babel/plugin-transform-member-expression-literals@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.22.5.tgz#4fcc9050eded981a468347dd374539ed3e058def" + resolved "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.22.5.tgz" integrity sha512-RZEdkNtzzYCFl9SE9ATaUMTj2hqMb4StarOJLrZRbqqU4HSBE7UlBw9WBWQiDzrJZJdUWiMTVDI6Gv/8DPvfew== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-amd@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.22.5.tgz#4e045f55dcf98afd00f85691a68fc0780704f526" - integrity sha512-R+PTfLTcYEmb1+kK7FNkhQ1gP4KgjpSO6HfH9+f8/yfp2Nt3ggBjiVpRwmwTlfqZLafYKJACy36yDXlEmI9HjQ== +"@babel/plugin-transform-modules-amd@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.23.0.tgz" + integrity sha512-xWT5gefv2HGSm4QHtgc1sYPbseOyf+FFDo2JbpE25GWl5BqTGO9IMwTYJRoIdjsF85GE+VegHxSCUt5EvoYTAw== dependencies: - "@babel/helper-module-transforms" "^7.22.5" + "@babel/helper-module-transforms" "^7.23.0" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-commonjs@^7.13.8", "@babel/plugin-transform-modules-commonjs@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.22.15.tgz#b11810117ed4ee7691b29bd29fd9f3f98276034f" - integrity sha512-jWL4eh90w0HQOTKP2MoXXUpVxilxsB2Vl4ji69rSjS3EcZ/v4sBmn+A3NpepuJzBhOaEBbR7udonlHHn5DWidg== +"@babel/plugin-transform-modules-commonjs@^7.13.8", "@babel/plugin-transform-modules-commonjs@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.23.0.tgz" + integrity sha512-32Xzss14/UVc7k9g775yMIvkVK8xwKE0DPdP5JTapr3+Z9w4tzeOuLNY6BXDQR6BdnzIlXnCGAzsk/ICHBLVWQ== dependencies: - "@babel/helper-module-transforms" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.0" "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-simple-access" "^7.22.5" -"@babel/plugin-transform-modules-systemjs@^7.22.11": - version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.22.11.tgz#3386be5875d316493b517207e8f1931d93154bb1" - integrity sha512-rIqHmHoMEOhI3VkVf5jQ15l539KrwhzqcBO6wdCNWPWc/JWt9ILNYNUssbRpeq0qWns8svuw8LnMNCvWBIJ8wA== +"@babel/plugin-transform-modules-systemjs@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.23.0.tgz" + integrity sha512-qBej6ctXZD2f+DhlOC9yO47yEYgUh5CZNz/aBoH4j/3NOlRfJXJbY7xDQCqQVf9KbrqGzIWER1f23doHGrIHFg== dependencies: "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-module-transforms" "^7.22.9" + "@babel/helper-module-transforms" "^7.23.0" "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" "@babel/plugin-transform-modules-umd@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.22.5.tgz#4694ae40a87b1745e3775b6a7fe96400315d4f98" + resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.22.5.tgz" integrity sha512-+S6kzefN/E1vkSsKx8kmQuqeQsvCKCd1fraCM7zXm4SFoggI099Tr4G8U81+5gtMdUeMQ4ipdQffbKLX0/7dBQ== dependencies: "@babel/helper-module-transforms" "^7.22.5" @@ -742,7 +742,7 @@ "@babel/plugin-transform-named-capturing-groups-regex@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz#67fe18ee8ce02d57c855185e27e3dc959b2e991f" + resolved "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz" integrity sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" @@ -750,14 +750,14 @@ "@babel/plugin-transform-new-target@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.22.5.tgz#1b248acea54ce44ea06dfd37247ba089fcf9758d" + resolved "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.22.5.tgz" integrity sha512-AsF7K0Fx/cNKVyk3a+DW0JLo+Ua598/NxMRvxDnkpCIGFh43+h/v2xyhRUYf6oD8gE4QtL83C7zZVghMjHd+iw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-nullish-coalescing-operator@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.22.11.tgz#debef6c8ba795f5ac67cd861a81b744c5d38d9fc" + resolved "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.22.11.tgz" integrity sha512-YZWOw4HxXrotb5xsjMJUDlLgcDXSfO9eCmdl1bgW4+/lAGdkjaEvOnQ4p5WKKdUgSzO39dgPl0pTnfxm0OAXcg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -765,7 +765,7 @@ "@babel/plugin-transform-numeric-separator@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.22.11.tgz#498d77dc45a6c6db74bb829c02a01c1d719cbfbd" + resolved "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.22.11.tgz" integrity sha512-3dzU4QGPsILdJbASKhF/V2TVP+gJya1PsueQCxIPCEcerqF21oEcrob4mzjsp2Py/1nLfF5m+xYNMDpmA8vffg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -773,7 +773,7 @@ "@babel/plugin-transform-object-rest-spread@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.22.15.tgz#21a95db166be59b91cde48775310c0df6e1da56f" + resolved "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.22.15.tgz" integrity sha512-fEB+I1+gAmfAyxZcX1+ZUwLeAuuf8VIg67CTznZE0MqVFumWkh8xWtn58I4dxdVf080wn7gzWoF8vndOViJe9Q== dependencies: "@babel/compat-data" "^7.22.9" @@ -784,7 +784,7 @@ "@babel/plugin-transform-object-super@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.22.5.tgz#794a8d2fcb5d0835af722173c1a9d704f44e218c" + resolved "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.22.5.tgz" integrity sha512-klXqyaT9trSjIUrcsYIfETAzmOEZL3cBYqOYLJxBHfMFFggmXOv+NYSX/Jbs9mzMVESw/WycLFPRx8ba/b2Ipw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -792,16 +792,16 @@ "@babel/plugin-transform-optional-catch-binding@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.22.11.tgz#461cc4f578a127bb055527b3e77404cad38c08e0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.22.11.tgz" integrity sha512-rli0WxesXUeCJnMYhzAglEjLWVDF6ahb45HuprcmQuLidBJFWjNnOzssk2kuc6e33FlLaiZhG/kUIzUMWdBKaQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" -"@babel/plugin-transform-optional-chaining@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.22.15.tgz#d7a5996c2f7ca4ad2ad16dbb74444e5c4385b1ba" - integrity sha512-ngQ2tBhq5vvSJw2Q2Z9i7ealNkpDMU0rGWnHPKqRZO0tzZ5tlaoz4hDvhXioOoaE0X2vfNss1djwg0DXlfu30A== +"@babel/plugin-transform-optional-chaining@^7.22.15", "@babel/plugin-transform-optional-chaining@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.23.0.tgz" + integrity sha512-sBBGXbLJjxTzLBF5rFWaikMnOGOk/BmK6vVByIdEggZ7Vn6CvWXZyRkkLFK6WE0IF8jSliyOkUN6SScFgzCM0g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" @@ -809,14 +809,14 @@ "@babel/plugin-transform-parameters@^7.20.7", "@babel/plugin-transform-parameters@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.22.15.tgz#719ca82a01d177af358df64a514d64c2e3edb114" + resolved "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.22.15.tgz" integrity sha512-hjk7qKIqhyzhhUvRT683TYQOFa/4cQKwQy7ALvTpODswN40MljzNDa0YldevS6tGbxwaEKVn502JmY0dP7qEtQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-private-methods@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.22.5.tgz#21c8af791f76674420a147ae62e9935d790f8722" + resolved "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.22.5.tgz" integrity sha512-PPjh4gyrQnGe97JTalgRGMuU4icsZFnWkzicB/fUtzlKUqvsWBKEpPPfr5a2JiyirZkHxnAqkQMO5Z5B2kK3fA== dependencies: "@babel/helper-create-class-features-plugin" "^7.22.5" @@ -824,7 +824,7 @@ "@babel/plugin-transform-private-property-in-object@^7.22.11": version "7.22.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.22.11.tgz#ad45c4fc440e9cb84c718ed0906d96cf40f9a4e1" + resolved "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.22.11.tgz" integrity sha512-sSCbqZDBKHetvjSwpyWzhuHkmW5RummxJBVbYLkGkaiTOWGxml7SXt0iWa03bzxFIx7wOj3g/ILRd0RcJKBeSQ== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" @@ -834,28 +834,28 @@ "@babel/plugin-transform-property-literals@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.22.5.tgz#b5ddabd73a4f7f26cd0e20f5db48290b88732766" + resolved "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.22.5.tgz" integrity sha512-TiOArgddK3mK/x1Qwf5hay2pxI6wCZnvQqrFSqbtg1GLl2JcNMitVH/YnqjP+M31pLUeTfzY1HAXFDnUBV30rQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-display-name@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.22.5.tgz#3c4326f9fce31c7968d6cb9debcaf32d9e279a2b" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.22.5.tgz" integrity sha512-PVk3WPYudRF5z4GKMEYUrLjPl38fJSKNaEOkFuoprioowGuWN6w2RKznuFNSlJx7pzzXXStPUnNSOEO0jL5EVw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-jsx-development@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz#e716b6edbef972a92165cd69d92f1255f7e73e87" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz" integrity sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A== dependencies: "@babel/plugin-transform-react-jsx" "^7.22.5" "@babel/plugin-transform-react-jsx@^7.22.15", "@babel/plugin-transform-react-jsx@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.15.tgz#7e6266d88705d7c49f11c98db8b9464531289cd6" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.15.tgz" integrity sha512-oKckg2eZFa8771O/5vi7XeTvmM6+O9cxZu+kanTU7tD4sin5nO/G8jGJhq8Hvt2Z0kUoEDRayuZLaUlYl8QuGA== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" @@ -866,7 +866,7 @@ "@babel/plugin-transform-react-pure-annotations@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.22.5.tgz#1f58363eef6626d6fa517b95ac66fe94685e32c0" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.22.5.tgz" integrity sha512-gP4k85wx09q+brArVinTXhWiyzLl9UpmGva0+mWyKxk6JZequ05x3eUcIUE+FyttPKJFRRVtAvQaJ6YF9h1ZpA== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" @@ -874,7 +874,7 @@ "@babel/plugin-transform-regenerator@^7.22.10": version "7.22.10" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.22.10.tgz#8ceef3bd7375c4db7652878b0241b2be5d0c3cca" + resolved "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.22.10.tgz" integrity sha512-F28b1mDt8KcT5bUyJc/U9nwzw6cV+UmTeRlXYIl2TNqMMJif0Jeey9/RQ3C4NOd2zp0/TRsDns9ttj2L523rsw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -882,33 +882,33 @@ "@babel/plugin-transform-reserved-words@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.22.5.tgz#832cd35b81c287c4bcd09ce03e22199641f964fb" + resolved "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.22.5.tgz" integrity sha512-DTtGKFRQUDm8svigJzZHzb/2xatPc6TzNvAIJ5GqOKDsGFYgAskjRulbR/vGsPKq3OPqtexnz327qYpP57RFyA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-runtime@^7.22.9": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.22.15.tgz#3a625c4c05a39e932d7d34f5d4895cdd0172fdc9" - integrity sha512-tEVLhk8NRZSmwQ0DJtxxhTrCht1HVo8VaMzYT4w6lwyKBuHsgoioAUA7/6eT2fRfc5/23fuGdlwIxXhRVgWr4g== + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.23.2.tgz" + integrity sha512-XOntj6icgzMS58jPVtQpiuF6ZFWxQiJavISGx5KGjRj+3gqZr8+N6Kx+N9BApWzgS+DOjIZfXXj0ZesenOWDyA== dependencies: "@babel/helper-module-imports" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" - babel-plugin-polyfill-corejs2 "^0.4.5" - babel-plugin-polyfill-corejs3 "^0.8.3" - babel-plugin-polyfill-regenerator "^0.5.2" + babel-plugin-polyfill-corejs2 "^0.4.6" + babel-plugin-polyfill-corejs3 "^0.8.5" + babel-plugin-polyfill-regenerator "^0.5.3" semver "^6.3.1" "@babel/plugin-transform-shorthand-properties@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.22.5.tgz#6e277654be82b5559fc4b9f58088507c24f0c624" + resolved "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.22.5.tgz" integrity sha512-vM4fq9IXHscXVKzDv5itkO1X52SmdFBFcMIBZ2FRn2nqVYqw6dBexUgMvAjHW+KXpPPViD/Yo3GrDEBaRC0QYA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-spread@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.22.5.tgz#6487fd29f229c95e284ba6c98d65eafb893fea6b" + resolved "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.22.5.tgz" integrity sha512-5ZzDQIGyvN4w8+dMmpohL6MBo+l2G7tfC/O2Dg7/hjpgeWvUx8FzfeOKxGog9IimPa4YekaQ9PlDqTLOljkcxg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -916,28 +916,28 @@ "@babel/plugin-transform-sticky-regex@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.22.5.tgz#295aba1595bfc8197abd02eae5fc288c0deb26aa" + resolved "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.22.5.tgz" integrity sha512-zf7LuNpHG0iEeiyCNwX4j3gDg1jgt1k3ZdXBKbZSoA3BbGQGvMiSvfbZRR3Dr3aeJe3ooWFZxOOG3IRStYp2Bw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-template-literals@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.22.5.tgz#8f38cf291e5f7a8e60e9f733193f0bcc10909bff" + resolved "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.22.5.tgz" integrity sha512-5ciOehRNf+EyUeewo8NkbQiUs4d6ZxiHo6BcBcnFlgiJfu16q0bQUw9Jvo0b0gBKFG1SMhDSjeKXSYuJLeFSMA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-typeof-symbol@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.22.5.tgz#5e2ba478da4b603af8673ff7c54f75a97b716b34" + resolved "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.22.5.tgz" integrity sha512-bYkI5lMzL4kPii4HHEEChkD0rkc+nvnlR6+o/qdqR6zrm0Sv/nodmyLhlq2DO0YKLUNd2VePmPRjJXSBh9OIdA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-typescript@^7.22.15": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.22.15.tgz#15adef906451d86349eb4b8764865c960eb54127" + resolved "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.22.15.tgz" integrity sha512-1uirS0TnijxvQLnlv5wQBwOX3E1wCFX7ITv+9pBV2wKEk4K+M5tqDaoNXnTH8tjEIYHLO98MwiTWO04Ggz4XuA== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" @@ -947,14 +947,14 @@ "@babel/plugin-transform-unicode-escapes@^7.22.10": version "7.22.10" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.22.10.tgz#c723f380f40a2b2f57a62df24c9005834c8616d9" + resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.22.10.tgz" integrity sha512-lRfaRKGZCBqDlRU3UIFovdp9c9mEvlylmpod0/OatICsSfuQ9YFthRo1tpTkGsklEefZdqlEFdY4A2dwTb6ohg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-unicode-property-regex@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.22.5.tgz#098898f74d5c1e86660dc112057b2d11227f1c81" + resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.22.5.tgz" integrity sha512-HCCIb+CbJIAE6sXn5CjFQXMwkCClcOfPCzTlilJ8cUatfzwHlWQkbtV0zD338u9dZskwvuOYTuuaMaA8J5EI5A== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" @@ -962,7 +962,7 @@ "@babel/plugin-transform-unicode-regex@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.22.5.tgz#ce7e7bb3ef208c4ff67e02a22816656256d7a183" + resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.22.5.tgz" integrity sha512-028laaOKptN5vHJf9/Arr/HiJekMd41hOEZYvNsrsXqJ7YPYuX2bQxh31fkZzGmq3YqHRJzYFFAVYvKfMPKqyg== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" @@ -970,18 +970,18 @@ "@babel/plugin-transform-unicode-sets-regex@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.22.5.tgz#77788060e511b708ffc7d42fdfbc5b37c3004e91" + resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.22.5.tgz" integrity sha512-lhMfi4FC15j13eKrh3DnYHjpGj6UKQHtNKTbtc1igvAhRy4+kLhV07OpLcsN0VgDEw/MjAvJO4BdMJsHwMhzCg== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" "@babel/preset-env@^7.20.2", "@babel/preset-env@^7.22.9": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.22.15.tgz#142716f8e00bc030dae5b2ac6a46fbd8b3e18ff8" - integrity sha512-tZFHr54GBkHk6hQuVA8w4Fmq+MSPsfvMG0vPnOYyTnJpyfMqybL8/MbNCPRT9zc2KBO2pe4tq15g6Uno4Jpoag== + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.23.2.tgz" + integrity sha512-BW3gsuDD+rvHL2VO2SjAUNTBe5YrjsTiDyqamPDWY723na3/yPQ65X5oQkFVJZ0o50/2d+svm1rkPoJeR1KxVQ== dependencies: - "@babel/compat-data" "^7.22.9" + "@babel/compat-data" "^7.23.2" "@babel/helper-compilation-targets" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-validator-option" "^7.22.15" @@ -1007,15 +1007,15 @@ "@babel/plugin-syntax-top-level-await" "^7.14.5" "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" "@babel/plugin-transform-arrow-functions" "^7.22.5" - "@babel/plugin-transform-async-generator-functions" "^7.22.15" + "@babel/plugin-transform-async-generator-functions" "^7.23.2" "@babel/plugin-transform-async-to-generator" "^7.22.5" "@babel/plugin-transform-block-scoped-functions" "^7.22.5" - "@babel/plugin-transform-block-scoping" "^7.22.15" + "@babel/plugin-transform-block-scoping" "^7.23.0" "@babel/plugin-transform-class-properties" "^7.22.5" "@babel/plugin-transform-class-static-block" "^7.22.11" "@babel/plugin-transform-classes" "^7.22.15" "@babel/plugin-transform-computed-properties" "^7.22.5" - "@babel/plugin-transform-destructuring" "^7.22.15" + "@babel/plugin-transform-destructuring" "^7.23.0" "@babel/plugin-transform-dotall-regex" "^7.22.5" "@babel/plugin-transform-duplicate-keys" "^7.22.5" "@babel/plugin-transform-dynamic-import" "^7.22.11" @@ -1027,9 +1027,9 @@ "@babel/plugin-transform-literals" "^7.22.5" "@babel/plugin-transform-logical-assignment-operators" "^7.22.11" "@babel/plugin-transform-member-expression-literals" "^7.22.5" - "@babel/plugin-transform-modules-amd" "^7.22.5" - "@babel/plugin-transform-modules-commonjs" "^7.22.15" - "@babel/plugin-transform-modules-systemjs" "^7.22.11" + "@babel/plugin-transform-modules-amd" "^7.23.0" + "@babel/plugin-transform-modules-commonjs" "^7.23.0" + "@babel/plugin-transform-modules-systemjs" "^7.23.0" "@babel/plugin-transform-modules-umd" "^7.22.5" "@babel/plugin-transform-named-capturing-groups-regex" "^7.22.5" "@babel/plugin-transform-new-target" "^7.22.5" @@ -1038,7 +1038,7 @@ "@babel/plugin-transform-object-rest-spread" "^7.22.15" "@babel/plugin-transform-object-super" "^7.22.5" "@babel/plugin-transform-optional-catch-binding" "^7.22.11" - "@babel/plugin-transform-optional-chaining" "^7.22.15" + "@babel/plugin-transform-optional-chaining" "^7.23.0" "@babel/plugin-transform-parameters" "^7.22.15" "@babel/plugin-transform-private-methods" "^7.22.5" "@babel/plugin-transform-private-property-in-object" "^7.22.11" @@ -1055,16 +1055,16 @@ "@babel/plugin-transform-unicode-regex" "^7.22.5" "@babel/plugin-transform-unicode-sets-regex" "^7.22.5" "@babel/preset-modules" "0.1.6-no-external-plugins" - "@babel/types" "^7.22.15" - babel-plugin-polyfill-corejs2 "^0.4.5" - babel-plugin-polyfill-corejs3 "^0.8.3" - babel-plugin-polyfill-regenerator "^0.5.2" + "@babel/types" "^7.23.0" + babel-plugin-polyfill-corejs2 "^0.4.6" + babel-plugin-polyfill-corejs3 "^0.8.5" + babel-plugin-polyfill-regenerator "^0.5.3" core-js-compat "^3.31.0" semver "^6.3.1" "@babel/preset-flow@^7.13.13", "@babel/preset-flow@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/preset-flow/-/preset-flow-7.22.15.tgz#30318deb9b3ebd9f5738e96da03a531e0cd3165d" + resolved "https://registry.npmjs.org/@babel/preset-flow/-/preset-flow-7.22.15.tgz" integrity sha512-dB5aIMqpkgbTfN5vDdTRPzjqtWiZcRESNR88QYnoPR+bmdYoluOzMX9tQerTv0XzSgZYctPfO1oc0N5zdog1ew== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -1073,7 +1073,7 @@ "@babel/preset-modules@0.1.6-no-external-plugins": version "0.1.6-no-external-plugins" - resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz#ccb88a2c49c817236861fee7826080573b8a923a" + resolved "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz" integrity sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" @@ -1082,7 +1082,7 @@ "@babel/preset-react@^7.18.6", "@babel/preset-react@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.22.15.tgz#9a776892b648e13cc8ca2edf5ed1264eea6b6afc" + resolved "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.22.15.tgz" integrity sha512-Csy1IJ2uEh/PecCBXXoZGAZBeCATTuePzCSB7dLYWS0vOEj6CNpjxIhW4duWwZodBNueH7QO14WbGn8YyeuN9w== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -1093,19 +1093,19 @@ "@babel/plugin-transform-react-pure-annotations" "^7.22.5" "@babel/preset-typescript@^7.13.0", "@babel/preset-typescript@^7.21.0", "@babel/preset-typescript@^7.22.5": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.22.15.tgz#43db30516fae1d417d748105a0bc95f637239d48" - integrity sha512-HblhNmh6yM+cU4VwbBRpxFhxsTdfS1zsvH9W+gEjD0ARV9+8B4sNfpI6GuhePti84nuvhiwKS539jKPFHskA9A== + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.23.2.tgz" + integrity sha512-u4UJc1XsS1GhIGteM8rnGiIvf9rJpiVgMEeCnwlLA7WJPC+jcXWJAGxYmeqs5hOZD8BbAfnV5ezBOxQbb4OUxA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-validator-option" "^7.22.15" "@babel/plugin-syntax-jsx" "^7.22.5" - "@babel/plugin-transform-modules-commonjs" "^7.22.15" + "@babel/plugin-transform-modules-commonjs" "^7.23.0" "@babel/plugin-transform-typescript" "^7.22.15" "@babel/register@^7.13.16": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/register/-/register-7.22.15.tgz#c2c294a361d59f5fa7bcc8b97ef7319c32ecaec7" + resolved "https://registry.npmjs.org/@babel/register/-/register-7.22.15.tgz" integrity sha512-V3Q3EqoQdn65RCgTLwauZaTfd1ShhwPmbBv+1dkZV/HpCGMKVyn6oFcRlI7RaKqiDQjX2Qd3AuoEguBgdjIKlg== dependencies: clone-deep "^4.0.1" @@ -1116,90 +1116,90 @@ "@babel/regjsgen@^0.8.0": version "0.8.0" - resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" + resolved "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz" integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== "@babel/runtime@^7.1.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.17.8", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.6", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.15.tgz#38f46494ccf6cf020bd4eed7124b425e83e523b8" - integrity sha512-T0O+aa+4w0u06iNmapipJXMV4HoUir03hpx3/YqXXhu9xim3w+dVphjFWl1OH8NbZHw5Lbm9k45drDkgq2VNNA== + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.2.tgz" + integrity sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg== dependencies: regenerator-runtime "^0.14.0" "@babel/template@^7.20.7", "@babel/template@^7.22.15", "@babel/template@^7.22.5": version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + resolved "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz" integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== dependencies: "@babel/code-frame" "^7.22.13" "@babel/parser" "^7.22.15" "@babel/types" "^7.22.15" -"@babel/traverse@^7.1.6", "@babel/traverse@^7.21.2", "@babel/traverse@^7.22.15", "@babel/traverse@^7.22.8": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.15.tgz#75be4d2d6e216e880e93017f4e2389aeb77ef2d9" - integrity sha512-DdHPwvJY0sEeN4xJU5uRLmZjgMMDIvMPniLuYzUVXj/GGzysPl0/fwt44JBkyUIzGJPV8QgHMcQdQ34XFuKTYQ== +"@babel/traverse@^7.1.6", "@babel/traverse@^7.21.2", "@babel/traverse@^7.22.8", "@babel/traverse@^7.23.2": + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz" + integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== dependencies: "@babel/code-frame" "^7.22.13" - "@babel/generator" "^7.22.15" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-function-name" "^7.22.5" + "@babel/generator" "^7.23.0" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" "@babel/helper-hoist-variables" "^7.22.5" "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.22.15" - "@babel/types" "^7.22.15" + "@babel/parser" "^7.23.0" + "@babel/types" "^7.23.0" debug "^4.1.0" globals "^11.1.0" -"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.20.7", "@babel/types@^7.21.5", "@babel/types@^7.22.10", "@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.4.4": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.15.tgz#266cb21d2c5fd0b3931e7a91b6dd72d2f617d282" - integrity sha512-X+NLXr0N8XXmN5ZsaQdm9U2SSC3UbIYq/doL++sueHOTisgZHoKaQtZxGuV2cUPQHMfjKEfg/g6oy7Hm6SKFtA== +"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.20.7", "@babel/types@^7.21.5", "@babel/types@^7.22.15", "@babel/types@^7.22.19", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.4.4": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz" + integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== dependencies: "@babel/helper-string-parser" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.15" + "@babel/helper-validator-identifier" "^7.22.20" to-fast-properties "^2.0.0" "@base2/pretty-print-object@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@base2/pretty-print-object/-/pretty-print-object-1.0.1.tgz#371ba8be66d556812dc7fb169ebc3c08378f69d4" + resolved "https://registry.npmjs.org/@base2/pretty-print-object/-/pretty-print-object-1.0.1.tgz" integrity sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA== "@bcoe/v8-coverage@^0.2.3": version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + resolved "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== "@colors/colors@1.5.0": version "1.5.0" - resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" + resolved "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz" integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== "@discoveryjs/json-ext@^0.5.3": version "0.5.7" - resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" + resolved "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz" integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== "@emotion/is-prop-valid@^1.2.1": version "1.2.1" - resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.2.1.tgz#23116cf1ed18bfeac910ec6436561ecb1a3885cc" + resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.2.1.tgz" integrity sha512-61Mf7Ufx4aDxx1xlDeOm8aFFigGHE4z+0sKCa+IHCeZKiyP9RLD0Mmx7m8b9/Cf37f7NAvQOOJAbQQGVr5uERw== dependencies: "@emotion/memoize" "^0.8.1" "@emotion/memoize@^0.8.1": version "0.8.1" - resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.8.1.tgz#c1ddb040429c6d21d38cc945fe75c818cfb68e17" + resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz" integrity sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA== "@emotion/unitless@^0.8.0": version "0.8.1" - resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.8.1.tgz#182b5a4704ef8ad91bde93f7a860a88fd92c79a3" + resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz" integrity sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ== "@emotion/use-insertion-effect-with-fallbacks@^1.0.0": version "1.0.1" - resolved "https://registry.yarnpkg.com/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz#08de79f54eb3406f9daaf77c76e35313da963963" + resolved "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz" integrity sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw== "@esbuild/android-arm64@0.18.20": @@ -1219,7 +1219,7 @@ "@esbuild/darwin-arm64@0.18.20": version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz#08172cbeccf95fbc383399a7f39cfbddaeb0d7c1" + resolved "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz" integrity sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA== "@esbuild/darwin-x64@0.18.20": @@ -1314,19 +1314,19 @@ "@eslint-community/eslint-utils@^4.2.0": version "4.4.0" - resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + resolved "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz" integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== dependencies: eslint-visitor-keys "^3.3.0" "@eslint-community/regexpp@^4.6.1": - version "4.8.0" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.8.0.tgz#11195513186f68d42fbf449f9a7136b2c0c92005" - integrity sha512-JylOEEzDiOryeUnFbQz+oViCXS0KsvR1mvHkoMiu5+UiBvy+RYX7tzlIIIEstF/gVa2tj9AQXk3dgnxv6KxhFg== + version "4.9.1" + resolved "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.9.1.tgz" + integrity sha512-Y27x+MBLjXa+0JWDhykM3+JE+il3kHKAEqabfEWq3SDhZjLYb6/BHL/JKFnH3fe207JaXkyDo685Oc2Glt6ifA== "@eslint/eslintrc@^2.1.2": version "2.1.2" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.2.tgz#c6936b4b328c64496692f76944e755738be62396" + resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz" integrity sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g== dependencies: ajv "^6.12.4" @@ -1339,71 +1339,71 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@eslint/js@8.48.0": - version "8.48.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.48.0.tgz#642633964e217905436033a2bd08bf322849b7fb" - integrity sha512-ZSjtmelB7IJfWD2Fvb7+Z+ChTIKWq6kjda95fLcQKNS5aheVHn4IkfgRQE3sIIzTcSLwLcLZUD9UBt+V7+h+Pw== +"@eslint/js@8.51.0": + version "8.51.0" + resolved "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz" + integrity sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg== "@fal-works/esbuild-plugin-global-externals@^2.1.2": version "2.1.2" - resolved "https://registry.yarnpkg.com/@fal-works/esbuild-plugin-global-externals/-/esbuild-plugin-global-externals-2.1.2.tgz#c05ed35ad82df8e6ac616c68b92c2282bd083ba4" + resolved "https://registry.npmjs.org/@fal-works/esbuild-plugin-global-externals/-/esbuild-plugin-global-externals-2.1.2.tgz" integrity sha512-cEee/Z+I12mZcFJshKcCqC8tuX5hG3s+d+9nZ3LabqKF1vKdF41B92pJVCBggjAGORAeOzyyDDKrZwIkLffeOQ== -"@floating-ui/core@^1.4.1": - version "1.4.1" - resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.4.1.tgz#0d633f4b76052668afb932492ac452f7ebe97f17" - integrity sha512-jk3WqquEJRlcyu7997NtR5PibI+y5bi+LS3hPmguVClypenMsCY3CBa3LAQnozRCtCrYWSEtAdiskpamuJRFOQ== +"@floating-ui/core@^1.4.2": + version "1.5.0" + resolved "https://registry.npmjs.org/@floating-ui/core/-/core-1.5.0.tgz" + integrity sha512-kK1h4m36DQ0UHGj5Ah4db7R0rHemTqqO0QLvUqi1/mUUp3LuAWbWxdxSIf/XsnH9VS6rRVPLJCncjRzUvyCLXg== dependencies: - "@floating-ui/utils" "^0.1.1" + "@floating-ui/utils" "^0.1.3" "@floating-ui/dom@^1.2.1", "@floating-ui/dom@^1.5.1": - version "1.5.1" - resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.5.1.tgz#88b70defd002fe851f17b4a25efb2d3c04d7a8d7" - integrity sha512-KwvVcPSXg6mQygvA1TjbN/gh///36kKtllIF8SUm0qpFj8+rvYrpvlYdL1JoA71SHpDqgSSdGOSoQ0Mp3uY5aw== + version "1.5.3" + resolved "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.3.tgz" + integrity sha512-ClAbQnEqJAKCJOEbbLo5IUlZHkNszqhuxS4fHAVxRPXPya6Ysf2G8KypnYcOTpx6I8xcgF9bbHb6g/2KpbV8qA== dependencies: - "@floating-ui/core" "^1.4.1" - "@floating-ui/utils" "^0.1.1" + "@floating-ui/core" "^1.4.2" + "@floating-ui/utils" "^0.1.3" "@floating-ui/react-dom@^1.3.0": version "1.3.0" - resolved "https://registry.yarnpkg.com/@floating-ui/react-dom/-/react-dom-1.3.0.tgz#4d35d416eb19811c2b0e9271100a6aa18c1579b3" + resolved "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-1.3.0.tgz" integrity sha512-htwHm67Ji5E/pROEAr7f8IKFShuiCKHwUC/UY4vC3I5jiSvGFAYnSYiZO5MlGmads+QqvUkR9ANHEguGrDv72g== dependencies: "@floating-ui/dom" "^1.2.1" "@floating-ui/react-dom@^2.0.0": version "2.0.2" - resolved "https://registry.yarnpkg.com/@floating-ui/react-dom/-/react-dom-2.0.2.tgz#fab244d64db08e6bed7be4b5fcce65315ef44d20" + resolved "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.2.tgz" integrity sha512-5qhlDvjaLmAst/rKb3VdlCinwTF4EYMiVxuuc/HVUjs46W0zgtbMmAZ1UTsDrRTxRmUEzl92mOtWbeeXL26lSQ== dependencies: "@floating-ui/dom" "^1.5.1" "@floating-ui/react@^0.19.1": version "0.19.2" - resolved "https://registry.yarnpkg.com/@floating-ui/react/-/react-0.19.2.tgz#c6e4d2097ed0dca665a7c042ddf9cdecc95e9412" + resolved "https://registry.npmjs.org/@floating-ui/react/-/react-0.19.2.tgz" integrity sha512-JyNk4A0Ezirq8FlXECvRtQOX/iBe5Ize0W/pLkrZjfHW9GUV7Xnq6zm6fyZuQzaHHqEnVizmvlA96e1/CkZv+w== dependencies: "@floating-ui/react-dom" "^1.3.0" aria-hidden "^1.1.3" tabbable "^6.0.1" -"@floating-ui/utils@^0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.1.1.tgz#1a5b1959a528e374e8037c4396c3e825d6cf4a83" - integrity sha512-m0G6wlnhm/AX0H12IOWtK8gASEMffnX08RtKkCgTdHb9JpHKGloI7icFfLg9ZmQeavcvR0PKmzxClyuFPSjKWw== +"@floating-ui/utils@^0.1.3": + version "0.1.6" + resolved "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.6.tgz" + integrity sha512-OfX7E2oUDYxtBvsuS4e/jSn4Q9Qb6DzgeYtsAdkPZ47znpoNsMgZw0+tVijiv3uGNR6dgNlty6r9rzIzHjtd/A== "@grpc/grpc-js@^1.9.2": - version "1.9.2" - resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.9.2.tgz#151148f6485eab8fb13fe53042d25f4ffa9c7d09" - integrity sha512-Lf2pUhNTaviEdEaGgjU+29qw3arX7Qd/45q66F3z1EV5hroE6wM9xSHPvjB8EY+b1RmKZgwnLWXQorC6fZ9g5g== + version "1.9.5" + resolved "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.9.5.tgz" + integrity sha512-iouYNlPxRAwZ2XboDT+OfRKHuaKHiqjB5VFYZ0NFrHkbEF+AV3muIUY9olQsp8uxU4VvRCMiRk9ftzFDGb61aw== dependencies: "@grpc/proto-loader" "^0.7.8" "@types/node" ">=12.12.47" "@grpc/proto-loader@^0.7.8": - version "0.7.9" - resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.7.9.tgz#3ca68236f1a0d77566dafa53c715eb31d096279a" - integrity sha512-YJsOehVXzgurc+lLAxYnlSMc1p/Gu6VAvnfx0ATi2nzvr0YZcjhmZDeY8SeAKv1M7zE3aEJH0Xo9mK1iZ8GYoQ== + version "0.7.10" + resolved "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.10.tgz" + integrity sha512-CAqDfoaQ8ykFd9zqBDn4k6iWT9loLAlc2ETmDFS9JCD70gDcnA4L3AFEo2iV7KyAtAAHFW9ftq1Fz+Vsgq80RQ== dependencies: lodash.camelcase "^4.3.0" long "^5.0.0" @@ -1412,19 +1412,19 @@ "@headlessui/react@^1.7.14": version "1.7.17" - resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-1.7.17.tgz#a0ec23af21b527c030967245fd99776aa7352bc6" + resolved "https://registry.npmjs.org/@headlessui/react/-/react-1.7.17.tgz" integrity sha512-4am+tzvkqDSSgiwrsEpGWqgGo9dz8qU5M3znCkC4PgkpY4HcCZzEDEvozltGGGHIKl9jbXbZPSH5TWn4sWJdow== dependencies: client-only "^0.0.1" "@headlessui/tailwindcss@^0.1.3": version "0.1.3" - resolved "https://registry.yarnpkg.com/@headlessui/tailwindcss/-/tailwindcss-0.1.3.tgz#a9b8b4c2677a7ef37889708d4401c7871b2e6105" + resolved "https://registry.npmjs.org/@headlessui/tailwindcss/-/tailwindcss-0.1.3.tgz" integrity sha512-3aMdDyYZx9A15euRehpppSyQnb2gIw2s/Uccn2ELIoLQ9oDy0+9oRygNWNjXCD5Dt+w1pxo7C+XoiYvGcqA4Kg== -"@humanwhocodes/config-array@^0.11.10": +"@humanwhocodes/config-array@^0.11.11": version "0.11.11" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.11.tgz#88a04c570dbbc7dd943e4712429c3df09bc32844" + resolved "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz" integrity sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA== dependencies: "@humanwhocodes/object-schema" "^1.2.1" @@ -1433,17 +1433,17 @@ "@humanwhocodes/module-importer@^1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + resolved "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz" integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== "@humanwhocodes/object-schema@^1.2.1": version "1.2.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== "@isaacs/cliui@^8.0.2": version "8.0.2" - resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + resolved "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz" integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== dependencies: string-width "^5.1.2" @@ -1455,7 +1455,7 @@ "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz" integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== dependencies: camelcase "^5.3.1" @@ -1466,20 +1466,20 @@ "@istanbuljs/schema@^0.1.2", "@istanbuljs/schema@^0.1.3": version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + resolved "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz" integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== "@jest/schemas@^29.6.3": version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" + resolved "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz" integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== dependencies: "@sinclair/typebox" "^0.27.8" "@jest/transform@^29.3.1": - version "29.6.4" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.6.4.tgz#a6bc799ef597c5d85b2e65a11fd96b6b239bab5a" - integrity sha512-8thgRSiXUqtr/pPGY/OsyHuMjGyhVnWrFAwoxmIemlBuiMyU1WFs0tXoNxzcr4A4uErs/ABre76SGmrr5ab/AA== + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz" + integrity sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw== dependencies: "@babel/core" "^7.11.6" "@jest/types" "^29.6.3" @@ -1489,9 +1489,9 @@ convert-source-map "^2.0.0" fast-json-stable-stringify "^2.1.0" graceful-fs "^4.2.9" - jest-haste-map "^29.6.4" + jest-haste-map "^29.7.0" jest-regex-util "^29.6.3" - jest-util "^29.6.3" + jest-util "^29.7.0" micromatch "^4.0.4" pirates "^4.0.4" slash "^3.0.0" @@ -1499,7 +1499,7 @@ "@jest/types@^27.5.1": version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" + resolved "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz" integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== dependencies: "@types/istanbul-lib-coverage" "^2.0.0" @@ -1510,7 +1510,7 @@ "@jest/types@^29.6.3": version "29.6.3" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" + resolved "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz" integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== dependencies: "@jest/schemas" "^29.6.3" @@ -1522,7 +1522,7 @@ "@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz" integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== dependencies: "@jridgewell/set-array" "^1.0.1" @@ -1531,17 +1531,17 @@ "@jridgewell/resolve-uri@^3.1.0": version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz" integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== "@jridgewell/set-array@^1.0.1": version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== "@jridgewell/source-map@^0.3.3": version "0.3.5" - resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.5.tgz#a3bb4d5c6825aab0d281268f47f6ad5853431e91" + resolved "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.5.tgz" integrity sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ== dependencies: "@jridgewell/gen-mapping" "^0.3.0" @@ -1549,12 +1549,12 @@ "@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz" integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== "@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.9": version "0.3.19" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz#f8a3249862f91be48d3127c3cfe992f79b4b8811" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz" integrity sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw== dependencies: "@jridgewell/resolve-uri" "^3.1.0" @@ -1562,91 +1562,105 @@ "@juggle/resize-observer@^3.3.1": version "3.4.0" - resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60" + resolved "https://registry.npmjs.org/@juggle/resize-observer/-/resize-observer-3.4.0.tgz" integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA== "@mdx-js/react@^2.1.5": version "2.3.0" - resolved "https://registry.yarnpkg.com/@mdx-js/react/-/react-2.3.0.tgz#4208bd6d70f0d0831def28ef28c26149b03180b3" + resolved "https://registry.npmjs.org/@mdx-js/react/-/react-2.3.0.tgz" integrity sha512-zQH//gdOmuu7nt2oJR29vFhDv88oGPmVw6BggmrHeMI+xgEkp1B2dX9/bMBSYtK0dyLX/aOmesKS09g222K1/g== dependencies: "@types/mdx" "^2.0.0" "@types/react" ">=16" +"@monaco-editor/loader@^1.4.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@monaco-editor/loader/-/loader-1.4.0.tgz#f08227057331ec890fa1e903912a5b711a2ad558" + integrity sha512-00ioBig0x642hytVspPl7DbQyaSWRaolYie/UFNjoTdvoKPzo6xrXLhTk9ixgIKcLH5b5vDOjVNiGyY+uDCUlg== + dependencies: + state-local "^1.0.6" + +"@monaco-editor/react@^4.6.0": + version "4.6.0" + resolved "https://registry.yarnpkg.com/@monaco-editor/react/-/react-4.6.0.tgz#bcc68671e358a21c3814566b865a54b191e24119" + integrity sha512-RFkU9/i7cN2bsq/iTkurMWOEErmYcY6JiQI3Jn+WeR/FGISH8JbHERjpS9oRuSOPvDMJI0Z8nJeKkbOs9sBYQw== + dependencies: + "@monaco-editor/loader" "^1.4.0" + "@ndelangen/get-tarball@^3.0.7": version "3.0.9" - resolved "https://registry.yarnpkg.com/@ndelangen/get-tarball/-/get-tarball-3.0.9.tgz#727ff4454e65f34707e742a59e5e6b1f525d8964" + resolved "https://registry.npmjs.org/@ndelangen/get-tarball/-/get-tarball-3.0.9.tgz" integrity sha512-9JKTEik4vq+yGosHYhZ1tiH/3WpUS0Nh0kej4Agndhox8pAdWhEx5knFVRcb/ya9knCRCs1rPxNrSXTDdfVqpA== dependencies: gunzip-maybe "^1.4.2" pump "^3.0.0" tar-fs "^2.1.1" -"@next/env@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/env/-/env-13.4.16.tgz#382b565b35a2a69bd0e6b50f74c7b95f0c4b1097" - integrity sha512-pCU0sJBqdfKP9mwDadxvZd+eLz3fZrTlmmDHY12Hdpl3DD0vy8ou5HWKVfG0zZS6tqhL4wnQqRbspdY5nqa7MA== +"@next/env@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/env/-/env-13.5.5.tgz#c26fb9784fe4eae1279c0f2906d925c2297816e9" + integrity sha512-agvIhYWp+ilbScg81s/sLueZo8CNEYLjNOqhISxheLmD/AQI4/VxV7bV76i/KzxH4iHy/va0YS9z0AOwGnw4Fg== "@next/eslint-plugin-next@13.4.19": version "13.4.19" - resolved "https://registry.yarnpkg.com/@next/eslint-plugin-next/-/eslint-plugin-next-13.4.19.tgz#93d130c37b47fd120f6d111aee36a60611148df1" + resolved "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-13.4.19.tgz" integrity sha512-N/O+zGb6wZQdwu6atMZHbR7T9Np5SUFUjZqCbj0sXm+MwQO35M8TazVB4otm87GkXYs2l6OPwARd3/PUWhZBVQ== dependencies: glob "7.1.7" -"@next/swc-darwin-arm64@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.4.16.tgz#ed6a342f95e5f21213fdadbceb65b40ae678cee0" - integrity sha512-Rl6i1uUq0ciRa3VfEpw6GnWAJTSKo9oM2OrkGXPsm7rMxdd2FR5NkKc0C9xzFCI4+QtmBviWBdF2m3ur3Nqstw== - -"@next/swc-darwin-x64@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-13.4.16.tgz#36c16066a1a3ef8211e84a6a5d72bef15826b291" - integrity sha512-o1vIKYbZORyDmTrPV1hApt9NLyWrS5vr2p5hhLGpOnkBY1cz6DAXjv8Lgan8t6X87+83F0EUDlu7klN8ieZ06A== - -"@next/swc-linux-arm64-gnu@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.4.16.tgz#a5b5500737f07e3aa7f184014d8df7973420df26" - integrity sha512-JRyAl8lCfyTng4zoOmE6hNI2f1MFUr7JyTYCHl1RxX42H4a5LMwJhDVQ7a9tmDZ/yj+0hpBn+Aan+d6lA3v0UQ== - -"@next/swc-linux-arm64-musl@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.4.16.tgz#381b7662c5b10ed5750dce41dd57841aa0713e77" - integrity sha512-9gqVqNzUMWbUDgDiND18xoUqhwSm2gmksqXgCU0qaOKt6oAjWz8cWYjgpPVD0WICKFylEY/gvPEP1fMZDVFZ/g== - -"@next/swc-linux-x64-gnu@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.4.16.tgz#6e0b0eab1c316506950aeb4a09a5ea5c38edabe7" - integrity sha512-KcQGwchAKmZVPa8i5PLTxvTs1/rcFnSltfpTm803Tr/BtBV3AxCkHLfhtoyVtVzx/kl/oue8oS+DSmbepQKwhw== - -"@next/swc-linux-x64-musl@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.4.16.tgz#36b84e4509168a5cadf9dfd728c239002d4311fe" - integrity sha512-2RbMZNxYnJmW8EPHVBsGZPq5zqWAyBOc/YFxq/jIQ/Yn3RMFZ1dZVCjtIcsiaKmgh7mjA/W0ApbumutHNxRqqQ== - -"@next/swc-win32-arm64-msvc@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.4.16.tgz#52d36f909ccdefa2761617b6d4e9ae65f99880a9" - integrity sha512-thDcGonELN7edUKzjzlHrdoKkm7y8IAdItQpRvvMxNUXa4d9r0ElofhTZj5emR7AiXft17hpen+QAkcWpqG7Jg== - -"@next/swc-win32-ia32-msvc@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.4.16.tgz#a9cb0556d19c33fbb39ac9bef195fd490d6c7673" - integrity sha512-f7SE1Mo4JAchUWl0LQsbtySR9xCa+x55C0taetjUApKtcLR3AgAjASrrP+oE1inmLmw573qRnE1eZN8YJfEBQw== - -"@next/swc-win32-x64-msvc@13.4.16": - version "13.4.16" - resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.4.16.tgz#79a151d94583e03992c80df3d3e7f7686390ddac" - integrity sha512-WamDZm1M/OEM4QLce3lOmD1XdLEl37zYZwlmOLhmF7qYJ2G6oYm9+ejZVv+LakQIsIuXhSpVlOvrxIAHqwRkPQ== +"@next/swc-darwin-arm64@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.5.5.tgz#0ab604e2ae39d9ca5d8d7190f6eedc2f63ef89a1" + integrity sha512-FvTdcJdTA7H1FGY8dKPPbf/O0oDC041/znHZwXA7liiGUhgw5hOQ+9z8tWvuz0M5a/SDjY/IRPBAb5FIFogYww== + +"@next/swc-darwin-x64@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-13.5.5.tgz#400fe2a2845998c76c0547b6605877d7ba65aa67" + integrity sha512-mTqNIecaojmyia7appVO2QggBe1Z2fdzxgn6jb3x9qlAk8yY2sy4MAcsj71kC9RlenCqDmr9vtC/ESFf110TPA== + +"@next/swc-linux-arm64-gnu@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.5.5.tgz#b21291e2a5b691ac426331ce6cb0b1c4e8c3e1c7" + integrity sha512-U9e+kNkfvwh/T8yo+xcslvNXgyMzPPX1IbwCwnHHFmX5ckb1Uc3XZSInNjFQEQR5xhJpB5sFdal+IiBIiLYkZA== + +"@next/swc-linux-arm64-musl@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.5.5.tgz#2dc33466edbe8db37f08aefd2ae472db437ac3ca" + integrity sha512-h7b58eIoNCSmKVC5fr167U0HWZ/yGLbkKD9wIller0nGdyl5zfTji0SsPKJvrG8jvKPFt2xOkVBmXlFOtuKynw== + +"@next/swc-linux-x64-gnu@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.5.5.tgz#3239655c97fbc13449841bc44f9b2b86c3c7ef35" + integrity sha512-6U4y21T1J6FfcpM9uqzBJicxycpB5gJKLyQ3g6KOfBzT8H1sMwfHTRrvHKB09GIn1BCRy5YJHrA1G26DzqR46w== + +"@next/swc-linux-x64-musl@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.5.5.tgz#60c66bc1b2c3d1f3993dd2fa55da1ff7c8247901" + integrity sha512-OuqWSAQHJQM2EsapPFTSU/FLQ0wKm7UeRNatiR/jLeCe1V02aB9xmzuWYo2Neaxxag4rss3S8fj+lvMLzwDaFA== + +"@next/swc-win32-arm64-msvc@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.5.5.tgz#521b3073bac7d4b89df520a61b7ae71510a0bc5c" + integrity sha512-+yLrOZIIZDY4uGn9bLOc0wTgs+M8RuOUFSUK3BhmcLav9e+tcAj0jyBHD4aXv2qWhppUeuYMsyBo1I58/eE6Dg== + +"@next/swc-win32-ia32-msvc@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.5.5.tgz#a21e29a57bd0534c646ba3e6d73191064f396b96" + integrity sha512-SyMxXyJtf9ScMH0Dh87THJMXNFvfkRAk841xyW9SeOX3KxM1buXX3hN7vof4kMGk0Yg996OGsX+7C9ueS8ugsw== + +"@next/swc-win32-x64-msvc@13.5.5": + version "13.5.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.5.5.tgz#7a442ee669dd6b0e5c774a86cd98457cd945c867" + integrity sha512-n5KVf2Ok0BbLwofAaHiiKf+BQCj1M8WmTujiER4/qzYAVngnsNSjqEWvJ03raeN9eURqxDO+yL5VRoDrR33H9A== "@nicolo-ribaudo/chokidar-2@2.1.8-no-fsevents.3": version "2.1.8-no-fsevents.3" - resolved "https://registry.yarnpkg.com/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz#323d72dd25103d0c4fbdce89dadf574a787b1f9b" + resolved "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz" integrity sha512-s88O1aVtXftvp5bCPB7WnmXc5IwOZZ7YPuwNPt+GtOOXpPvad1LfbmjYv+qII7zP6RU2QGnqve27dnLycEnyEQ== "@nodelib/fs.scandir@2.1.5": version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: "@nodelib/fs.stat" "2.0.5" @@ -1654,12 +1668,12 @@ "@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== "@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: "@nodelib/fs.scandir" "2.1.5" @@ -1667,12 +1681,12 @@ "@pkgjs/parseargs@^0.11.0": version "0.11.0" - resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + resolved "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz" integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== "@pmmmwh/react-refresh-webpack-plugin@^0.5.5": version "0.5.11" - resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.11.tgz#7c2268cedaa0644d677e8c4f377bc8fb304f714a" + resolved "https://registry.npmjs.org/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.11.tgz" integrity sha512-7j/6vdTym0+qZ6u4XbSAxrWBGYSdCfTzySkj7WAFgDLmSyWlOrWvpyzxlFh5jtw9dn0oL/jtW+06XfFiisN3JQ== dependencies: ansi-html-community "^0.0.8" @@ -1685,29 +1699,46 @@ schema-utils "^3.0.0" source-map "^0.7.3" +"@prisma/client@^5.4.2": + version "5.4.2" + resolved "https://registry.yarnpkg.com/@prisma/client/-/client-5.4.2.tgz#786f9c1d8f06d955933004ac638d14da4bf14025" + integrity sha512-2xsPaz4EaMKj1WS9iW6MlPhmbqtBsXAOeVttSePp8vTFTtvzh2hZbDgswwBdSCgPzmmwF+tLB259QzggvCmJqA== + dependencies: + "@prisma/engines-version" "5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574" + +"@prisma/engines-version@5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574": + version "5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574" + resolved "https://registry.yarnpkg.com/@prisma/engines-version/-/engines-version-5.4.1-2.ac9d7041ed77bcc8a8dbd2ab6616b39013829574.tgz#ff14f2926890edee47e8f1d08df7b4f392ee34bf" + integrity sha512-wvupDL4AA1vf4TQNANg7kR7y98ITqPsk6aacfBxZKtrJKRIsWjURHkZCGcQliHdqCiW/hGreO6d6ZuSv9MhdAA== + +"@prisma/engines@5.4.2": + version "5.4.2" + resolved "https://registry.yarnpkg.com/@prisma/engines/-/engines-5.4.2.tgz#ba2b7faeb227c76e423e88f962afe6a031319f3f" + integrity sha512-fqeucJ3LH0e1eyFdT0zRx+oETLancu5+n4lhiYECyEz6H2RDskPJHJYHkVc0LhkU4Uv7fuEnppKU3nVKNzMh8g== + "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" + resolved "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz" integrity sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ== "@protobufjs/base64@^1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735" + resolved "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz" integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== "@protobufjs/codegen@^2.0.4": version "2.0.4" - resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb" + resolved "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz" integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== "@protobufjs/eventemitter@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70" + resolved "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz" integrity sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q== "@protobufjs/fetch@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45" + resolved "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz" integrity sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ== dependencies: "@protobufjs/aspromise" "^1.1.1" @@ -1715,46 +1746,46 @@ "@protobufjs/float@^1.0.2": version "1.0.2" - resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1" + resolved "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz" integrity sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ== "@protobufjs/inquire@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089" + resolved "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz" integrity sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q== "@protobufjs/path@^1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d" + resolved "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz" integrity sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA== "@protobufjs/pool@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54" + resolved "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz" integrity sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw== "@protobufjs/utf8@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" + resolved "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== "@radix-ui/number@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/number/-/number-1.0.1.tgz#644161a3557f46ed38a042acf4a770e826021674" + resolved "https://registry.npmjs.org/@radix-ui/number/-/number-1.0.1.tgz" integrity sha512-T5gIdVO2mmPW3NNhjNgEP3cqMXjXL9UbO0BzWcXfvdBs+BohbQxvd/K5hSVKmn9/lbTdsQVKbUcP5WLCwvUbBg== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/primitive@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-1.0.1.tgz#e46f9958b35d10e9f6dc71c497305c22e3e55dbd" + resolved "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.1.tgz" integrity sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-arrow@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz#c24f7968996ed934d57fe6cde5d6ec7266e1d25d" + resolved "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz" integrity sha512-wSP+pHsB/jQRaL6voubsQ/ZlrGBHHrOjmBnr19hxYgtS0WvAFwZhK2WP/YY5yF9uKECCEEDGxuLxq1NBK51wFA== dependencies: "@babel/runtime" "^7.13.10" @@ -1762,7 +1793,7 @@ "@radix-ui/react-checkbox@^1.0.4": version "1.0.4" - resolved "https://registry.yarnpkg.com/@radix-ui/react-checkbox/-/react-checkbox-1.0.4.tgz#98f22c38d5010dd6df4c5744cac74087e3275f4b" + resolved "https://registry.npmjs.org/@radix-ui/react-checkbox/-/react-checkbox-1.0.4.tgz" integrity sha512-CBuGQa52aAYnADZVt/KBQzXrwx6TqnlwtcIPGtVt5JkkzQwMOLJjPukimhfKEr4GQNd43C+djUh5Ikopj8pSLg== dependencies: "@babel/runtime" "^7.13.10" @@ -1775,9 +1806,24 @@ "@radix-ui/react-use-previous" "1.0.1" "@radix-ui/react-use-size" "1.0.1" +"@radix-ui/react-collapsible@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-collapsible/-/react-collapsible-1.0.3.tgz#df0e22e7a025439f13f62d4e4a9e92c4a0df5b81" + integrity sha512-UBmVDkmR6IvDsloHVN+3rtx4Mi5TFvylYXpluuv0f37dtaz3H99bp8No0LGXRigVpl3UAT4l9j6bIchh42S/Gg== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-id" "1.0.1" + "@radix-ui/react-presence" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-controllable-state" "1.0.1" + "@radix-ui/react-use-layout-effect" "1.0.1" + "@radix-ui/react-collection@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-collection/-/react-collection-1.0.3.tgz#9595a66e09026187524a36c6e7e9c7d286469159" + resolved "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.0.3.tgz" integrity sha512-3SzW+0PW7yBBoQlT8wNcGtaxaD0XSu0uLUFgrtHY08Acx05TaHaOmVLR73c0j/cqpDy53KBMO7s0dx2wmOIDIA== dependencies: "@babel/runtime" "^7.13.10" @@ -1788,32 +1834,32 @@ "@radix-ui/react-compose-refs@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz#7ed868b66946aa6030e580b1ffca386dd4d21989" + resolved "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz" integrity sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-context@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-context/-/react-context-1.0.1.tgz#fe46e67c96b240de59187dcb7a1a50ce3e2ec00c" + resolved "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.1.tgz" integrity sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-dialog@^1.0.4": - version "1.0.4" - resolved "https://registry.yarnpkg.com/@radix-ui/react-dialog/-/react-dialog-1.0.4.tgz#06bce6c16bb93eb36d7a8589e665a20f4c1c52c1" - integrity sha512-hJtRy/jPULGQZceSAP2Re6/4NpKo8im6V8P2hUqZsdFiSL8l35kYsw3qbRI6Ay5mQd2+wlLqje770eq+RJ3yZg== + version "1.0.5" + resolved "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.5.tgz" + integrity sha512-GjWJX/AUpB703eEBanuBnIWdIXg6NvJFCXcNlSZk4xdszCdhrJgBoUd1cGk67vFO+WdA2pfI/plOpqz/5GUP6Q== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/primitive" "1.0.1" "@radix-ui/react-compose-refs" "1.0.1" "@radix-ui/react-context" "1.0.1" - "@radix-ui/react-dismissable-layer" "1.0.4" + "@radix-ui/react-dismissable-layer" "1.0.5" "@radix-ui/react-focus-guards" "1.0.1" - "@radix-ui/react-focus-scope" "1.0.3" + "@radix-ui/react-focus-scope" "1.0.4" "@radix-ui/react-id" "1.0.1" - "@radix-ui/react-portal" "1.0.3" + "@radix-ui/react-portal" "1.0.4" "@radix-ui/react-presence" "1.0.1" "@radix-ui/react-primitive" "1.0.3" "@radix-ui/react-slot" "1.0.2" @@ -1823,14 +1869,14 @@ "@radix-ui/react-direction@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-direction/-/react-direction-1.0.1.tgz#9cb61bf2ccf568f3421422d182637b7f47596c9b" + resolved "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.0.1.tgz" integrity sha512-RXcvnXgyvYvBEOhCBuddKecVkoMiI10Jcm5cTI7abJRAHYfFxeu+FBQs/DvdxSYucxR5mna0dNsL6QFlds5TMA== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-dismissable-layer@1.0.4": version "1.0.4" - resolved "https://registry.yarnpkg.com/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz#883a48f5f938fa679427aa17fcba70c5494c6978" + resolved "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz" integrity sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg== dependencies: "@babel/runtime" "^7.13.10" @@ -1840,16 +1886,28 @@ "@radix-ui/react-use-callback-ref" "1.0.1" "@radix-ui/react-use-escape-keydown" "1.0.3" +"@radix-ui/react-dismissable-layer@1.0.5": + version "1.0.5" + resolved "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz" + integrity sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" + "@radix-ui/react-use-escape-keydown" "1.0.3" + "@radix-ui/react-focus-guards@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.1.tgz#1ea7e32092216b946397866199d892f71f7f98ad" + resolved "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.1.tgz" integrity sha512-Rect2dWbQ8waGzhMavsIbmSVCgYxkXLxxR3ZvCX79JOglzdEy4JXMb98lq4hPxUbLr77nP0UOGf4rcMU+s1pUA== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-focus-scope@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz#9c2e8d4ed1189a1d419ee61edd5c1828726472f9" + resolved "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz" integrity sha512-upXdPfqI4islj2CslyfUBNlaJCPybbqRHAi1KER7Isel9Q2AtSJ0zRBZv8mWQiFXD2nyAJ4BhC3yXgZ6kMBSrQ== dependencies: "@babel/runtime" "^7.13.10" @@ -1857,9 +1915,19 @@ "@radix-ui/react-primitive" "1.0.3" "@radix-ui/react-use-callback-ref" "1.0.1" +"@radix-ui/react-focus-scope@1.0.4": + version "1.0.4" + resolved "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.4.tgz" + integrity sha512-sL04Mgvf+FmyvZeYfNu1EPAaaxD+aw7cYeIB9L9Fvq8+urhltTRaEo5ysKOpHuKPclsZcSUMKlN05x4u+CINpA== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" + "@radix-ui/react-form@^0.0.3": version "0.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-form/-/react-form-0.0.3.tgz#328e7163e723ccc748459d66a2d685d7b4f85d5a" + resolved "https://registry.npmjs.org/@radix-ui/react-form/-/react-form-0.0.3.tgz" integrity sha512-kgE+Z/haV6fxE5WqIXj05KkaXa3OkZASoTDy25yX2EIp/x0c54rOH/vFr5nOZTg7n7T1z8bSyXmiVIFP9bbhPQ== dependencies: "@babel/runtime" "^7.13.10" @@ -1870,9 +1938,14 @@ "@radix-ui/react-label" "2.0.2" "@radix-ui/react-primitive" "1.0.3" +"@radix-ui/react-icons@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@radix-ui/react-icons/-/react-icons-1.3.0.tgz#c61af8f323d87682c5ca76b856d60c2312dbcb69" + integrity sha512-jQxj/0LKgp+j9BiTXz3O3sgs26RNet2iLWmsPyRz2SIcR4q/4SbazXfnYwbAr+vLYKSfc7qxzyGQA1HLlYiuNw== + "@radix-ui/react-id@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-id/-/react-id-1.0.1.tgz#73cdc181f650e4df24f0b6a5b7aa426b912c88c0" + resolved "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.1.tgz" integrity sha512-tI7sT/kqYp8p96yGWY1OAnLHrqDgzHefRBKQ2YAkBS5ja7QLcZ9Z/uY7bEjPUatf8RomoXM8/1sMj1IJaE5UzQ== dependencies: "@babel/runtime" "^7.13.10" @@ -1880,15 +1953,37 @@ "@radix-ui/react-label@2.0.2": version "2.0.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-label/-/react-label-2.0.2.tgz#9c72f1d334aac996fdc27b48a8bdddd82108fb6d" + resolved "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.0.2.tgz" integrity sha512-N5ehvlM7qoTLx7nWPodsPYPgMzA5WM8zZChQg8nyFJKnDO5WHdba1vv5/H6IO5LtJMfD2Q3wh1qHFGNtK0w3bQ== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-primitive" "1.0.3" +"@radix-ui/react-popover@^1.0.7": + version "1.0.7" + resolved "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.7.tgz" + integrity sha512-shtvVnlsxT6faMnK/a7n0wptwBD23xc1Z5mdrtKLwVEfsEMXodS0r5s0/g5P0hX//EKYZS2sxUjqfzlg52ZSnQ== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-dismissable-layer" "1.0.5" + "@radix-ui/react-focus-guards" "1.0.1" + "@radix-ui/react-focus-scope" "1.0.4" + "@radix-ui/react-id" "1.0.1" + "@radix-ui/react-popper" "1.1.3" + "@radix-ui/react-portal" "1.0.4" + "@radix-ui/react-presence" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-slot" "1.0.2" + "@radix-ui/react-use-controllable-state" "1.0.1" + aria-hidden "^1.1.1" + react-remove-scroll "2.5.5" + "@radix-ui/react-popper@1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-popper/-/react-popper-1.1.2.tgz#4c0b96fcd188dc1f334e02dba2d538973ad842e9" + resolved "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.2.tgz" integrity sha512-1CnGGfFi/bbqtJZZ0P/NQY20xdG3E0LALJaLUEoKwPLwl6PPPfbeiCqMVQnhoFRAxjJj4RpBRJzDmUgsex2tSg== dependencies: "@babel/runtime" "^7.13.10" @@ -1903,17 +1998,42 @@ "@radix-ui/react-use-size" "1.0.1" "@radix-ui/rect" "1.0.1" +"@radix-ui/react-popper@1.1.3": + version "1.1.3" + resolved "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.3.tgz" + integrity sha512-cKpopj/5RHZWjrbF2846jBNacjQVwkP068DfmgrNJXpvVWrOvlAmE9xSiy5OqeE+Gi8D9fP+oDhUnPqNMY8/5w== + dependencies: + "@babel/runtime" "^7.13.10" + "@floating-ui/react-dom" "^2.0.0" + "@radix-ui/react-arrow" "1.0.3" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" + "@radix-ui/react-use-layout-effect" "1.0.1" + "@radix-ui/react-use-rect" "1.0.1" + "@radix-ui/react-use-size" "1.0.1" + "@radix-ui/rect" "1.0.1" + "@radix-ui/react-portal@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-portal/-/react-portal-1.0.3.tgz#ffb961244c8ed1b46f039e6c215a6c4d9989bda1" + resolved "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.3.tgz" integrity sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-primitive" "1.0.3" +"@radix-ui/react-portal@1.0.4": + version "1.0.4" + resolved "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.4.tgz" + integrity sha512-Qki+C/EuGUVCQTOTD5vzJzJuMUlewbzuKyUy+/iHM2uwGiru9gZeBJtHAPKAEkB5KWGi9mP/CHKcY0wt1aW45Q== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-presence@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-presence/-/react-presence-1.0.1.tgz#491990ba913b8e2a5db1b06b203cb24b5cdef9ba" + resolved "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.0.1.tgz" integrity sha512-UXLW4UAbIY5ZjcvzjfRFo5gxva8QirC9hF7wRE4U5gz+TP0DbRk+//qyuAQ1McDxBt1xNMBTaciFGvEmJvAZCg== dependencies: "@babel/runtime" "^7.13.10" @@ -1922,7 +2042,7 @@ "@radix-ui/react-primitive@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-primitive/-/react-primitive-1.0.3.tgz#d49ea0f3f0b2fe3ab1cb5667eb03e8b843b914d0" + resolved "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-1.0.3.tgz" integrity sha512-yi58uVyoAcK/Nq1inRY56ZSjKypBNKTa/1mcL8qdl6oJeEaDbOldlzrGn7P6Q3Id5d+SYNGc5AJgc4vGhjs5+g== dependencies: "@babel/runtime" "^7.13.10" @@ -1930,7 +2050,7 @@ "@radix-ui/react-progress@^1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-progress/-/react-progress-1.0.3.tgz#8380272fdc64f15cbf263a294dea70a7d5d9b4fa" + resolved "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.0.3.tgz" integrity sha512-5G6Om/tYSxjSeEdrb1VfKkfZfn/1IlPWd731h2RfPuSbIfNUgfqAwbKfJCg/PP6nuUCTrYzalwHSpSinoWoCag== dependencies: "@babel/runtime" "^7.13.10" @@ -1939,7 +2059,7 @@ "@radix-ui/react-radio-group@^1.1.3": version "1.1.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-radio-group/-/react-radio-group-1.1.3.tgz#3197f5dcce143bcbf961471bf89320735c0212d3" + resolved "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.1.3.tgz" integrity sha512-x+yELayyefNeKeTx4fjK6j99Fs6c4qKm3aY38G3swQVTN6xMpsrbigC0uHs2L//g8q4qR7qOcww8430jJmi2ag== dependencies: "@babel/runtime" "^7.13.10" @@ -1956,7 +2076,7 @@ "@radix-ui/react-roving-focus@1.0.4": version "1.0.4" - resolved "https://registry.yarnpkg.com/@radix-ui/react-roving-focus/-/react-roving-focus-1.0.4.tgz#e90c4a6a5f6ac09d3b8c1f5b5e81aab2f0db1974" + resolved "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.0.4.tgz" integrity sha512-2mUg5Mgcu001VkGy+FfzZyzbmuUWzgWkj3rvv4yu+mLw03+mTzbxZHvfcGyFp2b8EkQeMkpRQ5FiA2Vr2O6TeQ== dependencies: "@babel/runtime" "^7.13.10" @@ -1972,7 +2092,7 @@ "@radix-ui/react-select@^1.2.2": version "1.2.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-select/-/react-select-1.2.2.tgz#caa981fa0d672cf3c1b2a5240135524e69b32181" + resolved "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-1.2.2.tgz" integrity sha512-zI7McXr8fNaSrUY9mZe4x/HC0jTLY9fWNhO1oLWYMQGDXuV4UCivIGTxwioSzO0ZCYX9iSLyWmAh/1TOmX3Cnw== dependencies: "@babel/runtime" "^7.13.10" @@ -2000,7 +2120,7 @@ "@radix-ui/react-separator@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-separator/-/react-separator-1.0.3.tgz#be5a931a543d5726336b112f465f58585c04c8aa" + resolved "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.0.3.tgz" integrity sha512-itYmTy/kokS21aiV5+Z56MZB54KrhPgn6eHDKkFeOLR34HMN2s8PaN47qZZAGnvupcjxHaFZnW4pQEh0BvvVuw== dependencies: "@babel/runtime" "^7.13.10" @@ -2008,7 +2128,7 @@ "@radix-ui/react-slot@1.0.2": version "1.0.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.0.2.tgz#a9ff4423eade67f501ffb32ec22064bc9d3099ab" + resolved "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.2.tgz" integrity sha512-YeTpuq4deV+6DusvVUW4ivBgnkHwECUu0BiN43L5UCDFgdhsRUWAghhTF5MbvNTPzmiFOx90asDSUjWuCNapwg== dependencies: "@babel/runtime" "^7.13.10" @@ -2016,7 +2136,7 @@ "@radix-ui/react-switch@^1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-switch/-/react-switch-1.0.3.tgz#6119f16656a9eafb4424c600fdb36efa5ec5837e" + resolved "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.0.3.tgz" integrity sha512-mxm87F88HyHztsI7N+ZUmEoARGkC22YVW5CaC+Byc+HRpuvCrOBPTAnXgf+tZ/7i0Sg/eOePGdMhUKhPaQEqow== dependencies: "@babel/runtime" "^7.13.10" @@ -2028,18 +2148,33 @@ "@radix-ui/react-use-previous" "1.0.1" "@radix-ui/react-use-size" "1.0.1" +"@radix-ui/react-tabs@^1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-tabs/-/react-tabs-1.0.4.tgz#993608eec55a5d1deddd446fa9978d2bc1053da2" + integrity sha512-egZfYY/+wRNCflXNHx+dePvnz9FbmssDTJBtgRfDY7e8SE5oIo3Py2eCB1ckAbh1Q7cQ/6yJZThJ++sgbxibog== + dependencies: + "@babel/runtime" "^7.13.10" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-direction" "1.0.1" + "@radix-ui/react-id" "1.0.1" + "@radix-ui/react-presence" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-roving-focus" "1.0.4" + "@radix-ui/react-use-controllable-state" "1.0.1" + "@radix-ui/react-toast@^1.1.4": - version "1.1.4" - resolved "https://registry.yarnpkg.com/@radix-ui/react-toast/-/react-toast-1.1.4.tgz#9a7fc2d71700886f3292f7699c905f1e01be59e1" - integrity sha512-wf+fc8DOywrpRK3jlPlWVe+ELYGHdKDaaARJZNuUTWyWYq7+ANCFLp4rTjZ/mcGkJJQ/vZ949Zis9xxEpfq9OA== + version "1.1.5" + resolved "https://registry.npmjs.org/@radix-ui/react-toast/-/react-toast-1.1.5.tgz" + integrity sha512-fRLn227WHIBRSzuRzGJ8W+5YALxofH23y0MlPLddaIpLpCDqdE0NZlS2NRQDRiptfxDeeCjgFIpexB1/zkxDlw== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/primitive" "1.0.1" "@radix-ui/react-collection" "1.0.3" "@radix-ui/react-compose-refs" "1.0.1" "@radix-ui/react-context" "1.0.1" - "@radix-ui/react-dismissable-layer" "1.0.4" - "@radix-ui/react-portal" "1.0.3" + "@radix-ui/react-dismissable-layer" "1.0.5" + "@radix-ui/react-portal" "1.0.4" "@radix-ui/react-presence" "1.0.1" "@radix-ui/react-primitive" "1.0.3" "@radix-ui/react-use-callback-ref" "1.0.1" @@ -2049,7 +2184,7 @@ "@radix-ui/react-toggle-group@1.0.4", "@radix-ui/react-toggle-group@^1.0.4": version "1.0.4" - resolved "https://registry.yarnpkg.com/@radix-ui/react-toggle-group/-/react-toggle-group-1.0.4.tgz#f5b5c8c477831b013bec3580c55e20a68179d6ec" + resolved "https://registry.npmjs.org/@radix-ui/react-toggle-group/-/react-toggle-group-1.0.4.tgz" integrity sha512-Uaj/M/cMyiyT9Bx6fOZO0SAG4Cls0GptBWiBmBxofmDbNVnYYoyRWj/2M/6VCi/7qcXFWnHhRUfdfZFvvkuu8A== dependencies: "@babel/runtime" "^7.13.10" @@ -2063,7 +2198,7 @@ "@radix-ui/react-toggle@1.0.3", "@radix-ui/react-toggle@^1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-toggle/-/react-toggle-1.0.3.tgz#aecb2945630d1dc5c512997556c57aba894e539e" + resolved "https://registry.npmjs.org/@radix-ui/react-toggle/-/react-toggle-1.0.3.tgz" integrity sha512-Pkqg3+Bc98ftZGsl60CLANXQBBQ4W3mTFS9EJvNxKMZ7magklKV69/id1mlAlOFDDfHvlCms0fx8fA4CMKDJHg== dependencies: "@babel/runtime" "^7.13.10" @@ -2073,7 +2208,7 @@ "@radix-ui/react-toolbar@^1.0.4": version "1.0.4" - resolved "https://registry.yarnpkg.com/@radix-ui/react-toolbar/-/react-toolbar-1.0.4.tgz#3211a105567fa016e89921b5b514877f833de559" + resolved "https://registry.npmjs.org/@radix-ui/react-toolbar/-/react-toolbar-1.0.4.tgz" integrity sha512-tBgmM/O7a07xbaEkYJWYTXkIdU/1pW4/KZORR43toC/4XWyBCURK0ei9kMUdp+gTPPKBgYLxXmRSH1EVcIDp8Q== dependencies: "@babel/runtime" "^7.13.10" @@ -2086,18 +2221,18 @@ "@radix-ui/react-toggle-group" "1.0.4" "@radix-ui/react-tooltip@^1.0.6": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@radix-ui/react-tooltip/-/react-tooltip-1.0.6.tgz#87a7786cd9f2b4de957ac645afae1575339c58b0" - integrity sha512-DmNFOiwEc2UDigsYj6clJENma58OelxD24O4IODoZ+3sQc3Zb+L8w1EP+y9laTuKCLAysPw4fD6/v0j4KNV8rg== + version "1.0.7" + resolved "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.0.7.tgz" + integrity sha512-lPh5iKNFVQ/jav/j6ZrWq3blfDJ0OH9R6FlNUHPMqdLuQ9vwDgFsRxvl8b7Asuy5c8xmoojHUxKHQSOAvMHxyw== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/primitive" "1.0.1" "@radix-ui/react-compose-refs" "1.0.1" "@radix-ui/react-context" "1.0.1" - "@radix-ui/react-dismissable-layer" "1.0.4" + "@radix-ui/react-dismissable-layer" "1.0.5" "@radix-ui/react-id" "1.0.1" - "@radix-ui/react-popper" "1.1.2" - "@radix-ui/react-portal" "1.0.3" + "@radix-ui/react-popper" "1.1.3" + "@radix-ui/react-portal" "1.0.4" "@radix-ui/react-presence" "1.0.1" "@radix-ui/react-primitive" "1.0.3" "@radix-ui/react-slot" "1.0.2" @@ -2106,14 +2241,14 @@ "@radix-ui/react-use-callback-ref@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.1.tgz#f4bb1f27f2023c984e6534317ebc411fc181107a" + resolved "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.1.tgz" integrity sha512-D94LjX4Sp0xJFVaoQOd3OO9k7tpBYNOXdVhkltUbGv2Qb9OXdrg/CpsjlZv7ia14Sylv398LswWBVVu5nqKzAQ== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-use-controllable-state@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.1.tgz#ecd2ced34e6330caf89a82854aa2f77e07440286" + resolved "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.1.tgz" integrity sha512-Svl5GY5FQeN758fWKrjM6Qb7asvXeiZltlT4U2gVfl8Gx5UAv2sMR0LWo8yhsIZh2oQ0eFdZ59aoOOMV7b47VA== dependencies: "@babel/runtime" "^7.13.10" @@ -2121,7 +2256,7 @@ "@radix-ui/react-use-escape-keydown@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.3.tgz#217b840c250541609c66f67ed7bab2b733620755" + resolved "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.3.tgz" integrity sha512-vyL82j40hcFicA+M4Ex7hVkB9vHgSse1ZWomAqV2Je3RleKGO5iM8KMOEtfoSB0PnIelMd2lATjTGMYqN5ylTg== dependencies: "@babel/runtime" "^7.13.10" @@ -2129,21 +2264,21 @@ "@radix-ui/react-use-layout-effect@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.1.tgz#be8c7bc809b0c8934acf6657b577daf948a75399" + resolved "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.1.tgz" integrity sha512-v/5RegiJWYdoCvMnITBkNNx6bCj20fiaJnWtRkU18yITptraXjffz5Qbn05uOiQnOvi+dbkznkoaMltz1GnszQ== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-use-previous@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-previous/-/react-use-previous-1.0.1.tgz#b595c087b07317a4f143696c6a01de43b0d0ec66" + resolved "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.0.1.tgz" integrity sha512-cV5La9DPwiQ7S0gf/0qiD6YgNqM5Fk97Kdrlc5yBcrF3jyEZQwm7vYFqMo4IfeHgJXsRaMvLABFtd0OVEmZhDw== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-use-rect@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-rect/-/react-use-rect-1.0.1.tgz#fde50b3bb9fd08f4a1cd204572e5943c244fcec2" + resolved "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.0.1.tgz" integrity sha512-Cq5DLuSiuYVKNU8orzJMbl15TXilTnJKUCltMVQg53BQOF1/C5toAaGrowkgksdBQ9H+SRL23g0HDmg9tvmxXw== dependencies: "@babel/runtime" "^7.13.10" @@ -2151,7 +2286,7 @@ "@radix-ui/react-use-size@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-size/-/react-use-size-1.0.1.tgz#1c5f5fea940a7d7ade77694bb98116fb49f870b2" + resolved "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.0.1.tgz" integrity sha512-ibay+VqrgcaI6veAojjofPATwledXiSmX+C0KrBk/xgpX9rBzPV3OsfwlhQdUOFbh+LKQorLYT+xTXW9V8yd0g== dependencies: "@babel/runtime" "^7.13.10" @@ -2159,7 +2294,7 @@ "@radix-ui/react-visually-hidden@1.0.3": version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.0.3.tgz#51aed9dd0fe5abcad7dee2a234ad36106a6984ac" + resolved "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.0.3.tgz" integrity sha512-D4w41yN5YRKtu464TLnByKzMDG/JlMPHtfZgQAu9v6mNakUqGUI9vUrfQKz8NK41VMm/xbZbh76NUTVtIYqOMA== dependencies: "@babel/runtime" "^7.13.10" @@ -2167,34 +2302,34 @@ "@radix-ui/rect@1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/rect/-/rect-1.0.1.tgz#bf8e7d947671996da2e30f4904ece343bc4a883f" + resolved "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.0.1.tgz" integrity sha512-fyrgCaedtvMg9NK3en0pnOYJdtfwxUcNolezkNPUsoX57X8oQk+NkqcvzHXD2uKNij6GXmWU9NDru2IWjrO4BQ== dependencies: "@babel/runtime" "^7.13.10" "@rushstack/eslint-patch@^1.1.3": - version "1.3.3" - resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.3.tgz#16ab6c727d8c2020a5b6e4a176a243ecd88d8d69" - integrity sha512-0xd7qez0AQ+MbHatZTlI1gu5vkG8r7MYRUJAHPAHJBmGLs16zpkrpAVLvjQKQOqaXPDUBwOiJzNc00znHSCVBw== + version "1.5.1" + resolved "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.5.1.tgz" + integrity sha512-6i/8UoL0P5y4leBIGzvkZdS85RDMG9y1ihZzmTZQ5LdHUYmZ7pKFoj8X0236s3lusPs1Fa5HTQUpwI+UfTcmeA== "@sinclair/typebox@^0.27.8": version "0.27.8" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz" integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== -"@storybook/addon-actions@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-actions/-/addon-actions-7.4.0.tgz#709988f46422b85b3672d2e6f90bf623af59faa9" - integrity sha512-0lHLLUlrGE7CBFrfmAXrBKu7fUIsiQlnNekuE3cIAjSgVR481bJEzYHUUoMATqpPC4GGErBdP1CZxVDDwWV8jA== +"@storybook/addon-actions@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-7.4.6.tgz" + integrity sha512-SsqZr3js5NinKPnC8AeNI7Ij+Q6fIl9tRdRmSulEgjksjOg7E5S1/Wsn5Bb2CCgj7MaX6VxGyC7s3XskQtDiIQ== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" dequal "^2.0.2" lodash "^4.17.21" polished "^4.2.2" @@ -2204,160 +2339,160 @@ ts-dedent "^2.0.0" uuid "^9.0.0" -"@storybook/addon-backgrounds@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-backgrounds/-/addon-backgrounds-7.4.0.tgz#7d3048329b8ef73145a2e9b435b7b35004a65f86" - integrity sha512-cEO/Tp/eRE+5bf1FGN4wKLqLDBv3EYp9enJyXV7B3cFdciqtoE7VJPZuFZkzjJN1rRcOKSZp8g5agsx+x9uNGQ== +"@storybook/addon-backgrounds@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-7.4.6.tgz" + integrity sha512-+LHTZB/ZYMAzkyD5ZxSriBsqmsrvIaW/Nnd/BeuXGbkrVKKqM0qAKiFZAfjc2WchA1piVNy0/1Rsf+kuYCEiJw== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" memoizerific "^1.11.3" ts-dedent "^2.0.0" -"@storybook/addon-controls@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-controls/-/addon-controls-7.4.0.tgz#b212d60fd74d69f6b63c53e4d52ab6c77ee51247" - integrity sha512-tYDfqpTR+c9y4kElmr3aWNHPot6kYd+nruYb697LpkCdy4lFErqSo0mhvPyZfMZp2KEajfp6YJAurhQWbvbj/A== - dependencies: - "@storybook/blocks" "7.4.0" - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/core-events" "7.4.0" - "@storybook/manager-api" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" +"@storybook/addon-controls@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-7.4.6.tgz" + integrity sha512-4lq3sycEUIsK8SUWDYc60QgF4vV9FZZ3lDr6M7j2W9bOnvGw49d2fbdlnq+bX1ZprZZ9VgglQpBAorQB3BXZRw== + dependencies: + "@storybook/blocks" "7.4.6" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/core-events" "7.4.6" + "@storybook/manager-api" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" lodash "^4.17.21" ts-dedent "^2.0.0" -"@storybook/addon-docs@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-docs/-/addon-docs-7.4.0.tgz#e07233c264eaec149a0fcca0e27c586d4e80b403" - integrity sha512-LJE92LUeVTgi8W4tLBEbSvCqF54snmBfTFCr46vhCFov2CE2VBgEvIX1XT3dfUgYUOtPu3RXR2C89fYgU6VYZw== +"@storybook/addon-docs@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-7.4.6.tgz" + integrity sha512-dLaub+XWFq4hChw+xfuF9yYg0Txp77FUawKoAigccfjWXx+OOhRV3XTuAcknpXkYq94GWynHgUFXosXT9kbDNA== dependencies: "@jest/transform" "^29.3.1" "@mdx-js/react" "^2.1.5" - "@storybook/blocks" "7.4.0" - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/csf-plugin" "7.4.0" - "@storybook/csf-tools" "7.4.0" + "@storybook/blocks" "7.4.6" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/csf-plugin" "7.4.6" + "@storybook/csf-tools" "7.4.6" "@storybook/global" "^5.0.0" "@storybook/mdx2-csf" "^1.0.0" - "@storybook/node-logger" "7.4.0" - "@storybook/postinstall" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/react-dom-shim" "7.4.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/node-logger" "7.4.6" + "@storybook/postinstall" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/react-dom-shim" "7.4.6" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" fs-extra "^11.1.0" remark-external-links "^8.0.0" remark-slug "^6.0.0" ts-dedent "^2.0.0" "@storybook/addon-essentials@^7.3.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-essentials/-/addon-essentials-7.4.0.tgz#b5d19c60233e5bd5e1a29b76b51059c889f18d52" - integrity sha512-nZmNM9AKw2JXxnYUXyFKLeUF/cL7Z9E1WTeZyOFTDtU2aITRt8+LvaepwjchtPqu2B0GcQxLB5FRDdhy0I19nw== - dependencies: - "@storybook/addon-actions" "7.4.0" - "@storybook/addon-backgrounds" "7.4.0" - "@storybook/addon-controls" "7.4.0" - "@storybook/addon-docs" "7.4.0" - "@storybook/addon-highlight" "7.4.0" - "@storybook/addon-measure" "7.4.0" - "@storybook/addon-outline" "7.4.0" - "@storybook/addon-toolbars" "7.4.0" - "@storybook/addon-viewport" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/manager-api" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/preview-api" "7.4.0" + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-7.4.6.tgz" + integrity sha512-dWodufrt71TK7ELkeIvVae/x4PzECUlbOm57Iqqt4yQCyR291CgvI4PjeB8un2HbpcXCGZ+N/Oj3YkytvzBi4A== + dependencies: + "@storybook/addon-actions" "7.4.6" + "@storybook/addon-backgrounds" "7.4.6" + "@storybook/addon-controls" "7.4.6" + "@storybook/addon-docs" "7.4.6" + "@storybook/addon-highlight" "7.4.6" + "@storybook/addon-measure" "7.4.6" + "@storybook/addon-outline" "7.4.6" + "@storybook/addon-toolbars" "7.4.6" + "@storybook/addon-viewport" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/manager-api" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/preview-api" "7.4.6" ts-dedent "^2.0.0" -"@storybook/addon-highlight@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-highlight/-/addon-highlight-7.4.0.tgz#ea33826a7f610f5e76cfa59ff22283e01cfd76cd" - integrity sha512-kpYSb3oXI9t/1+aRJhToDZ0/1W4mu+SzTBfv9Bl2d/DogEkFzgJricoy5LtvS5EpcXUmKO1FJsw/DCm9buSL2g== +"@storybook/addon-highlight@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-highlight/-/addon-highlight-7.4.6.tgz" + integrity sha512-zCufxxD2KS5VwczxfkcBxe1oR/juTTn2H1Qm8kYvWCJQx3UxzX0+G9cwafbpV7eivqaufLweEwROkH+0KjAtkQ== dependencies: - "@storybook/core-events" "7.4.0" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/preview-api" "7.4.0" + "@storybook/preview-api" "7.4.6" "@storybook/addon-interactions@^7.3.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-interactions/-/addon-interactions-7.4.0.tgz#d0c15303999ac1e8f33705146e9a0a6db6df339c" - integrity sha512-nEWP+Ib0Y/ShXfpCm40FBTbBy1/MT8XxTEAhcNN+3ZJ07Vhhkrb8GMlWHTKQv2PyghEVBYEoPFHhElUJQOe00g== - dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/core-events" "7.4.0" + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-interactions/-/addon-interactions-7.4.6.tgz" + integrity sha512-zVZYrEPZPhNrXBuPqM7HbQvr6jwsje1sbCYj3wnp83U5wjciuqrngqHIlaSZ30zOWSfRVyzbyqL+JQZKA58BNA== + dependencies: + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/instrumenter" "7.4.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/instrumenter" "7.4.6" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" jest-mock "^27.0.6" polished "^4.2.2" ts-dedent "^2.2.0" "@storybook/addon-links@^7.3.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-links/-/addon-links-7.4.0.tgz#f10ba388143d0de75150a27e94241d5fb4dfba7e" - integrity sha512-lFj8fiokWKk3jx5YUQ4anQo1uCNDMP1y6nJ/92Y85vnOd1vJr3w4GlLy8eOWMABRE33AKLI5Yp6wcpWZDe7hhQ== + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-links/-/addon-links-7.4.6.tgz" + integrity sha512-BPygElZKX+CPI9Se6GJNk1dYc5oxuhA+vHigO1tBqhiM6VkHyFP3cvezJNQvpNYhkUnu3cxnZXb3UJnlRbPY3g== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/csf" "^0.1.0" "@storybook/global" "^5.0.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/router" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/router" "7.4.6" + "@storybook/types" "7.4.6" prop-types "^15.7.2" ts-dedent "^2.0.0" -"@storybook/addon-measure@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-measure/-/addon-measure-7.4.0.tgz#61bc0d0af5af8c22e81b70e1690b2f58262944cd" - integrity sha512-8YjBqm6jPOBgkRn9YnJkLN0+ghgJiukdHOa0VB3qhiT+oww4ZOZ7mc2aQRwXQoFb05UbVVG9UNxE7lhyTyaG2w== +"@storybook/addon-measure@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-7.4.6.tgz" + integrity sha512-nCymMLaHnxv8TE3yEM1A9Tulb1NuRXRNmtsdHTkjv7P1aWCxZo8A/GZaottKe/GLT8jSRjZ+dnpYWrbAhw6wTQ== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/types" "7.4.6" tiny-invariant "^1.3.1" -"@storybook/addon-outline@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-outline/-/addon-outline-7.4.0.tgz#63fef45815f209a3ad7ac2b3765f0734093af668" - integrity sha512-CCAWFC3bfkmYPzFjOemfH/kjpqJOHt+SdJgBKmwujDy+zum0DHlUL/7rd+U32cEpezCA8bapd0hlWn59C4agHQ== +"@storybook/addon-outline@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-7.4.6.tgz" + integrity sha512-errNUblRVDLpuEaHQPr/nsrnsUkD2ARmXawkRvizgDWLIDMDJYjTON3MUCaVx3x+hlZ3I6X//G5TVcma8tCc8A== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/types" "7.4.6" ts-dedent "^2.0.0" "@storybook/addon-styling@^1.3.7": version "1.3.7" - resolved "https://registry.yarnpkg.com/@storybook/addon-styling/-/addon-styling-1.3.7.tgz#a440fafcbf2899fac28c3c778bec9f3c6fc02dd9" + resolved "https://registry.npmjs.org/@storybook/addon-styling/-/addon-styling-1.3.7.tgz" integrity sha512-JSBZMOrSw/3rlq5YoEI7Qyq703KSNP0Jd+gxTWu3/tP6245mpjn2dXnR8FvqVxCi+FG4lt2kQyPzgsuwEw1SSA== dependencies: "@babel/template" "^7.20.7" @@ -2379,56 +2514,65 @@ sass-loader "^13.2.2" style-loader "^3.3.2" -"@storybook/addon-toolbars@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-toolbars/-/addon-toolbars-7.4.0.tgz#db1a3bc1d6e6aa0142b62aaf8c44d5a9f82fd6b7" - integrity sha512-00PDLchlQXI3ZClQHU0YQBfikAAxHOhVNv2QKW54yFKmxPl+P2c/VIeir9LcPhA04smKrJTD1u+Nszd66A9xAA== - dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/theming" "7.4.0" - -"@storybook/addon-viewport@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/addon-viewport/-/addon-viewport-7.4.0.tgz#a9bc167b822d31491cec6aad21cc0a420f1ae5b7" - integrity sha512-Bfoilf9eJV/C7tR8XHDxz3h8JlZ+iggoESp2Tc0bW9tlRvz+PsCqeyHhF/IgHY+gLnPal2PkK/PIM+ruO45HXA== - dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-events" "7.4.0" +"@storybook/addon-toolbars@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-7.4.6.tgz" + integrity sha512-L9m2FBcKeteGq7qIYsMJr0LEfiH7Wdrv5IDcldZTn68eZUJTh1p4GdJZcOmzX1P5IFRr76hpu03iWsNlWQjpbQ== + dependencies: + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/theming" "7.4.6" + +"@storybook/addon-viewport@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-7.4.6.tgz" + integrity sha512-INDtk54j7bi7NgxMfd2ATmbA0J7nAd6X8itMkLIyPuPJtx8bYHPDORyemDOd0AojgmAdTOAyUtDYdI/PFeo4Cw== + dependencies: + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/theming" "7.4.0" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/theming" "7.4.6" memoizerific "^1.11.3" prop-types "^15.7.2" +"@storybook/addons@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/addons/-/addons-7.4.6.tgz" + integrity sha512-c+4awrtwNlJayFdgLkEXa5H2Gj+KNlxuN+Z5oDAdZBLqXI8g0gn7eYO2F/eCSIDWdd/+zcU2uq57XPFKc8veHQ== + dependencies: + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/types" "7.4.6" + "@storybook/api@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/api/-/api-7.4.0.tgz#8cbe7edfdd4d23ab7aaa375869e0b93fdec9ccf5" - integrity sha512-L6CT3YCuUov9VHZUSA0euNuPB1Kczd2qVDgBzw8CFjQLei06ELoGCWFPS1X1HtY7d9BC+UjhU16uA4pjt833yw== - dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/manager-api" "7.4.0" - -"@storybook/blocks@7.4.0", "@storybook/blocks@^7.3.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/blocks/-/blocks-7.4.0.tgz#6a9240e2b58bac99a998c559d719be7ff4e19dcc" - integrity sha512-YQznNjJm+l32fCfPxrZso9+MbcyG0pWZSpx3RKI1+pxDMsAs4mbXsIw4//jKfjoDP/6/Cz/FJcSx8LT7i4BJ2w== - dependencies: - "@storybook/channels" "7.4.0" - "@storybook/client-logger" "7.4.0" - "@storybook/components" "7.4.0" - "@storybook/core-events" "7.4.0" + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/api/-/api-7.4.6.tgz" + integrity sha512-mnkHs2WI3/7vEUk+Bo1ZlQDp5vJDtoFSCFP5iae3YyVBbnjiI6oYlMZ14KgeizFULk3VaDv6/BdiynG1RkdO4Q== + dependencies: + "@storybook/client-logger" "7.4.6" + "@storybook/manager-api" "7.4.6" + +"@storybook/blocks@7.4.6", "@storybook/blocks@^7.3.0": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/blocks/-/blocks-7.4.6.tgz" + integrity sha512-HxBSAeOiTZW2jbHQlo1upRWFgoMsaAyKijUFf5MwwMNIesXCuuTGZDJ3xTABwAVLK2qC9Ektfbo0CZCiPVuDRQ== + dependencies: + "@storybook/channels" "7.4.6" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/csf" "^0.1.0" - "@storybook/docs-tools" "7.4.0" + "@storybook/docs-tools" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/manager-api" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/manager-api" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" "@types/lodash" "^4.14.167" color-convert "^2.0.1" dequal "^2.0.2" @@ -2442,15 +2586,15 @@ ts-dedent "^2.0.0" util-deprecate "^1.0.2" -"@storybook/builder-manager@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/builder-manager/-/builder-manager-7.4.0.tgz#80cf72ea83f88e16d585c5bdb40d563874c7d8ca" - integrity sha512-4fuxVzBIBbZh2aVBizSOU5EJ8b74IhR6x2TAZjifZZf5Gdxgfgio8sAyrrd/C78vrFOFhFEgmQhMqZRuCLHxvQ== +"@storybook/builder-manager@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/builder-manager/-/builder-manager-7.4.6.tgz" + integrity sha512-zylZCD2rmyLOOFBFmUgtJg6UNUKmRNgXiig1XApzS2TkIbTZP827DsVEUl0ey/lskCe0uArkrEBR6ICba8p/Rw== dependencies: "@fal-works/esbuild-plugin-global-externals" "^2.1.2" - "@storybook/core-common" "7.4.0" - "@storybook/manager" "7.4.0" - "@storybook/node-logger" "7.4.0" + "@storybook/core-common" "7.4.6" + "@storybook/manager" "7.4.6" + "@storybook/node-logger" "7.4.6" "@types/ejs" "^3.1.1" "@types/find-cache-dir" "^3.2.1" "@yarnpkg/esbuild-plugin-pnp" "^3.0.0-rc.10" @@ -2464,20 +2608,28 @@ process "^0.11.10" util "^0.12.4" -"@storybook/builder-webpack5@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/builder-webpack5/-/builder-webpack5-7.4.0.tgz#c9a4ee5a6424dd70f5b35f057de24afd268a5fd3" - integrity sha512-CYeXppqGACzDUpLCFvWvwD7IjN7VNi7+nwQ1uRNgW2NgBMOIldZe+gcTXcc0BuHyIitU5/vvquYM0qjis05LYw== - dependencies: - "@babel/core" "^7.22.0" - "@storybook/channels" "7.4.0" - "@storybook/client-logger" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/core-events" "7.4.0" - "@storybook/core-webpack" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/preview" "7.4.0" - "@storybook/preview-api" "7.4.0" +"@storybook/builder-webpack5@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-7.4.6.tgz" + integrity sha512-j7AyDPlUuO2GiH6riB8iGbT7blQpyVGB+rMHXPSm7v6/U7IITbNzxFwe+sSMLoFr8K1e2VXpgqQ9p3rHFey+nw== + dependencies: + "@babel/core" "^7.22.9" + "@storybook/addons" "7.4.6" + "@storybook/channels" "7.4.6" + "@storybook/client-api" "7.4.6" + "@storybook/client-logger" "7.4.6" + "@storybook/components" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/core-events" "7.4.6" + "@storybook/core-webpack" "7.4.6" + "@storybook/global" "^5.0.0" + "@storybook/manager-api" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/preview" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/router" "7.4.6" + "@storybook/store" "7.4.6" + "@storybook/theming" "7.4.6" "@swc/core" "^1.3.49" "@types/node" "^16.0.0" "@types/semver" "^7.3.4" @@ -2506,34 +2658,35 @@ webpack-hot-middleware "^2.25.1" webpack-virtual-modules "^0.5.0" -"@storybook/channels@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/channels/-/channels-7.4.0.tgz#4ab69fce09c0fe7299f1595628b3de10b0fdcd8f" - integrity sha512-/1CU0s3npFumzVHLGeubSyPs21O3jNqtSppOjSB9iDTyV2GtQrjh5ntVwebfKpCkUSitx3x7TkCb9dylpEZ8+w== +"@storybook/channels@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/channels/-/channels-7.4.6.tgz" + integrity sha512-yPv/sfo2c18fM3fvG0i1xse63vG8l33Al/OU0k/dtovltPu001/HVa1QgBgsb/QrEfZtvGjGhmtdVeYb39fv3A== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" qs "^6.10.0" telejson "^7.2.0" tiny-invariant "^1.3.1" -"@storybook/cli@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/cli/-/cli-7.4.0.tgz#a50f435d55e3056547c983c0bfacb2eed63cd692" - integrity sha512-yn27cn3LzhTqpEVX6CzUz13KTJ3jPLA2eM4bO1t7SYUqpDlzw3lET9DIcYIaUAIiL+0r2Js3jW2BsyN/5KmO5w== +"@storybook/cli@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/cli/-/cli-7.4.6.tgz" + integrity sha512-rRwaH8pOL+FHz/pJMEkNpMH2xvZvWsrl7obBYw26NQiHmiVSAkfHJicndSN1mwc+p5w+9iXthrgzbLtSAOSvkA== dependencies: "@babel/core" "^7.22.9" "@babel/preset-env" "^7.22.9" "@babel/types" "^7.22.5" "@ndelangen/get-tarball" "^3.0.7" - "@storybook/codemod" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/core-server" "7.4.0" - "@storybook/csf-tools" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/telemetry" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/codemod" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/core-events" "7.4.6" + "@storybook/core-server" "7.4.6" + "@storybook/csf-tools" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/telemetry" "7.4.6" + "@storybook/types" "7.4.6" "@types/semver" "^7.3.4" "@yarnpkg/fslib" "2.10.3" "@yarnpkg/libzip" "2.3.0" @@ -2564,25 +2717,33 @@ ts-dedent "^2.0.0" util-deprecate "^1.0.2" -"@storybook/client-logger@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/client-logger/-/client-logger-7.4.0.tgz#f90aa5ee29d540074f6e4890bae71836ac87273c" - integrity sha512-4pBnf7+df1wXEVcF1civqxbrtccGGHQkfWQkJo49s53RXvF7SRTcif6XTx0V3cQV0v7I1C5mmLm0LNlmjPRP1Q== +"@storybook/client-api@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/client-api/-/client-api-7.4.6.tgz" + integrity sha512-O8yA/xEzPW9Oe3s5VJAFor2d2KwXHjUZ1gvou3o14zu/TJLgXwol0qBBr+YLRO2rcNNJ51pAIGwAT5bgmpUaeg== + dependencies: + "@storybook/client-logger" "7.4.6" + "@storybook/preview-api" "7.4.6" + +"@storybook/client-logger@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/client-logger/-/client-logger-7.4.6.tgz" + integrity sha512-XDw31ZziU//86PKuMRnmc+L/G0VopaGKENQOGEpvAXCU9IZASwGKlKAtcyosjrpi+ZiUXlMgUXCpXM7x3b1Ehw== dependencies: "@storybook/global" "^5.0.0" -"@storybook/codemod@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/codemod/-/codemod-7.4.0.tgz#c23ef80253b5a5998c83e49e74bd6ff62683d27a" - integrity sha512-XqNhv5bec+L7TJ5tXdsMalmJazwaFMVVxoNlnb0f9zKhovAEF2F6hl6+Pnd2avRomH9+1q7EM+GwrTCAvzAfzg== +"@storybook/codemod@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/codemod/-/codemod-7.4.6.tgz" + integrity sha512-lxmwEpwksCaAq96APN2YlooSDfKjJ1vKzN5Ni2EqQzf2TEXl7XQjLacHd7OOaII1kfsy+D5gNG4N5wBo7Ub30g== dependencies: "@babel/core" "^7.22.9" "@babel/preset-env" "^7.22.9" "@babel/types" "^7.22.5" "@storybook/csf" "^0.1.0" - "@storybook/csf-tools" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/csf-tools" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/types" "7.4.6" "@types/cross-spawn" "^6.0.2" cross-spawn "^7.0.3" globby "^11.0.2" @@ -2591,37 +2752,38 @@ prettier "^2.8.0" recast "^0.23.1" -"@storybook/components@7.4.0", "@storybook/components@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/components/-/components-7.4.0.tgz#0cc83ff89dd9cdcde3eaeeb7b3fbcf2036ba6fb8" - integrity sha512-GGnQrI4NXwri/PqNjhO1vNv4tC7RBjY87ce9WHBq1ueat3kBakdqV97NzScoldXarkkKK6grBqmhw9jE5PfzhQ== +"@storybook/components@7.4.6", "@storybook/components@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/components/-/components-7.4.6.tgz" + integrity sha512-nIRBhewAgrJJVafyCzuaLx1l+YOfvvD5dOZ0JxZsxJsefOdw1jFpUqUZ5fIpQ2moyvrR0mAUFw378rBfMdHz5Q== dependencies: "@radix-ui/react-select" "^1.2.2" "@radix-ui/react-toolbar" "^1.0.4" - "@storybook/client-logger" "7.4.0" + "@storybook/client-logger" "7.4.6" "@storybook/csf" "^0.1.0" "@storybook/global" "^5.0.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" memoizerific "^1.11.3" use-resize-observer "^9.1.0" util-deprecate "^1.0.2" -"@storybook/core-client@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/core-client/-/core-client-7.4.0.tgz#b2b683ebc44d0dfaa7a886f7bb1a5fc74a3d0965" - integrity sha512-AhysJS2HnydB8Jc+BMVzK5VLHa1liJjxroNsd+ZTgGUhD7R8wvozrswQgY4MLFtcaLwN/wDWlK2YavSBqmc94Q== +"@storybook/core-client@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/core-client/-/core-client-7.4.6.tgz" + integrity sha512-tfgxAHeCvMcs6DsVgtb4hQSDaCHeAPJOsoyhb47eDQfk4OmxzriM0qWucJV5DePSMi+KutX/rN2u0JxfOuN68g== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/preview-api" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/preview-api" "7.4.6" -"@storybook/core-common@7.4.0", "@storybook/core-common@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/core-common/-/core-common-7.4.0.tgz#da71afd79a12cfb5565351f184f6797214a5da79" - integrity sha512-QKrBL46ZFdfTjlZE3f7b59Q5+frOHWIJ64sC9BZ2PHkZkGjFeYRDdJJ6EHLYBb+nToynl33dYN1GQz+hQn2vww== +"@storybook/core-common@7.4.6", "@storybook/core-common@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/core-common/-/core-common-7.4.6.tgz" + integrity sha512-05MJFmOM86qvTLtgDskokIFz9txe0Lbhq4L3by1FtF0GwgH+p+W6I94KI7c6ANER+kVZkXQZhiRzwBFnVTW+Cg== dependencies: - "@storybook/node-logger" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/core-events" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/types" "7.4.6" "@types/find-cache-dir" "^3.2.1" "@types/node" "^16.0.0" "@types/node-fetch" "^2.6.4" @@ -2643,33 +2805,33 @@ resolve-from "^5.0.0" ts-dedent "^2.0.0" -"@storybook/core-events@7.4.0", "@storybook/core-events@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/core-events/-/core-events-7.4.0.tgz#0d50d254d65a678065d5906ac1dcab64396f2f6a" - integrity sha512-JavEo4dw7TQdF5pSKjk4RtqLgsG2R/eWRI8vZ3ANKa0ploGAnQR/eMTfSxf6TUH3ElBWLJhi+lvUCkKXPQD+dw== +"@storybook/core-events@7.4.6", "@storybook/core-events@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/core-events/-/core-events-7.4.6.tgz" + integrity sha512-r5vrE+32lwrJh1NGFr1a0mWjvxo7q8FXYShylcwRWpacmL5NTtLkrXOoJSeGvJ4yKNYkvxQFtOPId4lzDxa32w== dependencies: ts-dedent "^2.0.0" -"@storybook/core-server@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/core-server/-/core-server-7.4.0.tgz#9e624789ff30d9538ac014b038c48fac0ebb7272" - integrity sha512-AcbfXatHVx1by4R2CiPIMgjQlOL3sUbVarkhmgUcL0AWT0zC0SCQWUZdo22en+jZhAraazgXyLGNCVP7A+6Tqg== +"@storybook/core-server@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/core-server/-/core-server-7.4.6.tgz" + integrity sha512-jqmRTGCJ1W0WReImivkisPVaLFT5sjtLnFoAk0feHp6QS5j7EYOPN7CYzliyQmARWTLUEXOVaFf3VD6nJZQhJQ== dependencies: "@aw-web-design/x-default-browser" "1.4.126" "@discoveryjs/json-ext" "^0.5.3" - "@storybook/builder-manager" "7.4.0" - "@storybook/channels" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/builder-manager" "7.4.6" + "@storybook/channels" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/csf" "^0.1.0" - "@storybook/csf-tools" "7.4.0" + "@storybook/csf-tools" "7.4.6" "@storybook/docs-mdx" "^0.1.0" "@storybook/global" "^5.0.0" - "@storybook/manager" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/telemetry" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/manager" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/telemetry" "7.4.6" + "@storybook/types" "7.4.6" "@types/detect-port" "^1.3.0" "@types/node" "^16.0.0" "@types/pretty-hrtime" "^1.0.0" @@ -2689,7 +2851,6 @@ prompts "^2.4.0" read-pkg-up "^7.0.1" semver "^7.3.7" - serve-favicon "^2.5.0" telejson "^7.2.0" tiny-invariant "^1.3.1" ts-dedent "^2.0.0" @@ -2698,100 +2859,100 @@ watchpack "^2.2.0" ws "^8.2.3" -"@storybook/core-webpack@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/core-webpack/-/core-webpack-7.4.0.tgz#0ff348a1590e9b8d425e9aec1ed850e3cfa3e75c" - integrity sha512-1zxzJjRbkcjl++OjYBVTDi0V/yO22Kz3ciPASTvXwrg0fXTXgxwxhJBmgOI4r17oY0kOWnJ1RDsmd95NLGAbGw== +"@storybook/core-webpack@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/core-webpack/-/core-webpack-7.4.6.tgz" + integrity sha512-EqQDmd+vKAWOAjoe539LsfP8WvQG9V9i1priMA53u1FOEged8o0NBtRiRy2+JDdUSiGUdpe/X5+V/TyyQw/KWw== dependencies: - "@storybook/core-common" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/core-common" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/types" "7.4.6" "@types/node" "^16.0.0" ts-dedent "^2.0.0" -"@storybook/csf-plugin@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/csf-plugin/-/csf-plugin-7.4.0.tgz#f25ebb30affbc9b4dd61b1fdb12c4a4257a275dc" - integrity sha512-X1L3l/dpz2UYjCEQlFLkW7w1A13pmzDZpJ0lotkV79PALlakMXBeoX3I2E0VMjJATV8wC9RSj56COBAs6HsPeg== +"@storybook/csf-plugin@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-7.4.6.tgz" + integrity sha512-yi7Qa4NSqKOyiJTWCxlB0ih2ijXq6oY5qZKW6MuMMBP14xJNRGLbH5KabpfXgN2T7YECcOWG1uWaGj2veJb1KA== dependencies: - "@storybook/csf-tools" "7.4.0" + "@storybook/csf-tools" "7.4.6" unplugin "^1.3.1" -"@storybook/csf-tools@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/csf-tools/-/csf-tools-7.4.0.tgz#db5c97ee603da9a68511192d701534e356f9e592" - integrity sha512-bKyOmWPyvT50Neq2wCRr2PmVGLVVm6pOw8WL5t5jueD8sRRzo9QdfhEkqmuSyqdsBdt3SiJKL5oA6dqY5Vl9ww== +"@storybook/csf-tools@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/csf-tools/-/csf-tools-7.4.6.tgz" + integrity sha512-ocKpcIUtTBy6hlLY34RUFQyX403cWpB2gGfqvkHbpGe2BQj7EyV0zpWnjsfVxvw+M9OWlCdxHWDOPUgXM33ELw== dependencies: "@babel/generator" "^7.22.9" "@babel/parser" "^7.22.7" "@babel/traverse" "^7.22.8" "@babel/types" "^7.22.5" "@storybook/csf" "^0.1.0" - "@storybook/types" "7.4.0" + "@storybook/types" "7.4.6" fs-extra "^11.1.0" recast "^0.23.1" ts-dedent "^2.0.0" "@storybook/csf@^0.0.1": version "0.0.1" - resolved "https://registry.yarnpkg.com/@storybook/csf/-/csf-0.0.1.tgz#95901507dc02f0bc6f9ac8ee1983e2fc5bb98ce6" + resolved "https://registry.npmjs.org/@storybook/csf/-/csf-0.0.1.tgz" integrity sha512-USTLkZze5gkel8MYCujSRBVIrUQ3YPBrLOx7GNk/0wttvVtlzWXAq9eLbQ4p/NicGxP+3T7KPEMVV//g+yubpw== dependencies: lodash "^4.17.15" "@storybook/csf@^0.1.0": version "0.1.1" - resolved "https://registry.yarnpkg.com/@storybook/csf/-/csf-0.1.1.tgz#abccc8c3e49aed0a6a7e87beb0d1c262b1921c06" + resolved "https://registry.npmjs.org/@storybook/csf/-/csf-0.1.1.tgz" integrity sha512-4hE3AlNVxR60Wc5KSC68ASYzUobjPqtSKyhV6G+ge0FIXU55N5nTY7dXGRZHQGDBPq+XqchMkIdlkHPRs8nTHg== dependencies: type-fest "^2.19.0" "@storybook/docs-mdx@^0.1.0": version "0.1.0" - resolved "https://registry.yarnpkg.com/@storybook/docs-mdx/-/docs-mdx-0.1.0.tgz#33ba0e39d1461caf048b57db354b2cc410705316" + resolved "https://registry.npmjs.org/@storybook/docs-mdx/-/docs-mdx-0.1.0.tgz" integrity sha512-JDaBR9lwVY4eSH5W8EGHrhODjygPd6QImRbwjAuJNEnY0Vw4ie3bPkeGfnacB3OBW6u/agqPv2aRlR46JcAQLg== -"@storybook/docs-tools@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/docs-tools/-/docs-tools-7.4.0.tgz#d9109c9c8ec4e90bb24d1acfcc16834a252618eb" - integrity sha512-DzXmt4JorAOePoS+sjQznf8jLPI9D5mdB1eSXjfvmGBQyyehKTZv5+TXuxYvT3iPN4rW4OPrIrQCSIrbULFdwA== +"@storybook/docs-tools@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/docs-tools/-/docs-tools-7.4.6.tgz" + integrity sha512-nZj1L/8WwKWWJ41FW4MaKGajZUtrhnr9UwflRCkQJaWhAKmDfOb5M5TqI93uCOULpFPOm5wpoMBz2IHInQ2Lrg== dependencies: - "@storybook/core-common" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/core-common" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/types" "7.4.6" "@types/doctrine" "^0.0.3" doctrine "^3.0.0" lodash "^4.17.21" "@storybook/global@^5.0.0": version "5.0.0" - resolved "https://registry.yarnpkg.com/@storybook/global/-/global-5.0.0.tgz#b793d34b94f572c1d7d9e0f44fac4e0dbc9572ed" + resolved "https://registry.npmjs.org/@storybook/global/-/global-5.0.0.tgz" integrity sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ== -"@storybook/instrumenter@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/instrumenter/-/instrumenter-7.4.0.tgz#197335f25a45ecdc2c5f458bff1c2481d7ffe08c" - integrity sha512-jZKxLK0lGKxY8LEul6GP7s+PDlNuXT4JU6MnPY9+SVSo23lP0pAOxo/ojV8WTLf48tcoyL3ztSfbYhxnaJvBfw== +"@storybook/instrumenter@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-7.4.6.tgz" + integrity sha512-K5atRoVFCl6HEgkSxIbwygpzgE/iROc7BrtJ3z3a7E70sanFr6Jxt6Egu6fz2QkL3ef4EWpXMnle2vhEfG29pA== dependencies: - "@storybook/channels" "7.4.0" - "@storybook/client-logger" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/channels" "7.4.6" + "@storybook/client-logger" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/preview-api" "7.4.0" + "@storybook/preview-api" "7.4.6" -"@storybook/manager-api@7.4.0", "@storybook/manager-api@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/manager-api/-/manager-api-7.4.0.tgz#aee0153df1583459b7e1e64e1d8c46fb49a584c8" - integrity sha512-sBfkkt0eZGTozeKrbzMtWLEOQrgqdk24OUJlkc2IDaucR1CBNjoCMjNeYg7cLDw0rXE8W3W3AdWtJnfsUbLMAQ== +"@storybook/manager-api@7.4.6", "@storybook/manager-api@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/manager-api/-/manager-api-7.4.6.tgz" + integrity sha512-inrm3DIbCp8wjXSN/wK6e6i2ysQ/IEmtC7IN0OJ7vdrp+USCooPT448SQTUmVctUGCFmOU3fxXByq8g77oIi7w== dependencies: - "@storybook/channels" "7.4.0" - "@storybook/client-logger" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/channels" "7.4.6" + "@storybook/client-logger" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/csf" "^0.1.0" "@storybook/global" "^5.0.0" - "@storybook/router" "7.4.0" - "@storybook/theming" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/router" "7.4.6" + "@storybook/theming" "7.4.6" + "@storybook/types" "7.4.6" dequal "^2.0.2" lodash "^4.17.21" memoizerific "^1.11.3" @@ -2800,20 +2961,20 @@ telejson "^7.2.0" ts-dedent "^2.0.0" -"@storybook/manager@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/manager/-/manager-7.4.0.tgz#21a825c9145f56ca6c38d3e9d3546b311a6db14e" - integrity sha512-uOSdPBEBKg8WORUZ5HKHb4KnKcTyA5j5Q8MWy/NBaRd22JR3fQkZiKuHer9WJIOQTU+fb6KDmzhZbCTKg5Euog== +"@storybook/manager@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/manager/-/manager-7.4.6.tgz" + integrity sha512-kA1hUDxpn1i2SO9OinvLvVXDeL4xgJkModp+pbE8IXv4NJWReNq1ecMeQCzPLS3Sil2gnrullQ9uYXsnZ9bxxA== "@storybook/mdx2-csf@^1.0.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@storybook/mdx2-csf/-/mdx2-csf-1.1.0.tgz#97f6df04d0bf616991cc1005a073ac004a7281e5" + resolved "https://registry.npmjs.org/@storybook/mdx2-csf/-/mdx2-csf-1.1.0.tgz" integrity sha512-TXJJd5RAKakWx4BtpwvSNdgTDkKM6RkXU8GK34S/LhidQ5Pjz3wcnqb0TxEkfhK/ztbP8nKHqXFwLfa2CYkvQw== "@storybook/nextjs@^7.3.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/nextjs/-/nextjs-7.4.0.tgz#6456c0bdca4e53ade56791fc432dd2524f16ecfe" - integrity sha512-nGmer4Hu1/XX3+XyxfAkQ9d16Qsj467aLc7MTNQ2uFyYAksCqT3bvznooUOcD/X5NfoyL2YA78OczGdt1HFFpQ== + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/nextjs/-/nextjs-7.4.6.tgz" + integrity sha512-9Xr0gKEXihxefn0ihDHOxlGyuPdiD4AsrhduNP63brbBaZmOEJZm1b/V9WulCFc8tE/Xu4o5ThUphhB8GFtdzw== dependencies: "@babel/core" "^7.22.9" "@babel/plugin-proposal-class-properties" "^7.18.6" @@ -2828,13 +2989,13 @@ "@babel/preset-react" "^7.22.5" "@babel/preset-typescript" "^7.22.5" "@babel/runtime" "^7.22.6" - "@storybook/addon-actions" "7.4.0" - "@storybook/builder-webpack5" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/preset-react-webpack" "7.4.0" - "@storybook/preview-api" "7.4.0" - "@storybook/react" "7.4.0" + "@storybook/addon-actions" "7.4.6" + "@storybook/builder-webpack5" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/preset-react-webpack" "7.4.6" + "@storybook/preview-api" "7.4.6" + "@storybook/react" "7.4.6" "@types/node" "^16.0.0" css-loader "^6.7.3" find-up "^5.0.0" @@ -2854,28 +3015,28 @@ tsconfig-paths "^4.0.0" tsconfig-paths-webpack-plugin "^4.0.1" -"@storybook/node-logger@7.4.0", "@storybook/node-logger@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/node-logger/-/node-logger-7.4.0.tgz#808ed8a63e3bc2f97a2d276b4e8ddaa72b79deb0" - integrity sha512-tWSWkYyAvp6SxjIBaTklg29avzv/3Lv4c0dOG2o5tz79PyZkq9v6sQtwLLoI8EJA9Mo8Z08vaJp8NZyDQ9RCuA== +"@storybook/node-logger@7.4.6", "@storybook/node-logger@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-7.4.6.tgz" + integrity sha512-djZb310Q27GviDug1XBv0jOEDLCiwr4hhDE0aifCEKZpfNCi/EaP31nbWimFzZwxu4hE/YAPWExzScruR1zw9Q== -"@storybook/postinstall@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/postinstall/-/postinstall-7.4.0.tgz#81f3bef31b566e26d616f9c3ce567f07ff143cc7" - integrity sha512-ZVBZggqkuj7ysfuHSCd/J7ovWV06zY9uWf+VU+Zw7ZeojDT8QHFrCurPsN7D9679j9vRU1/kSzqvAiStALS33g== +"@storybook/postinstall@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/postinstall/-/postinstall-7.4.6.tgz" + integrity sha512-TqI5BucPAGRWrkh55BYiG2/gHLFtC0In4cuu0GsUzB/1jc4i51npLRorCwhmT7r7YliGl5F7JaP0Bni/qHN3Lg== -"@storybook/preset-react-webpack@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/preset-react-webpack/-/preset-react-webpack-7.4.0.tgz#5d8c1a776fe46ab732a165129da57b89483e1e6b" - integrity sha512-9iZ9lvhRUYtxXmJMqR7txNyatrHryqo6FSKzfpUzmcCySn3d7mu9I6LEPxEir43TkPnBio3W4EsbvtIhjJ5ekA== +"@storybook/preset-react-webpack@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/preset-react-webpack/-/preset-react-webpack-7.4.6.tgz" + integrity sha512-FfJvlk3bJfg66t06YLiyu+1o/DZN3uNfFP37zv5cJux7TpdmJRV/4m9LKQPJOvcnWBQYem8xX8k5cRS29vdW5g== dependencies: "@babel/preset-flow" "^7.22.5" "@babel/preset-react" "^7.22.5" "@pmmmwh/react-refresh-webpack-plugin" "^0.5.5" - "@storybook/core-webpack" "7.4.0" - "@storybook/docs-tools" "7.4.0" - "@storybook/node-logger" "7.4.0" - "@storybook/react" "7.4.0" + "@storybook/core-webpack" "7.4.6" + "@storybook/docs-tools" "7.4.6" + "@storybook/node-logger" "7.4.6" + "@storybook/react" "7.4.6" "@storybook/react-docgen-typescript-plugin" "1.0.6--canary.9.0c3f3b7.0" "@types/node" "^16.0.0" "@types/semver" "^7.3.4" @@ -2886,17 +3047,17 @@ semver "^7.3.7" webpack "5" -"@storybook/preview-api@7.4.0", "@storybook/preview-api@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/preview-api/-/preview-api-7.4.0.tgz#46818910545735bef43965651eef380a6f481f4b" - integrity sha512-ndXO0Nx+eE7ktVE4EqHpQZ0guX7yYBdruDdJ7B739C0+OoPWsJN7jAzUqq0NXaBcYrdaU5gTy+KnWJUt8R+OyA== +"@storybook/preview-api@7.4.6", "@storybook/preview-api@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/preview-api/-/preview-api-7.4.6.tgz" + integrity sha512-byUS/Opt3ytWD4cWz3sNEKw5Yks8MkQgRN+GDSyIomaEAQkLAM0rchPC0MYjwCeUSecV7IIQweNX5RbV4a34BA== dependencies: - "@storybook/channels" "7.4.0" - "@storybook/client-logger" "7.4.0" - "@storybook/core-events" "7.4.0" + "@storybook/channels" "7.4.6" + "@storybook/client-logger" "7.4.6" + "@storybook/core-events" "7.4.6" "@storybook/csf" "^0.1.0" "@storybook/global" "^5.0.0" - "@storybook/types" "7.4.0" + "@storybook/types" "7.4.6" "@types/qs" "^6.9.5" dequal "^2.0.2" lodash "^4.17.21" @@ -2906,14 +3067,14 @@ ts-dedent "^2.0.0" util-deprecate "^1.0.2" -"@storybook/preview@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/preview/-/preview-7.4.0.tgz#a58756ac9b12ea21f203032eca47991946257b53" - integrity sha512-R4LMTvUrVAbcUetRbAXpY3frkwD0eysqHrByiR73040+ngzDwtZOBAy0JfO3jw3WrWv2dn3kWlao5aEwVc9Exw== +"@storybook/preview@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/preview/-/preview-7.4.6.tgz" + integrity sha512-2RPXusJ4CTDrIipIKKvbotD7fP0+8VzoFjImunflIrzN9rni+2rq5eMjqlXAaB+77w064zIR4uDUzI9fxsMDeQ== "@storybook/react-docgen-typescript-plugin@1.0.6--canary.9.0c3f3b7.0": version "1.0.6--canary.9.0c3f3b7.0" - resolved "https://registry.yarnpkg.com/@storybook/react-docgen-typescript-plugin/-/react-docgen-typescript-plugin-1.0.6--canary.9.0c3f3b7.0.tgz#7f10f3c641f32e4513a8b6ffb5036933e7059534" + resolved "https://registry.npmjs.org/@storybook/react-docgen-typescript-plugin/-/react-docgen-typescript-plugin-1.0.6--canary.9.0c3f3b7.0.tgz" integrity sha512-KUqXC3oa9JuQ0kZJLBhVdS4lOneKTOopnNBK4tUAgoxWQ3u/IjzdueZjFr7gyBrXMoU6duutk3RQR9u8ZpYJ4Q== dependencies: debug "^4.1.1" @@ -2924,23 +3085,23 @@ react-docgen-typescript "^2.2.2" tslib "^2.0.0" -"@storybook/react-dom-shim@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/react-dom-shim/-/react-dom-shim-7.4.0.tgz#12f137f00f2a209cb49a4084475dd93f23e0678a" - integrity sha512-TLpb8a2hnWJoRLqoXpMADh82BFfRZll6JI2Waf1FjnvJ4SF9eS0zBbxybrjW3lFAHWy2XJi+rwcK8FiPj0iBoQ== +"@storybook/react-dom-shim@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-7.4.6.tgz" + integrity sha512-DSq8l9FDocUF1ooVI+TF83pddj1LynE/Hv0/y8XZhc3IgJ/HkuOQuUmfz29ezgfAi9gFYUR8raTIBi3/xdoRmw== -"@storybook/react@7.4.0", "@storybook/react@^7.3.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/react/-/react-7.4.0.tgz#18d29aa49f0b784b46613d26a243caf473177403" - integrity sha512-QWsFw/twsNkcWI6brW06sugQQ5dV+fJm4IrEeI28cA4cBHK9G9HKOwCHoXDUWikzZx48XYMpNfs/WyIkuGmEqg== +"@storybook/react@7.4.6", "@storybook/react@^7.3.0": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/react/-/react-7.4.6.tgz" + integrity sha512-w0dVo64baFFPTGpUOWFqkKsu6pQincoymegSNgqaBd5DxEyMDRiRoTWSJHMKE9BwgE8SyWhRkP1ak1mkccSOhQ== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/core-client" "7.4.0" - "@storybook/docs-tools" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/core-client" "7.4.6" + "@storybook/docs-tools" "7.4.6" "@storybook/global" "^5.0.0" - "@storybook/preview-api" "7.4.0" - "@storybook/react-dom-shim" "7.4.0" - "@storybook/types" "7.4.0" + "@storybook/preview-api" "7.4.6" + "@storybook/react-dom-shim" "7.4.6" + "@storybook/types" "7.4.6" "@types/escodegen" "^0.0.6" "@types/estree" "^0.0.51" "@types/node" "^16.0.0" @@ -2956,23 +3117,31 @@ type-fest "~2.19" util-deprecate "^1.0.2" -"@storybook/router@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/router/-/router-7.4.0.tgz#627f824bfd9cc4653ee84581fc09373ab1463336" - integrity sha512-IATdtFL5C3ryjNQSwaQfrmiOZiVFoVNMevMoBGDC++g0laSW40TGiNK6fUjUDBKuOgbuDt4Svfbl29k21GefEg== +"@storybook/router@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/router/-/router-7.4.6.tgz" + integrity sha512-Vl1esrHkcHxDKqc+HY7+6JQpBPW3zYvGk0cQ2rxVMhWdLZTAz1hss9DqzN9tFnPyfn0a1Q77EpMySkUrvWKKNQ== dependencies: - "@storybook/client-logger" "7.4.0" + "@storybook/client-logger" "7.4.6" memoizerific "^1.11.3" qs "^6.10.0" -"@storybook/telemetry@7.4.0": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/telemetry/-/telemetry-7.4.0.tgz#04e47a2d9decf7671273130a9af9d231a8c3d2e8" - integrity sha512-oxCB3kIbpiDWuXEtQhk/j6t1/h0KKWAuvxmcwGPxwhEvj/uNtoM+f1qhoDID9waxNo4AccU9Px+1ZJQ+2ejcDg== +"@storybook/store@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/store/-/store-7.4.6.tgz" + integrity sha512-tlm9rQ+djkYjEyCuEjaUv+c+jVgwnMEF9mZxnOoA6zrzU2g0S/1oE9/MdVLByGbH67U0NuuP0FcvsWLhAOQzjQ== dependencies: - "@storybook/client-logger" "7.4.0" - "@storybook/core-common" "7.4.0" - "@storybook/csf-tools" "7.4.0" + "@storybook/client-logger" "7.4.6" + "@storybook/preview-api" "7.4.6" + +"@storybook/telemetry@7.4.6": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/telemetry/-/telemetry-7.4.6.tgz" + integrity sha512-c8p/C1NIH8EMBviZkBCx8MMDk6rrITJ+b29DEp5MaWSRlklIVyhGiC4RPIRv6sxJwlD41PnqWVFtfu2j2eXLdQ== + dependencies: + "@storybook/client-logger" "7.4.6" + "@storybook/core-common" "7.4.6" + "@storybook/csf-tools" "7.4.6" chalk "^4.1.0" detect-package-manager "^2.0.1" fetch-retry "^5.0.2" @@ -2980,119 +3149,124 @@ read-pkg-up "^7.0.1" "@storybook/testing-library@^0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@storybook/testing-library/-/testing-library-0.2.0.tgz#09202b90ea5bd67b503dbb1a0b1f3ab3eb005d04" - integrity sha512-Ff6jNnrsosmDshgCf0Eb5Cz7IA34p/1Ps5N3Kp3598kfXpBSccSkQQvVFUXC3kIHw/isIXWPqntZuKqnWUz7Gw== + version "0.2.2" + resolved "https://registry.npmjs.org/@storybook/testing-library/-/testing-library-0.2.2.tgz" + integrity sha512-L8sXFJUHmrlyU2BsWWZGuAjv39Jl1uAqUHdxmN42JY15M4+XCMjGlArdCCjDe1wpTSW6USYISA9axjZojgtvnw== dependencies: "@testing-library/dom" "^9.0.0" - "@testing-library/user-event" "^14.0.0" + "@testing-library/user-event" "^14.4.0" ts-dedent "^2.2.0" -"@storybook/theming@7.4.0", "@storybook/theming@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/theming/-/theming-7.4.0.tgz#f5d9f8f55c41e08c0f50b57d9fb0e159ed595274" - integrity sha512-eLjEf6G3cqlegfutF/iUrec9LrUjKDj7K4ZhGdACWrf7bQcODs99EK62e9/d8GNKr4b+QMSEuM6XNGaqdPnuzQ== +"@storybook/theming@7.4.6", "@storybook/theming@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/theming/-/theming-7.4.6.tgz" + integrity sha512-HW77iJ9ptCMqhoBOYFjRQw7VBap+38fkJGHP5KylEJCyYCgIAm2dEcQmtWpMVYFssSGcb6djfbtAMhYU4TL4Iw== dependencies: "@emotion/use-insertion-effect-with-fallbacks" "^1.0.0" - "@storybook/client-logger" "7.4.0" + "@storybook/client-logger" "7.4.6" "@storybook/global" "^5.0.0" memoizerific "^1.11.3" -"@storybook/types@7.4.0", "@storybook/types@^7.0.12": - version "7.4.0" - resolved "https://registry.yarnpkg.com/@storybook/types/-/types-7.4.0.tgz#71ce550d4d469f6aaf9777fc7432db9fb67f53f9" - integrity sha512-XyzYkmeklywxvElPrIWLczi/PWtEdgTL6ToT3++FVxptsC2LZKS3Ue+sBcQ9xRZhkRemw4HQHwed5EW3dO8yUg== +"@storybook/types@7.4.6", "@storybook/types@^7.0.12": + version "7.4.6" + resolved "https://registry.npmjs.org/@storybook/types/-/types-7.4.6.tgz" + integrity sha512-6QLXtMVsFZFpzPkdGWsu/iuc8na9dnS67AMOBKm5qCLPwtUJOYkwhMdFRSSeJthLRpzV7JLAL8Kwvl7MFP3QSw== dependencies: - "@storybook/channels" "7.4.0" + "@storybook/channels" "7.4.6" "@types/babel__core" "^7.0.0" "@types/express" "^4.7.0" - "@types/react" "^16.14.34" file-system-cache "2.3.0" -"@swc/core-darwin-arm64@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.82.tgz#bbf9874747b51053d8a59ea26c3e235c326f24a3" - integrity sha512-JfsyDW34gVKD3uE0OUpUqYvAD3yseEaicnFP6pB292THtLJb0IKBBnK50vV/RzEJtc1bR3g1kNfxo2PeurZTrA== - -"@swc/core-darwin-x64@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.82.tgz#145cdde16678e0d793620035783e5b413a16ac43" - integrity sha512-ogQWgNMq7qTpITjcP3dnzkFNj7bh6SwMr859GvtOTrE75H7L7jDWxESfH4f8foB/LGxBKiDNmxKhitCuAsZK4A== - -"@swc/core-linux-arm-gnueabihf@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.82.tgz#0c2f32c5793f2ac8e8ccf416aec84d016c30ef7b" - integrity sha512-7TMXG1lXlNhD0kUiEqs+YlGV4irAdBa2quuy+XI3oJf2fBK6dQfEq4xBy65B3khrorzQS3O0oDGQ+cmdpHExHA== - -"@swc/core-linux-arm64-gnu@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.82.tgz#2313d4901fa0ebdd2a0f189909073e1e8a07f1d6" - integrity sha512-26JkOujbzcItPAmIbD5vHJxQVy5ihcSu3YHTKwope1h28sApZdtE7S3e2G3gsZRTIdsCQkXUtAQeqHxGWWR3pw== - -"@swc/core-linux-arm64-musl@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.82.tgz#6e96cf6e52e647fecf27511d766bea90e96f8a2f" - integrity sha512-8Izj9tuuMpoc3cqiPBRtwqpO1BZ/+sfZVsEhLxrbOFlcSb8LnKyMle1g3JMMUwI4EU75RGVIzZMn8A6GOKdJbA== - -"@swc/core-linux-x64-gnu@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.82.tgz#6275c10d7c8c0768550bc7934c9dd8cde4881d92" - integrity sha512-0GSrIBScQwTaPv46T2qB7XnDYxndRCpwH4HMjh6FN+I+lfPUhTSJKW8AonqrqT1TbpFIgvzQs7EnTsD7AnSCow== - -"@swc/core-linux-x64-musl@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.82.tgz#edb98c30bd0de42bf1a63469937630d942c71988" - integrity sha512-KJUnaaepDKNzrEbwz4jv0iC3/t9x0NSoe06fnkAlhh2+NFKWKKJhVCOBTrpds8n7eylBDIXUlK34XQafjVMUdg== - -"@swc/core-win32-arm64-msvc@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.82.tgz#0a8e9b361aac37d01f684c8a3d3e94e5f8c3b14f" - integrity sha512-TR3MHKhDYIyGyFcyl2d/p1ftceXcubAhX5wRSOdtOyr5+K/v3jbyCCqN7bbqO5o43wQVCwwR/drHleYyDZvg8Q== - -"@swc/core-win32-ia32-msvc@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.82.tgz#096854ff764282766271f1354ee1214358a8bf01" - integrity sha512-ZX4HzVVt6hs84YUg70UvyBJnBOIspmQQM0iXSzBvOikk3zRoN7BnDwQH4GScvevCEBuou60+i4I6d5kHLOfh8Q== - -"@swc/core-win32-x64-msvc@1.3.82": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.82.tgz#1181070bff4a13a7fcc7f1020eef1571f8c1257a" - integrity sha512-4mJMnex21kbQoaHeAmHnVwQN9/XAfPszJ6n9HI7SVH+aAHnbBIR0M59/b50/CJMjTj5niUGk7EwQ3nhVNOG32g== +"@swc/core-darwin-arm64@1.3.93": + version "1.3.93" + resolved "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.93.tgz" + integrity sha512-gEKgk7FVIgltnIfDO6GntyuQBBlAYg5imHpRgLxB1zSI27ijVVkksc6QwISzFZAhKYaBWIsFSVeL9AYSziAF7A== + +"@swc/core-darwin-x64@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.93.tgz#18409c6effdf508ddf1ebccfa77d35aaa6cd72f0" + integrity sha512-ZQPxm/fXdDQtn3yrYSL/gFfA8OfZ5jTi33yFQq6vcg/Y8talpZ+MgdSlYM0FkLrZdMTYYTNFiuBQuuvkA+av+Q== + +"@swc/core-linux-arm-gnueabihf@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.93.tgz#23a97bc94a8b2f23fb6cc4bc9d8936899e5eeff5" + integrity sha512-OYFMMI2yV+aNe3wMgYhODxHdqUB/jrK0SEMHHS44GZpk8MuBXEF+Mcz4qjkY5Q1EH7KVQqXb/gVWwdgTHpjM2A== + +"@swc/core-linux-arm64-gnu@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.93.tgz#7a17406a7cf76a959a617626d5ee2634ae9afa26" + integrity sha512-BT4dT78odKnJMNiq5HdjBsv29CiIdcCcImAPxeFqAeFw1LL6gh9nzI8E96oWc+0lVT5lfhoesCk4Qm7J6bty8w== + +"@swc/core-linux-arm64-musl@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.93.tgz#a30be7780090afefd3b8706398418cbe1d23db49" + integrity sha512-yH5fWEl1bktouC0mhh0Chuxp7HEO4uCtS/ly1Vmf18gs6wZ8DOOkgAEVv2dNKIryy+Na++ljx4Ym7C8tSJTrLw== + +"@swc/core-linux-x64-gnu@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.93.tgz#41e903fd82e059952d16051b442cbe65ee5b8cb3" + integrity sha512-OFUdx64qvrGJhXKEyxosHxgoUVgba2ztYh7BnMiU5hP8lbI8G13W40J0SN3CmFQwPP30+3oEbW7LWzhKEaYjlg== + +"@swc/core-linux-x64-musl@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.93.tgz#0866807545c44eac9b3254b374310ad5e1c573f9" + integrity sha512-4B8lSRwEq1XYm6xhxHhvHmKAS7pUp1Q7E33NQ2TlmFhfKvCOh86qvThcjAOo57x8DRwmpvEVrqvpXtYagMN6Ig== + +"@swc/core-win32-arm64-msvc@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.93.tgz#c72411dea2fd4f62a832f71a6e15424d849e7610" + integrity sha512-BHShlxtkven8ZjjvZ5QR6sC5fZCJ9bMujEkiha6W4cBUTY7ce7qGFyHmQd+iPC85d9kD/0cCiX/Xez8u0BhO7w== + +"@swc/core-win32-ia32-msvc@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.93.tgz#05c2b031b976af4ef81f5073ee114254678a5d5d" + integrity sha512-nEwNWnz4JzYAK6asVvb92yeylfxMYih7eMQOnT7ZVlZN5ba9WF29xJ6kcQKs9HRH6MvWhz9+wRgv3FcjlU6HYA== + +"@swc/core-win32-x64-msvc@1.3.93": + version "1.3.93" + resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.93.tgz#f8748b3fd1879f13084b1b0814edf328c662935c" + integrity sha512-jibQ0zUr4kwJaQVwgmH+svS04bYTPnPw/ZkNInzxS+wFAtzINBYcU8s2PMWbDb2NGYiRSEeoSGyAvS9H+24JFA== "@swc/core@^1.3.49": - version "1.3.82" - resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.3.82.tgz#8f6c53db3c23a1769b6c5085fbcb3b1df9548a40" - integrity sha512-jpC1a18HMH67018Ij2jh+hT7JBFu7ZKcQVfrZ8K6JuEY+kjXmbea07P9MbQUZbAe0FB+xi3CqEVCP73MebodJQ== + version "1.3.93" + resolved "https://registry.npmjs.org/@swc/core/-/core-1.3.93.tgz" + integrity sha512-690GRr1wUGmGYZHk7fUduX/JUwViMF2o74mnZYIWEcJaCcd9MQfkhsxPBtjeg6tF+h266/Cf3RPYhsFBzzxXcA== dependencies: - "@swc/types" "^0.1.4" + "@swc/counter" "^0.1.1" + "@swc/types" "^0.1.5" optionalDependencies: - "@swc/core-darwin-arm64" "1.3.82" - "@swc/core-darwin-x64" "1.3.82" - "@swc/core-linux-arm-gnueabihf" "1.3.82" - "@swc/core-linux-arm64-gnu" "1.3.82" - "@swc/core-linux-arm64-musl" "1.3.82" - "@swc/core-linux-x64-gnu" "1.3.82" - "@swc/core-linux-x64-musl" "1.3.82" - "@swc/core-win32-arm64-msvc" "1.3.82" - "@swc/core-win32-ia32-msvc" "1.3.82" - "@swc/core-win32-x64-msvc" "1.3.82" - -"@swc/helpers@0.5.1": - version "0.5.1" - resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.1.tgz#e9031491aa3f26bfcc974a67f48bd456c8a5357a" - integrity sha512-sJ902EfIzn1Fa+qYmjdQqh8tPsoxyBz+8yBKC2HKUxyezKJFwPGOn7pv4WY6QuQW//ySQi5lJjA/ZT9sNWWNTg== + "@swc/core-darwin-arm64" "1.3.93" + "@swc/core-darwin-x64" "1.3.93" + "@swc/core-linux-arm-gnueabihf" "1.3.93" + "@swc/core-linux-arm64-gnu" "1.3.93" + "@swc/core-linux-arm64-musl" "1.3.93" + "@swc/core-linux-x64-gnu" "1.3.93" + "@swc/core-linux-x64-musl" "1.3.93" + "@swc/core-win32-arm64-msvc" "1.3.93" + "@swc/core-win32-ia32-msvc" "1.3.93" + "@swc/core-win32-x64-msvc" "1.3.93" + +"@swc/counter@^0.1.1": + version "0.1.2" + resolved "https://registry.npmjs.org/@swc/counter/-/counter-0.1.2.tgz" + integrity sha512-9F4ys4C74eSTEUNndnER3VJ15oru2NumfQxS8geE+f3eB5xvfxpWyqE5XlVnxb/R14uoXi6SLbBwwiDSkv+XEw== + +"@swc/helpers@0.5.2": + version "0.5.2" + resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.2.tgz#85ea0c76450b61ad7d10a37050289eded783c27d" + integrity sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw== dependencies: tslib "^2.4.0" -"@swc/types@^0.1.4": - version "0.1.4" - resolved "https://registry.yarnpkg.com/@swc/types/-/types-0.1.4.tgz#8d647e111dc97a8e2881bf71c2ee2d011698ff10" - integrity sha512-z/G02d+59gyyUb7KYhKi9jOhicek6QD2oMaotUyG+lUkybpXoV49dY9bj7Ah5Q+y7knK2jU67UTX9FyfGzaxQg== +"@swc/types@^0.1.5": + version "0.1.5" + resolved "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz" + integrity sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw== "@testing-library/dom@^9.0.0": - version "9.3.1" - resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-9.3.1.tgz#8094f560e9389fb973fe957af41bf766937a9ee9" - integrity sha512-0DGPd9AR3+iDTjGoMpxIkAsUihHZ3Ai6CneU6bRRrffXMgzCdlNk43jTrD2/5LT6CBb3MWTP8v510JzYtahD2w== + version "9.3.3" + resolved "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.3.tgz" + integrity sha512-fB0R+fa3AUqbLHWyxXa2kGVtf1Fe1ZZFr0Zp6AIbIAzXb2mKbEXl+PCQNUOaq5lbTab5tfctfXRNsWXxa2f7Aw== dependencies: "@babel/code-frame" "^7.10.4" "@babel/runtime" "^7.12.5" @@ -3103,15 +3277,15 @@ lz-string "^1.5.0" pretty-format "^27.0.2" -"@testing-library/user-event@^14.0.0": - version "14.4.3" - resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-14.4.3.tgz#af975e367743fa91989cd666666aec31a8f50591" - integrity sha512-kCUc5MEwaEMakkO5x7aoD+DLi02ehmEM2QCGWvNqAS1dV/fAvORWEjnjsEIvml59M7Y5kCkWN6fCCyPOe8OL6Q== +"@testing-library/user-event@^14.4.0": + version "14.5.1" + resolved "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.5.1.tgz" + integrity sha512-UCcUKrUYGj7ClomOo2SpNVvx4/fkd/2BbIHDCle8A0ax+P3bU7yJwDBDrS6ZwdTMARWTGODX1hEsCcO+7beJjg== "@tremor/react@^3.6.6": - version "3.7.2" - resolved "https://registry.yarnpkg.com/@tremor/react/-/react-3.7.2.tgz#96136af0a65bf88dd395fb80610a42dbebbf0cb6" - integrity sha512-WEFsXL9uot5bL/T2YCtkj7PeTC1XVgevWaD2Ql8h/9xKTpA6YCwEaj/jQqtz83b/8GAYx3ZsI8choKqPNGGb9Q== + version "3.9.2" + resolved "https://registry.npmjs.org/@tremor/react/-/react-3.9.2.tgz" + integrity sha512-JecMNPW3oPOqM+0essg6mYu4mef11uDl8qCWGvX4DVwQluS0H/DKonHIEOzLxMNuDUYSzztkwyObGEntVYO0Lg== dependencies: "@floating-ui/react" "^0.19.1" "@headlessui/react" "^1.7.14" @@ -3123,14 +3297,14 @@ tailwind-merge "^1.9.1" "@types/aria-query@^5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-5.0.1.tgz#3286741fb8f1e1580ac28784add4c7a1d49bdfbc" - integrity sha512-XTIieEY+gvJ39ChLcB4If5zHtPxt3Syj5rgZR+e1ctpmK8NjPf0zFqsz4JpLJT0xla9GFDKjy8Cpu331nrmE1Q== + version "5.0.2" + resolved "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.2.tgz" + integrity sha512-PHKZuMN+K5qgKIWhBodXzQslTo5P+K/6LqeKXS6O/4liIDdZqaX5RXrCK++LAw+y/nptN48YmUMFiQHRSWYwtQ== "@types/babel__core@^7.0.0": - version "7.20.1" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.1.tgz#916ecea274b0c776fec721e333e55762d3a9614b" - integrity sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw== + version "7.20.2" + resolved "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.2.tgz" + integrity sha512-pNpr1T1xLUc2l3xJKuPtsEky3ybxN3m4fJkknfIpTCTfIZCDW57oAg+EfCgIIp2rvCe0Wn++/FfodDS4YXxBwA== dependencies: "@babel/parser" "^7.20.7" "@babel/types" "^7.20.7" @@ -3139,155 +3313,155 @@ "@types/babel__traverse" "*" "@types/babel__generator@*": - version "7.6.4" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" - integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + version "7.6.5" + resolved "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.5.tgz" + integrity sha512-h9yIuWbJKdOPLJTbmSpPzkF67e659PbQDba7ifWm5BJ8xTv+sDmS7rFmywkWOvXedGTivCdeGSIIX8WLcRTz8w== dependencies: "@babel/types" "^7.0.0" "@types/babel__template@*": - version "7.4.1" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" - integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + version "7.4.2" + resolved "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.2.tgz" + integrity sha512-/AVzPICMhMOMYoSx9MoKpGDKdBRsIXMNByh1PXSZoa+v6ZoLa8xxtsT/uLQ/NJm0XVAWl/BvId4MlDeXJaeIZQ== dependencies: "@babel/parser" "^7.1.0" "@babel/types" "^7.0.0" "@types/babel__traverse@*": - version "7.20.1" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.1.tgz#dd6f1d2411ae677dcb2db008c962598be31d6acf" - integrity sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg== + version "7.20.2" + resolved "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.2.tgz" + integrity sha512-ojlGK1Hsfce93J0+kn3H5R73elidKUaZonirN33GSmgTUMpzI/MIFfSpF3haANe3G1bEBS9/9/QEqwTzwqFsKw== dependencies: "@babel/types" "^7.20.7" "@types/body-parser@*": - version "1.19.2" - resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" - integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + version "1.19.3" + resolved "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.3.tgz" + integrity sha512-oyl4jvAfTGX9Bt6Or4H9ni1Z447/tQuxnZsytsCaExKlmJiU8sFgnIBRzJUpKwB5eWn9HuBYlUlVA74q/yN0eQ== dependencies: "@types/connect" "*" "@types/node" "*" "@types/connect@*": version "3.4.36" - resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.36.tgz#e511558c15a39cb29bd5357eebb57bd1459cd1ab" + resolved "https://registry.npmjs.org/@types/connect/-/connect-3.4.36.tgz" integrity sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w== dependencies: "@types/node" "*" "@types/cross-spawn@^6.0.2": version "6.0.3" - resolved "https://registry.yarnpkg.com/@types/cross-spawn/-/cross-spawn-6.0.3.tgz#c743cb2608f55860ee9776d8c99135d6032c763c" + resolved "https://registry.npmjs.org/@types/cross-spawn/-/cross-spawn-6.0.3.tgz" integrity sha512-BDAkU7WHHRHnvBf5z89lcvACsvkz/n7Tv+HyD/uW76O29HoH1Tk/W6iQrepaZVbisvlEek4ygwT8IW7ow9XLAA== dependencies: "@types/node" "*" "@types/d3-array@^3.0.3": - version "3.0.7" - resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.0.7.tgz#b128a0c0b0d9481d3281df47de0955730db384a1" - integrity sha512-4/Q0FckQ8TBjsB0VdGFemJOG8BLXUB2KKlL0VmZ+eOYeOnTb/wDRQqYWpBmQ6IlvWkXwkYiot+n9Px2aTJ7zGQ== + version "3.0.8" + resolved "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.0.8.tgz" + integrity sha512-2xAVyAUgaXHX9fubjcCbGAUOqYfRJN1em1EKR2HfzWBpObZhwfnZKvofTN4TplMqJdFQao61I+NVSai/vnBvDQ== "@types/d3-color@*": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.1.0.tgz#6594da178ded6c7c3842f3cc0ac84b156f12f2d4" - integrity sha512-HKuicPHJuvPgCD+np6Se9MQvS6OCbJmOjGvylzMJRlDwUXjKTTXs6Pwgk79O09Vj/ho3u1ofXnhFOaEWWPrlwA== + version "3.1.1" + resolved "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.1.tgz" + integrity sha512-CSAVrHAtM9wfuLJ2tpvvwCU/F22sm7rMHNN+yh9D6O6hyAms3+O0cgMpC1pm6UEUMOntuZC8bMt74PteiDUdCg== "@types/d3-ease@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/d3-ease/-/d3-ease-3.0.0.tgz#c29926f8b596f9dadaeca062a32a45365681eae0" + resolved "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.0.tgz" integrity sha512-aMo4eaAOijJjA6uU+GIeW018dvy9+oH5Y2VPPzjjfxevvGQ/oRDs+tfYC9b50Q4BygRR8yE2QCLsrT0WtAVseA== "@types/d3-interpolate@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.1.tgz#e7d17fa4a5830ad56fe22ce3b4fac8541a9572dc" - integrity sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw== + version "3.0.2" + resolved "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.2.tgz" + integrity sha512-zAbCj9lTqW9J9PlF4FwnvEjXZUy75NQqPm7DMHZXuxCFTpuTrdK2NMYGQekf4hlasL78fCYOLu4EE3/tXElwow== dependencies: "@types/d3-color" "*" "@types/d3-path@*": version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-3.0.0.tgz#939e3a784ae4f80b1fde8098b91af1776ff1312b" + resolved "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.0.0.tgz" integrity sha512-0g/A+mZXgFkQxN3HniRDbXMN79K3CdTpLsevj+PXiTcb2hVyvkZUBg37StmgCQkaD84cUJ4uaDAWq7UJOQy2Tg== "@types/d3-scale@^4.0.2": - version "4.0.4" - resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.4.tgz#3c5e2263eea5a3670cd91043b9f4d150a94c43f1" - integrity sha512-eq1ZeTj0yr72L8MQk6N6heP603ubnywSDRfNpi5enouR112HzGLS6RIvExCzZTraFF4HdzNpJMwA/zGiMoHUUw== + version "4.0.5" + resolved "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.5.tgz" + integrity sha512-w/C++3W394MHzcLKO2kdsIn5KKNTOqeQVzyPSGPLzQbkPw/jpeaGtSRlakcKevGgGsjJxGsbqS0fPrVFDbHrDA== dependencies: "@types/d3-time" "*" "@types/d3-shape@^3.1.0": - version "3.1.2" - resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-3.1.2.tgz#a3d421d8b0bc0c6c67cb3f4b4471ddc133cb0117" - integrity sha512-NN4CXr3qeOUNyK5WasVUV8NCSAx/CRVcwcb0BuuS1PiTqwIm6ABi1SyasLZ/vsVCFDArF+W4QiGzSry1eKYQ7w== + version "3.1.3" + resolved "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.3.tgz" + integrity sha512-cHMdIq+rhF5IVwAV7t61pcEXfEHsEsrbBUPkFGBwTXuxtTAkBBrnrNA8++6OWm3jwVsXoZYQM8NEekg6CPJ3zw== dependencies: "@types/d3-path" "*" "@types/d3-time@*", "@types/d3-time@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.0.tgz#e1ac0f3e9e195135361fa1a1d62f795d87e6e819" - integrity sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg== + version "3.0.1" + resolved "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.1.tgz" + integrity sha512-5j/AnefKAhCw4HpITmLDTPlf4vhi8o/dES+zbegfPb7LaGfNyqkLxBR6E+4yvTAgnJLmhe80EXFMzUs38fw4oA== "@types/d3-timer@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/d3-timer/-/d3-timer-3.0.0.tgz#e2505f1c21ec08bda8915238e397fb71d2fc54ce" + resolved "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.0.tgz" integrity sha512-HNB/9GHqu7Fo8AQiugyJbv6ZxYz58wef0esl4Mv828w1ZKpAshw/uFWVDUcIB9KKFeFKoxS3cHY07FFgtTRZ1g== "@types/detect-port@^1.3.0": version "1.3.3" - resolved "https://registry.yarnpkg.com/@types/detect-port/-/detect-port-1.3.3.tgz#124c5d4c283f48a21f80826bcf39433b3e64aa81" + resolved "https://registry.npmjs.org/@types/detect-port/-/detect-port-1.3.3.tgz" integrity sha512-bV/jQlAJ/nPY3XqSatkGpu+nGzou+uSwrH1cROhn+jBFg47yaNH+blW4C7p9KhopC7QxCv/6M86s37k8dMk0Yg== "@types/doctrine@^0.0.3": version "0.0.3" - resolved "https://registry.yarnpkg.com/@types/doctrine/-/doctrine-0.0.3.tgz#e892d293c92c9c1d3f9af72c15a554fbc7e0895a" + resolved "https://registry.npmjs.org/@types/doctrine/-/doctrine-0.0.3.tgz" integrity sha512-w5jZ0ee+HaPOaX25X2/2oGR/7rgAQSYII7X7pp0m9KgBfMP7uKfMfTvcpl5Dj+eDBbpxKGiqE+flqDr6XTd2RA== "@types/ejs@^3.1.1": - version "3.1.2" - resolved "https://registry.yarnpkg.com/@types/ejs/-/ejs-3.1.2.tgz#75d277b030bc11b3be38c807e10071f45ebc78d9" - integrity sha512-ZmiaE3wglXVWBM9fyVC17aGPkLo/UgaOjEiI2FXQfyczrCefORPxIe+2dVmnmk3zkVIbizjrlQzmPGhSYGXG5g== + version "3.1.3" + resolved "https://registry.npmjs.org/@types/ejs/-/ejs-3.1.3.tgz" + integrity sha512-mv5T/JI/bu+pbfz1o+TLl1NF0NIBbjS0Vl6Ppz1YY9DkXfzZT0lelXpfS5i3ZS3U/p90it7uERQpBvLYoK8e4A== "@types/emscripten@^1.39.6": - version "1.39.7" - resolved "https://registry.yarnpkg.com/@types/emscripten/-/emscripten-1.39.7.tgz#3025183ea56e12bf4d096aadc48ce74ca051233d" - integrity sha512-tLqYV94vuqDrXh515F/FOGtBcRMTPGvVV1LzLbtYDcQmmhtpf/gLYf+hikBbQk8MzOHNz37wpFfJbYAuSn8HqA== + version "1.39.8" + resolved "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.39.8.tgz" + integrity sha512-Rk0HKcMXFUuqT32k1kXHZWgxiMvsyYsmlnjp0rLKa0MMoqXLE3T9dogDBTRfuc3SAsXu97KD3k4SKR1lHqd57w== "@types/escodegen@^0.0.6": version "0.0.6" - resolved "https://registry.yarnpkg.com/@types/escodegen/-/escodegen-0.0.6.tgz#5230a9ce796e042cda6f086dbf19f22ea330659c" + resolved "https://registry.npmjs.org/@types/escodegen/-/escodegen-0.0.6.tgz" integrity sha512-AjwI4MvWx3HAOaZqYsjKWyEObT9lcVV0Y0V8nXo6cXzN8ZiMxVhf6F3d/UNvXVGKrEzL/Dluc5p+y9GkzlTWig== "@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== + version "3.7.5" + resolved "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.5.tgz" + integrity sha512-JNvhIEyxVW6EoMIFIvj93ZOywYFatlpu9deeH6eSx6PE3WHYvHaQtmHmQeNw7aA81bYGBPPQqdtBm6b1SsQMmA== dependencies: "@types/eslint" "*" "@types/estree" "*" "@types/eslint@*": - version "8.44.2" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.2.tgz#0d21c505f98a89b8dd4d37fa162b09da6089199a" - integrity sha512-sdPRb9K6iL5XZOmBubg8yiFp5yS/JdUDQsq5e6h95km91MCYMuvp7mh1fjPEYUhvHepKpZOjnEaMBR4PxjWDzg== + version "8.44.4" + resolved "https://registry.npmjs.org/@types/eslint/-/eslint-8.44.4.tgz" + integrity sha512-lOzjyfY/D9QR4hY9oblZ76B90MYTB3RrQ4z2vBIJKj9ROCRqdkYl2gSUx1x1a4IWPjKJZLL4Aw1Zfay7eMnmnA== dependencies: "@types/estree" "*" "@types/json-schema" "*" "@types/estree@*", "@types/estree@^1.0.0": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" - integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== + version "1.0.2" + resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.2.tgz" + integrity sha512-VeiPZ9MMwXjO32/Xu7+OwflfmeoRwkE/qzndw42gGtgJwZopBnzy2gD//NN1+go1mADzkDcqf/KnFRSjTJ8xJA== "@types/estree@^0.0.51": version "0.0.51" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" + resolved "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz" integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== "@types/express-serve-static-core@^4.17.33": - version "4.17.36" - resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.36.tgz#baa9022119bdc05a4adfe740ffc97b5f9360e545" - integrity sha512-zbivROJ0ZqLAtMzgzIUC4oNqDG9iF0lSsAqpOD9kbs5xcIM3dTiyuHvBc7R8MtWBp3AAWGaovJa+wzWPjLYW7Q== + version "4.17.37" + resolved "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.37.tgz" + integrity sha512-ZohaCYTgGFcOP7u6aJOhY9uIZQgZ2vxC2yWoArY+FeDXlqeH66ZVBjgvg+RLVAS/DWNq4Ap9ZXu1+SUQiiWYMg== dependencies: "@types/node" "*" "@types/qs" "*" @@ -3295,9 +3469,9 @@ "@types/send" "*" "@types/express@^4.7.0": - version "4.17.17" - resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.17.tgz#01d5437f6ef9cfa8668e616e13c2f2ac9a491ae4" - integrity sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q== + version "4.17.19" + resolved "https://registry.npmjs.org/@types/express/-/express-4.17.19.tgz" + integrity sha512-UtOfBtzN9OvpZPPbnnYunfjM7XCI4jyk1NvnFhTVz5krYAnW4o5DCoIekvms+8ApqhB4+9wSge1kBijdfTSmfg== dependencies: "@types/body-parser" "*" "@types/express-serve-static-core" "^4.17.33" @@ -3306,92 +3480,102 @@ "@types/find-cache-dir@^3.2.1": version "3.2.1" - resolved "https://registry.yarnpkg.com/@types/find-cache-dir/-/find-cache-dir-3.2.1.tgz#7b959a4b9643a1e6a1a5fe49032693cc36773501" + resolved "https://registry.npmjs.org/@types/find-cache-dir/-/find-cache-dir-3.2.1.tgz" integrity sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw== "@types/graceful-fs@^4.1.3": - version "4.1.6" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae" - integrity sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw== + version "4.1.7" + resolved "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.7.tgz" + integrity sha512-MhzcwU8aUygZroVwL2jeYk6JisJrPl/oov/gsgGCue9mkgl9wjGbzReYQClxiUgFDnib9FuHqTndccKeZKxTRw== dependencies: "@types/node" "*" +"@types/hoist-non-react-statics@*": + version "3.3.3" + resolved "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.3.tgz" + integrity sha512-Wny3a2UXn5FEA1l7gc6BbpoV5mD1XijZqgkp4TRgDCDL5r3B5ieOFGUX5h3n78Tr1MEG7BfvoM8qeztdvNU0fw== + dependencies: + "@types/react" "*" + hoist-non-react-statics "^3.3.0" + "@types/html-minifier-terser@^6.0.0": version "6.1.0" - resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + resolved "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== "@types/http-errors@*": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" - integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== + version "2.0.2" + resolved "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.2.tgz" + integrity sha512-lPG6KlZs88gef6aD85z3HNkztpj7w2R7HmR3gygjfXCQmsLloWNARFkMuzKiiY8FGdh1XDpgBdrSf4aKDiA7Kg== "@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + resolved "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz" integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== "@types/istanbul-lib-report@*": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + version "3.0.1" + resolved "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz" + integrity sha512-gPQuzaPR5h/djlAv2apEG1HVOyj1IUs7GpfMZixU0/0KXT3pm64ylHuMUI1/Akh+sq/iikxg6Z2j+fcMDXaaTQ== dependencies: "@types/istanbul-lib-coverage" "*" "@types/istanbul-reports@^3.0.0": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" - integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + version "3.0.2" + resolved "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.2.tgz" + integrity sha512-kv43F9eb3Lhj+lr/Hn6OcLCs/sSM8bt+fIaP11rCYngfV6NVjzWXJ17owQtDQTL9tQ8WSLUrGsSJ6rJz0F1w1A== dependencies: "@types/istanbul-lib-report" "*" "@types/json-schema@*", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": - version "7.0.12" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" - integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== + version "7.0.13" + resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz" + integrity sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ== "@types/json5@^0.0.29": version "0.0.29" - resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz" integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== "@types/lodash@^4.14.167": - version "4.14.197" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.197.tgz#e95c5ddcc814ec3e84c891910a01e0c8a378c54b" - integrity sha512-BMVOiWs0uNxHVlHBgzTIqJYmj+PgCo4euloGF+5m4okL3rEYzM2EEv78mw8zWSMM57dM7kVIgJ2QDvwHSoCI5g== + version "4.14.199" + resolved "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.199.tgz" + integrity sha512-Vrjz5N5Ia4SEzWWgIVwnHNEnb1UE1XMkvY5DGXrAeOGE9imk0hgTHh5GyDjLDJi9OTCn9oo9dXH1uToK1VRfrg== "@types/mdx@^2.0.0": - version "2.0.7" - resolved "https://registry.yarnpkg.com/@types/mdx/-/mdx-2.0.7.tgz#c7482e995673e01b83f8e96df83b3843ea76401f" - integrity sha512-BG4tyr+4amr3WsSEmHn/fXPqaCba/AYZ7dsaQTiavihQunHSIxk+uAtqsjvicNpyHN6cm+B9RVrUOtW9VzIKHw== + version "2.0.8" + resolved "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.8.tgz" + integrity sha512-r7/zWe+f9x+zjXqGxf821qz++ld8tp6Z4jUS6qmPZUXH6tfh4riXOhAqb12tWGWAevCFtMt1goLWkQMqIJKpsA== "@types/mime-types@^2.1.0": - version "2.1.1" - resolved "https://registry.yarnpkg.com/@types/mime-types/-/mime-types-2.1.1.tgz#d9ba43490fa3a3df958759adf69396c3532cf2c1" - integrity sha512-vXOTGVSLR2jMw440moWTC7H19iUyLtP3Z1YTj7cSsubOICinjMxFeb/V57v9QdyyPGbbWolUFSSmSiRSn94tFw== + version "2.1.2" + resolved "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.2.tgz" + integrity sha512-q9QGHMGCiBJCHEvd4ZLdasdqXv570agPsUW0CeIm/B8DzhxsYMerD0l3IlI+EQ1A2RWHY2mmM9x1YIuuWxisCg== "@types/mime@*": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" - integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== + version "3.0.2" + resolved "https://registry.npmjs.org/@types/mime/-/mime-3.0.2.tgz" + integrity sha512-Wj+fqpTLtTbG7c0tH47dkahefpLKEbB+xAZuLq7b4/IDHPl/n6VoXcyUQ2bypFlbSwvCr0y+bD4euTTqTJsPxQ== "@types/mime@^1": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" - integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== + version "1.3.3" + resolved "https://registry.npmjs.org/@types/mime/-/mime-1.3.3.tgz" + integrity sha512-Ys+/St+2VF4+xuY6+kDIXGxbNRO0mesVg0bbxEfB97Od1Vjpjx9KD1qxs64Gcb3CWPirk9Xe+PT4YiiHQ9T+eg== "@types/node-fetch@^2.6.4": - version "2.6.4" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.4.tgz#1bc3a26de814f6bf466b25aeb1473fa1afe6a660" - integrity sha512-1ZX9fcN4Rvkvgv4E6PAY5WXUFWFcRWxZa3EW83UjycOB9ljJCedb2CupIP4RZMEwF/M3eTcCihbBRgwtGbg5Rg== + version "2.6.6" + resolved "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.6.tgz" + integrity sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw== dependencies: "@types/node" "*" - form-data "^3.0.0" + form-data "^4.0.0" "@types/node@*", "@types/node@>=12.12.47", "@types/node@>=13.7.0": - version "20.5.9" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.5.9.tgz#a70ec9d8fa0180a314c3ede0e20ea56ff71aed9a" - integrity sha512-PcGNd//40kHAS3sTlzKB9C9XL4K0sTup8nbG5lC14kzEteTNuAFh9u5nA0o5TWnSG2r/JNPRXFVcHJIIeRlmqQ== + version "20.8.5" + resolved "https://registry.npmjs.org/@types/node/-/node-20.8.5.tgz" + integrity sha512-SPlobFgbidfIeOYlzXiEjSYeIJiOCthv+9tSQVpvk4PAdIIc+2SmjNVzWXk9t0Y7dl73Zdf+OgXKHX9XtkqUpw== + dependencies: + undici-types "~5.25.1" "@types/node@20.5.8": version "20.5.8" @@ -3399,161 +3583,170 @@ integrity sha512-eajsR9aeljqNhK028VG0Wuw+OaY5LLxYmxeoXynIoE6jannr9/Ucd1LL0hSSoafk5LTYG+FfqsyGt81Q6Zkybw== "@types/node@^16.0.0": - version "16.18.48" - resolved "https://registry.yarnpkg.com/@types/node/-/node-16.18.48.tgz#3bc872236cdb31cb51024d8875d655e25db489a4" - integrity sha512-mlaecDKQ7rIZrYD7iiKNdzFb6e/qD5I9U1rAhq+Fd+DWvYVs+G2kv74UFHmSOlg5+i/vF3XxuR522V4u8BqO+Q== + version "16.18.58" + resolved "https://registry.npmjs.org/@types/node/-/node-16.18.58.tgz" + integrity sha512-YGncyA25/MaVtQkjWW9r0EFBukZ+JulsLcVZBlGUfIb96OBMjkoRWwQo5IEWJ8Fj06Go3GHw+bjYDitv6BaGsA== "@types/normalize-package-data@^2.4.0": - version "2.4.1" - resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" - integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== + version "2.4.2" + resolved "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.2.tgz" + integrity sha512-lqa4UEhhv/2sjjIQgjX8B+RBjj47eo0mzGasklVJ78UKGQY1r0VpB9XHDaZZO9qzEFDdy4MrXLuEaSmPrPSe/A== "@types/parse-json@^4.0.0": version "4.0.0" - resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== "@types/pretty-hrtime@^1.0.0": version "1.0.1" - resolved "https://registry.yarnpkg.com/@types/pretty-hrtime/-/pretty-hrtime-1.0.1.tgz#72a26101dc567b0d68fd956cf42314556e42d601" + resolved "https://registry.npmjs.org/@types/pretty-hrtime/-/pretty-hrtime-1.0.1.tgz" integrity sha512-VjID5MJb1eGKthz2qUerWT8+R4b9N+CHvGCzg9fn4kWZgaF9AhdYikQio3R7wV8YY1NsQKPaCwKz1Yff+aHNUQ== "@types/prop-types@*": - version "15.7.5" - resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" - integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + version "15.7.8" + resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.8.tgz" + integrity sha512-kMpQpfZKSCBqltAJwskgePRaYRFukDkm1oItcAbC3gNELR20XIBcN9VRgg4+m8DKsTfkWeA4m4Imp4DDuWy7FQ== "@types/qs@*", "@types/qs@^6.9.5": version "6.9.8" - resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.8.tgz#f2a7de3c107b89b441e071d5472e6b726b4adf45" + resolved "https://registry.npmjs.org/@types/qs/-/qs-6.9.8.tgz" integrity sha512-u95svzDlTysU5xecFNTgfFG5RUWu1A9P0VzgpcIiGZA9iraHOdSzcxMxQ55DyeRaGCSxQi7LxXDI4rzq/MYfdg== "@types/range-parser@*": - version "1.2.4" - resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" - integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + version "1.2.5" + resolved "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.5.tgz" + integrity sha512-xrO9OoVPqFuYyR/loIHjnbvvyRZREYKLjxV4+dY6v3FQR3stQ9ZxIGkaclF7YhI9hfjpuTbu14hZEy94qKLtOA== "@types/react-dom@18.2.7": version "18.2.7" - resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.2.7.tgz#67222a08c0a6ae0a0da33c3532348277c70abb63" + resolved "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.7.tgz" integrity sha512-GRaAEriuT4zp9N4p1i8BDBYmEyfo+xQ3yHjJU4eiK5NDa1RmUZG+unZABUTK4/Ox/M+GaHwb6Ow8rUITrtjszA== dependencies: "@types/react" "*" -"@types/react@*", "@types/react@18.2.21", "@types/react@>=16": +"@types/react@*", "@types/react@18.2.21": version "18.2.21" - resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.21.tgz#774c37fd01b522d0b91aed04811b58e4e0514ed9" + resolved "https://registry.npmjs.org/@types/react/-/react-18.2.21.tgz" integrity sha512-neFKG/sBAwGxHgXiIxnbm3/AAVQ/cMRS93hvBpg8xYRbeQSPVABp9U2bRnPf0iI4+Ucdv3plSxKK+3CW2ENJxA== dependencies: "@types/prop-types" "*" "@types/scheduler" "*" csstype "^3.0.2" -"@types/react@^16.14.34": - version "16.14.46" - resolved "https://registry.yarnpkg.com/@types/react/-/react-16.14.46.tgz#42ac91aece416176e6b6127cd9ec9e381ea67e16" - integrity sha512-Am4pyXMrr6cWWw/TN3oqHtEZl0j+G6Up/O8m65+xF/3ZaUgkv1GAtTPWw4yNRmH0HJXmur6xKCKoMo3rBGynuw== +"@types/react@>=16": + version "18.2.28" + resolved "https://registry.npmjs.org/@types/react/-/react-18.2.28.tgz" + integrity sha512-ad4aa/RaaJS3hyGz0BGegdnSRXQBkd1CCYDCdNjBPg90UUpLgo+WlJqb9fMYUxtehmzF3PJaTWqRZjko6BRzBg== dependencies: "@types/prop-types" "*" "@types/scheduler" "*" csstype "^3.0.2" "@types/scheduler@*": - version "0.16.3" - resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.3.tgz#cef09e3ec9af1d63d2a6cc5b383a737e24e6dcf5" - integrity sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ== + version "0.16.4" + resolved "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.4.tgz" + integrity sha512-2L9ifAGl7wmXwP4v3pN4p2FLhD0O1qsJpvKmNin5VA8+UvNVb447UDaAEV6UdrkA+m/Xs58U1RFps44x6TFsVQ== "@types/semver@^7.3.12", "@types/semver@^7.3.4": - version "7.5.1" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.1.tgz#0480eeb7221eb9bc398ad7432c9d7e14b1a5a367" - integrity sha512-cJRQXpObxfNKkFAZbJl2yjWtJCqELQIdShsogr1d2MilP8dKD9TE/nEKHkJgUNHdGKCQaf9HbIynuV2csLGVLg== + version "7.5.3" + resolved "https://registry.npmjs.org/@types/semver/-/semver-7.5.3.tgz" + integrity sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw== "@types/send@*": - version "0.17.1" - resolved "https://registry.yarnpkg.com/@types/send/-/send-0.17.1.tgz#ed4932b8a2a805f1fe362a70f4e62d0ac994e301" - integrity sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q== + version "0.17.2" + resolved "https://registry.npmjs.org/@types/send/-/send-0.17.2.tgz" + integrity sha512-aAG6yRf6r0wQ29bkS+x97BIs64ZLxeE/ARwyS6wrldMm3C1MdKwCcnnEwMC1slI8wuxJOpiUH9MioC0A0i+GJw== dependencies: "@types/mime" "^1" "@types/node" "*" "@types/serve-static@*": - version "1.15.2" - resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.2.tgz#3e5419ecd1e40e7405d34093f10befb43f63381a" - integrity sha512-J2LqtvFYCzaj8pVYKw8klQXrLLk7TBZmQ4ShlcdkELFKGwGMfevMLneMMRkMgZxotOD9wg497LpC7O8PcvAmfw== + version "1.15.3" + resolved "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.3.tgz" + integrity sha512-yVRvFsEMrv7s0lGhzrggJjNOSmZCdgCjw9xWrPr/kNNLp6FaDfMC1KaYl3TSJ0c58bECwNBMoQrZJ8hA8E1eFg== dependencies: "@types/http-errors" "*" "@types/mime" "*" "@types/node" "*" +"@types/styled-components@^5.1.28": + version "5.1.28" + resolved "https://registry.npmjs.org/@types/styled-components/-/styled-components-5.1.28.tgz" + integrity sha512-nu0VKNybkjvUqJAXWtRqKd7j3iRUl8GbYSTvZNuIBJcw/HUp1Y4QUXNLlj7gcnRV/t784JnHAlvRnSnE3nPbJA== + dependencies: + "@types/hoist-non-react-statics" "*" + "@types/react" "*" + csstype "^3.0.2" + "@types/stylis@^4.0.2": - version "4.2.0" - resolved "https://registry.yarnpkg.com/@types/stylis/-/stylis-4.2.0.tgz#199a3f473f0c3a6f6e4e1b17cdbc967f274bdc6b" - integrity sha512-n4sx2bqL0mW1tvDf/loQ+aMX7GQD3lc3fkCMC55VFNDu/vBOabO+LTIeXKM14xK0ppk5TUGcWRjiSpIlUpghKw== + version "4.2.1" + resolved "https://registry.npmjs.org/@types/stylis/-/stylis-4.2.1.tgz" + integrity sha512-OSaMrXUKxVigGlKRrET39V2xdhzlztQ9Aqumn1WbCBKHOi9ry7jKSd7rkyj0GzmWaU960Rd+LpOFpLfx5bMQAg== "@types/unist@^2.0.0": version "2.0.8" - resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.8.tgz#bb197b9639aa1a04cf464a617fe800cccd92ad5c" + resolved "https://registry.npmjs.org/@types/unist/-/unist-2.0.8.tgz" integrity sha512-d0XxK3YTObnWVp6rZuev3c49+j4Lo8g4L1ZRm9z5L0xpoZycUPshHgczK5gsUMaZOstjVYYi09p5gYvUtfChYw== "@types/yargs-parser@*": - version "21.0.0" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" - integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + version "21.0.1" + resolved "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.1.tgz" + integrity sha512-axdPBuLuEJt0c4yI5OZssC19K2Mq1uKdrfZBzuxLvaztgqUtFYZUNw7lETExPYJR9jdEoIg4mb7RQKRQzOkeGQ== "@types/yargs@^16.0.0": - version "16.0.5" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.5.tgz#12cc86393985735a283e387936398c2f9e5f88e3" - integrity sha512-AxO/ADJOBFJScHbWhq2xAhlWP24rY4aCEG/NFaMvbT3X2MgRsLjhjQwsn0Zi5zn0LG9jUhCCZMeX9Dkuw6k+vQ== + version "16.0.6" + resolved "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.6.tgz" + integrity sha512-oTP7/Q13GSPrgcwEwdlnkoZSQ1Hg9THe644qq8PG6hhJzjZ3qj1JjEFPIwWV/IXVs5XGIVqtkNOS9kh63WIJ+A== dependencies: "@types/yargs-parser" "*" "@types/yargs@^17.0.8": - version "17.0.24" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" - integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== + version "17.0.28" + resolved "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.28.tgz" + integrity sha512-N3e3fkS86hNhtk6BEnc0rj3zcehaxx8QWhCROJkqpl5Zaoi7nAic3jH8q94jVD3zu5LGk+PUB6KAiDmimYOEQw== dependencies: "@types/yargs-parser" "*" "@typescript-eslint/parser@^5.4.2 || ^6.0.0": - version "6.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.6.0.tgz#fe323a7b4eafb6d5ea82b96216561810394a739e" - integrity sha512-setq5aJgUwtzGrhW177/i+DMLqBaJbdwGj2CPIVFFLE0NCliy5ujIdLHd2D1ysmlmsjdL2GWW+hR85neEfc12w== - dependencies: - "@typescript-eslint/scope-manager" "6.6.0" - "@typescript-eslint/types" "6.6.0" - "@typescript-eslint/typescript-estree" "6.6.0" - "@typescript-eslint/visitor-keys" "6.6.0" + version "6.7.5" + resolved "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.5.tgz" + integrity sha512-bIZVSGx2UME/lmhLcjdVc7ePBwn7CLqKarUBL4me1C5feOd663liTGjMBGVcGr+BhnSLeP4SgwdvNnnkbIdkCw== + dependencies: + "@typescript-eslint/scope-manager" "6.7.5" + "@typescript-eslint/types" "6.7.5" + "@typescript-eslint/typescript-estree" "6.7.5" + "@typescript-eslint/visitor-keys" "6.7.5" debug "^4.3.4" "@typescript-eslint/scope-manager@5.62.0": version "5.62.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz#d9457ccc6a0b8d6b37d0eb252a23022478c5460c" + resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz" integrity sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w== dependencies: "@typescript-eslint/types" "5.62.0" "@typescript-eslint/visitor-keys" "5.62.0" -"@typescript-eslint/scope-manager@6.6.0": - version "6.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.6.0.tgz#57105d4419d6de971f7d2c30a2ff4ac40003f61a" - integrity sha512-pT08u5W/GT4KjPUmEtc2kSYvrH8x89cVzkA0Sy2aaOUIw6YxOIjA8ilwLr/1fLjOedX1QAuBpG9XggWqIIfERw== +"@typescript-eslint/scope-manager@6.7.5": + version "6.7.5" + resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.5.tgz" + integrity sha512-GAlk3eQIwWOJeb9F7MKQ6Jbah/vx1zETSDw8likab/eFcqkjSD7BI75SDAeC5N2L0MmConMoPvTsmkrg71+B1A== dependencies: - "@typescript-eslint/types" "6.6.0" - "@typescript-eslint/visitor-keys" "6.6.0" + "@typescript-eslint/types" "6.7.5" + "@typescript-eslint/visitor-keys" "6.7.5" "@typescript-eslint/types@5.62.0": version "5.62.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.62.0.tgz#258607e60effa309f067608931c3df6fed41fd2f" + resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz" integrity sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ== -"@typescript-eslint/types@6.6.0": - version "6.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.6.0.tgz#95e7ea650a2b28bc5af5ea8907114a48f54618c2" - integrity sha512-CB6QpJQ6BAHlJXdwUmiaXDBmTqIE2bzGTDLADgvqtHWuhfNP3rAOK7kAgRMAET5rDRr9Utt+qAzRBdu3AhR3sg== +"@typescript-eslint/types@6.7.5": + version "6.7.5" + resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.5.tgz" + integrity sha512-WboQBlOXtdj1tDFPyIthpKrUb+kZf2VroLZhxKa/VlwLlLyqv/PwUNgL30BlTVZV1Wu4Asu2mMYPqarSO4L5ZQ== "@typescript-eslint/typescript-estree@5.62.0": version "5.62.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz#7d17794b77fabcac615d6a48fb143330d962eb9b" + resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz" integrity sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA== dependencies: "@typescript-eslint/types" "5.62.0" @@ -3564,13 +3757,13 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/typescript-estree@6.6.0": - version "6.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.6.0.tgz#373c420d2e12c28220f4a83352280a04823a91b7" - integrity sha512-hMcTQ6Al8MP2E6JKBAaSxSVw5bDhdmbCEhGW/V8QXkb9oNsFkA4SBuOMYVPxD3jbtQ4R/vSODBsr76R6fP3tbA== +"@typescript-eslint/typescript-estree@6.7.5": + version "6.7.5" + resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.5.tgz" + integrity sha512-NhJiJ4KdtwBIxrKl0BqG1Ur+uw7FiOnOThcYx9DpOGJ/Abc9z2xNzLeirCG02Ig3vkvrc2qFLmYSSsaITbKjlg== dependencies: - "@typescript-eslint/types" "6.6.0" - "@typescript-eslint/visitor-keys" "6.6.0" + "@typescript-eslint/types" "6.7.5" + "@typescript-eslint/visitor-keys" "6.7.5" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" @@ -3579,7 +3772,7 @@ "@typescript-eslint/utils@^5.45.0": version "5.62.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.62.0.tgz#141e809c71636e4a75daa39faed2fb5f4b10df86" + resolved "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.62.0.tgz" integrity sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ== dependencies: "@eslint-community/eslint-utils" "^4.2.0" @@ -3593,23 +3786,23 @@ "@typescript-eslint/visitor-keys@5.62.0": version "5.62.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz#2174011917ce582875954ffe2f6912d5931e353e" + resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz" integrity sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw== dependencies: "@typescript-eslint/types" "5.62.0" eslint-visitor-keys "^3.3.0" -"@typescript-eslint/visitor-keys@6.6.0": - version "6.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.6.0.tgz#1109088b4346c8b2446f3845db526374d9a3bafc" - integrity sha512-L61uJT26cMOfFQ+lMZKoJNbAEckLe539VhTxiGHrWl5XSKQgA0RTBZJW2HFPy5T0ZvPVSD93QsrTKDkfNwJGyQ== +"@typescript-eslint/visitor-keys@6.7.5": + version "6.7.5" + resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.5.tgz" + integrity sha512-3MaWdDZtLlsexZzDSdQWsFQ9l9nL8B80Z4fImSpyllFC/KLqWQRdEcB+gGGO+N3Q2uL40EsG66wZLsohPxNXvg== dependencies: - "@typescript-eslint/types" "6.6.0" + "@typescript-eslint/types" "6.7.5" eslint-visitor-keys "^3.4.1" "@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" + resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz" integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== dependencies: "@webassemblyjs/helper-numbers" "1.11.6" @@ -3617,22 +3810,22 @@ "@webassemblyjs/floating-point-hex-parser@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + resolved "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz" integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== "@webassemblyjs/helper-api-error@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz" integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== "@webassemblyjs/helper-buffer@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz" integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== "@webassemblyjs/helper-numbers@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz" integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: "@webassemblyjs/floating-point-hex-parser" "1.11.6" @@ -3641,12 +3834,12 @@ "@webassemblyjs/helper-wasm-bytecode@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz" integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== "@webassemblyjs/helper-wasm-section@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz" integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -3656,26 +3849,26 @@ "@webassemblyjs/ieee754@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + resolved "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz" integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + resolved "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz" integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + resolved "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz" integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== "@webassemblyjs/wasm-edit@^1.11.5": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz" integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -3689,7 +3882,7 @@ "@webassemblyjs/wasm-gen@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz" integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -3700,7 +3893,7 @@ "@webassemblyjs/wasm-opt@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz" integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -3710,7 +3903,7 @@ "@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz" integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -3722,7 +3915,7 @@ "@webassemblyjs/wast-printer@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" + resolved "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz" integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -3730,24 +3923,24 @@ "@xtuc/ieee754@^1.2.0": version "1.2.0" - resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + resolved "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" - resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + resolved "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== "@yarnpkg/esbuild-plugin-pnp@^3.0.0-rc.10": version "3.0.0-rc.15" - resolved "https://registry.yarnpkg.com/@yarnpkg/esbuild-plugin-pnp/-/esbuild-plugin-pnp-3.0.0-rc.15.tgz#4e40e7d2eb28825c9a35ab9d04c363931d7c0e67" + resolved "https://registry.npmjs.org/@yarnpkg/esbuild-plugin-pnp/-/esbuild-plugin-pnp-3.0.0-rc.15.tgz" integrity sha512-kYzDJO5CA9sy+on/s2aIW0411AklfCi8Ck/4QDivOqsMKpStZA2SsR+X27VTggGwpStWaLrjJcDcdDMowtG8MA== dependencies: tslib "^2.4.0" "@yarnpkg/fslib@2.10.3": version "2.10.3" - resolved "https://registry.yarnpkg.com/@yarnpkg/fslib/-/fslib-2.10.3.tgz#a8c9893df5d183cf6362680b9f1c6d7504dd5717" + resolved "https://registry.npmjs.org/@yarnpkg/fslib/-/fslib-2.10.3.tgz" integrity sha512-41H+Ga78xT9sHvWLlFOZLIhtU6mTGZ20pZ29EiZa97vnxdohJD2AF42rCoAoWfqUz486xY6fhjMH+DYEM9r14A== dependencies: "@yarnpkg/libzip" "^2.3.0" @@ -3755,7 +3948,7 @@ "@yarnpkg/libzip@2.3.0", "@yarnpkg/libzip@^2.3.0": version "2.3.0" - resolved "https://registry.yarnpkg.com/@yarnpkg/libzip/-/libzip-2.3.0.tgz#fe1e762e47669f6e2c960fc118436608d834e3be" + resolved "https://registry.npmjs.org/@yarnpkg/libzip/-/libzip-2.3.0.tgz" integrity sha512-6xm38yGVIa6mKm/DUCF2zFFJhERh/QWp1ufm4cNUvxsONBmfPg8uZ9pZBdOmF6qFGr/HlT6ABBkCSx/dlEtvWg== dependencies: "@types/emscripten" "^1.39.6" @@ -3763,14 +3956,14 @@ abort-controller@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + resolved "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== dependencies: event-target-shim "^5.0.0" accepts@~1.3.5, accepts@~1.3.8: version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz" integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== dependencies: mime-types "~2.1.34" @@ -3778,37 +3971,37 @@ accepts@~1.3.5, accepts@~1.3.8: acorn-import-assertions@^1.9.0: version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" + resolved "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz" integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== acorn-jsx@^5.3.1, acorn-jsx@^5.3.2: version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn-walk@^7.2.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== acorn@^7.4.1: version "7.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + resolved "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== -acorn@^8.7.1, acorn@^8.8.2, acorn@^8.9.0: +acorn@^8.10.0, acorn@^8.7.1, acorn@^8.8.2, acorn@^8.9.0: version "8.10.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5" + resolved "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz" integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw== address@^1.0.1: version "1.2.2" - resolved "https://registry.yarnpkg.com/address/-/address-1.2.2.tgz#2b5248dac5485a6390532c6a517fda2e3faac89e" + resolved "https://registry.npmjs.org/address/-/address-1.2.2.tgz" integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA== adjust-sourcemap-loader@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + resolved "https://registry.npmjs.org/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz" integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== dependencies: loader-utils "^2.0.0" @@ -3816,19 +4009,19 @@ adjust-sourcemap-loader@^4.0.0: agent-base@5: version "5.1.1" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-5.1.1.tgz#e8fb3f242959db44d63be665db7a8e739537a32c" + resolved "https://registry.npmjs.org/agent-base/-/agent-base-5.1.1.tgz" integrity sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g== -agent-base@6: - version "6.0.2" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== +agent-base@^7.0.2: + version "7.1.0" + resolved "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz" + integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg== dependencies: - debug "4" + debug "^4.3.4" aggregate-error@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz" integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== dependencies: clean-stack "^2.0.0" @@ -3836,26 +4029,26 @@ aggregate-error@^3.0.0: ajv-formats@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + resolved "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz" integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== dependencies: ajv "^8.0.0" ajv-keywords@^3.5.2: version "3.5.2" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== ajv-keywords@^5.1.0: version "5.1.0" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz" integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== dependencies: fast-deep-equal "^3.1.3" ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== dependencies: fast-deep-equal "^3.1.1" @@ -3865,7 +4058,7 @@ ajv@^6.12.4, ajv@^6.12.5: ajv@^8.0.0, ajv@^8.9.0: version "8.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" + resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== dependencies: fast-deep-equal "^3.1.1" @@ -3875,51 +4068,51 @@ ajv@^8.0.0, ajv@^8.9.0: ansi-html-community@0.0.8, ansi-html-community@^0.0.8: version "0.0.8" - resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + resolved "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz" integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== ansi-regex@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== ansi-regex@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz" integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== ansi-styles@^3.2.1: version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" ansi-styles@^5.0.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== ansi-styles@^6.1.0: version "6.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== any-promise@^1.0.0: version "1.3.0" - resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + resolved "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz" integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== anymatch@^3.0.3, anymatch@~3.1.2: version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz" integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" @@ -3927,50 +4120,50 @@ anymatch@^3.0.3, anymatch@~3.1.2: app-root-dir@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/app-root-dir/-/app-root-dir-1.0.2.tgz#38187ec2dea7577fff033ffcb12172692ff6e118" + resolved "https://registry.npmjs.org/app-root-dir/-/app-root-dir-1.0.2.tgz" integrity sha512-jlpIfsOoNoafl92Sz//64uQHGSyMrD2vYG5d8o2a4qGvyNCvXur7bzIsWtAC/6flI2RYAp3kv8rsfBtaLm7w0g== arg@^5.0.2: version "5.0.2" - resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + resolved "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz" integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== argparse@^1.0.7: version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" argparse@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== aria-hidden@^1.1.1, aria-hidden@^1.1.3: version "1.2.3" - resolved "https://registry.yarnpkg.com/aria-hidden/-/aria-hidden-1.2.3.tgz#14aeb7fb692bbb72d69bebfa47279c1fd725e954" + resolved "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.3.tgz" integrity sha512-xcLxITLe2HYa1cnYnwCjkOO1PqUHQpozB8x9AR0OgWN2woOBi5kSDVxKfd0b7sb1hw5qFeJhXm9H1nu3xSfLeQ== dependencies: tslib "^2.0.0" aria-query@5.1.3: version "5.1.3" - resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.1.3.tgz#19db27cd101152773631396f7a95a3b58c22c35e" + resolved "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz" integrity sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ== dependencies: deep-equal "^2.0.5" aria-query@^5.1.3: version "5.3.0" - resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.0.tgz#650c569e41ad90b51b3d7df5e5eed1c7549c103e" + resolved "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz" integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A== dependencies: dequal "^2.0.3" array-buffer-byte-length@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" + resolved "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz" integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== dependencies: call-bind "^1.0.2" @@ -3978,12 +4171,12 @@ array-buffer-byte-length@^1.0.0: array-flatten@1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== array-includes@^3.1.6: version "3.1.7" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.7.tgz#8cd2e01b26f7a3086cbc87271593fe921c62abda" + resolved "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz" integrity sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ== dependencies: call-bind "^1.0.2" @@ -3994,24 +4187,24 @@ array-includes@^3.1.6: array-union@^1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + resolved "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz" integrity sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng== dependencies: array-uniq "^1.0.1" array-union@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== array-uniq@^1.0.1: version "1.0.3" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + resolved "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz" integrity sha512-MNha4BWQ6JbwhFhj03YK552f7cb3AzoE8SzeljgChvL1dl3IcvggXVz1DilzySZkCja+CXuZbdW7yATchWn8/Q== array.prototype.findlastindex@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz#b37598438f97b579166940814e2c0493a4f50207" + resolved "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz" integrity sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA== dependencies: call-bind "^1.0.2" @@ -4021,51 +4214,52 @@ array.prototype.findlastindex@^1.2.2: get-intrinsic "^1.2.1" array.prototype.flat@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz#ffc6576a7ca3efc2f46a143b9d1dda9b4b3cf5e2" - integrity sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA== + version "1.3.2" + resolved "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz" + integrity sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" es-shim-unscopables "^1.0.0" array.prototype.flatmap@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz#1aae7903c2100433cb8261cd4ed310aab5c4a183" - integrity sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ== + version "1.3.2" + resolved "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz" + integrity sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" es-shim-unscopables "^1.0.0" array.prototype.tosorted@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz#ccf44738aa2b5ac56578ffda97c03fd3e23dd532" - integrity sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ== + version "1.1.2" + resolved "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.2.tgz" + integrity sha512-HuQCHOlk1Weat5jzStICBCd83NxiIMwqDg/dHEsoefabn/hJRj5pVdWcPUSpRrwhwxZOsQassMpgN/xRYFBMIg== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" es-shim-unscopables "^1.0.0" - get-intrinsic "^1.1.3" + get-intrinsic "^1.2.1" -arraybuffer.prototype.slice@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.1.tgz#9b5ea3868a6eebc30273da577eb888381c0044bb" - integrity sha512-09x0ZWFEjj4WD8PDbykUwo3t9arLn8NIzmmYEJFpYekOAQjpkGSyrQhNoRTcwwcFRu+ycWF78QZ63oWTqSjBcw== +arraybuffer.prototype.slice@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz" + integrity sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw== dependencies: array-buffer-byte-length "^1.0.0" call-bind "^1.0.2" define-properties "^1.2.0" + es-abstract "^1.22.1" get-intrinsic "^1.2.1" is-array-buffer "^3.0.2" is-shared-array-buffer "^1.0.2" asn1.js@^5.2.0: version "5.4.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07" + resolved "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz" integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== dependencies: bn.js "^4.0.0" @@ -4074,100 +4268,101 @@ asn1.js@^5.2.0: safer-buffer "^2.1.0" assert@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/assert/-/assert-2.0.0.tgz#95fc1c616d48713510680f2eaf2d10dd22e02d32" - integrity sha512-se5Cd+js9dXJnu6Ag2JFc00t+HmHOen+8Q+L7O9zI0PqQXr20uk2J0XQqMxZEeo5U50o8Nvmmx7dZrl+Ufr35A== + version "2.1.0" + resolved "https://registry.npmjs.org/assert/-/assert-2.1.0.tgz" + integrity sha512-eLHpSK/Y4nhMJ07gDaAzoX/XAKS8PSaojml3M0DM4JpV1LAi5JOJ/p6H/XWrl8L+DzVEvVCW1z3vWAaB9oTsQw== dependencies: - es6-object-assign "^1.1.0" - is-nan "^1.2.1" - object-is "^1.0.1" - util "^0.12.0" + call-bind "^1.0.2" + is-nan "^1.3.2" + object-is "^1.1.5" + object.assign "^4.1.4" + util "^0.12.5" ast-types-flow@^0.0.7: version "0.0.7" - resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + resolved "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz" integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== ast-types@0.15.2: version "0.15.2" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.15.2.tgz#39ae4809393c4b16df751ee563411423e85fb49d" + resolved "https://registry.npmjs.org/ast-types/-/ast-types-0.15.2.tgz" integrity sha512-c27loCv9QkZinsa5ProX751khO9DJl/AcB5c2KNtA6NRvHKS0PgLfcftz72KVq504vB0Gku5s2kUZzDBvQWvHg== dependencies: tslib "^2.0.1" ast-types@^0.14.2: version "0.14.2" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.14.2.tgz#600b882df8583e3cd4f2df5fa20fa83759d4bdfd" + resolved "https://registry.npmjs.org/ast-types/-/ast-types-0.14.2.tgz" integrity sha512-O0yuUDnZeQDL+ncNGlJ78BiO4jnYI3bvMsD5prT0/nsgijG/LpNBIr63gTjVTNsiGkgQhiyCShTgxt8oXOrklA== dependencies: tslib "^2.0.1" ast-types@^0.16.1: version "0.16.1" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.16.1.tgz#7a9da1617c9081bc121faafe91711b4c8bb81da2" + resolved "https://registry.npmjs.org/ast-types/-/ast-types-0.16.1.tgz" integrity sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg== dependencies: tslib "^2.0.1" async-limiter@~1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" + resolved "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== async@^3.2.3, async@^3.2.4: version "3.2.4" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + resolved "https://registry.npmjs.org/async/-/async-3.2.4.tgz" integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== asynciterator.prototype@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz#8c5df0514936cdd133604dfcc9d3fb93f09b2b62" + resolved "https://registry.npmjs.org/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz" integrity sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg== dependencies: has-symbols "^1.0.3" asynckit@^0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== autoprefixer@^10.4.15: - version "10.4.15" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.15.tgz#a1230f4aeb3636b89120b34a1f513e2f6834d530" - integrity sha512-KCuPB8ZCIqFdA4HwKXsvz7j6gvSDNhDP7WnUjBleRkKjPdvCmHFuQ77ocavI8FT6NdvlBnE2UFr2H4Mycn8Vew== + version "10.4.16" + resolved "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz" + integrity sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ== dependencies: browserslist "^4.21.10" - caniuse-lite "^1.0.30001520" - fraction.js "^4.2.0" + caniuse-lite "^1.0.30001538" + fraction.js "^4.3.6" normalize-range "^0.1.2" picocolors "^1.0.0" postcss-value-parser "^4.2.0" available-typed-arrays@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" + resolved "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz" integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== axe-core@^4.6.2: - version "4.7.2" - resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" - integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== + version "4.8.2" + resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.8.2.tgz" + integrity sha512-/dlp0fxyM3R8YW7MFzaHWXrf4zzbr0vaYb23VBFCl83R7nWNPg/yaQw2Dc8jzCMmDVLhSdzH8MjrsuIUuvX+6g== axobject-query@^3.1.1: version "3.2.1" - resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.2.1.tgz#39c378a6e3b06ca679f29138151e45b2b32da62a" + resolved "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz" integrity sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg== dependencies: dequal "^2.0.3" babel-core@^7.0.0-bridge.0: version "7.0.0-bridge.0" - resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-7.0.0-bridge.0.tgz#95a492ddd90f9b4e9a4a1da14eb335b87b634ece" + resolved "https://registry.npmjs.org/babel-core/-/babel-core-7.0.0-bridge.0.tgz" integrity sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg== babel-loader@^9.0.0: version "9.1.3" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-9.1.3.tgz#3d0e01b4e69760cc694ee306fe16d358aa1c6f9a" + resolved "https://registry.npmjs.org/babel-loader/-/babel-loader-9.1.3.tgz" integrity sha512-xG3ST4DglodGf8qSwv0MdeWLhrDsw/32QMdTO5T1ZIp9gQur0HkCyFs7Awskr10JKXFXwpAhiCuYX5oGXnRGbw== dependencies: find-cache-dir "^4.0.0" @@ -4175,12 +4370,12 @@ babel-loader@^9.0.0: babel-plugin-add-react-displayname@^0.0.5: version "0.0.5" - resolved "https://registry.yarnpkg.com/babel-plugin-add-react-displayname/-/babel-plugin-add-react-displayname-0.0.5.tgz#339d4cddb7b65fd62d1df9db9fe04de134122bd5" + resolved "https://registry.npmjs.org/babel-plugin-add-react-displayname/-/babel-plugin-add-react-displayname-0.0.5.tgz" integrity sha512-LY3+Y0XVDYcShHHorshrDbt4KFWL4bSeniCtl4SYZbask+Syngk1uMPCeN9+nSiZo6zX5s0RTq/J9Pnaaf/KHw== babel-plugin-istanbul@^6.1.1: version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + resolved "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz" integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" @@ -4191,36 +4386,36 @@ babel-plugin-istanbul@^6.1.1: babel-plugin-named-exports-order@^0.0.2: version "0.0.2" - resolved "https://registry.yarnpkg.com/babel-plugin-named-exports-order/-/babel-plugin-named-exports-order-0.0.2.tgz#ae14909521cf9606094a2048239d69847540cb09" + resolved "https://registry.npmjs.org/babel-plugin-named-exports-order/-/babel-plugin-named-exports-order-0.0.2.tgz" integrity sha512-OgOYHOLoRK+/mvXU9imKHlG6GkPLYrUCvFXG/CM93R/aNNO8pOOF4aS+S8CCHMDQoNSeiOYEZb/G6RwL95Jktw== -babel-plugin-polyfill-corejs2@^0.4.5: - version "0.4.5" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.5.tgz#8097b4cb4af5b64a1d11332b6fb72ef5e64a054c" - integrity sha512-19hwUH5FKl49JEsvyTcoHakh6BE0wgXLLptIyKZ3PijHc/Ci521wygORCUCCred+E/twuqRyAkE02BAWPmsHOg== +babel-plugin-polyfill-corejs2@^0.4.6: + version "0.4.6" + resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.6.tgz" + integrity sha512-jhHiWVZIlnPbEUKSSNb9YoWcQGdlTLq7z1GHL4AjFxaoOUMuuEVJ+Y4pAaQUGOGk93YsVCKPbqbfw3m0SM6H8Q== dependencies: "@babel/compat-data" "^7.22.6" - "@babel/helper-define-polyfill-provider" "^0.4.2" + "@babel/helper-define-polyfill-provider" "^0.4.3" semver "^6.3.1" -babel-plugin-polyfill-corejs3@^0.8.3: - version "0.8.3" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.3.tgz#b4f719d0ad9bb8e0c23e3e630c0c8ec6dd7a1c52" - integrity sha512-z41XaniZL26WLrvjy7soabMXrfPWARN25PZoriDEiLMxAp50AUW3t35BGQUMg5xK3UrpVTtagIDklxYa+MhiNA== +babel-plugin-polyfill-corejs3@^0.8.5: + version "0.8.5" + resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.5.tgz" + integrity sha512-Q6CdATeAvbScWPNLB8lzSO7fgUVBkQt6zLgNlfyeCr/EQaEQR+bWiBYYPYAFyE528BMjRhL+1QBMOI4jc/c5TA== dependencies: - "@babel/helper-define-polyfill-provider" "^0.4.2" - core-js-compat "^3.31.0" + "@babel/helper-define-polyfill-provider" "^0.4.3" + core-js-compat "^3.32.2" -babel-plugin-polyfill-regenerator@^0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.2.tgz#80d0f3e1098c080c8b5a65f41e9427af692dc326" - integrity sha512-tAlOptU0Xj34V1Y2PNTL4Y0FOJMDB6bZmoW39FeCQIhigGLkqu3Fj6uiXpxIf6Ij274ENdYx64y6Au+ZKlb1IA== +babel-plugin-polyfill-regenerator@^0.5.3: + version "0.5.3" + resolved "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.3.tgz" + integrity sha512-8sHeDOmXC8csczMrYEOf0UTNa4yE2SxV5JGeT/LP1n0OYVDUUFPxG9vdk2AlDlIit4t+Kf0xCtpgXPBwnn/9pw== dependencies: - "@babel/helper-define-polyfill-provider" "^0.4.2" + "@babel/helper-define-polyfill-provider" "^0.4.3" babel-plugin-react-docgen@^4.2.1: version "4.2.1" - resolved "https://registry.yarnpkg.com/babel-plugin-react-docgen/-/babel-plugin-react-docgen-4.2.1.tgz#7cc8e2f94e8dc057a06e953162f0810e4e72257b" + resolved "https://registry.npmjs.org/babel-plugin-react-docgen/-/babel-plugin-react-docgen-4.2.1.tgz" integrity sha512-UQ0NmGHj/HAqi5Bew8WvNfCk8wSsmdgNd8ZdMjBCICtyCJCq9LiqgqvjCYe570/Wg7AQArSq1VQ60Dd/CHN7mQ== dependencies: ast-types "^0.14.2" @@ -4229,39 +4424,39 @@ babel-plugin-react-docgen@^4.2.1: balanced-match@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== base64-js@^1.3.1: version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== better-opn@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/better-opn/-/better-opn-3.0.2.tgz#f96f35deaaf8f34144a4102651babcf00d1d8817" + resolved "https://registry.npmjs.org/better-opn/-/better-opn-3.0.2.tgz" integrity sha512-aVNobHnJqLiUelTaHat9DZ1qM2w0C0Eym4LPI/3JxOnSokGVdsl1T1kN7TFvsEAD8G47A6VKQ0TVHqbBnYMJlQ== dependencies: open "^8.0.4" big-integer@^1.6.44: version "1.6.51" - resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" + resolved "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz" integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== big.js@^5.2.2: version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + resolved "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== bl@^4.0.3, bl@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + resolved "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz" integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== dependencies: buffer "^5.5.0" @@ -4270,17 +4465,17 @@ bl@^4.0.3, bl@^4.1.0: bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9: version "4.12.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" + resolved "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== bn.js@^5.0.0, bn.js@^5.1.1: version "5.2.1" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" + resolved "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz" integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== body-parser@1.20.1: version "1.20.1" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" + resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz" integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== dependencies: bytes "3.1.2" @@ -4298,19 +4493,19 @@ body-parser@1.20.1: boolbase@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== bplist-parser@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/bplist-parser/-/bplist-parser-0.2.0.tgz#43a9d183e5bf9d545200ceac3e712f79ebbe8d0e" + resolved "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz" integrity sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw== dependencies: big-integer "^1.6.44" brace-expansion@^1.1.7: version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" @@ -4318,31 +4513,31 @@ brace-expansion@^1.1.7: brace-expansion@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz" integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== dependencies: balanced-match "^1.0.0" braces@^3.0.2, braces@~3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" + resolved "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz" integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== browser-assert@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/browser-assert/-/browser-assert-1.2.1.tgz#9aaa5a2a8c74685c2ae05bfe46efd606f068c200" + resolved "https://registry.npmjs.org/browser-assert/-/browser-assert-1.2.1.tgz" integrity sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ== browserify-aes@^1.0.0, browserify-aes@^1.0.4: version "1.2.0" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" + resolved "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz" integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== dependencies: buffer-xor "^1.0.3" @@ -4354,7 +4549,7 @@ browserify-aes@^1.0.0, browserify-aes@^1.0.4: browserify-cipher@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" + resolved "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz" integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== dependencies: browserify-aes "^1.0.4" @@ -4363,7 +4558,7 @@ browserify-cipher@^1.0.0: browserify-des@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" + resolved "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz" integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== dependencies: cipher-base "^1.0.1" @@ -4373,7 +4568,7 @@ browserify-des@^1.0.0: browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: version "4.1.0" - resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.1.0.tgz#b2fd06b5b75ae297f7ce2dc651f918f5be158c8d" + resolved "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz" integrity sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog== dependencies: bn.js "^5.0.0" @@ -4381,7 +4576,7 @@ browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: browserify-sign@^4.0.0: version "4.2.1" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3" + resolved "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz" integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== dependencies: bn.js "^5.1.1" @@ -4396,53 +4591,53 @@ browserify-sign@^4.0.0: browserify-zlib@^0.1.4: version "0.1.4" - resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.1.4.tgz#bb35f8a519f600e0fa6b8485241c979d0141fb2d" + resolved "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz" integrity sha512-19OEpq7vWgsH6WkvkBJQDFvJS1uPcbFOQ4v9CU839dO+ZZXUZO6XpE6hNCqvlIIj+4fZvRiJ6DsAQ382GwiyTQ== dependencies: pako "~0.2.0" browserify-zlib@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" + resolved "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz" integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== dependencies: pako "~1.0.5" -browserslist@^4.14.5, browserslist@^4.21.10, browserslist@^4.21.9: - version "4.21.10" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.10.tgz#dbbac576628c13d3b2231332cb2ec5a46e015bb0" - integrity sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ== +browserslist@^4.14.5, browserslist@^4.21.10, browserslist@^4.21.9, browserslist@^4.22.1: + version "4.22.1" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.22.1.tgz" + integrity sha512-FEVc202+2iuClEhZhrWy6ZiAcRLvNMyYcxZ8raemul1DYVOVdFsbqckWLdsixQZCpJlwe77Z3UTalE7jsjnKfQ== dependencies: - caniuse-lite "^1.0.30001517" - electron-to-chromium "^1.4.477" + caniuse-lite "^1.0.30001541" + electron-to-chromium "^1.4.535" node-releases "^2.0.13" - update-browserslist-db "^1.0.11" + update-browserslist-db "^1.0.13" bser@2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + resolved "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz" integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== dependencies: node-int64 "^0.4.0" buffer-crc32@~0.2.3: version "0.2.13" - resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" + resolved "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz" integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== buffer-from@^1.0.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== buffer-xor@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" + resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz" integrity sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ== buffer@^5.5.0: version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + resolved "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== dependencies: base64-js "^1.3.1" @@ -4450,7 +4645,7 @@ buffer@^5.5.0: buffer@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" + resolved "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz" integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== dependencies: base64-js "^1.3.1" @@ -4458,29 +4653,29 @@ buffer@^6.0.3: builtin-status-codes@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" + resolved "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz" integrity sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ== busboy@1.6.0: version "1.6.0" - resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + resolved "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz" integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== dependencies: streamsearch "^1.1.0" bytes@3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + resolved "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz" integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== bytes@3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== c8@^7.6.0: version "7.14.0" - resolved "https://registry.yarnpkg.com/c8/-/c8-7.14.0.tgz#f368184c73b125a80565e9ab2396ff0be4d732f3" + resolved "https://registry.npmjs.org/c8/-/c8-7.14.0.tgz" integrity sha512-i04rtkkcNcCf7zsQcSv/T9EbUn4RXQ6mropeMcjFOsQXQ0iGLAr/xT6TImQg4+U9hmNpN9XdvPkjUL1IzbgxJw== dependencies: "@bcoe/v8-coverage" "^0.2.3" @@ -4498,7 +4693,7 @@ c8@^7.6.0: call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz" integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== dependencies: function-bind "^1.1.1" @@ -4506,12 +4701,12 @@ call-bind@^1.0.0, call-bind@^1.0.2: callsites@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== camel-case@^4.1.2: version "4.1.2" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + resolved "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz" integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== dependencies: pascal-case "^3.1.2" @@ -4519,32 +4714,32 @@ camel-case@^4.1.2: camelcase-css@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + resolved "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz" integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== camelcase@^5.3.1: version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== camelize@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/camelize/-/camelize-1.0.1.tgz#89b7e16884056331a35d6b5ad064332c91daa6c3" + resolved "https://registry.npmjs.org/camelize/-/camelize-1.0.1.tgz" integrity sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ== -caniuse-lite@^1.0.30001406, caniuse-lite@^1.0.30001517, caniuse-lite@^1.0.30001520: - version "1.0.30001527" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001527.tgz#813826554828245ccee776c850566dce12bdeaba" - integrity sha512-YkJi7RwPgWtXVSgK4lG9AHH57nSzvvOp9MesgXmw4Q7n0C3H04L0foHqfxcmSAm5AcWb8dW9AYj2tR7/5GnddQ== +caniuse-lite@^1.0.30001406, caniuse-lite@^1.0.30001538, caniuse-lite@^1.0.30001541: + version "1.0.30001547" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001547.tgz" + integrity sha512-W7CrtIModMAxobGhz8iXmDfuJiiKg1WADMO/9x7/CLNin5cpSbuBjooyoIUVB5eyCc36QuTVlkVa1iB2S5+/eA== case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" - resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + resolved "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz" integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== chalk@^2.4.2: version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" @@ -4553,7 +4748,7 @@ chalk@^2.4.2: chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: ansi-styles "^4.1.0" @@ -4561,7 +4756,7 @@ chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: chokidar@^3.4.0, chokidar@^3.5.3: version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== dependencies: anymatch "~3.1.2" @@ -4576,27 +4771,27 @@ chokidar@^3.4.0, chokidar@^3.5.3: chownr@^1.1.1: version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + resolved "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chownr@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" + resolved "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz" integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== chrome-trace-event@^1.0.2: version "1.0.3" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz" integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== ci-info@^3.2.0: - version "3.8.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" - integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== + version "3.9.0" + resolved "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz" + integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ== cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" + resolved "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz" integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== dependencies: inherits "^2.0.1" @@ -4604,36 +4799,36 @@ cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: classnames@^2.2.5, classnames@^2.3.2: version "2.3.2" - resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.2.tgz#351d813bf0137fcc6a76a16b88208d2560a0d924" + resolved "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz" integrity sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw== clean-css@^5.2.2: version "5.3.2" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.2.tgz#70ecc7d4d4114921f5d298349ff86a31a9975224" + resolved "https://registry.npmjs.org/clean-css/-/clean-css-5.3.2.tgz" integrity sha512-JVJbM+f3d3Q704rF4bqQ5UUyTtuJ0JRKNbTKVEeujCCBoMdkEi+V+e8oktO9qGQNSvHrFTM6JZRXrUvGR1czww== dependencies: source-map "~0.6.0" clean-stack@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== cli-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + resolved "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz" integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== dependencies: restore-cursor "^3.1.0" cli-spinners@^2.5.0: - version "2.9.0" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.0.tgz#5881d0ad96381e117bbe07ad91f2008fe6ffd8db" - integrity sha512-4/aL9X3Wh0yiMQlE+eeRhWP6vclO3QRtw1JHKIT0FFUs5FjpFmESqtMvYZ0+lbzBw900b95mS0hohy+qn2VK/g== + version "2.9.1" + resolved "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.1.tgz" + integrity sha512-jHgecW0pxkonBJdrKsqxgRX9AcG+u/5k0Q7WPDfi8AogLAdwxEkyYYNWwZ5GvVFoFx2uiY1eNcSK00fh+1+FyQ== cli-table3@^0.6.1: version "0.6.3" - resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.3.tgz#61ab765aac156b52f222954ffc607a6f01dbeeb2" + resolved "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz" integrity sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg== dependencies: string-width "^4.2.0" @@ -4642,12 +4837,12 @@ cli-table3@^0.6.1: client-only@0.0.1, client-only@^0.0.1: version "0.0.1" - resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" + resolved "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz" integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA== cliui@^7.0.2: version "7.0.4" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + resolved "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz" integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== dependencies: string-width "^4.2.0" @@ -4656,7 +4851,7 @@ cliui@^7.0.2: cliui@^8.0.1: version "8.0.1" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + resolved "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz" integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== dependencies: string-width "^4.2.0" @@ -4665,7 +4860,7 @@ cliui@^8.0.1: clone-deep@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + resolved "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz" integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== dependencies: is-plain-object "^2.0.4" @@ -4674,90 +4869,95 @@ clone-deep@^4.0.1: clone@^1.0.2: version "1.0.4" - resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + resolved "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz" integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== +clsx@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.0.0.tgz#12658f3fd98fafe62075595a5c30e43d18f3d00b" + integrity sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q== + color-convert@^1.9.0: version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-convert@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: color-name "~1.1.4" color-name@1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== color-name@~1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -colorette@^2.0.10, colorette@^2.0.19: +colorette@^2.0.10, colorette@^2.0.20: version "2.0.20" - resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" + resolved "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== combined-stream@^1.0.8: version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-11.0.0.tgz#43e19c25dbedc8256203538e8d7e9346877a6f67" - integrity sha512-9HMlXtt/BNoYr8ooyjjNRdIilOTkVJXB+GhxMTtOKwk0R4j4lS4NpjuqmRxroBfnfTSHQIHQB7wryHhXarNjmQ== + version "11.1.0" + resolved "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz" + integrity sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ== commander@^2.19.0, commander@^2.20.0: version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@^4.0.0, commander@^4.0.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" + resolved "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz" integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== commander@^6.2.1: version "6.2.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" + resolved "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz" integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== commander@^8.3.0: version "8.3.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + resolved "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz" integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== common-path-prefix@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + resolved "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz" integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== commondir@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz" integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== compressible@~2.0.16: version "2.0.18" - resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + resolved "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz" integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== dependencies: mime-db ">= 1.43.0 < 2" compression@^1.7.4: version "1.7.4" - resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + resolved "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz" integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" @@ -4770,12 +4970,12 @@ compression@^1.7.4: concat-map@0.0.1: version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== concat-stream@^1.6.2: version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + resolved "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" @@ -4785,56 +4985,56 @@ concat-stream@^1.6.2: console-browserify@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" + resolved "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== constants-browserify@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" + resolved "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz" integrity sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ== content-disposition@0.5.4: version "0.5.4" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz" integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== dependencies: safe-buffer "5.2.1" content-type@~1.0.4: version "1.0.5" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz" integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== -convert-source-map@^1.1.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: +convert-source-map@^1.7.0: version "1.9.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== convert-source-map@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz" integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== cookie-signature@1.0.6: version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== cookie@0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + resolved "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz" integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== copy-anything@^2.0.1: version "2.0.6" - resolved "https://registry.yarnpkg.com/copy-anything/-/copy-anything-2.0.6.tgz#092454ea9584a7b7ad5573062b2a87f5900fc480" + resolved "https://registry.npmjs.org/copy-anything/-/copy-anything-2.0.6.tgz" integrity sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw== dependencies: is-what "^3.14.1" copy-webpack-plugin@^11.0.0: version "11.0.0" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz#96d4dbdb5f73d02dd72d0528d1958721ab72e04a" + resolved "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz" integrity sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ== dependencies: fast-glob "^3.2.11" @@ -4844,26 +5044,26 @@ copy-webpack-plugin@^11.0.0: schema-utils "^4.0.0" serialize-javascript "^6.0.0" -core-js-compat@^3.31.0: - version "3.32.1" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.32.1.tgz#55f9a7d297c0761a8eb1d31b593e0f5b6ffae964" - integrity sha512-GSvKDv4wE0bPnQtjklV101juQ85g6H3rm5PDP20mqlS5j0kXF3pP97YvAu5hl+uFHqMictp3b2VxOHljWMAtuA== +core-js-compat@^3.31.0, core-js-compat@^3.32.2: + version "3.33.0" + resolved "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.33.0.tgz" + integrity sha512-0w4LcLXsVEuNkIqwjjf9rjCoPhK8uqA4tMRh4Ge26vfLtUutshn+aRJU21I9LCJlh2QQHfisNToLjw1XEJLTWw== dependencies: - browserslist "^4.21.10" + browserslist "^4.22.1" core-js-pure@^3.23.3: - version "3.32.1" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.32.1.tgz#5775b88f9062885f67b6d7edce59984e89d276f3" - integrity sha512-f52QZwkFVDPf7UEQZGHKx6NYxsxmVGJe5DIvbzOdRMJlmT6yv0KDjR8rmy3ngr/t5wU54c7Sp/qIJH0ppbhVpQ== + version "3.33.0" + resolved "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.33.0.tgz" + integrity sha512-FKSIDtJnds/YFIEaZ4HszRX7hkxGpNKM7FC9aJ9WLJbSd3lD4vOltFuVIBLR8asSx9frkTSqL0dw90SKQxgKrg== core-util-is@~1.0.0: version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== cosmiconfig@^7.0.1: version "7.1.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6" + resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz" integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== dependencies: "@types/parse-json" "^4.0.0" @@ -4873,9 +5073,9 @@ cosmiconfig@^7.0.1: yaml "^1.10.0" cosmiconfig@^8.2.0: - version "8.3.4" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.3.4.tgz#ee1356e7f24e248a6bb34ec5d438c3dcebeb410c" - integrity sha512-SF+2P8+o/PTV05rgsAjDzL4OFdVXAulSfC/L19VaeVT7+tpOOSscCt2QLxDZ+CLxF2WOiq6y1K5asvs8qUJT/Q== + version "8.3.6" + resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz" + integrity sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA== dependencies: import-fresh "^3.3.0" js-yaml "^4.1.0" @@ -4884,7 +5084,7 @@ cosmiconfig@^8.2.0: create-ecdh@^4.0.0: version "4.0.4" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" + resolved "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz" integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== dependencies: bn.js "^4.1.0" @@ -4892,7 +5092,7 @@ create-ecdh@^4.0.0: create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" + resolved "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz" integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== dependencies: cipher-base "^1.0.1" @@ -4903,7 +5103,7 @@ create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: version "1.1.7" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" + resolved "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz" integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== dependencies: cipher-base "^1.0.3" @@ -4915,7 +5115,7 @@ create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: path-key "^3.1.0" @@ -4924,7 +5124,7 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: crypto-browserify@^3.12.0: version "3.12.0" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" + resolved "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz" integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== dependencies: browserify-cipher "^1.0.0" @@ -4941,17 +5141,17 @@ crypto-browserify@^3.12.0: crypto-random-string@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + resolved "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz" integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== css-color-keywords@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/css-color-keywords/-/css-color-keywords-1.0.0.tgz#fea2616dc676b2962686b3af8dbdbe180b244e05" + resolved "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz" integrity sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg== css-loader@^6.7.1, css-loader@^6.7.3: version "6.8.1" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.8.1.tgz#0f8f52699f60f5e679eab4ec0fcd68b8e8a50a88" + resolved "https://registry.npmjs.org/css-loader/-/css-loader-6.8.1.tgz" integrity sha512-xDAXtEVGlD0gJ07iclwWVkLoZOpEvAWaSyf6W18S2pOC//K8+qUDIx8IIT3D+HjnmkJPQeesOPv5aiUaJsCM2g== dependencies: icss-utils "^5.1.0" @@ -4965,7 +5165,7 @@ css-loader@^6.7.1, css-loader@^6.7.3: css-select@^4.1.3: version "4.3.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + resolved "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz" integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== dependencies: boolbase "^1.0.0" @@ -4976,7 +5176,7 @@ css-select@^4.1.3: css-to-react-native@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/css-to-react-native/-/css-to-react-native-3.2.0.tgz#cdd8099f71024e149e4f6fe17a7d46ecd55f1e32" + resolved "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.2.0.tgz" integrity sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ== dependencies: camelize "^1.0.0" @@ -4985,61 +5185,61 @@ css-to-react-native@^3.2.0: css-unit-converter@^1.1.1: version "1.1.2" - resolved "https://registry.yarnpkg.com/css-unit-converter/-/css-unit-converter-1.1.2.tgz#4c77f5a1954e6dbff60695ecb214e3270436ab21" + resolved "https://registry.npmjs.org/css-unit-converter/-/css-unit-converter-1.1.2.tgz" integrity sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA== css-what@^6.0.1: version "6.1.0" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + resolved "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz" integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== cssesc@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + resolved "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== csstype@^3.0.2, csstype@^3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.2.tgz#1d4bf9d572f11c14031f0436e1c10bc1f571f50b" + resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz" integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ== "d3-array@2 - 3", "d3-array@2.10.0 - 3", d3-array@^3.1.6: version "3.2.4" - resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5" + resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz" integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== dependencies: internmap "1 - 2" "d3-color@1 - 3": version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2" + resolved "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz" integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA== d3-ease@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-3.0.1.tgz#9658ac38a2140d59d346160f1f6c30fda0bd12f4" + resolved "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz" integrity sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w== "d3-format@1 - 3": version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" + resolved "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz" integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== "d3-interpolate@1.2.0 - 3", d3-interpolate@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d" + resolved "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz" integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== dependencies: d3-color "1 - 3" d3-path@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-3.1.0.tgz#22df939032fb5a71ae8b1800d61ddb7851c42526" + resolved "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz" integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ== d3-scale@^4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396" + resolved "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz" integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== dependencies: d3-array "2.10.0 - 3" @@ -5050,76 +5250,76 @@ d3-scale@^4.0.2: d3-shape@^3.1.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-3.2.0.tgz#a1a839cbd9ba45f28674c69d7f855bcf91dfc6a5" + resolved "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz" integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA== dependencies: d3-path "^3.1.0" "d3-time-format@2 - 4": version "4.1.0" - resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" + resolved "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz" integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== dependencies: d3-time "1 - 3" "d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7" + resolved "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz" integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q== dependencies: d3-array "2 - 3" d3-timer@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0" + resolved "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz" integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA== damerau-levenshtein@^1.0.8: version "1.0.8" - resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + resolved "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz" integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== date-fns@^2.28.0: version "2.30.0" - resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.30.0.tgz#f367e644839ff57894ec6ac480de40cae4b0f4d0" + resolved "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz" integrity sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw== dependencies: "@babel/runtime" "^7.21.0" debug@2.6.9, debug@^2.6.9: version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" debug@^3.2.6, debug@^3.2.7: version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== dependencies: ms "^2.1.1" decimal.js-light@^2.4.1: version "2.5.1" - resolved "https://registry.yarnpkg.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz#134fd32508f19e208f4fb2f8dac0d2626a867934" + resolved "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz" integrity sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg== dedent@^0.7.0: version "0.7.0" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + resolved "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz" integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== deep-equal@^2.0.5: version "2.2.2" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.2.tgz#9b2635da569a13ba8e1cc159c2f744071b115daa" + resolved "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.2.tgz" integrity sha512-xjVyBf0w5vH0I42jdAZzOKVldmPgSulmiyPRywoyq7HXC9qdgo17kxJE+rdnif5Tz6+pIrpJI8dCpMNLIGkUiA== dependencies: array-buffer-byte-length "^1.0.0" @@ -5143,17 +5343,17 @@ deep-equal@^2.0.5: deep-is@^0.1.3: version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== deepmerge@^4.2.2: version "4.3.1" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz" integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== default-browser-id@3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/default-browser-id/-/default-browser-id-3.0.0.tgz#bee7bbbef1f4e75d31f98f4d3f1556a14cea790c" + resolved "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz" integrity sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA== dependencies: bplist-parser "^0.2.0" @@ -5161,32 +5361,42 @@ default-browser-id@3.0.0: defaults@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + resolved "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz" integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== dependencies: clone "^1.0.2" +define-data-property@^1.0.1: + version "1.1.1" + resolved "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz" + integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== + dependencies: + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + define-lazy-prop@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + resolved "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz" integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== -define-properties@^1.1.3, define-properties@^1.1.4, define-properties@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" - integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== +define-properties@^1.1.3, define-properties@^1.1.4, define-properties@^1.2.0, define-properties@^1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== dependencies: + define-data-property "^1.0.1" has-property-descriptors "^1.0.0" object-keys "^1.1.1" defu@^6.1.2: version "6.1.2" - resolved "https://registry.yarnpkg.com/defu/-/defu-6.1.2.tgz#1217cba167410a1765ba93893c6dbac9ed9d9e5c" + resolved "https://registry.npmjs.org/defu/-/defu-6.1.2.tgz" integrity sha512-+uO4+qr7msjNNWKYPHqN/3+Dx3NFkmIzayk2L1MyZQlvgZb/J1A0fo410dpKrN2SnqFjt8n4JL8fDJE0wIgjFQ== del@^6.0.0: version "6.1.1" - resolved "https://registry.yarnpkg.com/del/-/del-6.1.1.tgz#3b70314f1ec0aa325c6b14eb36b95786671edb7a" + resolved "https://registry.npmjs.org/del/-/del-6.1.1.tgz" integrity sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg== dependencies: globby "^11.0.1" @@ -5200,22 +5410,22 @@ del@^6.0.0: delayed-stream@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== depd@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== dequal@^2.0.2, dequal@^2.0.3: version "2.0.3" - resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" + resolved "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz" integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== des.js@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.1.0.tgz#1d37f5766f3bbff4ee9638e871a8768c173b81da" + resolved "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz" integrity sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg== dependencies: inherits "^2.0.1" @@ -5223,29 +5433,29 @@ des.js@^1.0.0: destroy@1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== detect-indent@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.1.0.tgz#592485ebbbf6b3b1ab2be175c8393d04ca0d57e6" + resolved "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz" integrity sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA== detect-node-es@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/detect-node-es/-/detect-node-es-1.1.0.tgz#163acdf643330caa0b4cd7c21e7ee7755d6fa493" + resolved "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz" integrity sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ== detect-package-manager@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/detect-package-manager/-/detect-package-manager-2.0.1.tgz#6b182e3ae5e1826752bfef1de9a7b828cffa50d8" + resolved "https://registry.npmjs.org/detect-package-manager/-/detect-package-manager-2.0.1.tgz" integrity sha512-j/lJHyoLlWi6G1LDdLgvUtz60Zo5GEj+sVYtTVXnYLDPuzgC3llMxonXym9zIwhhUII8vjdw0LXxavpLqTbl1A== dependencies: execa "^5.1.1" detect-port@^1.3.0: version "1.5.1" - resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.5.1.tgz#451ca9b6eaf20451acb0799b8ab40dff7718727b" + resolved "https://registry.npmjs.org/detect-port/-/detect-port-1.5.1.tgz" integrity sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ== dependencies: address "^1.0.1" @@ -5253,12 +5463,12 @@ detect-port@^1.3.0: didyoumean@^1.2.2: version "1.2.2" - resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + resolved "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz" integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== diffie-hellman@^5.0.0: version "5.0.3" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" + resolved "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz" integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== dependencies: bn.js "^4.1.0" @@ -5267,52 +5477,52 @@ diffie-hellman@^5.0.0: dir-glob@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== dependencies: path-type "^4.0.0" dlv@^1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + resolved "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz" integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== doctrine@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz" integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== dependencies: esutils "^2.0.2" doctrine@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz" integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: esutils "^2.0.2" dom-accessibility-api@^0.5.9: version "0.5.16" - resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz#5a7429e6066eb3664d911e33fb0e45de8eb08453" + resolved "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz" integrity sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg== dom-converter@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + resolved "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz" integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-helpers@^3.4.0: version "3.4.0" - resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-3.4.0.tgz#e9b369700f959f62ecde5a6babde4bccd9169af8" + resolved "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.4.0.tgz" integrity sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA== dependencies: "@babel/runtime" "^7.1.2" dom-helpers@^5.0.1: version "5.2.1" - resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902" + resolved "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz" integrity sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA== dependencies: "@babel/runtime" "^7.8.7" @@ -5320,7 +5530,7 @@ dom-helpers@^5.0.1: dom-serializer@^1.0.1: version "1.4.1" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz" integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== dependencies: domelementtype "^2.0.1" @@ -5329,24 +5539,24 @@ dom-serializer@^1.0.1: domain-browser@^4.22.0: version "4.22.0" - resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-4.22.0.tgz#6ddd34220ec281f9a65d3386d267ddd35c491f9f" + resolved "https://registry.npmjs.org/domain-browser/-/domain-browser-4.22.0.tgz" integrity sha512-IGBwjF7tNk3cwypFNH/7bfzBcgSCbaMOD3GsaY1AU/JRrnHnYgEM0+9kQt52iZxjNsjBtJYtao146V+f8jFZNw== domelementtype@^2.0.1, domelementtype@^2.2.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz" integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: version "4.3.1" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + resolved "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz" integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== dependencies: domelementtype "^2.2.0" domutils@^2.5.2, domutils@^2.8.0: version "2.8.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + resolved "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz" integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== dependencies: dom-serializer "^1.0.1" @@ -5355,7 +5565,7 @@ domutils@^2.5.2, domutils@^2.8.0: dot-case@^3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + resolved "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz" integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== dependencies: no-case "^3.0.4" @@ -5363,17 +5573,17 @@ dot-case@^3.0.4: dotenv-expand@^10.0.0: version "10.0.0" - resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-10.0.0.tgz#12605d00fb0af6d0a592e6558585784032e4ef37" + resolved "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-10.0.0.tgz" integrity sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A== dotenv@^16.0.0: version "16.3.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" + resolved "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz" integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== duplexify@^3.5.0, duplexify@^3.6.0: version "3.7.1" - resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" + resolved "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" @@ -5383,29 +5593,29 @@ duplexify@^3.5.0, duplexify@^3.6.0: eastasianwidth@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz" integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== ee-first@1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + resolved "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== ejs@^3.1.8: version "3.1.9" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.9.tgz#03c9e8777fe12686a9effcef22303ca3d8eeb361" + resolved "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz" integrity sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== dependencies: jake "^10.8.5" -electron-to-chromium@^1.4.477: - version "1.4.508" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.508.tgz#5641ff2f5ba11df4bd960fe6a2f9f70aa8b9af96" - integrity sha512-FFa8QKjQK/A5QuFr2167myhMesGrhlOBD+3cYNxO9/S4XzHEXesyTD/1/xF644gC8buFPz3ca6G1LOQD0tZrrg== +electron-to-chromium@^1.4.535: + version "1.4.553" + resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.553.tgz" + integrity sha512-HiRdtyKS2+VhiXvjhMvvxiMC33FJJqTA5EB2YHgFZW6v7HkK4Q9Ahv2V7O2ZPgAjw+MyCJVMQvigj13H8t+wvA== elliptic@^6.5.3: version "6.5.4" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" + resolved "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz" integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== dependencies: bn.js "^4.11.9" @@ -5418,44 +5628,44 @@ elliptic@^6.5.3: email-addresses@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/email-addresses/-/email-addresses-5.0.0.tgz#7ae9e7f58eef7d5e3e2c2c2d3ea49b78dc854fa6" + resolved "https://registry.npmjs.org/email-addresses/-/email-addresses-5.0.0.tgz" integrity sha512-4OIPYlA6JXqtVn8zpHpGiI7vE6EQOAg16aGnDMIAlZVinnoZ8208tW1hAbjWydgN/4PLTT9q+O1K6AH/vALJGw== emoji-regex@^10.2.1: version "10.2.1" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.2.1.tgz#a41c330d957191efd3d9dfe6e1e8e1e9ab048b3f" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.2.1.tgz" integrity sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA== emoji-regex@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== emoji-regex@^9.2.2: version "9.2.2" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz" integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== emojis-list@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + resolved "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz" integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== encodeurl@~1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + resolved "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" endent@^2.0.1: version "2.1.0" - resolved "https://registry.yarnpkg.com/endent/-/endent-2.1.0.tgz#5aaba698fb569e5e18e69e1ff7a28ff35373cd88" + resolved "https://registry.npmjs.org/endent/-/endent-2.1.0.tgz" integrity sha512-r8VyPX7XL8U01Xgnb1CjZ3XV+z90cXIJ9JPE/R9SEC9vpw2P6CfsRPJmp20DppC5N7ZAMCmjYkJIa744Iyg96w== dependencies: dedent "^0.7.0" @@ -5464,7 +5674,7 @@ endent@^2.0.1: enhanced-resolve@^5.12.0, enhanced-resolve@^5.15.0, enhanced-resolve@^5.7.0: version "5.15.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" + resolved "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz" integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== dependencies: graceful-fs "^4.2.4" @@ -5472,47 +5682,47 @@ enhanced-resolve@^5.12.0, enhanced-resolve@^5.15.0, enhanced-resolve@^5.7.0: entities@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== envinfo@^7.7.3: version "7.10.0" - resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.10.0.tgz#55146e3909cc5fe63c22da63fb15b05aeac35b13" + resolved "https://registry.npmjs.org/envinfo/-/envinfo-7.10.0.tgz" integrity sha512-ZtUjZO6l5mwTHvc1L9+1q5p/R3wTopcfqMW8r5t8SJSKqeVI/LtajORwRFEKpEFuekjD0VBjwu1HMxL4UalIRw== errno@^0.1.1: version "0.1.8" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" + resolved "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz" integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== dependencies: prr "~1.0.1" error-ex@^1.3.1: version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" error-stack-parser@^2.0.6: version "2.1.4" - resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" + resolved "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.1.4.tgz" integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== dependencies: stackframe "^1.3.4" -es-abstract@^1.20.4, es-abstract@^1.22.1: - version "1.22.1" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.1.tgz#8b4e5fc5cefd7f1660f0f8e1a52900dfbc9d9ccc" - integrity sha512-ioRRcXMO6OFyRpyzV3kE1IIBd4WG5/kltnzdxSCqoP8CMGs/Li+M1uF5o7lOkZVFjDs+NLesthnF66Pg/0q0Lw== +es-abstract@^1.22.1: + version "1.22.2" + resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.2.tgz" + integrity sha512-YoxfFcDmhjOgWPWsV13+2RNjq1F6UQnfs+8TftwNqtzlmFzEXvlUwdrNrYeaizfjQzRMxkZ6ElWMOJIFKdVqwA== dependencies: array-buffer-byte-length "^1.0.0" - arraybuffer.prototype.slice "^1.0.1" + arraybuffer.prototype.slice "^1.0.2" available-typed-arrays "^1.0.5" call-bind "^1.0.2" es-set-tostringtag "^2.0.1" es-to-primitive "^1.2.1" - function.prototype.name "^1.1.5" + function.prototype.name "^1.1.6" get-intrinsic "^1.2.1" get-symbol-description "^1.0.0" globalthis "^1.0.3" @@ -5528,27 +5738,27 @@ es-abstract@^1.20.4, es-abstract@^1.22.1: is-regex "^1.1.4" is-shared-array-buffer "^1.0.2" is-string "^1.0.7" - is-typed-array "^1.1.10" + is-typed-array "^1.1.12" is-weakref "^1.0.2" object-inspect "^1.12.3" object-keys "^1.1.1" object.assign "^4.1.4" - regexp.prototype.flags "^1.5.0" - safe-array-concat "^1.0.0" + regexp.prototype.flags "^1.5.1" + safe-array-concat "^1.0.1" safe-regex-test "^1.0.0" - string.prototype.trim "^1.2.7" - string.prototype.trimend "^1.0.6" - string.prototype.trimstart "^1.0.6" + string.prototype.trim "^1.2.8" + string.prototype.trimend "^1.0.7" + string.prototype.trimstart "^1.0.7" typed-array-buffer "^1.0.0" typed-array-byte-length "^1.0.0" typed-array-byte-offset "^1.0.0" typed-array-length "^1.0.4" unbox-primitive "^1.0.2" - which-typed-array "^1.1.10" + which-typed-array "^1.1.11" es-get-iterator@^1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.3.tgz#3ef87523c5d464d41084b2c3c9c214f1199763d6" + resolved "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz" integrity sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw== dependencies: call-bind "^1.0.2" @@ -5562,13 +5772,13 @@ es-get-iterator@^1.1.3: stop-iteration-iterator "^1.0.0" es-iterator-helpers@^1.0.12: - version "1.0.14" - resolved "https://registry.yarnpkg.com/es-iterator-helpers/-/es-iterator-helpers-1.0.14.tgz#19cd7903697d97e21198f3293b55e8985791c365" - integrity sha512-JgtVnwiuoRuzLvqelrvN3Xu7H9bu2ap/kQ2CrM62iidP8SKuD99rWU3CJy++s7IVL2qb/AjXPGR/E7i9ngd/Cw== + version "1.0.15" + resolved "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.15.tgz" + integrity sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g== dependencies: asynciterator.prototype "^1.0.0" call-bind "^1.0.2" - define-properties "^1.2.0" + define-properties "^1.2.1" es-abstract "^1.22.1" es-set-tostringtag "^2.0.1" function-bind "^1.1.1" @@ -5578,17 +5788,17 @@ es-iterator-helpers@^1.0.12: has-proto "^1.0.1" has-symbols "^1.0.3" internal-slot "^1.0.5" - iterator.prototype "^1.1.0" - safe-array-concat "^1.0.0" + iterator.prototype "^1.1.2" + safe-array-concat "^1.0.1" es-module-lexer@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.3.0.tgz#6be9c9e0b4543a60cd166ff6f8b4e9dae0b0c16f" - integrity sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA== + version "1.3.1" + resolved "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.3.1.tgz" + integrity sha512-JUFAyicQV9mXc3YRxPnDlrfBKpqt6hUYzz9/boprUJHs4e4KVr3XwOF70doO6gwXUor6EWZJAyWAfKki84t20Q== es-set-tostringtag@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" + resolved "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz" integrity sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg== dependencies: get-intrinsic "^1.1.3" @@ -5597,40 +5807,35 @@ es-set-tostringtag@^2.0.1: es-shim-unscopables@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + resolved "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz" integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== dependencies: has "^1.0.3" es-to-primitive@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + resolved "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" is-symbol "^1.0.2" -es6-object-assign@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/es6-object-assign/-/es6-object-assign-1.1.0.tgz#c2c3582656247c39ea107cb1e6652b6f9f24523c" - integrity sha512-MEl9uirslVwqQU369iHNWZXsI8yaZYGg/D65aOgZkeyFJwHYSxilf7rQzXKI7DdDuBPrBXbfk3sl9hJhmd5AUw== - esbuild-plugin-alias@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/esbuild-plugin-alias/-/esbuild-plugin-alias-0.2.1.tgz#45a86cb941e20e7c2bc68a2bea53562172494fcb" + resolved "https://registry.npmjs.org/esbuild-plugin-alias/-/esbuild-plugin-alias-0.2.1.tgz" integrity sha512-jyfL/pwPqaFXyKnj8lP8iLk6Z0m099uXR45aSN8Av1XD4vhvQutxxPzgA2bTcAwQpa1zCXDcWOlhFgyP3GKqhQ== esbuild-register@^3.4.0: - version "3.4.2" - resolved "https://registry.yarnpkg.com/esbuild-register/-/esbuild-register-3.4.2.tgz#1e39ee0a77e8f320a9790e68c64c3559620b9175" - integrity sha512-kG/XyTDyz6+YDuyfB9ZoSIOOmgyFCH+xPRtsCa8W85HLRV5Csp+o3jWVbOSHgSLfyLc5DmP+KFDNwty4mEjC+Q== + version "3.5.0" + resolved "https://registry.npmjs.org/esbuild-register/-/esbuild-register-3.5.0.tgz" + integrity sha512-+4G/XmakeBAsvJuDugJvtyF1x+XJT4FMocynNpxrvEBViirpfUn2PgNpCHedfWhF4WokNsO/OvMKrmJOIJsI5A== dependencies: debug "^4.3.4" esbuild@^0.18.0: version "0.18.20" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.18.20.tgz#4709f5a34801b43b799ab7d6d82f7284a9b7a7a6" + resolved "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz" integrity sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA== optionalDependencies: "@esbuild/android-arm" "0.18.20" @@ -5658,27 +5863,27 @@ esbuild@^0.18.0: escalade@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== escape-string-regexp@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== escodegen@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.1.0.tgz#ba93bbb7a43986d29d6041f99f5262da773e2e17" + resolved "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz" integrity sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w== dependencies: esprima "^4.0.1" @@ -5689,7 +5894,7 @@ escodegen@^2.1.0: eslint-config-next@13.4.19: version "13.4.19" - resolved "https://registry.yarnpkg.com/eslint-config-next/-/eslint-config-next-13.4.19.tgz#f46be9d4bd9e52755f846338456132217081d7f8" + resolved "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-13.4.19.tgz" integrity sha512-WE8367sqMnjhWHvR5OivmfwENRQ1ixfNE9hZwQqNCsd+iM3KnuMc1V8Pt6ytgjxjf23D+xbesADv9x3xaKfT3g== dependencies: "@next/eslint-plugin-next" "13.4.19" @@ -5704,12 +5909,12 @@ eslint-config-next@13.4.19: eslint-config-prettier@^9.0.0: version "9.0.0" - resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-9.0.0.tgz#eb25485946dd0c66cd216a46232dc05451518d1f" + resolved "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.0.0.tgz" integrity sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw== eslint-import-resolver-node@^0.3.6, eslint-import-resolver-node@^0.3.7: version "0.3.9" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz#d4eaac52b8a2e7c3cd1903eb00f7e053356118ac" + resolved "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz" integrity sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g== dependencies: debug "^3.2.7" @@ -5717,9 +5922,9 @@ eslint-import-resolver-node@^0.3.6, eslint-import-resolver-node@^0.3.7: resolve "^1.22.4" eslint-import-resolver-typescript@^3.5.2: - version "3.6.0" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.0.tgz#36f93e1eb65a635e688e16cae4bead54552e3bbd" - integrity sha512-QTHR9ddNnn35RTxlaEnx2gCxqFlF2SEN0SE2d17SqwyM7YOSI2GHWRYp5BiRkObTUNYPupC/3Fq2a0PpT+EKpg== + version "3.6.1" + resolved "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz" + integrity sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg== dependencies: debug "^4.3.4" enhanced-resolve "^5.12.0" @@ -5731,14 +5936,14 @@ eslint-import-resolver-typescript@^3.5.2: eslint-module-utils@^2.7.4, eslint-module-utils@^2.8.0: version "2.8.0" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz#e439fee65fc33f6bba630ff621efc38ec0375c49" + resolved "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz" integrity sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw== dependencies: debug "^3.2.7" eslint-plugin-import@^2.26.0: version "2.28.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz#63b8b5b3c409bfc75ebaf8fb206b07ab435482c4" + resolved "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz" integrity sha512-9I9hFlITvOV55alzoKBI+K9q74kv0iKMeY6av5+umsNwayt59fz692daGyjR+oStBQgx6nwR9rXldDev3Clw+A== dependencies: array-includes "^3.1.6" @@ -5761,7 +5966,7 @@ eslint-plugin-import@^2.26.0: eslint-plugin-jsx-a11y@^6.5.1: version "6.7.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz#fca5e02d115f48c9a597a6894d5bcec2f7a76976" + resolved "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz" integrity sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA== dependencies: "@babel/runtime" "^7.20.7" @@ -5783,12 +5988,12 @@ eslint-plugin-jsx-a11y@^6.5.1: "eslint-plugin-react-hooks@^4.5.0 || 5.0.0-canary-7118f5dd7-20230705": version "4.6.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + resolved "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz" integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== eslint-plugin-react@^7.31.7: version "7.33.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz#69ee09443ffc583927eafe86ffebb470ee737608" + resolved "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz" integrity sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw== dependencies: array-includes "^3.1.6" @@ -5809,9 +6014,9 @@ eslint-plugin-react@^7.31.7: string.prototype.matchall "^4.0.8" eslint-plugin-storybook@^0.6.13: - version "0.6.13" - resolved "https://registry.yarnpkg.com/eslint-plugin-storybook/-/eslint-plugin-storybook-0.6.13.tgz#897a9f6a9bb88c63b02f05850f30c28a9848a3f7" - integrity sha512-smd+CS0WH1jBqUEJ3znGS7DU4ayBE9z6lkQAK2yrSUv1+rq8BT/tiI5C/rKE7rmiqiAfojtNYZRhzo5HrulccQ== + version "0.6.15" + resolved "https://registry.npmjs.org/eslint-plugin-storybook/-/eslint-plugin-storybook-0.6.15.tgz" + integrity sha512-lAGqVAJGob47Griu29KXYowI4G7KwMoJDOkEip8ujikuDLxU+oWJ1l0WL6F2oDO4QiyUFXvtDkEkISMOPzo+7w== dependencies: "@storybook/csf" "^0.0.1" "@typescript-eslint/utils" "^5.45.0" @@ -5820,7 +6025,7 @@ eslint-plugin-storybook@^0.6.13: eslint-scope@5.1.1, eslint-scope@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== dependencies: esrecurse "^4.3.0" @@ -5828,7 +6033,7 @@ eslint-scope@5.1.1, eslint-scope@^5.1.1: eslint-scope@^7.2.2: version "7.2.2" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" + resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz" integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== dependencies: esrecurse "^4.3.0" @@ -5836,19 +6041,19 @@ eslint-scope@^7.2.2: eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: version "3.4.3" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" + resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz" integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== eslint@^8.48.0: - version "8.48.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.48.0.tgz#bf9998ba520063907ba7bfe4c480dc8be03c2155" - integrity sha512-sb6DLeIuRXxeM1YljSe1KEx9/YYeZFQWcV8Rq9HfigmdDEugjLEVEa1ozDjL6YDjBpQHPJxJzze+alxi4T3OLg== + version "8.51.0" + resolved "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz" + integrity sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA== dependencies: "@eslint-community/eslint-utils" "^4.2.0" "@eslint-community/regexpp" "^4.6.1" "@eslint/eslintrc" "^2.1.2" - "@eslint/js" "8.48.0" - "@humanwhocodes/config-array" "^0.11.10" + "@eslint/js" "8.51.0" + "@humanwhocodes/config-array" "^0.11.11" "@humanwhocodes/module-importer" "^1.0.1" "@nodelib/fs.walk" "^1.2.8" ajv "^6.12.4" @@ -5884,7 +6089,7 @@ eslint@^8.48.0: espree@^9.6.0, espree@^9.6.1: version "9.6.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" + resolved "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz" integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== dependencies: acorn "^8.9.0" @@ -5893,36 +6098,36 @@ espree@^9.6.0, espree@^9.6.1: esprima@^4.0.0, esprima@^4.0.1, esprima@~4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esquery@^1.4.2: version "1.5.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" + resolved "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz" integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== dependencies: estraverse "^5.1.0" esrecurse@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== dependencies: estraverse "^5.2.0" estraverse@^4.1.1: version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== estree-to-babel@^3.1.0: version "3.2.1" - resolved "https://registry.yarnpkg.com/estree-to-babel/-/estree-to-babel-3.2.1.tgz#82e78315275c3ca74475fdc8ac1a5103c8a75bf5" + resolved "https://registry.npmjs.org/estree-to-babel/-/estree-to-babel-3.2.1.tgz" integrity sha512-YNF+mZ/Wu2FU/gvmzuWtYc8rloubL7wfXCTgouFrnjGVXPA/EeYYA7pupXWrb3Iv1cTBeSSxxJIbK23l4MRNqg== dependencies: "@babel/traverse" "^7.1.6" @@ -5931,32 +6136,32 @@ estree-to-babel@^3.1.0: esutils@^2.0.2: version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + resolved "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== event-target-shim@^5.0.0: version "5.0.1" - resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + resolved "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== eventemitter3@^4.0.1: version "4.0.7" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.2.0, events@^3.3.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" + resolved "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz" integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== dependencies: md5.js "^1.3.4" @@ -5964,7 +6169,7 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: execa@^5.0.0, execa@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz" integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== dependencies: cross-spawn "^7.0.3" @@ -5979,7 +6184,7 @@ execa@^5.0.0, execa@^5.1.1: express@^4.17.3: version "4.18.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" + resolved "https://registry.npmjs.org/express/-/express-4.18.2.tgz" integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== dependencies: accepts "~1.3.8" @@ -6016,12 +6221,12 @@ express@^4.17.3: extend@^3.0.0: version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extract-zip@^1.6.6: version "1.7.0" - resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.7.0.tgz#556cc3ae9df7f452c493a0cfb51cc30277940927" + resolved "https://registry.npmjs.org/extract-zip/-/extract-zip-1.7.0.tgz" integrity sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA== dependencies: concat-stream "^1.6.2" @@ -6031,17 +6236,17 @@ extract-zip@^1.6.6: fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-equals@^5.0.0: version "5.0.1" - resolved "https://registry.yarnpkg.com/fast-equals/-/fast-equals-5.0.1.tgz#a4eefe3c5d1c0d021aeed0bc10ba5e0c12ee405d" + resolved "https://registry.npmjs.org/fast-equals/-/fast-equals-5.0.1.tgz" integrity sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ== fast-glob@^3.2.11, fast-glob@^3.2.12, fast-glob@^3.2.9, fast-glob@^3.3.0, fast-glob@^3.3.1: version "3.3.1" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" + resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.1.tgz" integrity sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg== dependencies: "@nodelib/fs.stat" "^2.0.2" @@ -6052,55 +6257,55 @@ fast-glob@^3.2.11, fast-glob@^3.2.12, fast-glob@^3.2.9, fast-glob@^3.3.0, fast-g fast-json-parse@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/fast-json-parse/-/fast-json-parse-1.0.3.tgz#43e5c61ee4efa9265633046b770fb682a7577c4d" + resolved "https://registry.npmjs.org/fast-json-parse/-/fast-json-parse-1.0.3.tgz" integrity sha512-FRWsaZRWEJ1ESVNbDWmsAlqDk96gPQezzLghafp5J4GUKjbCz3OkAHuZs5TuPEtkbVQERysLp9xv6c24fBm8Aw== fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== fast-levenshtein@^2.0.6: version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== fastq@^1.6.0: version "1.15.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + resolved "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz" integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== dependencies: reusify "^1.0.4" fb-watchman@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + resolved "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz" integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== dependencies: bser "2.1.1" fd-slicer@~1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" + resolved "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz" integrity sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g== dependencies: pend "~1.2.0" fetch-retry@^5.0.2: version "5.0.6" - resolved "https://registry.yarnpkg.com/fetch-retry/-/fetch-retry-5.0.6.tgz#17d0bc90423405b7a88b74355bf364acd2a7fa56" + resolved "https://registry.npmjs.org/fetch-retry/-/fetch-retry-5.0.6.tgz" integrity sha512-3yurQZ2hD9VISAhJJP9bpYFNQrHHBXE2JxxjY5aLEcDi46RmAzJE2OC9FAde0yis5ElW0jTTzs0zfg/Cca4XqQ== file-entry-cache@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz" integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== dependencies: flat-cache "^3.0.4" file-system-cache@2.3.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/file-system-cache/-/file-system-cache-2.3.0.tgz#201feaf4c8cd97b9d0d608e96861bb6005f46fe6" + resolved "https://registry.npmjs.org/file-system-cache/-/file-system-cache-2.3.0.tgz" integrity sha512-l4DMNdsIPsVnKrgEXbJwDJsA5mB8rGwHYERMgqQx/xAUtChPJMre1bXBzDEqqVbWv9AIbFezXMxeEkZDSrXUOQ== dependencies: fs-extra "11.1.1" @@ -6108,19 +6313,19 @@ file-system-cache@2.3.0: filelist@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" + resolved "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz" integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== dependencies: minimatch "^5.0.1" filename-reserved-regex@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz#abf73dfab735d045440abfea2d91f389ebbfa229" + resolved "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz" integrity sha512-lc1bnsSr4L4Bdif8Xb/qrtokGbq5zlsms/CYH8PP+WtCkGNF65DPiQY8vG3SakEdRn8Dlnm+gW/qWKKjS5sZzQ== filenamify@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/filenamify/-/filenamify-4.3.0.tgz#62391cb58f02b09971c9d4f9d63b3cf9aba03106" + resolved "https://registry.npmjs.org/filenamify/-/filenamify-4.3.0.tgz" integrity sha512-hcFKyUG57yWGAzu1CMt/dPzYZuv+jAJUT85bL8mrXvNe6hWj6yEHEc4EdcgiA6Z3oi1/9wXJdZPXF2dZNgwgOg== dependencies: filename-reserved-regex "^2.0.0" @@ -6129,19 +6334,19 @@ filenamify@^4.3.0: fill-range@^7.0.1: version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" filter-obj@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-2.0.2.tgz#fff662368e505d69826abb113f0f6a98f56e9d5f" + resolved "https://registry.npmjs.org/filter-obj/-/filter-obj-2.0.2.tgz" integrity sha512-lO3ttPjHZRfjMcxWKb1j1eDhTFsu4meeR3lnMcnBFhk6RuLhvEiuALu2TlfL310ph4lCYYwgF/ElIjdP739tdg== finalhandler@1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz" integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== dependencies: debug "2.6.9" @@ -6154,7 +6359,7 @@ finalhandler@1.2.0: find-cache-dir@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" + resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz" integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== dependencies: commondir "^1.0.1" @@ -6163,7 +6368,7 @@ find-cache-dir@^2.0.0: find-cache-dir@^3.0.0, find-cache-dir@^3.3.1: version "3.3.2" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz" integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== dependencies: commondir "^1.0.1" @@ -6172,7 +6377,7 @@ find-cache-dir@^3.0.0, find-cache-dir@^3.3.1: find-cache-dir@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-4.0.0.tgz#a30ee0448f81a3990708f6453633c733e2f6eec2" + resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-4.0.0.tgz" integrity sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg== dependencies: common-path-prefix "^3.0.0" @@ -6180,14 +6385,14 @@ find-cache-dir@^4.0.0: find-up@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + resolved "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz" integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== dependencies: locate-path "^5.0.0" @@ -6195,7 +6400,7 @@ find-up@^4.0.0, find-up@^4.1.0: find-up@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== dependencies: locate-path "^6.0.0" @@ -6203,41 +6408,41 @@ find-up@^5.0.0: find-up@^6.3.0: version "6.3.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-6.3.0.tgz#2abab3d3280b2dc7ac10199ef324c4e002c8c790" + resolved "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz" integrity sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw== dependencies: locate-path "^7.1.0" path-exists "^5.0.0" flat-cache@^3.0.4: - version "3.1.0" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.1.0.tgz#0e54ab4a1a60fe87e2946b6b00657f1c99e1af3f" - integrity sha512-OHx4Qwrrt0E4jEIcI5/Xb+f+QmJYNj2rrK8wiIdQOIrB9WrrJL8cjZvXdXuBTkkEwEqLycb5BeZDV1o2i9bTew== + version "3.1.1" + resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.1.1.tgz" + integrity sha512-/qM2b3LUIaIgviBQovTLvijfyOQXPtSRnRK26ksj2J7rzPIecePUIpJsZ4T02Qg+xiAEKIs5K8dsHEd+VaKa/Q== dependencies: - flatted "^3.2.7" + flatted "^3.2.9" keyv "^4.5.3" rimraf "^3.0.2" -flatted@^3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" - integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== +flatted@^3.2.9: + version "3.2.9" + resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz" + integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ== flow-parser@0.*: - version "0.215.1" - resolved "https://registry.yarnpkg.com/flow-parser/-/flow-parser-0.215.1.tgz#a14007f404db46ac829bb6db3a22a7956d9e298f" - integrity sha512-qq3rdRToqwesrddyXf+Ml8Tuf7TdoJS+EMbJgC6fHAVoBCXjb4mHelNd3J+jD8ts0bSHX81FG3LN7Qn/dcl6pA== + version "0.218.1" + resolved "https://registry.npmjs.org/flow-parser/-/flow-parser-0.218.1.tgz" + integrity sha512-46xpXyI4Bh3K2ej+NF3V5+pAsDlB5P0DWpgIIy/0/R7ujK0syfI/xfKDCOlq2sxtfUyPrr4rxfS2Da7yWdTdwg== for-each@^0.3.3: version "0.3.3" - resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + resolved "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz" integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== dependencies: is-callable "^1.1.3" foreground-child@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53" + resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz" integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA== dependencies: cross-spawn "^7.0.0" @@ -6245,7 +6450,7 @@ foreground-child@^2.0.0: foreground-child@^3.1.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" + resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz" integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg== dependencies: cross-spawn "^7.0.0" @@ -6253,7 +6458,7 @@ foreground-child@^3.1.0: fork-ts-checker-webpack-plugin@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-8.0.0.tgz#dae45dfe7298aa5d553e2580096ced79b6179504" + resolved "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-8.0.0.tgz" integrity sha512-mX3qW3idpueT2klaQXBzrIM/pHw+T0B/V9KHEvNrqijTq9NFnMZU6oreVxDYcf33P8a5cW+67PjodNHthGnNVg== dependencies: "@babel/code-frame" "^7.16.7" @@ -6269,10 +6474,10 @@ fork-ts-checker-webpack-plugin@^8.0.0: semver "^7.3.5" tapable "^2.2.1" -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" @@ -6280,27 +6485,27 @@ form-data@^3.0.0: forwarded@0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== -fraction.js@^4.2.0: - version "4.3.6" - resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.6.tgz#e9e3acec6c9a28cf7bc36cbe35eea4ceb2c5c92d" - integrity sha512-n2aZ9tNfYDwaHhvFTkhFErqOMIb8uyzSQ+vGJBjZyanAKZVbGUQ1sngfk9FdkBw7G26O7AgNjLcecLffD1c7eg== +fraction.js@^4.3.6: + version "4.3.7" + resolved "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz" + integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== fresh@0.5.2: version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== fs-constants@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" + resolved "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz" integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== fs-extra@11.1.1, fs-extra@^11.1.0, fs-extra@^11.1.1: version "11.1.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.1.1.tgz#da69f7c39f3b002378b0954bb6ae7efdc0876e2d" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.1.tgz" integrity sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ== dependencies: graceful-fs "^4.2.0" @@ -6309,7 +6514,7 @@ fs-extra@11.1.1, fs-extra@^11.1.0, fs-extra@^11.1.1: fs-extra@^10.0.0: version "10.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== dependencies: graceful-fs "^4.2.0" @@ -6318,39 +6523,39 @@ fs-extra@^10.0.0: fs-minipass@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" + resolved "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz" integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== dependencies: minipass "^3.0.0" fs-monkey@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.4.tgz#ee8c1b53d3fe8bb7e5d2c5c5dfc0168afdd2f747" - integrity sha512-INM/fWAxMICjttnD0DX1rBvinKskj5G1w+oy/pnm9u/tSlnBrzFonJMcalKJ30P8RRsPzKcCG7Q8l0jx5Fh9YQ== + version "1.0.5" + resolved "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.5.tgz" + integrity sha512-8uMbBjrhzW76TYgEV27Y5E//W2f/lTFmx78P2w19FZSxarhI/798APGQyuGCwmkNxgwGRhrLfvWyLBvNtuOmew== fs-readdir-recursive@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz#e32fc030a2ccee44a6b5371308da54be0b397d27" + resolved "https://registry.npmjs.org/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz" integrity sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA== fs.realpath@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== fsevents@^2.3.2, fsevents@~2.3.2: version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz" integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + version "1.1.2" + resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== -function.prototype.name@^1.1.5: +function.prototype.name@^1.1.5, function.prototype.name@^1.1.6: version "1.1.6" - resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd" + resolved "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz" integrity sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg== dependencies: call-bind "^1.0.2" @@ -6360,22 +6565,22 @@ function.prototype.name@^1.1.5: functions-have-names@^1.2.3: version "1.2.3" - resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + resolved "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== gensync@^1.0.0-beta.2: version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== get-caller-file@^2.0.5: version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0, get-intrinsic@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.1.tgz#d295644fed4505fc9cde952c37ee12b477a83d82" + resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz" integrity sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw== dependencies: function-bind "^1.1.1" @@ -6385,47 +6590,47 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@ get-nonce@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/get-nonce/-/get-nonce-1.0.1.tgz#fdf3f0278073820d2ce9426c18f07481b1e0cdf3" + resolved "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz" integrity sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q== get-npm-tarball-url@^2.0.3: version "2.0.3" - resolved "https://registry.yarnpkg.com/get-npm-tarball-url/-/get-npm-tarball-url-2.0.3.tgz#67dff908d699e9e2182530ae6e939a93e5f8dfdb" + resolved "https://registry.npmjs.org/get-npm-tarball-url/-/get-npm-tarball-url-2.0.3.tgz" integrity sha512-R/PW6RqyaBQNWYaSyfrh54/qtcnOp22FHCCiRhSSZj0FP3KQWCsxxt0DzIdVTbwTqe9CtQfvl/FPD4UIPt4pqw== get-package-type@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== get-port@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" + resolved "https://registry.npmjs.org/get-port/-/get-port-5.1.1.tgz" integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== get-stream@^6.0.0: version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== get-symbol-description@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + resolved "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz" integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.1" get-tsconfig@^4.5.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.0.tgz#06ce112a1463e93196aa90320c35df5039147e34" - integrity sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw== + version "4.7.2" + resolved "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.2.tgz" + integrity sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A== dependencies: resolve-pkg-maps "^1.0.0" gh-pages@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/gh-pages/-/gh-pages-6.0.0.tgz#3bb46ea13dc7cee306662db0d3f02bf05635cdc1" + resolved "https://registry.npmjs.org/gh-pages/-/gh-pages-6.0.0.tgz" integrity sha512-FXZWJRsvP/fK2HJGY+Di6FRNHvqFF6gOIELaopDjXXgjeOYSNURcuYwEO/6bwuq6koP5Lnkvnr5GViXzuOB89g== dependencies: async "^3.2.4" @@ -6437,45 +6642,45 @@ gh-pages@^6.0.0: globby "^6.1.0" giget@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/giget/-/giget-1.1.2.tgz#f99a49cb0ff85479c8c3612cdc7ca27f2066e818" - integrity sha512-HsLoS07HiQ5oqvObOI+Qb2tyZH4Gj5nYGfF9qQcZNrPw+uEFhdXtgJr01aO2pWadGHucajYDLxxbtQkm97ON2A== + version "1.1.3" + resolved "https://registry.npmjs.org/giget/-/giget-1.1.3.tgz" + integrity sha512-zHuCeqtfgqgDwvXlR84UNgnJDuUHQcNI5OqWqFxxuk2BshuKbYhJWdxBsEo4PvKqoGh23lUAIvBNpChMLv7/9Q== dependencies: - colorette "^2.0.19" + colorette "^2.0.20" defu "^6.1.2" - https-proxy-agent "^5.0.1" + https-proxy-agent "^7.0.2" mri "^1.2.0" - node-fetch-native "^1.0.2" - pathe "^1.1.0" - tar "^6.1.13" + node-fetch-native "^1.4.0" + pathe "^1.1.1" + tar "^6.2.0" github-slugger@^1.0.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.5.0.tgz#17891bbc73232051474d68bd867a34625c955f7d" + resolved "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz" integrity sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw== glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" glob-parent@^6.0.1, glob-parent@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: is-glob "^4.0.3" glob-to-regexp@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz" integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== glob@7.1.6: version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + resolved "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" @@ -6487,7 +6692,7 @@ glob@7.1.6: glob@7.1.7: version "7.1.7" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" + resolved "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz" integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== dependencies: fs.realpath "^1.0.0" @@ -6498,19 +6703,19 @@ glob@7.1.7: path-is-absolute "^1.0.0" glob@^10.0.0: - version "10.3.4" - resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.4.tgz#c85c9c7ab98669102b6defda76d35c5b1ef9766f" - integrity sha512-6LFElP3A+i/Q8XQKEvZjkEWEOTgAIALR9AO2rwT8bgPhDd1anmqDJDZ6lLddI4ehxxxR1S5RIqKe1uapMQfYaQ== + version "10.3.10" + resolved "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz" + integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== dependencies: foreground-child "^3.1.0" - jackspeak "^2.0.3" + jackspeak "^2.3.5" minimatch "^9.0.1" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" path-scurry "^1.10.1" glob@^7.0.3, glob@^7.1.3, glob@^7.1.4, glob@^7.2.0: version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: fs.realpath "^1.0.0" @@ -6522,26 +6727,26 @@ glob@^7.0.3, glob@^7.1.3, glob@^7.1.4, glob@^7.2.0: globals@^11.1.0: version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.19.0: - version "13.21.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.21.0.tgz#163aae12f34ef502f5153cfbdd3600f36c63c571" - integrity sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg== + version "13.23.0" + resolved "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz" + integrity sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA== dependencies: type-fest "^0.20.2" globalthis@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" + resolved "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz" integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== dependencies: define-properties "^1.1.3" globby@^11.0.1, globby@^11.0.2, globby@^11.1.0: version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== dependencies: array-union "^2.1.0" @@ -6553,7 +6758,7 @@ globby@^11.0.1, globby@^11.0.2, globby@^11.1.0: globby@^13.1.1: version "13.2.2" - resolved "https://registry.yarnpkg.com/globby/-/globby-13.2.2.tgz#63b90b1bf68619c2135475cbd4e71e66aa090592" + resolved "https://registry.npmjs.org/globby/-/globby-13.2.2.tgz" integrity sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w== dependencies: dir-glob "^3.0.1" @@ -6564,7 +6769,7 @@ globby@^13.1.1: globby@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" + resolved "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz" integrity sha512-KVbFv2TQtbzCoxAnfD6JcHZTYCzyliEaaeM/gH8qQdkKr5s0OP9scEgvdcngyk7AVdY6YVW/TJHd+lQ/Df3Daw== dependencies: array-union "^1.0.1" @@ -6575,24 +6780,24 @@ globby@^6.1.0: gopd@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + resolved "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz" integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== dependencies: get-intrinsic "^1.1.3" graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.9: version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== graphemer@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + resolved "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== gunzip-maybe@^1.4.2: version "1.4.2" - resolved "https://registry.yarnpkg.com/gunzip-maybe/-/gunzip-maybe-1.4.2.tgz#b913564ae3be0eda6f3de36464837a9cd94b98ac" + resolved "https://registry.npmjs.org/gunzip-maybe/-/gunzip-maybe-1.4.2.tgz" integrity sha512-4haO1M4mLO91PW57BMsDFf75UmwoRX0GkdD+Faw+Lr+r/OZrOCS0pIBwOL1xCKQqnQzbNFGgK2V2CpBUPeFNTw== dependencies: browserify-zlib "^0.1.4" @@ -6604,7 +6809,7 @@ gunzip-maybe@^1.4.2: handlebars@^4.7.7: version "4.7.8" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.8.tgz#41c42c18b1be2365439188c77c6afae71c0cd9e9" + resolved "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz" integrity sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ== dependencies: minimist "^1.2.5" @@ -6616,53 +6821,51 @@ handlebars@^4.7.7: has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz" integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== has-flag@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== has-flag@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== has-property-descriptors@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + resolved "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz" integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== dependencies: get-intrinsic "^1.1.1" has-proto@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + resolved "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz" integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== has-tostringtag@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + resolved "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz" integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== dependencies: has-symbols "^1.0.2" has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" + version "1.0.4" + resolved "https://registry.npmjs.org/has/-/has-1.0.4.tgz" + integrity sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ== hash-base@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" + resolved "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz" integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== dependencies: inherits "^2.0.4" @@ -6671,7 +6874,7 @@ hash-base@^3.0.0: hash.js@^1.0.0, hash.js@^1.0.3: version "1.1.7" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" + resolved "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz" integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== dependencies: inherits "^2.0.3" @@ -6679,36 +6882,43 @@ hash.js@^1.0.0, hash.js@^1.0.3: he@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== hmac-drbg@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" + resolved "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz" integrity sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg== dependencies: hash.js "^1.0.3" minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.1" +hoist-non-react-statics@^3.3.0: + version "3.3.2" + resolved "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + hosted-git-info@^2.1.4: version "2.8.9" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" + resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== html-entities@^2.1.0: version "2.4.0" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.4.0.tgz#edd0cee70402584c8c76cc2c0556db09d1f45061" + resolved "https://registry.npmjs.org/html-entities/-/html-entities-2.4.0.tgz" integrity sha512-igBTJcNNNhvZFRtm8uA6xMY6xYleeDwn3PeBCkDz7tHttv4F2hsDI2aPgNERWzvRcNYHNT3ymRaQzllmXj4YsQ== html-escaper@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== html-minifier-terser@^6.0.2: version "6.1.0" - resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + resolved "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== dependencies: camel-case "^4.1.2" @@ -6721,12 +6931,12 @@ html-minifier-terser@^6.0.2: html-tags@^3.1.0: version "3.3.1" - resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.3.1.tgz#a04026a18c882e4bba8a01a3d39cfe465d40b5ce" + resolved "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz" integrity sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ== html-webpack-plugin@^5.5.0: version "5.5.3" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.3.tgz#72270f4a78e222b5825b296e5e3e1328ad525a3e" + resolved "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.3.tgz" integrity sha512-6YrDKTuqaP/TquFH7h4srYWsZx+x6k6+FbsTm0ziCwGHDP78Unr1r9F/H4+sGmMbX08GQcJ+K64x55b+7VM/jg== dependencies: "@types/html-minifier-terser" "^6.0.0" @@ -6737,7 +6947,7 @@ html-webpack-plugin@^5.5.0: htmlparser2@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz" integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== dependencies: domelementtype "^2.0.1" @@ -6747,7 +6957,7 @@ htmlparser2@^6.1.0: http-errors@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== dependencies: depd "2.0.0" @@ -6758,74 +6968,74 @@ http-errors@2.0.0: https-browserify@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" + resolved "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz" integrity sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg== https-proxy-agent@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz#702b71fb5520a132a66de1f67541d9e62154d82b" + resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz" integrity sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg== dependencies: agent-base "5" debug "4" -https-proxy-agent@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" - integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== +https-proxy-agent@^7.0.2: + version "7.0.2" + resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz" + integrity sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA== dependencies: - agent-base "6" + agent-base "^7.0.2" debug "4" human-signals@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== iconv-lite@0.4.24: version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" iconv-lite@^0.6.3: version "0.6.3" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz" integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== dependencies: safer-buffer ">= 2.1.2 < 3.0.0" icss-utils@^5.0.0, icss-utils@^5.1.0: version "5.1.0" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + resolved "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz" integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== ieee754@^1.1.13, ieee754@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== ignore@^5.2.0, ignore@^5.2.4: version "5.2.4" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== image-size@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/image-size/-/image-size-1.0.2.tgz#d778b6d0ab75b2737c1556dd631652eb963bc486" + resolved "https://registry.npmjs.org/image-size/-/image-size-1.0.2.tgz" integrity sha512-xfOoWjceHntRb3qFCrh5ZFORYH8XCdYpASltMhZ/Q0KZiOwjdE/Yl2QCiWdwD+lygV5bMCvauzgu5PxBX/Yerg== dependencies: queue "6.0.2" image-size@~0.5.0: version "0.5.5" - resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" + resolved "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz" integrity sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ== import-fresh@^3.2.1, import-fresh@^3.3.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== dependencies: parent-module "^1.0.0" @@ -6833,17 +7043,17 @@ import-fresh@^3.2.1, import-fresh@^3.3.0: imurmurhash@^0.1.4: version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== indent-string@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== inflight@^1.0.4: version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" @@ -6851,12 +7061,12 @@ inflight@^1.0.4: inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3, inherits@~2.0.4: version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== internal-slot@^1.0.4, internal-slot@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.5.tgz#f2a2ee21f668f8627a4667f309dc0f4fb6674986" + resolved "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz" integrity sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ== dependencies: get-intrinsic "^1.2.0" @@ -6865,34 +7075,34 @@ internal-slot@^1.0.4, internal-slot@^1.0.5: "internmap@1 - 2": version "2.0.3" - resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" + resolved "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz" integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== invariant@^2.2.4: version "2.2.4" - resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + resolved "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz" integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" ip@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da" + resolved "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz" integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ== ipaddr.js@1.9.1: version "1.9.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== is-absolute-url@^3.0.0: version "3.0.3" - resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" + resolved "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== is-arguments@^1.0.4, is-arguments@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + resolved "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz" integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== dependencies: call-bind "^1.0.2" @@ -6900,7 +7110,7 @@ is-arguments@^1.0.4, is-arguments@^1.1.1: is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" + resolved "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz" integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== dependencies: call-bind "^1.0.2" @@ -6909,33 +7119,33 @@ is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: is-arrayish@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== is-async-function@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/is-async-function/-/is-async-function-2.0.0.tgz#8e4418efd3e5d3a6ebb0164c05ef5afb69aa9646" + resolved "https://registry.npmjs.org/is-async-function/-/is-async-function-2.0.0.tgz" integrity sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA== dependencies: has-tostringtag "^1.0.0" is-bigint@^1.0.1: version "1.0.4" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + resolved "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz" integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== dependencies: has-bigints "^1.0.1" is-binary-path@~2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-boolean-object@^1.1.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + resolved "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz" integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== dependencies: call-bind "^1.0.2" @@ -6943,82 +7153,82 @@ is-boolean-object@^1.1.0: is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: version "1.2.7" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== -is-core-module@^2.11.0, is-core-module@^2.13.0, is-core-module@^2.9.0: +is-core-module@^2.11.0, is-core-module@^2.13.0: version "2.13.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.0.tgz#bb52aa6e2cbd49a30c2ba68c42bf3435ba6072db" + resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz" integrity sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ== dependencies: has "^1.0.3" is-date-object@^1.0.1, is-date-object@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + resolved "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz" integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== dependencies: has-tostringtag "^1.0.0" is-deflate@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-deflate/-/is-deflate-1.0.0.tgz#c862901c3c161fb09dac7cdc7e784f80e98f2f14" + resolved "https://registry.npmjs.org/is-deflate/-/is-deflate-1.0.0.tgz" integrity sha512-YDoFpuZWu1VRXlsnlYMzKyVRITXj7Ej/V9gXQ2/pAe7X1J7M/RNOqaIYi6qUn+B7nGyB9pDXrv02dsB58d2ZAQ== is-docker@^2.0.0, is-docker@^2.1.1: version "2.2.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== is-extglob@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== is-finalizationregistry@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz#c8749b65f17c133313e661b1289b95ad3dbd62e6" + resolved "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz" integrity sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw== dependencies: call-bind "^1.0.2" is-fullwidth-code-point@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== is-generator-function@^1.0.10, is-generator-function@^1.0.7: version "1.0.10" - resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" + resolved "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz" integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== dependencies: has-tostringtag "^1.0.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" is-gzip@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-gzip/-/is-gzip-1.0.0.tgz#6ca8b07b99c77998025900e555ced8ed80879a83" + resolved "https://registry.npmjs.org/is-gzip/-/is-gzip-1.0.0.tgz" integrity sha512-rcfALRIb1YewtnksfRIHGcIY93QnK8BIQ/2c9yDYcG/Y6+vRoJuTWBmmSEbyLLYtXm7q35pHOHbZFQBaLrhlWQ== is-interactive@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + resolved "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz" integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== is-map@^2.0.1, is-map@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" + resolved "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz" integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== -is-nan@^1.2.1: +is-nan@^1.3.2: version "1.3.2" - resolved "https://registry.yarnpkg.com/is-nan/-/is-nan-1.3.2.tgz#043a54adea31748b55b6cd4e09aadafa69bd9e1d" + resolved "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz" integrity sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w== dependencies: call-bind "^1.0.0" @@ -7026,46 +7236,46 @@ is-nan@^1.2.1: is-negative-zero@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz" integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== is-number-object@^1.0.4: version "1.0.7" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz" integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== dependencies: has-tostringtag "^1.0.0" is-number@^7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^2.2.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" + resolved "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== is-path-inside@^3.0.2, is-path-inside@^3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz" integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== is-plain-object@5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" + resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz" integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== is-plain-object@^2.0.4: version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-regex@^1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + resolved "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz" integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== dependencies: call-bind "^1.0.2" @@ -7073,62 +7283,62 @@ is-regex@^1.1.4: is-set@^2.0.1, is-set@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" + resolved "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz" integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== is-shared-array-buffer@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + resolved "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz" integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== dependencies: call-bind "^1.0.2" is-stream@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + resolved "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz" integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== dependencies: has-tostringtag "^1.0.0" is-symbol@^1.0.2, is-symbol@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + resolved "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz" integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== dependencies: has-symbols "^1.0.2" -is-typed-array@^1.1.10, is-typed-array@^1.1.3, is-typed-array@^1.1.9: +is-typed-array@^1.1.10, is-typed-array@^1.1.12, is-typed-array@^1.1.3, is-typed-array@^1.1.9: version "1.1.12" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" + resolved "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz" integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== dependencies: which-typed-array "^1.1.11" is-unicode-supported@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + resolved "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz" integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== is-weakmap@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" + resolved "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz" integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== is-weakref@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + resolved "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz" integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== dependencies: call-bind "^1.0.2" is-weakset@^2.0.1: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.2.tgz#4569d67a747a1ce5a994dfd4ef6dcea76e7c0a1d" + resolved "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz" integrity sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg== dependencies: call-bind "^1.0.2" @@ -7136,44 +7346,44 @@ is-weakset@^2.0.1: is-what@^3.14.1: version "3.14.1" - resolved "https://registry.yarnpkg.com/is-what/-/is-what-3.14.1.tgz#e1222f46ddda85dead0fd1c9df131760e77755c1" + resolved "https://registry.npmjs.org/is-what/-/is-what-3.14.1.tgz" integrity sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA== is-wsl@^2.2.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz" integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== dependencies: is-docker "^2.0.0" isarray@^2.0.5: version "2.0.5" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + resolved "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz" integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== isarray@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== isexe@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== isobject@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + resolved "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + resolved "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz" integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== istanbul-lib-instrument@^5.0.4: version "5.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz" integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== dependencies: "@babel/core" "^7.12.3" @@ -7184,7 +7394,7 @@ istanbul-lib-instrument@^5.0.4: istanbul-lib-report@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" + resolved "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz" integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== dependencies: istanbul-lib-coverage "^3.0.0" @@ -7193,26 +7403,27 @@ istanbul-lib-report@^3.0.0: istanbul-reports@^3.1.4: version "3.1.6" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.6.tgz#2544bcab4768154281a2f0870471902704ccaa1a" + resolved "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.6.tgz" integrity sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg== dependencies: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -iterator.prototype@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/iterator.prototype/-/iterator.prototype-1.1.1.tgz#ab5b790e23ec00658f5974e032a2b05188bd3a5c" - integrity sha512-9E+nePc8C9cnQldmNl6bgpTY6zI4OPRZd97fhJ/iVZ1GifIUDVV5F6x1nEDqpe8KaMEZGT4xgrwKQDxXnjOIZQ== +iterator.prototype@^1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.2.tgz" + integrity sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w== dependencies: - define-properties "^1.2.0" + define-properties "^1.2.1" get-intrinsic "^1.2.1" has-symbols "^1.0.3" - reflect.getprototypeof "^1.0.3" + reflect.getprototypeof "^1.0.4" + set-function-name "^2.0.1" -jackspeak@^2.0.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.3.tgz#95e4cbcc03b3eb357bf6bcce14a903fb3d1151e1" - integrity sha512-R2bUw+kVZFS/h1AZqBKrSgDmdmjApzgY0AlCPumopFiAlbUxE2gf+SCuBzQ0cP5hHmUmFYF5yw55T97Th5Kstg== +jackspeak@^2.3.5: + version "2.3.6" + resolved "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz" + integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== dependencies: "@isaacs/cliui" "^8.0.2" optionalDependencies: @@ -7220,7 +7431,7 @@ jackspeak@^2.0.3: jake@^10.8.5: version "10.8.7" - resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" + resolved "https://registry.npmjs.org/jake/-/jake-10.8.7.tgz" integrity sha512-ZDi3aP+fG/LchyBzUM804VjddnwfSfsdeYkwt8NcbKRvo4rFkjhs456iLFn3k2ZUWvNe4i48WACDbza8fhq2+w== dependencies: async "^3.2.3" @@ -7228,10 +7439,10 @@ jake@^10.8.5: filelist "^1.0.4" minimatch "^3.1.2" -jest-haste-map@^29.6.4: - version "29.6.4" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.6.4.tgz#97143ce833829157ea7025204b08f9ace609b96a" - integrity sha512-12Ad+VNTDHxKf7k+M65sviyynRoZYuL1/GTuhEVb8RYsNSNln71nANRb/faSyWvx0j+gHcivChXHIoMJrGYjog== +jest-haste-map@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz" + integrity sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA== dependencies: "@jest/types" "^29.6.3" "@types/graceful-fs" "^4.1.3" @@ -7240,8 +7451,8 @@ jest-haste-map@^29.6.4: fb-watchman "^2.0.0" graceful-fs "^4.2.9" jest-regex-util "^29.6.3" - jest-util "^29.6.3" - jest-worker "^29.6.4" + jest-util "^29.7.0" + jest-worker "^29.7.0" micromatch "^4.0.4" walker "^1.0.8" optionalDependencies: @@ -7249,7 +7460,7 @@ jest-haste-map@^29.6.4: jest-mock@^27.0.6: version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" + resolved "https://registry.npmjs.org/jest-mock/-/jest-mock-27.5.1.tgz" integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== dependencies: "@jest/types" "^27.5.1" @@ -7257,13 +7468,13 @@ jest-mock@^27.0.6: jest-regex-util@^29.6.3: version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.6.3.tgz#4a556d9c776af68e1c5f48194f4d0327d24e8a52" + resolved "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz" integrity sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== -jest-util@^29.6.3: - version "29.6.3" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.6.3.tgz#e15c3eac8716440d1ed076f09bc63ace1aebca63" - integrity sha512-QUjna/xSy4B32fzcKTSz1w7YYzgiHrjjJjevdRf61HYk998R5vVMMNmrHESYZVDS5DSWs+1srPLPKxXPkeSDOA== +jest-util@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz" + integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA== dependencies: "@jest/types" "^29.6.3" "@types/node" "*" @@ -7274,36 +7485,36 @@ jest-util@^29.6.3: jest-worker@^27.4.5: version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz" integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== dependencies: "@types/node" "*" merge-stream "^2.0.0" supports-color "^8.0.0" -jest-worker@^29.6.4: - version "29.6.4" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.6.4.tgz#f34279f4afc33c872b470d4af21b281ac616abd3" - integrity sha512-6dpvFV4WjcWbDVGgHTWo/aupl8/LbBx2NSKfiwqf79xC/yeJjKHT1+StcKy/2KTmW16hE68ccKVOtXf+WZGz7Q== +jest-worker@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz" + integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== dependencies: "@types/node" "*" - jest-util "^29.6.3" + jest-util "^29.7.0" merge-stream "^2.0.0" supports-color "^8.0.0" jiti@^1.18.2: - version "1.19.3" - resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.19.3.tgz#ef554f76465b3c2b222dc077834a71f0d4a37569" - integrity sha512-5eEbBDQT/jF1xg6l36P+mWGGoH9Spuy0PCdSr2dtWRDGC6ph/w9ZCL4lmESW8f8F7MwT3XKescfP0wnZWAKL9w== + version "1.20.0" + resolved "https://registry.npmjs.org/jiti/-/jiti-1.20.0.tgz" + integrity sha512-3TV69ZbrvV6U5DfQimop50jE9Dl6J8O1ja1dvBbMba/sZ3YBEQqJ2VZRoQPVnhlzjNtU1vaXRZVrVjU4qtm8yA== "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-yaml@^3.13.1: version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== dependencies: argparse "^1.0.7" @@ -7311,14 +7522,14 @@ js-yaml@^3.13.1: js-yaml@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== dependencies: argparse "^2.0.1" jscodeshift@^0.14.0: version "0.14.0" - resolved "https://registry.yarnpkg.com/jscodeshift/-/jscodeshift-0.14.0.tgz#7542e6715d6d2e8bde0b4e883f0ccea358b46881" + resolved "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.14.0.tgz" integrity sha512-7eCC1knD7bLUPuSCwXsMZUH51O8jIcoVyKtI6P0XM0IVzlGjckPy3FIwQlorzbN0Sg79oK+RlohN32Mqf/lrYA== dependencies: "@babel/core" "^7.13.16" @@ -7343,54 +7554,54 @@ jscodeshift@^0.14.0: jsesc@^2.5.1: version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== json-buffer@3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz" integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== json-schema-traverse@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema-traverse@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== json5@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + resolved "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz" integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" json5@^2.1.2, json5@^2.2.2, json5@^2.2.3: version "2.2.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== jsonfile@^6.0.1: version "6.1.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== dependencies: universalify "^2.0.0" @@ -7399,7 +7610,7 @@ jsonfile@^6.0.1: "jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.3: version "3.3.5" - resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz#4766bd05a8e2a11af222becd19e15575e52a853a" + resolved "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz" integrity sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ== dependencies: array-includes "^3.1.6" @@ -7408,42 +7619,42 @@ jsonfile@^6.0.1: object.values "^1.1.6" keyv@^4.5.3: - version "4.5.3" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.3.tgz#00873d2b046df737963157bd04f294ca818c9c25" - integrity sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug== + version "4.5.4" + resolved "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz" + integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== dependencies: json-buffer "3.0.1" kind-of@^6.0.2: version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== kleur@^3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== klona@^2.0.4: version "2.0.6" - resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.6.tgz#85bffbf819c03b2f53270412420a4555ef882e22" + resolved "https://registry.npmjs.org/klona/-/klona-2.0.6.tgz" integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== language-subtag-registry@~0.3.2: version "0.3.22" - resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" + resolved "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz" integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== language-tags@=1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + resolved "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz" integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== dependencies: language-subtag-registry "~0.3.2" lazy-universal-dotenv@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/lazy-universal-dotenv/-/lazy-universal-dotenv-4.0.0.tgz#0b220c264e89a042a37181a4928cdd298af73422" + resolved "https://registry.npmjs.org/lazy-universal-dotenv/-/lazy-universal-dotenv-4.0.0.tgz" integrity sha512-aXpZJRnTkpK6gQ/z4nk+ZBLd/Qdp118cvPruLSIQzQNRhKwEcdXCOzXuF55VDqIiuAaY3UGZ10DJtvZzDcvsxg== dependencies: app-root-dir "^1.0.2" @@ -7452,12 +7663,12 @@ lazy-universal-dotenv@^4.0.0: less-loader@^11.1.0: version "11.1.3" - resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-11.1.3.tgz#1bb62d6ca9bf00a177c02793b54baac40f9be694" + resolved "https://registry.npmjs.org/less-loader/-/less-loader-11.1.3.tgz" integrity sha512-A5b7O8dH9xpxvkosNrP0dFp2i/dISOJa9WwGF3WJflfqIERE2ybxh1BFDj5CovC2+jCE4M354mk90hN6ziXlVw== less@^4.1.2: version "4.2.0" - resolved "https://registry.yarnpkg.com/less/-/less-4.2.0.tgz#cbefbfaa14a4cd388e2099b2b51f956e1465c450" + resolved "https://registry.npmjs.org/less/-/less-4.2.0.tgz" integrity sha512-P3b3HJDBtSzsXUl0im2L7gTO5Ubg8mEN6G8qoTS77iXxXX4Hvu4Qj540PZDvQ8V6DmX6iXo98k7Md0Cm1PrLaA== dependencies: copy-anything "^2.0.1" @@ -7474,12 +7685,12 @@ less@^4.1.2: leven@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + resolved "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== levn@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz" integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: prelude-ls "^1.2.1" @@ -7487,22 +7698,22 @@ levn@^0.4.1: lilconfig@^2.0.5, lilconfig@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" + resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz" integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== lines-and-columns@^1.1.6: version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== loader-runner@^4.2.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + resolved "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz" integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== loader-utils@^2.0.0, loader-utils@^2.0.4: version "2.0.4" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" + resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz" integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== dependencies: big.js "^5.2.2" @@ -7511,12 +7722,12 @@ loader-utils@^2.0.0, loader-utils@^2.0.4: loader-utils@^3.2.0: version "3.2.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-3.2.1.tgz#4fb104b599daafd82ef3e1a41fb9265f87e1f576" + resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.1.tgz" integrity sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw== locate-path@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz" integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" @@ -7524,48 +7735,48 @@ locate-path@^3.0.0: locate-path@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== dependencies: p-locate "^4.1.0" locate-path@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz" integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== dependencies: p-locate "^5.0.0" locate-path@^7.1.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-7.2.0.tgz#69cb1779bd90b35ab1e771e1f2f89a202c2a8a8a" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz" integrity sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA== dependencies: p-locate "^6.0.0" lodash.camelcase@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + resolved "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz" integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== lodash.debounce@^4.0.8: version "4.0.8" - resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz" integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== lodash.merge@^4.6.2: version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + resolved "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz" integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== dependencies: chalk "^4.1.0" @@ -7573,50 +7784,55 @@ log-symbols@^4.1.0: long@^5.0.0, long@^5.2.3: version "5.2.3" - resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1" + resolved "https://registry.npmjs.org/long/-/long-5.2.3.tgz" integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q== loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" lower-case@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + resolved "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz" integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== dependencies: tslib "^2.0.3" lru-cache@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" lru-cache@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" "lru-cache@^9.1.1 || ^10.0.0": version "10.0.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.0.1.tgz#0a3be479df549cca0e5d693ac402ff19537a6b7a" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz" integrity sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g== +lucide-react@^0.287.0: + version "0.287.0" + resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.287.0.tgz#efa49872a91fa97b7ef650c4b40396b6880d0088" + integrity sha512-auxP2bTGiMoELzX+6ItTeNzLmhGd/O+PHBsrXV2YwPXYCxarIFJhiMOSzFT9a1GWeYPSZtnWdLr79IVXr/5JqQ== + lz-string@^1.5.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.5.0.tgz#c1ab50f77887b712621201ba9fd4e3a6ed099941" + resolved "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz" integrity sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ== make-dir@^2.0.0, make-dir@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz" integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== dependencies: pify "^4.0.1" @@ -7624,43 +7840,43 @@ make-dir@^2.0.0, make-dir@^2.1.0: make-dir@^3.0.2: version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== dependencies: semver "^6.0.0" make-dir@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz" integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== dependencies: semver "^7.5.3" makeerror@1.0.12: version "1.0.12" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + resolved "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz" integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== dependencies: tmpl "1.0.5" map-or-similar@^1.5.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/map-or-similar/-/map-or-similar-1.5.0.tgz#6de2653174adfb5d9edc33c69d3e92a1b76faf08" + resolved "https://registry.npmjs.org/map-or-similar/-/map-or-similar-1.5.0.tgz" integrity sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg== markdown-to-jsx@^7.1.8: version "7.3.2" - resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-7.3.2.tgz#f286b4d112dad3028acc1e77dfe1f653b347e131" + resolved "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-7.3.2.tgz" integrity sha512-B+28F5ucp83aQm+OxNrPkS8z0tMKaeHiy0lHJs3LqCyDQFtWuenaIrkaVTgAm1pf1AU85LXltva86hlaT17i8Q== material-symbols@0.11.0: version "0.11.0" - resolved "https://registry.yarnpkg.com/material-symbols/-/material-symbols-0.11.0.tgz#b3bfa073946c61098a59108fd38a6c5e60efb8b0" + resolved "https://registry.npmjs.org/material-symbols/-/material-symbols-0.11.0.tgz" integrity sha512-YJ7EyB7KMZFPQAKi0x1A+tH4zXEYw8FSGy1aAheqLm/ELzdotKwYe1hXUpmMcz87g7OoqikDeKEjRB+uGv9LrQ== md5.js@^1.3.4: version "1.3.5" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" + resolved "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz" integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== dependencies: hash-base "^3.0.0" @@ -7669,58 +7885,58 @@ md5.js@^1.3.4: mdast-util-definitions@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz#c5c1a84db799173b4dcf7643cda999e440c24db2" + resolved "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz" integrity sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ== dependencies: unist-util-visit "^2.0.0" mdast-util-to-string@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/mdast-util-to-string/-/mdast-util-to-string-1.1.0.tgz#27055500103f51637bd07d01da01eb1967a43527" + resolved "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-1.1.0.tgz" integrity sha512-jVU0Nr2B9X3MU4tSK7JP1CMkSvOj7X5l/GboG1tKRw52lLF1x2Ju92Ms9tNetCcbfX3hzlM73zYo2NKkWSfF/A== media-typer@0.3.0: version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + resolved "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== memfs@^3.4.1, memfs@^3.4.12: version "3.6.0" - resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6" + resolved "https://registry.npmjs.org/memfs/-/memfs-3.6.0.tgz" integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ== dependencies: fs-monkey "^1.0.4" memoizerific@^1.11.3: version "1.11.3" - resolved "https://registry.yarnpkg.com/memoizerific/-/memoizerific-1.11.3.tgz#7c87a4646444c32d75438570905f2dbd1b1a805a" + resolved "https://registry.npmjs.org/memoizerific/-/memoizerific-1.11.3.tgz" integrity sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog== dependencies: map-or-similar "^1.5.0" merge-descriptors@1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== merge-stream@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== methods@~1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + resolved "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz" integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== dependencies: braces "^3.0.2" @@ -7728,7 +7944,7 @@ micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: miller-rabin@^4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" + resolved "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz" integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== dependencies: bn.js "^4.0.0" @@ -7736,161 +7952,166 @@ miller-rabin@^4.0.0: mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== mime-types@^2.1.12, mime-types@^2.1.25, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.24, mime-types@~2.1.34: version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" mime@1.6.0, mime@^1.4.1: version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + resolved "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.0.3: version "2.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" + resolved "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz" integrity sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== mimic-fn@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== min-indent@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + resolved "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz" integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimalistic-crypto-utils@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" + resolved "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz" integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg== minimatch@^3.0.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" minimatch@^5.0.1: version "5.1.6" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz" integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== dependencies: brace-expansion "^2.0.1" minimatch@^9.0.1: version "9.0.3" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz" integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== dependencies: brace-expansion "^2.0.1" minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== minipass@^3.0.0: version "3.3.6" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" + resolved "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz" integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== dependencies: yallist "^4.0.0" minipass@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" + resolved "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz" integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== "minipass@^5.0.0 || ^6.0.2 || ^7.0.0": - version "7.0.3" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.3.tgz#05ea638da44e475037ed94d1c7efcc76a25e1974" - integrity sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg== + version "7.0.4" + resolved "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz" + integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ== minizlib@^2.1.1: version "2.1.2" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" + resolved "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz" integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== dependencies: minipass "^3.0.0" yallist "^4.0.0" +mitt@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mitt/-/mitt-3.0.1.tgz#ea36cf0cc30403601ae074c8f77b7092cdab36d1" + integrity sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw== + mkdirp-classic@^0.5.2: version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" + resolved "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== mkdirp@^0.5.4: version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== dependencies: minimist "^1.2.6" mkdirp@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== +moment@^2.29.4: + version "2.29.4" + resolved "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz" + integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== + mri@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/mri/-/mri-1.2.0.tgz#6721480fec2a11a4889861115a48b6cbe7cc8f0b" + resolved "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz" integrity sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA== ms@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== -ms@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" - integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== - ms@2.1.2: version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== ms@2.1.3, ms@^2.1.1: version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== mz@^2.7.0: version "2.7.0" - resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + resolved "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz" integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== dependencies: any-promise "^1.0.0" object-assign "^4.0.1" thenify-all "^1.0.0" -nanoid@^3.3.4, nanoid@^3.3.6: +nanoid@^3.3.6: version "3.3.6" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" + resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz" integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== natural-compare@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== needle@^3.1.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/needle/-/needle-3.2.0.tgz#07d240ebcabfd65c76c03afae7f6defe6469df44" + resolved "https://registry.npmjs.org/needle/-/needle-3.2.0.tgz" integrity sha512-oUvzXnyLiVyVGoianLijF9O/RecZUf7TkBfimjGrLM4eQhXyeJwM6GeAWccwfQ9aa4gMCZKqhAOuLaMIcQxajQ== dependencies: debug "^3.2.6" @@ -7899,41 +8120,47 @@ needle@^3.1.0: negotiator@0.6.3: version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + resolved "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== neo-async@^2.5.0, neo-async@^2.6.1, neo-async@^2.6.2: version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + resolved "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== -next@13.4.16: - version "13.4.16" - resolved "https://registry.yarnpkg.com/next/-/next-13.4.16.tgz#327ef6885b22161ed001cd5943c20b5e409a9406" - integrity sha512-1xaA/5DrfpPu0eV31Iro7JfPeqO8uxQWb1zYNTe+KDKdzqkAGapLcDYHMLNKXKB7lHjZ7LfKUOf9dyuzcibrhA== +next-usequerystate@^1.8.4: + version "1.8.4" + resolved "https://registry.yarnpkg.com/next-usequerystate/-/next-usequerystate-1.8.4.tgz#81125aa3dd5a0c6afbc21da8c1f663ee2e0a0350" + integrity sha512-V4xMh87cu950Zy1Jpw/H8GwWxAeAmqnLNJ8hAl5bdEWpyZV4UIKdkJePKMCUy1+h254EXGmY83BuCGJOASJRVg== + dependencies: + mitt "^3.0.1" + +next@13.5.5: + version "13.5.5" + resolved "https://registry.yarnpkg.com/next/-/next-13.5.5.tgz#65addd98a1ae42845d455e08bc491448bb34929b" + integrity sha512-LddFJjpfrtrMMw8Q9VLhIURuSidiCNcMQjRqcPtrKd+Fx07MsG7hYndJb/f2d3I+mTbTotsTJfCnn0eZ/YPk8w== dependencies: - "@next/env" "13.4.16" - "@swc/helpers" "0.5.1" + "@next/env" "13.5.5" + "@swc/helpers" "0.5.2" busboy "1.6.0" caniuse-lite "^1.0.30001406" - postcss "8.4.14" + postcss "8.4.31" styled-jsx "5.1.1" watchpack "2.4.0" - zod "3.21.4" optionalDependencies: - "@next/swc-darwin-arm64" "13.4.16" - "@next/swc-darwin-x64" "13.4.16" - "@next/swc-linux-arm64-gnu" "13.4.16" - "@next/swc-linux-arm64-musl" "13.4.16" - "@next/swc-linux-x64-gnu" "13.4.16" - "@next/swc-linux-x64-musl" "13.4.16" - "@next/swc-win32-arm64-msvc" "13.4.16" - "@next/swc-win32-ia32-msvc" "13.4.16" - "@next/swc-win32-x64-msvc" "13.4.16" + "@next/swc-darwin-arm64" "13.5.5" + "@next/swc-darwin-x64" "13.5.5" + "@next/swc-linux-arm64-gnu" "13.5.5" + "@next/swc-linux-arm64-musl" "13.5.5" + "@next/swc-linux-x64-gnu" "13.5.5" + "@next/swc-linux-x64-musl" "13.5.5" + "@next/swc-win32-arm64-msvc" "13.5.5" + "@next/swc-win32-ia32-msvc" "13.5.5" + "@next/swc-win32-x64-msvc" "13.5.5" no-case@^3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + resolved "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz" integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== dependencies: lower-case "^2.0.2" @@ -7941,36 +8168,36 @@ no-case@^3.0.4: node-abort-controller@^3.0.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.1.1.tgz#a94377e964a9a37ac3976d848cb5c765833b8548" + resolved "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz" integrity sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ== node-dir@^0.1.10, node-dir@^0.1.17: version "0.1.17" - resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.17.tgz#5f5665d93351335caabef8f1c554516cf5f1e4e5" + resolved "https://registry.npmjs.org/node-dir/-/node-dir-0.1.17.tgz" integrity sha512-tmPX422rYgofd4epzrNoOXiE8XFZYOcCq1vD7MAXCDO+O+zndlA2ztdKKMa+EeuBG5tHETpr4ml4RGgpqDCCAg== dependencies: minimatch "^3.0.2" -node-fetch-native@^1.0.2: +node-fetch-native@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/node-fetch-native/-/node-fetch-native-1.4.0.tgz#fbe8ac033cb6aa44bd106b5e4fd2b6277ba70fa1" + resolved "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.4.0.tgz" integrity sha512-F5kfEj95kX8tkDhUCYdV8dg3/8Olx/94zB8+ZNthFs6Bz31UpUi8Xh40TN3thLwXgrwXry1pEg9lJ++tLWTcqA== node-fetch@^2.0.0: version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== dependencies: whatwg-url "^5.0.0" node-int64@^0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + resolved "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz" integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== node-polyfill-webpack-plugin@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/node-polyfill-webpack-plugin/-/node-polyfill-webpack-plugin-2.0.1.tgz#141d86f177103a8517c71d99b7c6a46edbb1bb58" + resolved "https://registry.npmjs.org/node-polyfill-webpack-plugin/-/node-polyfill-webpack-plugin-2.0.1.tgz" integrity sha512-ZUMiCnZkP1LF0Th2caY6J/eKKoA0TefpoVa68m/LQU1I/mE8rGt4fNYGgNuCcK+aG8P8P43nbeJ2RqJMOL/Y1A== dependencies: assert "^2.0.0" @@ -8001,12 +8228,12 @@ node-polyfill-webpack-plugin@^2.0.1: node-releases@^2.0.13: version "2.0.13" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d" + resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz" integrity sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ== normalize-package-data@^2.5.0: version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" @@ -8016,46 +8243,46 @@ normalize-package-data@^2.5.0: normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" - resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== npm-run-path@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz" integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== dependencies: path-key "^3.0.0" nth-check@^2.0.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + resolved "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz" integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== dependencies: boolbase "^1.0.0" object-assign@^4.0.1, object-assign@^4.1.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== object-hash@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + resolved "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz" integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== object-inspect@^1.12.3, object-inspect@^1.9.0: version "1.12.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" + resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz" integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== -object-is@^1.0.1, object-is@^1.1.5: +object-is@^1.1.5: version "1.1.5" - resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + resolved "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz" integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== dependencies: call-bind "^1.0.2" @@ -8063,12 +8290,12 @@ object-is@^1.0.1, object-is@^1.1.5: object-keys@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object.assign@^4.1.4: version "4.1.4" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz" integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== dependencies: call-bind "^1.0.2" @@ -8078,7 +8305,7 @@ object.assign@^4.1.4: object.entries@^1.1.6: version "1.1.7" - resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.7.tgz#2b47760e2a2e3a752f39dd874655c61a7f03c131" + resolved "https://registry.npmjs.org/object.entries/-/object.entries-1.1.7.tgz" integrity sha512-jCBs/0plmPsOnrKAfFQXRG2NFjlhZgjjcBLSmTnEhU8U6vVTsVe8ANeQJCHTl3gSsI4J+0emOoCgoKlmQPMgmA== dependencies: call-bind "^1.0.2" @@ -8087,7 +8314,7 @@ object.entries@^1.1.6: object.fromentries@^2.0.6: version "2.0.7" - resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.7.tgz#71e95f441e9a0ea6baf682ecaaf37fa2a8d7e616" + resolved "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz" integrity sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA== dependencies: call-bind "^1.0.2" @@ -8096,7 +8323,7 @@ object.fromentries@^2.0.6: object.groupby@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/object.groupby/-/object.groupby-1.0.1.tgz#d41d9f3c8d6c778d9cbac86b4ee9f5af103152ee" + resolved "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz" integrity sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ== dependencies: call-bind "^1.0.2" @@ -8106,7 +8333,7 @@ object.groupby@^1.0.0: object.hasown@^1.1.2: version "1.1.3" - resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.3.tgz#6a5f2897bb4d3668b8e79364f98ccf971bda55ae" + resolved "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.3.tgz" integrity sha512-fFI4VcYpRHvSLXxP7yiZOMAd331cPfd2p7PFDVbgUsYOfCT3tICVqXWngbjr4m49OvsBwUBQ6O2uQoJvy3RexA== dependencies: define-properties "^1.2.0" @@ -8114,7 +8341,7 @@ object.hasown@^1.1.2: object.values@^1.1.6: version "1.1.7" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.7.tgz#617ed13272e7e1071b43973aa1655d9291b8442a" + resolved "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz" integrity sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng== dependencies: call-bind "^1.0.2" @@ -8123,38 +8350,38 @@ object.values@^1.1.6: objectorarray@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/objectorarray/-/objectorarray-1.0.5.tgz#2c05248bbefabd8f43ad13b41085951aac5e68a5" + resolved "https://registry.npmjs.org/objectorarray/-/objectorarray-1.0.5.tgz" integrity sha512-eJJDYkhJFFbBBAxeh8xW+weHlkI28n2ZdQV/J/DNfWfSKlGEf2xcfAbZTv3riEXHAhL9SVOTs2pRmXiSTf78xg== on-finished@2.4.1: version "2.4.1" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + resolved "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz" integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + resolved "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" open@^8.0.4, open@^8.4.0: version "8.4.2" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + resolved "https://registry.npmjs.org/open/-/open-8.4.2.tgz" integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== dependencies: define-lazy-prop "^2.0.0" @@ -8163,7 +8390,7 @@ open@^8.0.4, open@^8.4.0: optionator@^0.9.3: version "0.9.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" + resolved "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz" integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== dependencies: "@aashutoshrathi/word-wrap" "^1.2.3" @@ -8175,7 +8402,7 @@ optionator@^0.9.3: ora@^5.4.1: version "5.4.1" - resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + resolved "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz" integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== dependencies: bl "^4.1.0" @@ -8190,83 +8417,83 @@ ora@^5.4.1: os-browserify@^0.3.0: version "0.3.0" - resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" + resolved "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz" integrity sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A== p-limit@^2.0.0, p-limit@^2.2.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" p-limit@^3.0.2: version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== dependencies: yocto-queue "^0.1.0" p-limit@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-4.0.0.tgz#914af6544ed32bfa54670b061cafcbd04984b644" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz" integrity sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ== dependencies: yocto-queue "^1.0.0" p-locate@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz" integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-locate@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== dependencies: p-limit "^2.2.0" p-locate@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz" integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== dependencies: p-limit "^3.0.2" p-locate@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-6.0.0.tgz#3da9a49d4934b901089dca3302fa65dc5a05c04f" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz" integrity sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw== dependencies: p-limit "^4.0.0" p-map@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + resolved "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz" integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== dependencies: aggregate-error "^3.0.0" p-try@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@~0.2.0: version "0.2.9" - resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" + resolved "https://registry.npmjs.org/pako/-/pako-0.2.9.tgz" integrity sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA== pako@~1.0.5: version "1.0.11" - resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" + resolved "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== param-case@^3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + resolved "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz" integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== dependencies: dot-case "^3.0.4" @@ -8274,14 +8501,14 @@ param-case@^3.0.4: parent-module@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: callsites "^3.0.0" parse-asn1@^5.0.0, parse-asn1@^5.1.5: version "5.1.6" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4" + resolved "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz" integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== dependencies: asn1.js "^5.2.0" @@ -8292,7 +8519,7 @@ parse-asn1@^5.0.0, parse-asn1@^5.1.5: parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== dependencies: "@babel/code-frame" "^7.0.0" @@ -8302,17 +8529,17 @@ parse-json@^5.0.0, parse-json@^5.2.0: parse-node-version@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b" + resolved "https://registry.npmjs.org/parse-node-version/-/parse-node-version-1.0.1.tgz" integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA== -parseurl@~1.3.2, parseurl@~1.3.3: +parseurl@~1.3.3: version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + resolved "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascal-case@^3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + resolved "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz" integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== dependencies: no-case "^3.0.4" @@ -8320,42 +8547,42 @@ pascal-case@^3.1.2: path-browserify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-1.0.1.tgz#d98454a9c3753d5790860f16f68867b9e46be1fd" + resolved "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz" integrity sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g== path-exists@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz" integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== path-exists@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== path-exists@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-5.0.0.tgz#a6aad9489200b21fab31e49cf09277e5116fb9e7" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz" integrity sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ== path-is-absolute@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-scurry@^1.10.1: version "1.10.1" - resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.1.tgz#9ba6bf5aa8500fe9fd67df4f0d9483b2b0bfc698" + resolved "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz" integrity sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ== dependencies: lru-cache "^9.1.1 || ^10.0.0" @@ -8363,22 +8590,22 @@ path-scurry@^1.10.1: path-to-regexp@0.1.7: version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== path-type@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -pathe@^1.1.0: +pathe@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.1.tgz#1dd31d382b974ba69809adc9a7a347e65d84829a" + resolved "https://registry.npmjs.org/pathe/-/pathe-1.1.1.tgz" integrity sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q== pbkdf2@^3.0.3: version "3.1.2" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" + resolved "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz" integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== dependencies: create-hash "^1.1.2" @@ -8389,7 +8616,7 @@ pbkdf2@^3.0.3: peek-stream@^1.1.0: version "1.1.3" - resolved "https://registry.yarnpkg.com/peek-stream/-/peek-stream-1.1.3.tgz#3b35d84b7ccbbd262fff31dc10da56856ead6d67" + resolved "https://registry.npmjs.org/peek-stream/-/peek-stream-1.1.3.tgz" integrity sha512-FhJ+YbOSBb9/rIl2ZeE/QHEsWn7PqNYt8ARAY3kIgNGOk13g9FGyIY6JIl/xB/3TFRVoTv5as0l11weORrTekA== dependencies: buffer-from "^1.0.0" @@ -8398,91 +8625,91 @@ peek-stream@^1.1.0: pend@~1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" + resolved "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz" integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== picocolors@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.0, picomatch@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== pify@^2.0.0, pify@^2.3.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== pify@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" + resolved "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + resolved "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz" integrity sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw== dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + resolved "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg== pirates@^4.0.1, pirates@^4.0.4, pirates@^4.0.5: version "4.0.6" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" + resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz" integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== pkg-dir@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz" integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== dependencies: find-up "^3.0.0" pkg-dir@^4.1.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== dependencies: find-up "^4.0.0" pkg-dir@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-5.0.0.tgz#a02d6aebe6ba133a928f74aec20bafdfe6b8e760" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-5.0.0.tgz" integrity sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA== dependencies: find-up "^5.0.0" pkg-dir@^7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-7.0.0.tgz#8f0c08d6df4476756c5ff29b3282d0bab7517d11" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-7.0.0.tgz" integrity sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA== dependencies: find-up "^6.3.0" pnp-webpack-plugin@^1.7.0: version "1.7.0" - resolved "https://registry.yarnpkg.com/pnp-webpack-plugin/-/pnp-webpack-plugin-1.7.0.tgz#65741384f6d8056f36e2255a8d67ffc20866f5c9" + resolved "https://registry.npmjs.org/pnp-webpack-plugin/-/pnp-webpack-plugin-1.7.0.tgz" integrity sha512-2Rb3vm+EXble/sMXNSu6eoBx8e79gKqhNq9F5ZWW6ERNCTE/Q0wQNne5541tE5vKjfM8hpNCYL+LGc1YTfI0dg== dependencies: ts-pnp "^1.1.6" polished@^4.2.2: version "4.2.2" - resolved "https://registry.yarnpkg.com/polished/-/polished-4.2.2.tgz#2529bb7c3198945373c52e34618c8fe7b1aa84d1" + resolved "https://registry.npmjs.org/polished/-/polished-4.2.2.tgz" integrity sha512-Sz2Lkdxz6F2Pgnpi9U5Ng/WdWAUZxmHrNPoVlm3aAemxoy2Qy7LGjQg4uf8qKelDAUW94F4np3iH2YPf2qefcQ== dependencies: "@babel/runtime" "^7.17.8" postcss-import@^15.1.0: version "15.1.0" - resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70" + resolved "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz" integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== dependencies: postcss-value-parser "^4.0.0" @@ -8491,14 +8718,14 @@ postcss-import@^15.1.0: postcss-js@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2" + resolved "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz" integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== dependencies: camelcase-css "^2.0.1" postcss-load-config@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.1.tgz#152383f481c2758274404e4962743191d73875bd" + resolved "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.1.tgz" integrity sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA== dependencies: lilconfig "^2.0.5" @@ -8506,7 +8733,7 @@ postcss-load-config@^4.0.1: postcss-loader@^7.0.2, postcss-loader@^7.2.4: version "7.3.3" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-7.3.3.tgz#6da03e71a918ef49df1bb4be4c80401df8e249dd" + resolved "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.3.3.tgz" integrity sha512-YgO/yhtevGO/vJePCQmTxiaEwER94LABZN0ZMT4A0vsak9TpO+RvKRs7EmJ8peIlB9xfXCsS7M8LjqncsUZ5HA== dependencies: cosmiconfig "^8.2.0" @@ -8515,12 +8742,12 @@ postcss-loader@^7.0.2, postcss-loader@^7.2.4: postcss-modules-extract-imports@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + resolved "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz" integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== postcss-modules-local-by-default@^4.0.3: version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.3.tgz#b08eb4f083050708998ba2c6061b50c2870ca524" + resolved "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.3.tgz" integrity sha512-2/u2zraspoACtrbFRnTijMiQtb4GW4BvatjaG/bCjYQo8kLTdevCUlwuBHx2sCnSyrI3x3qj4ZK1j5LQBgzmwA== dependencies: icss-utils "^5.0.0" @@ -8529,28 +8756,28 @@ postcss-modules-local-by-default@^4.0.3: postcss-modules-scope@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + resolved "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz" integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== dependencies: postcss-selector-parser "^6.0.4" postcss-modules-values@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + resolved "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz" integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== dependencies: icss-utils "^5.0.0" postcss-nested@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.0.1.tgz#f83dc9846ca16d2f4fa864f16e9d9f7d0961662c" + resolved "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.1.tgz" integrity sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ== dependencies: postcss-selector-parser "^6.0.11" postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4: version "6.0.13" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz#d05d8d76b1e8e173257ef9d60b706a8e5e99bf1b" + resolved "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz" integrity sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ== dependencies: cssesc "^3.0.0" @@ -8558,27 +8785,18 @@ postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.2, postcss-selecto postcss-value-parser@^3.3.0: version "3.3.1" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" + resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz" integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== postcss-value-parser@^4.0.0, postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== -postcss@8.4.14: - version "8.4.14" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.14.tgz#ee9274d5622b4858c1007a74d76e42e56fd21caf" - integrity sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig== - dependencies: - nanoid "^3.3.4" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -postcss@^8.2.14, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.28: - version "8.4.29" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.29.tgz#33bc121cf3b3688d4ddef50be869b2a54185a1dd" - integrity sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw== +postcss@8.4.31, postcss@^8.2.14, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.28: + version "8.4.31" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d" + integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== dependencies: nanoid "^3.3.6" picocolors "^1.0.0" @@ -8586,27 +8804,27 @@ postcss@^8.2.14, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.28: prelude-ls@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== prettier-plugin-organize-imports@^3.2.3: version "3.2.3" - resolved "https://registry.yarnpkg.com/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.3.tgz#6b0141ac71f7ee9a673ce83e95456319e3a7cf0d" + resolved "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.3.tgz" integrity sha512-KFvk8C/zGyvUaE3RvxN2MhCLwzV6OBbFSkwZ2OamCrs9ZY4i5L77jQ/w4UmUr+lqX8qbaqVq6bZZkApn+IgJSg== prettier@^2.8.0: version "2.8.8" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + resolved "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz" integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== prettier@^3.0.1: version "3.0.3" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.3.tgz#432a51f7ba422d1469096c0fdc28e235db8f9643" + resolved "https://registry.npmjs.org/prettier/-/prettier-3.0.3.tgz" integrity sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg== pretty-error@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + resolved "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz" integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== dependencies: lodash "^4.17.20" @@ -8614,7 +8832,7 @@ pretty-error@^4.0.0: pretty-format@^27.0.2: version "27.5.1" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + resolved "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz" integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== dependencies: ansi-regex "^5.0.1" @@ -8623,27 +8841,34 @@ pretty-format@^27.0.2: pretty-hrtime@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1" + resolved "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz" integrity sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A== +prisma@^5.4.2: + version "5.4.2" + resolved "https://registry.yarnpkg.com/prisma/-/prisma-5.4.2.tgz#7eac9276439ec7073ec697c6c0dfa259d96e955e" + integrity sha512-GDMZwZy7mysB2oXU+angQqJ90iaPFdD0rHaZNkn+dio5NRkGLmMqmXs31//tg/qXT3iB0cTQwnGGQNuirhSTZg== + dependencies: + "@prisma/engines" "5.4.2" + process-nextick-args@~2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process@^0.11.10: version "0.11.10" - resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + resolved "https://registry.npmjs.org/process/-/process-0.11.10.tgz" integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== progress@^2.0.1: version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + resolved "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== prompts@^2.4.0: version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz" integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== dependencies: kleur "^3.0.3" @@ -8651,7 +8876,7 @@ prompts@^2.4.0: prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: version "15.8.1" - resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + resolved "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz" integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== dependencies: loose-envify "^1.4.0" @@ -8660,7 +8885,7 @@ prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: protobufjs@^7.2.4, protobufjs@^7.2.5: version "7.2.5" - resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.5.tgz#45d5c57387a6d29a17aab6846dcc283f9b8e7f2d" + resolved "https://registry.npmjs.org/protobufjs/-/protobufjs-7.2.5.tgz" integrity sha512-gGXRSXvxQ7UiPgfw8gevrfRWcTlSbOFg+p/N+JVJEK5VhueL2miT6qTymqAmjr1Q5WbOCyJbyrk6JfWKwlFn6A== dependencies: "@protobufjs/aspromise" "^1.1.2" @@ -8678,7 +8903,7 @@ protobufjs@^7.2.4, protobufjs@^7.2.5: proxy-addr@~2.0.7: version "2.0.7" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + resolved "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz" integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== dependencies: forwarded "0.2.0" @@ -8686,17 +8911,17 @@ proxy-addr@~2.0.7: proxy-from-env@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + resolved "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== prr@~1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" + resolved "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz" integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== public-encrypt@^4.0.0: version "4.0.3" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" + resolved "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz" integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== dependencies: bn.js "^4.1.0" @@ -8708,7 +8933,7 @@ public-encrypt@^4.0.0: pump@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" + resolved "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" @@ -8716,7 +8941,7 @@ pump@^2.0.0: pump@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + resolved "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz" integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== dependencies: end-of-stream "^1.1.0" @@ -8724,7 +8949,7 @@ pump@^3.0.0: pumpify@^1.3.3: version "1.5.1" - resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" + resolved "https://registry.npmjs.org/pumpify/-/pumpify-1.5.1.tgz" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" @@ -8733,17 +8958,17 @@ pumpify@^1.3.3: punycode@^1.4.1: version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== punycode@^2.1.0, punycode@^2.1.1: version "2.3.0" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== puppeteer-core@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/puppeteer-core/-/puppeteer-core-2.1.1.tgz#e9b3fbc1237b4f66e25999832229e9db3e0b90ed" + resolved "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-2.1.1.tgz" integrity sha512-n13AWriBMPYxnpbb6bnaY5YoY6rGj8vPLrz6CZF3o0qJNEwlcfJVxBzYZ0NJsQ21UbdJoijPCDrM++SUVEz7+w== dependencies: "@types/mime-types" "^2.1.0" @@ -8759,50 +8984,50 @@ puppeteer-core@^2.1.1: qs@6.11.0: version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== dependencies: side-channel "^1.0.4" -qs@^6.10.0, qs@^6.11.0: +qs@^6.10.0, qs@^6.11.2: version "6.11.2" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.2.tgz#64bea51f12c1f5da1bc01496f48ffcff7c69d7d9" + resolved "https://registry.npmjs.org/qs/-/qs-6.11.2.tgz" integrity sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA== dependencies: side-channel "^1.0.4" querystring-es3@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" + resolved "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz" integrity sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA== queue-microtask@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== queue@6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/queue/-/queue-6.0.2.tgz#b91525283e2315c7553d2efa18d83e76432fed65" + resolved "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz" integrity sha512-iHZWu+q3IdFZFX36ro/lKBkSvfkztY5Y7HMiPlOUjhupPcG2JMfst2KKEpu5XndviX/3UhFbRngUPNKtgvtZiA== dependencies: inherits "~2.0.3" ramda@0.29.0: version "0.29.0" - resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.29.0.tgz#fbbb67a740a754c8a4cbb41e2a6e0eb8507f55fb" + resolved "https://registry.npmjs.org/ramda/-/ramda-0.29.0.tgz" integrity sha512-BBea6L67bYLtdbOqfp8f58fPMqEwx0doL+pAi8TZyp2YWz8R9G8z9x75CZI8W+ftqhFHCpEX2cRnUUXK130iKA== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" randomfill@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" + resolved "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz" integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== dependencies: randombytes "^2.0.5" @@ -8810,12 +9035,12 @@ randomfill@^1.0.3: range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.5.1: version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz" integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== dependencies: bytes "3.1.2" @@ -8825,22 +9050,22 @@ raw-body@2.5.1: react-colorful@^5.1.2: version "5.6.1" - resolved "https://registry.yarnpkg.com/react-colorful/-/react-colorful-5.6.1.tgz#7dc2aed2d7c72fac89694e834d179e32f3da563b" + resolved "https://registry.npmjs.org/react-colorful/-/react-colorful-5.6.1.tgz" integrity sha512-1exovf0uGTGyq5mXQT0zgQ80uvj2PCwvF8zY1RN9/vbJVSjSo3fsB/4L3ObbF7u70NduSiK4xu4Y6q1MHoUGEw== react-day-picker@^8.7.1: - version "8.8.1" - resolved "https://registry.yarnpkg.com/react-day-picker/-/react-day-picker-8.8.1.tgz#44474c6f7f346ea28796974d7a065d5572f4cd38" - integrity sha512-U7RsRoRI5pyMXhKq54hS9yM11WEGkPf8hIdrxIM/sefgmQjuxazqgwcZFMiPZW/K9vtmzLZFf9bLW0wVsGYd5w== + version "8.8.2" + resolved "https://registry.npmjs.org/react-day-picker/-/react-day-picker-8.8.2.tgz" + integrity sha512-sK5M5PNZaLiszmACUKUpVu1eX3eFDVV+WLdWQ3BxTPbEC9jhuawmlgpbSXX5dIIQQwJpZ4wwP5+vsMVOwa1IRw== react-docgen-typescript@^2.2.2: version "2.2.2" - resolved "https://registry.yarnpkg.com/react-docgen-typescript/-/react-docgen-typescript-2.2.2.tgz#4611055e569edc071204aadb20e1c93e1ab1659c" + resolved "https://registry.npmjs.org/react-docgen-typescript/-/react-docgen-typescript-2.2.2.tgz" integrity sha512-tvg2ZtOpOi6QDwsb3GZhOjDkkX0h8Z2gipvTg6OVMUyoYoURhEiRNePT8NZItTVCDh39JJHnLdfCOkzoLbFnTg== react-docgen@^5.0.0: version "5.4.3" - resolved "https://registry.yarnpkg.com/react-docgen/-/react-docgen-5.4.3.tgz#7d297f73b977d0c7611402e5fc2a168acf332b26" + resolved "https://registry.npmjs.org/react-docgen/-/react-docgen-5.4.3.tgz" integrity sha512-xlLJyOlnfr8lLEEeaDZ+X2J/KJoe6Nr9AzxnkdQWush5hz2ZSu66w6iLMOScMmxoSHWpWMn+k3v5ZiyCfcWsOA== dependencies: "@babel/core" "^7.7.5" @@ -8856,7 +9081,7 @@ react-docgen@^5.0.0: react-dom@18.2.0: version "18.2.0" - resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" + resolved "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz" integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== dependencies: loose-envify "^1.1.0" @@ -8864,7 +9089,7 @@ react-dom@18.2.0: react-element-to-jsx-string@^15.0.0: version "15.0.0" - resolved "https://registry.yarnpkg.com/react-element-to-jsx-string/-/react-element-to-jsx-string-15.0.0.tgz#1cafd5b6ad41946ffc8755e254da3fc752a01ac6" + resolved "https://registry.npmjs.org/react-element-to-jsx-string/-/react-element-to-jsx-string-15.0.0.tgz" integrity sha512-UDg4lXB6BzlobN60P8fHWVPX3Kyw8ORrTeBtClmIlGdkOOE+GYQSFvmEU5iLLpwp/6v42DINwNcwOhOLfQ//FQ== dependencies: "@base2/pretty-print-object" "1.0.1" @@ -8873,37 +9098,37 @@ react-element-to-jsx-string@^15.0.0: react-inspector@^6.0.0: version "6.0.2" - resolved "https://registry.yarnpkg.com/react-inspector/-/react-inspector-6.0.2.tgz#aa3028803550cb6dbd7344816d5c80bf39d07e9d" + resolved "https://registry.npmjs.org/react-inspector/-/react-inspector-6.0.2.tgz" integrity sha512-x+b7LxhmHXjHoU/VrFAzw5iutsILRoYyDq97EDYdFpPLcvqtEzk4ZSZSQjnFPbr5T57tLXnHcqFYoN1pI6u8uQ== react-is@18.1.0: version "18.1.0" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.1.0.tgz#61aaed3096d30eacf2a2127118b5b41387d32a67" + resolved "https://registry.npmjs.org/react-is/-/react-is-18.1.0.tgz" integrity sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg== -react-is@^16.10.2, react-is@^16.13.1: +react-is@^16.10.2, react-is@^16.13.1, react-is@^16.7.0: version "16.13.1" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== react-is@^17.0.1: version "17.0.2" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + resolved "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz" integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== react-lifecycles-compat@^3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz#4f1a273afdfc8f3488a8c516bfda78f872352362" + resolved "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz" integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA== react-refresh@^0.11.0: version "0.11.0" - resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + resolved "https://registry.npmjs.org/react-refresh/-/react-refresh-0.11.0.tgz" integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== react-remove-scroll-bar@^2.3.3: version "2.3.4" - resolved "https://registry.yarnpkg.com/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.4.tgz#53e272d7a5cb8242990c7f144c44d8bd8ab5afd9" + resolved "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.4.tgz" integrity sha512-63C4YQBUt0m6ALadE9XV56hV8BgJWDmmTPY758iIJjfQKt2nYwoUrPk0LXRXcB/yIj82T1/Ixfdpdk68LwIB0A== dependencies: react-style-singleton "^2.2.1" @@ -8911,7 +9136,7 @@ react-remove-scroll-bar@^2.3.3: react-remove-scroll@2.5.5: version "2.5.5" - resolved "https://registry.yarnpkg.com/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz#1e31a1260df08887a8a0e46d09271b52b3a37e77" + resolved "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz" integrity sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw== dependencies: react-remove-scroll-bar "^2.3.3" @@ -8922,22 +9147,27 @@ react-remove-scroll@2.5.5: react-resize-detector@^8.0.4: version "8.1.0" - resolved "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-8.1.0.tgz#1c7817db8bc886e2dbd3fbe3b26ea8e56be0524a" + resolved "https://registry.npmjs.org/react-resize-detector/-/react-resize-detector-8.1.0.tgz" integrity sha512-S7szxlaIuiy5UqLhLL1KY3aoyGHbZzsTpYal9eYMwCyKqoqoVLCmIgAgNyIM1FhnP2KyBygASJxdhejrzjMb+w== dependencies: lodash "^4.17.21" react-smooth@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/react-smooth/-/react-smooth-2.0.3.tgz#2845fa8f22914f2e4445856d5688fb8a7d72f3ae" - integrity sha512-yl4y3XiMorss7ayF5QnBiSprig0+qFHui8uh7Hgg46QX5O+aRMRKlfGGNGLHno35JkQSvSYY8eCWkBfHfrSHfg== + version "2.0.5" + resolved "https://registry.npmjs.org/react-smooth/-/react-smooth-2.0.5.tgz" + integrity sha512-BMP2Ad42tD60h0JW6BFaib+RJuV5dsXJK9Baxiv/HlNFjvRLqA9xrNKxVWnUIZPQfzUwGXIlU/dSYLU+54YGQA== dependencies: fast-equals "^5.0.0" react-transition-group "2.9.0" +react-spinners@^0.13.8: + version "0.13.8" + resolved "https://registry.yarnpkg.com/react-spinners/-/react-spinners-0.13.8.tgz#5262571be0f745d86bbd49a1e6b49f9f9cb19acc" + integrity sha512-3e+k56lUkPj0vb5NDXPVFAOkPC//XyhKPJjvcGjyMNPWsBKpplfeyialP74G7H7+It7KzhtET+MvGqbKgAqpZA== + react-style-singleton@^2.2.1: version "2.2.1" - resolved "https://registry.yarnpkg.com/react-style-singleton/-/react-style-singleton-2.2.1.tgz#f99e420492b2d8f34d38308ff660b60d0b1205b4" + resolved "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz" integrity sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g== dependencies: get-nonce "^1.0.0" @@ -8946,7 +9176,7 @@ react-style-singleton@^2.2.1: react-transition-group@2.9.0: version "2.9.0" - resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-2.9.0.tgz#df9cdb025796211151a436c69a8f3b97b5b07c8d" + resolved "https://registry.npmjs.org/react-transition-group/-/react-transition-group-2.9.0.tgz" integrity sha512-+HzNTCHpeQyl4MJ/bdE0u6XRMe9+XG/+aL4mCxVN4DnPBQ0/5bfHWPDuOZUzYdMj94daZaZdCCc1Dzt9R/xSSg== dependencies: dom-helpers "^3.4.0" @@ -8956,7 +9186,7 @@ react-transition-group@2.9.0: react-transition-group@^4.4.5: version "4.4.5" - resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.5.tgz#e53d4e3f3344da8521489fbef8f2581d42becdd1" + resolved "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz" integrity sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g== dependencies: "@babel/runtime" "^7.5.5" @@ -8966,21 +9196,21 @@ react-transition-group@^4.4.5: react@18.2.0: version "18.2.0" - resolved "https://registry.yarnpkg.com/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" + resolved "https://registry.npmjs.org/react/-/react-18.2.0.tgz" integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== dependencies: loose-envify "^1.1.0" read-cache@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + resolved "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz" integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== dependencies: pify "^2.3.0" read-pkg-up@^7.0.1: version "7.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" + resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz" integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== dependencies: find-up "^4.1.0" @@ -8989,7 +9219,7 @@ read-pkg-up@^7.0.1: read-pkg@^5.2.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" + resolved "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz" integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== dependencies: "@types/normalize-package-data" "^2.4.0" @@ -8999,7 +9229,7 @@ read-pkg@^5.2.0: readable-stream@^2.0.0, readable-stream@^2.2.2, readable-stream@~2.3.6: version "2.3.8" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== dependencies: core-util-is "~1.0.0" @@ -9012,7 +9242,7 @@ readable-stream@^2.0.0, readable-stream@^2.2.2, readable-stream@~2.3.6: readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0: version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== dependencies: inherits "^2.0.3" @@ -9021,7 +9251,7 @@ readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable readable-stream@^4.0.0: version "4.4.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.4.2.tgz#e6aced27ad3b9d726d8308515b9a1b98dc1b9d13" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz" integrity sha512-Lk/fICSyIhodxy1IDK2HazkeGjSmezAWX2egdtJnYhtzKEsBPJowlI6F6LPb5tqIQILrMbx22S5o3GuJavPusA== dependencies: abort-controller "^3.0.0" @@ -9032,14 +9262,14 @@ readable-stream@^4.0.0: readdirp@~3.6.0: version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== dependencies: picomatch "^2.2.1" recast@^0.21.0: version "0.21.5" - resolved "https://registry.yarnpkg.com/recast/-/recast-0.21.5.tgz#e8cd22bb51bcd6130e54f87955d33a2b2e57b495" + resolved "https://registry.npmjs.org/recast/-/recast-0.21.5.tgz" integrity sha512-hjMmLaUXAm1hIuTqOdeYObMslq/q+Xff6QE3Y2P+uoHAg2nmVlLBps2hzh1UJDdMtDTMXOFewK6ky51JQIeECg== dependencies: ast-types "0.15.2" @@ -9049,7 +9279,7 @@ recast@^0.21.0: recast@^0.23.1: version "0.23.4" - resolved "https://registry.yarnpkg.com/recast/-/recast-0.23.4.tgz#ca1bac7bfd3011ea5a28dfecb5df678559fb1ddf" + resolved "https://registry.npmjs.org/recast/-/recast-0.23.4.tgz" integrity sha512-qtEDqIZGVcSZCHniWwZWbRy79Dc6Wp3kT/UmDA2RJKBPg7+7k51aQBZirHmUGn5uvHf2rg8DkjizrN26k61ATw== dependencies: assert "^2.0.0" @@ -9060,14 +9290,14 @@ recast@^0.23.1: recharts-scale@^0.4.4: version "0.4.5" - resolved "https://registry.yarnpkg.com/recharts-scale/-/recharts-scale-0.4.5.tgz#0969271f14e732e642fcc5bd4ab270d6e87dd1d9" + resolved "https://registry.npmjs.org/recharts-scale/-/recharts-scale-0.4.5.tgz" integrity sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w== dependencies: decimal.js-light "^2.4.1" recharts@^2.7.1: version "2.8.0" - resolved "https://registry.yarnpkg.com/recharts/-/recharts-2.8.0.tgz#90c95136e2cb6930224c94a51adce607701284fc" + resolved "https://registry.npmjs.org/recharts/-/recharts-2.8.0.tgz" integrity sha512-nciXqQDh3aW8abhwUlA4EBOBusRHLNiKHfpRZiG/yjups1x+auHb2zWPuEcTn/IMiN47vVMMuF8Sr+vcQJtsmw== dependencies: classnames "^2.2.5" @@ -9082,15 +9312,15 @@ recharts@^2.7.1: reduce-css-calc@^2.1.8: version "2.1.8" - resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz#7ef8761a28d614980dc0c982f772c93f7a99de03" + resolved "https://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz" integrity sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg== dependencies: css-unit-converter "^1.1.1" postcss-value-parser "^3.3.0" -reflect.getprototypeof@^1.0.3: +reflect.getprototypeof@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz#aaccbf41aca3821b87bb71d9dcbc7ad0ba50a3f3" + resolved "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz" integrity sha512-ECkTw8TmJwW60lOTR+ZkODISW6RQ8+2CL3COqtiJKLd6MmB45hN51HprHFziKLGkAuTGQhBb91V8cy+KHlaCjw== dependencies: call-bind "^1.0.2" @@ -9101,46 +9331,46 @@ reflect.getprototypeof@^1.0.3: which-builtin-type "^1.1.3" regenerate-unicode-properties@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" - integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== + version "10.1.1" + resolved "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz" + integrity sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q== dependencies: regenerate "^1.4.2" regenerate@^1.4.2: version "1.4.2" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + resolved "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== regenerator-runtime@^0.14.0: version "0.14.0" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45" + resolved "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz" integrity sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA== regenerator-transform@^0.15.2: version "0.15.2" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.2.tgz#5bbae58b522098ebdf09bca2f83838929001c7a4" + resolved "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz" integrity sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg== dependencies: "@babel/runtime" "^7.8.4" regex-parser@^2.2.11: version "2.2.11" - resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" + resolved "https://registry.npmjs.org/regex-parser/-/regex-parser-2.2.11.tgz" integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== -regexp.prototype.flags@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz#fe7ce25e7e4cca8db37b6634c8a2c7009199b9cb" - integrity sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA== +regexp.prototype.flags@^1.5.0, regexp.prototype.flags@^1.5.1: + version "1.5.1" + resolved "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz" + integrity sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg== dependencies: call-bind "^1.0.2" define-properties "^1.2.0" - functions-have-names "^1.2.3" + set-function-name "^2.0.0" regexpu-core@^5.3.1: version "5.3.2" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b" + resolved "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz" integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== dependencies: "@babel/regjsgen" "^0.8.0" @@ -9152,19 +9382,19 @@ regexpu-core@^5.3.1: regjsparser@^0.9.1: version "0.9.1" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz" integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== dependencies: jsesc "~0.5.0" relateurl@^0.2.7: version "0.2.7" - resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz" integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== remark-external-links@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/remark-external-links/-/remark-external-links-8.0.0.tgz#308de69482958b5d1cd3692bc9b725ce0240f345" + resolved "https://registry.npmjs.org/remark-external-links/-/remark-external-links-8.0.0.tgz" integrity sha512-5vPSX0kHoSsqtdftSHhIYofVINC8qmp0nctkeU9YoJwV3YfiBRiI6cbFRJ0oI/1F9xS+bopXG0m2KS8VFscuKA== dependencies: extend "^3.0.0" @@ -9175,7 +9405,7 @@ remark-external-links@^8.0.0: remark-slug@^6.0.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/remark-slug/-/remark-slug-6.1.0.tgz#0503268d5f0c4ecb1f33315c00465ccdd97923ce" + resolved "https://registry.npmjs.org/remark-slug/-/remark-slug-6.1.0.tgz" integrity sha512-oGCxDF9deA8phWvxFuyr3oSJsdyUAxMFbA0mZ7Y1Sas+emILtO+e5WutF9564gDsEN4IXaQXm5pFo6MLH+YmwQ== dependencies: github-slugger "^1.0.0" @@ -9184,7 +9414,7 @@ remark-slug@^6.0.0: renderkid@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + resolved "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz" integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== dependencies: css-select "^4.1.3" @@ -9195,37 +9425,37 @@ renderkid@^3.0.0: require-directory@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== require-from-string@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz" integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== requireindex@^1.1.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/requireindex/-/requireindex-1.2.0.tgz#3463cdb22ee151902635aa6c9535d4de9c2ef1ef" + resolved "https://registry.npmjs.org/requireindex/-/requireindex-1.2.0.tgz" integrity sha512-L9jEkOi3ASd9PYit2cwRfyppc9NoABujTP8/5gFcbERmo5jUoAKovIC3fsF17pkTnGsrByysqX+Kxd2OTNI1ww== resolve-from@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== resolve-from@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== resolve-pkg-maps@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz#616b3dc2c57056b5588c31cdf4b3d64db133720f" + resolved "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz" integrity sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw== resolve-url-loader@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-5.0.0.tgz#ee3142fb1f1e0d9db9524d539cfa166e9314f795" + resolved "https://registry.npmjs.org/resolve-url-loader/-/resolve-url-loader-5.0.0.tgz" integrity sha512-uZtduh8/8srhBoMx//5bwqjQ+rfYOUq8zC9NrMUGtjBiGTtFJM42s58/36+hTqeqINcnYe08Nj3LkK9lW4N8Xg== dependencies: adjust-sourcemap-loader "^4.0.0" @@ -9235,26 +9465,26 @@ resolve-url-loader@^5.0.0: source-map "0.6.1" resolve@^1.1.7, resolve@^1.10.0, resolve@^1.14.2, resolve@^1.22.2, resolve@^1.22.4: - version "1.22.4" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.4.tgz#1dc40df46554cdaf8948a486a10f6ba1e2026c34" - integrity sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg== + version "1.22.8" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== dependencies: is-core-module "^2.13.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" resolve@^2.0.0-next.4: - version "2.0.0-next.4" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" - integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== + version "2.0.0-next.5" + resolved "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz" + integrity sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA== dependencies: - is-core-module "^2.9.0" + is-core-module "^2.13.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" restore-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + resolved "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz" integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== dependencies: onetime "^5.1.0" @@ -9262,33 +9492,33 @@ restore-cursor@^3.1.0: reusify@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== rimraf@^2.6.1: version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: glob "^7.1.3" rimraf@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: glob "^7.1.3" rimraf@~2.6.2: version "2.6.3" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz" integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== dependencies: glob "^7.1.3" ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" + resolved "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz" integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== dependencies: hash-base "^3.0.0" @@ -9296,39 +9526,34 @@ ripemd160@^2.0.0, ripemd160@^2.0.1: run-parallel@^1.1.9: version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== dependencies: queue-microtask "^1.2.2" -safe-array-concat@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.0.0.tgz#2064223cba3c08d2ee05148eedbc563cd6d84060" - integrity sha512-9dVEFruWIsnie89yym+xWTAYASdpw3CJV7Li/6zBewGf9z2i1j31rP6jnY0pHEO4QZh6N0K11bFjWmdR8UGdPQ== +safe-array-concat@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz" + integrity sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q== dependencies: call-bind "^1.0.2" - get-intrinsic "^1.2.0" + get-intrinsic "^1.2.1" has-symbols "^1.0.3" isarray "^2.0.5" -safe-buffer@5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" - integrity sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg== - safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex-test@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + resolved "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz" integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== dependencies: call-bind "^1.0.2" @@ -9337,12 +9562,12 @@ safe-regex-test@^1.0.0: "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.1.0: version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sass-loader@^12.4.0: version "12.6.0" - resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" + resolved "https://registry.npmjs.org/sass-loader/-/sass-loader-12.6.0.tgz" integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== dependencies: klona "^2.0.4" @@ -9350,26 +9575,26 @@ sass-loader@^12.4.0: sass-loader@^13.2.2: version "13.3.2" - resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-13.3.2.tgz#460022de27aec772480f03de17f5ba88fa7e18c6" + resolved "https://registry.npmjs.org/sass-loader/-/sass-loader-13.3.2.tgz" integrity sha512-CQbKl57kdEv+KDLquhC+gE3pXt74LEAzm+tzywcA0/aHZuub8wTErbjAoNI57rPUWRYRNC5WUnNl8eGJNbDdwg== dependencies: neo-async "^2.6.2" sax@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + version "1.3.0" + resolved "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz" + integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA== scheduler@^0.23.0: version "0.23.0" - resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" + resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz" integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== dependencies: loose-envify "^1.1.0" schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== dependencies: "@types/json-schema" "^7.0.8" @@ -9378,7 +9603,7 @@ schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: schema-utils@^4.0.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz" integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw== dependencies: "@types/json-schema" "^7.0.9" @@ -9388,24 +9613,24 @@ schema-utils@^4.0.0: "semver@2 || 3 || 4 || 5", semver@^5.6.0: version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== semver@^6.0.0, semver@^6.3.0, semver@^6.3.1: version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4: version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + resolved "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" send@0.18.0: version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz" integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== dependencies: debug "2.6.9" @@ -9424,25 +9649,14 @@ send@0.18.0: serialize-javascript@^6.0.0, serialize-javascript@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.1.tgz#b206efb27c3da0b0ab6b52f48d170b7996458e5c" + resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz" integrity sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w== dependencies: randombytes "^2.1.0" -serve-favicon@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/serve-favicon/-/serve-favicon-2.5.0.tgz#935d240cdfe0f5805307fdfe967d88942a2cbcf0" - integrity sha512-FMW2RvqNr03x+C0WxTyu6sOv21oOjkq5j8tjquWccwa6ScNyGFOGJVpuS1NmTVGBAHS07xnSKotgf2ehQmf9iA== - dependencies: - etag "~1.8.1" - fresh "0.5.2" - ms "2.1.1" - parseurl "~1.3.2" - safe-buffer "5.1.1" - serve-static@1.15.0: version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz" integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== dependencies: encodeurl "~1.0.2" @@ -9450,19 +9664,28 @@ serve-static@1.15.0: parseurl "~1.3.3" send "0.18.0" +set-function-name@^2.0.0, set-function-name@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz" + integrity sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA== + dependencies: + define-data-property "^1.0.1" + functions-have-names "^1.2.3" + has-property-descriptors "^1.0.0" + setimmediate@^1.0.4: version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + resolved "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz" integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== setprototypeof@1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz" integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== sha.js@^2.4.0, sha.js@^2.4.8: version "2.4.11" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" + resolved "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz" integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== dependencies: inherits "^2.0.1" @@ -9470,31 +9693,31 @@ sha.js@^2.4.0, sha.js@^2.4.8: shallow-clone@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + resolved "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz" integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== dependencies: kind-of "^6.0.2" shallowequal@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8" + resolved "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz" integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== shebang-command@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== dependencies: shebang-regex "^3.0.0" shebang-regex@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== side-channel@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + resolved "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz" integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== dependencies: call-bind "^1.0.0" @@ -9503,49 +9726,49 @@ side-channel@^1.0.4: signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== signal-exit@^4.0.1: version "4.1.0" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== simple-update-notifier@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz#d70b92bdab7d6d90dfd73931195a30b6e3d7cebb" + resolved "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz" integrity sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w== dependencies: semver "^7.5.3" sisteransi@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + resolved "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz" integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== slash@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44" + resolved "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz" integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A== slash@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== slash@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + resolved "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz" integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== source-map-js@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz" integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== source-map-support@^0.5.16, source-map-support@~0.5.20: version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== dependencies: buffer-from "^1.0.0" @@ -9553,22 +9776,22 @@ source-map-support@^0.5.16, source-map-support@~0.5.20: source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@^0.7.3: version "0.7.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz" integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== space-separated-tokens@^1.0.0: version "1.1.5" - resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899" + resolved "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz" integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== spdx-correct@^3.0.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" + resolved "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz" integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA== dependencies: spdx-expression-parse "^3.0.0" @@ -9576,59 +9799,64 @@ spdx-correct@^3.0.0: spdx-exceptions@^2.1.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" + resolved "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz" integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== spdx-expression-parse@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + resolved "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz" integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== dependencies: spdx-exceptions "^2.1.0" spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: - version "3.0.13" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz#7189a474c46f8d47c7b0da4b987bb45e908bd2d5" - integrity sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w== + version "3.0.16" + resolved "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.16.tgz" + integrity sha512-eWN+LnM3GR6gPu35WxNgbGl8rmY1AEmoMDvL/QD6zYmPWgywxWqJWNdLGT+ke8dKNWrcYgYjPpG5gbTfghP8rw== sprintf-js@~1.0.2: version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== stackframe@^1.3.4: version "1.3.4" - resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" + resolved "https://registry.npmjs.org/stackframe/-/stackframe-1.3.4.tgz" integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== +state-local@^1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/state-local/-/state-local-1.0.7.tgz#da50211d07f05748d53009bee46307a37db386d5" + integrity sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w== + statuses@2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== stop-iteration-iterator@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz#6a60be0b4ee757d1ed5254858ec66b10c49285e4" + resolved "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz" integrity sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ== dependencies: internal-slot "^1.0.4" store2@^2.14.2: version "2.14.2" - resolved "https://registry.yarnpkg.com/store2/-/store2-2.14.2.tgz#56138d200f9fe5f582ad63bc2704dbc0e4a45068" + resolved "https://registry.npmjs.org/store2/-/store2-2.14.2.tgz" integrity sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w== storybook@^7.3.0: - version "7.4.0" - resolved "https://registry.yarnpkg.com/storybook/-/storybook-7.4.0.tgz#f1b64222e3d474bc6e258eb7e48c675685829873" - integrity sha512-jSwbyxHlr2dTY51Pv0mzenjrMDJNZH7DQhHu4ZezpjV+QK/rLCnD+Gt/7iDSaNlsmZJejQcmURDoEybWggMOqw== + version "7.4.6" + resolved "https://registry.npmjs.org/storybook/-/storybook-7.4.6.tgz" + integrity sha512-YkFSpnR47j5zz7yElA+2axLjXN7K7TxDGJRHHlqXmG5iQ0PXzmjrj2RxMDKFz4Ybp/QjEUoJ4rx//ESEY0Nb5A== dependencies: - "@storybook/cli" "7.4.0" + "@storybook/cli" "7.4.6" stream-browserify@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-3.0.0.tgz#22b0a2850cdf6503e73085da1fc7b7d0c2122f2f" + resolved "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz" integrity sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA== dependencies: inherits "~2.0.4" @@ -9636,7 +9864,7 @@ stream-browserify@^3.0.0: stream-http@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-3.2.0.tgz#1872dfcf24cb15752677e40e5c3f9cc1926028b5" + resolved "https://registry.npmjs.org/stream-http/-/stream-http-3.2.0.tgz" integrity sha512-Oq1bLqisTyK3TSCXpPbT4sdeYNdmyZJv1LxpEm2vu1ZhK89kSE5YXwZc3cWk0MagGaKriBh9mCFbVGtO+vY29A== dependencies: builtin-status-codes "^3.0.0" @@ -9646,17 +9874,17 @@ stream-http@^3.2.0: stream-shift@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" + resolved "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz" integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== streamsearch@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + resolved "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== "string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== dependencies: emoji-regex "^8.0.0" @@ -9665,7 +9893,7 @@ streamsearch@^1.1.0: string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz" integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== dependencies: eastasianwidth "^0.2.0" @@ -9674,7 +9902,7 @@ string-width@^5.0.1, string-width@^5.1.2: string-width@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-6.1.0.tgz#96488d6ed23f9ad5d82d13522af9e4c4c3fd7518" + resolved "https://registry.npmjs.org/string-width/-/string-width-6.1.0.tgz" integrity sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ== dependencies: eastasianwidth "^0.2.0" @@ -9682,9 +9910,9 @@ string-width@^6.1.0: strip-ansi "^7.0.1" string.prototype.matchall@^4.0.8: - version "4.0.9" - resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.9.tgz#148779de0f75d36b13b15885fec5cadde994520d" - integrity sha512-6i5hL3MqG/K2G43mWXWgP+qizFW/QH/7kCNN13JrJS5q48FN5IKksLDscexKP3dnmB6cdm9jlNgAsWNLpSykmA== + version "4.0.10" + resolved "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.10.tgz" + integrity sha512-rGXbGmOEosIQi6Qva94HUjgPs9vKW+dkG7Y8Q5O2OYkWL6wFaTRZO8zM4mhP94uX55wgyrXzfS2aGtGzUL7EJQ== dependencies: call-bind "^1.0.2" define-properties "^1.2.0" @@ -9693,29 +9921,30 @@ string.prototype.matchall@^4.0.8: has-symbols "^1.0.3" internal-slot "^1.0.5" regexp.prototype.flags "^1.5.0" + set-function-name "^2.0.0" side-channel "^1.0.4" -string.prototype.trim@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz#a68352740859f6893f14ce3ef1bb3037f7a90533" - integrity sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg== +string.prototype.trim@^1.2.8: + version "1.2.8" + resolved "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz" + integrity sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" -string.prototype.trimend@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" - integrity sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ== +string.prototype.trimend@^1.0.7: + version "1.0.7" + resolved "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz" + integrity sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" -string.prototype.trimstart@^1.0.6: +string.prototype.trimstart@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz#d4cdb44b83a4737ffbac2d406e405d43d0184298" + resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz" integrity sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg== dependencies: call-bind "^1.0.2" @@ -9724,71 +9953,70 @@ string.prototype.trimstart@^1.0.6: string_decoder@^1.1.1, string_decoder@^1.3.0: version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" "strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: - name strip-ansi-cjs version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" strip-ansi@^7.0.1: version "7.1.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz" integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== dependencies: ansi-regex "^6.0.1" strip-bom@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz" integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== strip-final-newline@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== strip-indent@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + resolved "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz" integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== dependencies: min-indent "^1.0.0" strip-json-comments@^3.0.1, strip-json-comments@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== strip-outer@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/strip-outer/-/strip-outer-1.0.1.tgz#b2fd2abf6604b9d1e6013057195df836b8a9d631" + resolved "https://registry.npmjs.org/strip-outer/-/strip-outer-1.0.1.tgz" integrity sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg== dependencies: escape-string-regexp "^1.0.2" style-loader@^3.3.1, style-loader@^3.3.2: version "3.3.3" - resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.3.tgz#bba8daac19930169c0c9c96706749a597ae3acff" + resolved "https://registry.npmjs.org/style-loader/-/style-loader-3.3.3.tgz" integrity sha512-53BiGLXAcll9maCYtZi2RCQZKa8NQQai5C4horqKyRmHj9H7QmcUyucrH+4KW/gBQbXM2AsB0axoEcFZPlfPcw== styled-components@^6.0.7: - version "6.0.7" - resolved "https://registry.yarnpkg.com/styled-components/-/styled-components-6.0.7.tgz#1cf4a5e6b6181b29f941934df54af19b7ef05ab0" - integrity sha512-xIwWuiRMYR43mskVsW9MGTRjSo7ol4bcVjT595fGUp3OLBJOlOgaiKaxsHdC4a2HqWKqKnh0CmcRbk5ogyDjTg== + version "6.0.9" + resolved "https://registry.npmjs.org/styled-components/-/styled-components-6.0.9.tgz" + integrity sha512-dDEXXF66b4iQhI1YHgvkBqfdJPGj2EifyLd298PVs50nz7KDfBKnAmWVnkZtw6+Nb6Izf19BAUyfYy8p434JAg== dependencies: "@babel/cli" "^7.21.0" "@babel/core" "^7.21.0" @@ -9812,19 +10040,19 @@ styled-components@^6.0.7: styled-jsx@5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.1.tgz#839a1c3aaacc4e735fed0781b8619ea5d0009d1f" + resolved "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.1.tgz" integrity sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw== dependencies: client-only "0.0.1" stylis@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.3.0.tgz#abe305a669fc3d8777e10eefcfc73ad861c5588c" + resolved "https://registry.npmjs.org/stylis/-/stylis-4.3.0.tgz" integrity sha512-E87pIogpwUsUwXw7dNyU4QDjdgVMy52m+XEOPEKUn161cCzWjjhPSQhByfd1CcNvrOLnXQ6OnnZDwnJrz/Z4YQ== sucrase@^3.32.0: version "3.34.0" - resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.34.0.tgz#1e0e2d8fcf07f8b9c3569067d92fbd8690fb576f" + resolved "https://registry.npmjs.org/sucrase/-/sucrase-3.34.0.tgz" integrity sha512-70/LQEZ07TEcxiU2dz51FKaE6hCTWC6vr7FOk3Gr0U60C3shtAN+H+BFr9XlYe5xqf3RA8nrc+VIwzCfnxuXJw== dependencies: "@jridgewell/gen-mapping" "^0.3.2" @@ -9837,53 +10065,66 @@ sucrase@^3.32.0: supports-color@^5.3.0: version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" supports-color@^7.1.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: has-flag "^4.0.0" supports-color@^8.0.0: version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== dependencies: has-flag "^4.0.0" supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== swc-loader@^0.2.3: version "0.2.3" - resolved "https://registry.yarnpkg.com/swc-loader/-/swc-loader-0.2.3.tgz#6792f1c2e4c9ae9bf9b933b3e010210e270c186d" + resolved "https://registry.npmjs.org/swc-loader/-/swc-loader-0.2.3.tgz" integrity sha512-D1p6XXURfSPleZZA/Lipb3A8pZ17fP4NObZvFCDjK/OKljroqDpPmsBdTraWhVBqUNpcWBQY1imWdoPScRlQ7A== +swr@^2.2.4: + version "2.2.4" + resolved "https://registry.npmjs.org/swr/-/swr-2.2.4.tgz" + integrity sha512-njiZ/4RiIhoOlAaLYDqwz5qH/KZXVilRLvomrx83HjzCWTfa+InyfAjv05PSFxnmLzZkNO9ZfvgoqzAaEI4sGQ== + dependencies: + client-only "^0.0.1" + use-sync-external-store "^1.2.0" + synchronous-promise@^2.0.15: version "2.0.17" - resolved "https://registry.yarnpkg.com/synchronous-promise/-/synchronous-promise-2.0.17.tgz#38901319632f946c982152586f2caf8ddc25c032" + resolved "https://registry.npmjs.org/synchronous-promise/-/synchronous-promise-2.0.17.tgz" integrity sha512-AsS729u2RHUfEra9xJrE39peJcc2stq2+poBXX8bcM08Y6g9j/i/PUzwNQqkaJde7Ntg1TO7bSREbR5sdosQ+g== tabbable@^6.0.1: version "6.2.0" - resolved "https://registry.yarnpkg.com/tabbable/-/tabbable-6.2.0.tgz#732fb62bc0175cfcec257330be187dcfba1f3b97" + resolved "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz" integrity sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew== tailwind-merge@^1.9.1: version "1.14.0" - resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-1.14.0.tgz#e677f55d864edc6794562c63f5001f45093cdb8b" + resolved "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-1.14.0.tgz" integrity sha512-3mFKyCo/MBcgyOTlrY8T7odzZFx+w+qKSMAmdFzRvqBfLlSigU6TZnlFHK0lkMwj9Bj8OYU+9yW9lmGuS0QEnQ== +tailwindcss-animate@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/tailwindcss-animate/-/tailwindcss-animate-1.0.7.tgz#318b692c4c42676cc9e67b19b78775742388bef4" + integrity sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA== + tailwindcss@^3.3.3: version "3.3.3" - resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.3.tgz#90da807393a2859189e48e9e7000e6880a736daf" + resolved "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.3.3.tgz" integrity sha512-A0KgSkef7eE4Mf+nKJ83i75TMyq8HqY3qmFIJSWy8bNt0v1lG7jUcpGpoTFxAwYcWOphcTBLPPJg+bDfhDf52w== dependencies: "@alloc/quick-lru" "^5.2.0" @@ -9911,12 +10152,12 @@ tailwindcss@^3.3.3: tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0, tapable@^2.2.1: version "2.2.1" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== tar-fs@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" + resolved "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz" integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== dependencies: chownr "^1.1.1" @@ -9926,7 +10167,7 @@ tar-fs@^2.1.1: tar-stream@^2.1.4: version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" + resolved "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz" integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== dependencies: bl "^4.0.3" @@ -9935,9 +10176,9 @@ tar-stream@^2.1.4: inherits "^2.0.3" readable-stream "^3.1.1" -tar@^6.1.13: +tar@^6.2.0: version "6.2.0" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.0.tgz#b14ce49a79cb1cd23bc9b016302dea5474493f73" + resolved "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz" integrity sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ== dependencies: chownr "^2.0.0" @@ -9949,26 +10190,26 @@ tar@^6.1.13: telejson@^7.2.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/telejson/-/telejson-7.2.0.tgz#3994f6c9a8f8d7f2dba9be2c7c5bbb447e876f32" + resolved "https://registry.npmjs.org/telejson/-/telejson-7.2.0.tgz" integrity sha512-1QTEcJkJEhc8OnStBx/ILRu5J2p0GjvWsBx56bmZRqnrkdBMUe+nX92jxV+p3dB4CP6PZCdJMQJwCggkNBMzkQ== dependencies: memoizerific "^1.11.3" temp-dir@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + resolved "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz" integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== temp@^0.8.4: version "0.8.4" - resolved "https://registry.yarnpkg.com/temp/-/temp-0.8.4.tgz#8c97a33a4770072e0a05f919396c7665a7dd59f2" + resolved "https://registry.npmjs.org/temp/-/temp-0.8.4.tgz" integrity sha512-s0ZZzd0BzYv5tLSptZooSjK8oj6C+c19p7Vqta9+6NPOf7r+fxq0cJe6/oN4LTC79sy5NY8ucOJNgwsKCSbfqg== dependencies: rimraf "~2.6.2" tempy@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/tempy/-/tempy-1.0.1.tgz#30fe901fd869cfb36ee2bd999805aa72fbb035de" + resolved "https://registry.npmjs.org/tempy/-/tempy-1.0.1.tgz" integrity sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w== dependencies: del "^6.0.0" @@ -9979,7 +10220,7 @@ tempy@^1.0.1: terser-webpack-plugin@^5.3.1, terser-webpack-plugin@^5.3.7: version "5.3.9" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" + resolved "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz" integrity sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA== dependencies: "@jridgewell/trace-mapping" "^0.3.17" @@ -9989,9 +10230,9 @@ terser-webpack-plugin@^5.3.1, terser-webpack-plugin@^5.3.7: terser "^5.16.8" terser@^5.10.0, terser@^5.16.8: - version "5.19.4" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.19.4.tgz#941426fa482bf9b40a0308ab2b3cd0cf7c775ebd" - integrity sha512-6p1DjHeuluwxDXcuT9VR8p64klWJKo1ILiy19s6C9+0Bh2+NWTX6nD9EPppiER4ICkHDVB1RkVpin/YW2nQn/g== + version "5.21.0" + resolved "https://registry.npmjs.org/terser/-/terser-5.21.0.tgz" + integrity sha512-WtnFKrxu9kaoXuiZFSGrcAvvBqAdmKx0SFNmVNYdJamMu9yyN3I/QF0FbH4QcqJQ+y1CJnzxGIKH0cSj+FGYRw== dependencies: "@jridgewell/source-map" "^0.3.3" acorn "^8.8.2" @@ -10000,7 +10241,7 @@ terser@^5.10.0, terser@^5.16.8: test-exclude@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + resolved "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz" integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== dependencies: "@istanbuljs/schema" "^0.1.2" @@ -10009,26 +10250,26 @@ test-exclude@^6.0.0: text-table@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== thenify-all@^1.0.0: version "1.6.0" - resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + resolved "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz" integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== dependencies: thenify ">= 3.1.0 < 4" "thenify@>= 3.1.0 < 4": version "3.3.1" - resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + resolved "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz" integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== dependencies: any-promise "^1.0.0" through2@^2.0.3: version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + resolved "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" @@ -10036,78 +10277,78 @@ through2@^2.0.3: timers-browserify@^2.0.12: version "2.0.12" - resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.12.tgz#44a45c11fbf407f34f97bccd1577c652361b00ee" + resolved "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz" integrity sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ== dependencies: setimmediate "^1.0.4" tiny-invariant@^1.3.1: version "1.3.1" - resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.3.1.tgz#8560808c916ef02ecfd55e66090df23a4b7aa642" + resolved "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.1.tgz" integrity sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw== tmpl@1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz" integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== to-regex-range@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" tocbot@^4.20.1: - version "4.21.1" - resolved "https://registry.yarnpkg.com/tocbot/-/tocbot-4.21.1.tgz#7b667bef1c3ea1a07e4f400b742aa71e7e7e5ba0" - integrity sha512-IfajhBTeg0HlMXu1f+VMbPef05QpDTsZ9X2Yn1+8npdaXsXg/+wrm9Ze1WG5OS1UDC3qJ5EQN/XOZ3gfXjPFCw== + version "4.21.2" + resolved "https://registry.npmjs.org/tocbot/-/tocbot-4.21.2.tgz" + integrity sha512-R5Muhi/TUu4i4snWVrMgNoXyJm2f8sJfdgIkQvqb+cuIXQEIMAiWGWgCgYXHqX4+XiS/Bnm7IYZ9Zy6NVe6lhw== toidentifier@1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== tr46@~0.0.3: version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz" integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== trim-repeated@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/trim-repeated/-/trim-repeated-1.0.0.tgz#e3646a2ea4e891312bf7eace6cfb05380bc01c21" + resolved "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz" integrity sha512-pkonvlKk8/ZuR0D5tLW8ljt5I8kmxp2XKymhepUeOdCEfKpZaktSArkLHZt76OB1ZvO9bssUsDty4SWhLvZpLg== dependencies: escape-string-regexp "^1.0.2" ts-api-utils@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.2.tgz#7c094f753b6705ee4faee25c3c684ade52d66d99" - integrity sha512-Cbu4nIqnEdd+THNEsBdkolnOXhg0I8XteoHaEKgvsxpsbWda4IsUut2c187HxywQCvveojow0Dgw/amxtSKVkQ== + version "1.0.3" + resolved "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.3.tgz" + integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== ts-dedent@^2.0.0, ts-dedent@^2.2.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/ts-dedent/-/ts-dedent-2.2.0.tgz#39e4bd297cd036292ae2394eb3412be63f563bb5" + resolved "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz" integrity sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ== ts-interface-checker@^0.1.9: version "0.1.13" - resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" + resolved "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz" integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== ts-pnp@^1.1.6: version "1.2.0" - resolved "https://registry.yarnpkg.com/ts-pnp/-/ts-pnp-1.2.0.tgz#a500ad084b0798f1c3071af391e65912c86bca92" + resolved "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.2.0.tgz" integrity sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw== tsconfig-paths-webpack-plugin@^4.0.1: version "4.1.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths-webpack-plugin/-/tsconfig-paths-webpack-plugin-4.1.0.tgz#3c6892c5e7319c146eee1e7302ed9e6f2be4f763" + resolved "https://registry.npmjs.org/tsconfig-paths-webpack-plugin/-/tsconfig-paths-webpack-plugin-4.1.0.tgz" integrity sha512-xWFISjviPydmtmgeUAuXp4N1fky+VCtfhOkDUFIv5ea7p4wuTomI4QTrXvFBX2S4jZsmyTSrStQl+E+4w+RzxA== dependencies: chalk "^4.1.0" @@ -10116,7 +10357,7 @@ tsconfig-paths-webpack-plugin@^4.0.1: tsconfig-paths@^3.14.2: version "3.14.2" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" + resolved "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz" integrity sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g== dependencies: "@types/json5" "^0.0.29" @@ -10126,7 +10367,7 @@ tsconfig-paths@^3.14.2: tsconfig-paths@^4.0.0, tsconfig-paths@^4.1.2: version "4.2.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz#ef78e19039133446d244beac0fd6a1632e2d107c" + resolved "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz" integrity sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg== dependencies: json5 "^2.2.2" @@ -10135,61 +10376,61 @@ tsconfig-paths@^4.0.0, tsconfig-paths@^4.1.2: tslib@^1.13.0, tslib@^1.8.1: version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== tslib@^2.0.0, tslib@^2.0.1, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.5.0: version "2.6.2" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + resolved "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== tsutils@^3.21.0: version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + resolved "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz" integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== dependencies: tslib "^1.8.1" tty-browserify@^0.0.1: version "0.0.1" - resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.1.tgz#3f05251ee17904dfd0677546670db9651682b811" + resolved "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz" integrity sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw== type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz" integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: prelude-ls "^1.2.1" type-fest@^0.16.0: version "0.16.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.16.0.tgz" integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== type-fest@^0.20.2: version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== type-fest@^0.6.0: version "0.6.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz" integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== type-fest@^0.8.1: version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== type-fest@^2.14.0, type-fest@^2.19.0, type-fest@~2.19: version "2.19.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz" integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== type-is@~1.6.18: version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + resolved "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" @@ -10197,7 +10438,7 @@ type-is@~1.6.18: typed-array-buffer@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60" + resolved "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz" integrity sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw== dependencies: call-bind "^1.0.2" @@ -10206,7 +10447,7 @@ typed-array-buffer@^1.0.0: typed-array-byte-length@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0" + resolved "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz" integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA== dependencies: call-bind "^1.0.2" @@ -10216,7 +10457,7 @@ typed-array-byte-length@^1.0.0: typed-array-byte-offset@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b" + resolved "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz" integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg== dependencies: available-typed-arrays "^1.0.5" @@ -10227,7 +10468,7 @@ typed-array-byte-offset@^1.0.0: typed-array-length@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" + resolved "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz" integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== dependencies: call-bind "^1.0.2" @@ -10236,22 +10477,22 @@ typed-array-length@^1.0.4: typedarray@^0.0.6: version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + resolved "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== typescript@^5.2.2: version "5.2.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" + resolved "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz" integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w== uglify-js@^3.1.4: version "3.17.4" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" + resolved "https://registry.npmjs.org/uglify-js/-/uglify-js-3.17.4.tgz" integrity sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g== unbox-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + resolved "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz" integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== dependencies: call-bind "^1.0.2" @@ -10259,14 +10500,19 @@ unbox-primitive@^1.0.2: has-symbols "^1.0.3" which-boxed-primitive "^1.0.2" +undici-types@~5.25.1: + version "5.25.3" + resolved "https://registry.npmjs.org/undici-types/-/undici-types-5.25.3.tgz" + integrity sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA== + unicode-canonical-property-names-ecmascript@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== unicode-match-property-ecmascript@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + resolved "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz" integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== dependencies: unicode-canonical-property-names-ecmascript "^2.0.0" @@ -10274,29 +10520,29 @@ unicode-match-property-ecmascript@^2.0.0: unicode-match-property-value-ecmascript@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0" + resolved "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz" integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== unicode-property-aliases-ecmascript@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + resolved "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz" integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== unique-string@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + resolved "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz" integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== dependencies: crypto-random-string "^2.0.0" unist-util-is@^4.0.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-4.1.0.tgz#976e5f462a7a5de73d94b706bac1b90671b57797" + resolved "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz" integrity sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg== unist-util-visit-parents@^3.0.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz#65a6ce698f78a6b0f56aa0e88f13801886cdaef6" + resolved "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz" integrity sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg== dependencies: "@types/unist" "^2.0.0" @@ -10304,7 +10550,7 @@ unist-util-visit-parents@^3.0.0: unist-util-visit@^2.0.0: version "2.0.3" - resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-2.0.3.tgz#c3703893146df47203bb8a9795af47d7b971208c" + resolved "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz" integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q== dependencies: "@types/unist" "^2.0.0" @@ -10313,82 +10559,87 @@ unist-util-visit@^2.0.0: universalify@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== unplugin@^1.3.1: - version "1.4.0" - resolved "https://registry.yarnpkg.com/unplugin/-/unplugin-1.4.0.tgz#b771373aa1bc664f50a044ee8009bd3a7aa04d85" - integrity sha512-5x4eIEL6WgbzqGtF9UV8VEC/ehKptPXDS6L2b0mv4FRMkJxRtjaJfOWDd6a8+kYbqsjklix7yWP0N3SUepjXcg== + version "1.5.0" + resolved "https://registry.npmjs.org/unplugin/-/unplugin-1.5.0.tgz" + integrity sha512-9ZdRwbh/4gcm1JTOkp9lAkIDrtOyOxgHmY7cjuwI8L/2RTikMcVG25GsZwNAgRuap3iDw2jeq7eoqtAsz5rW3A== dependencies: - acorn "^8.9.0" + acorn "^8.10.0" chokidar "^3.5.3" webpack-sources "^3.2.3" webpack-virtual-modules "^0.5.0" untildify@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" + resolved "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz" integrity sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw== -update-browserslist-db@^1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" - integrity sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA== +update-browserslist-db@^1.0.13: + version "1.0.13" + resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz" + integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== dependencies: escalade "^3.1.1" picocolors "^1.0.0" uri-js@^4.2.2: version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: punycode "^2.1.0" url@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/url/-/url-0.11.1.tgz#26f90f615427eca1b9f4d6a28288c147e2302a32" - integrity sha512-rWS3H04/+mzzJkv0eZ7vEDGiQbgquI1fGfOad6zKvgYQi1SzMmhl7c/DdRGxhaWrVH6z0qWITo8rpnxK/RfEhA== + version "0.11.3" + resolved "https://registry.npmjs.org/url/-/url-0.11.3.tgz" + integrity sha512-6hxOLGfZASQK/cijlZnZJTq8OXAkt/3YGfQX45vvMYXpZoo8NdWZcY73K108Jf759lS1Bv/8wXnHDTSz17dSRw== dependencies: punycode "^1.4.1" - qs "^6.11.0" + qs "^6.11.2" use-callback-ref@^1.3.0: version "1.3.0" - resolved "https://registry.yarnpkg.com/use-callback-ref/-/use-callback-ref-1.3.0.tgz#772199899b9c9a50526fedc4993fc7fa1f7e32d5" + resolved "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.0.tgz" integrity sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w== dependencies: tslib "^2.0.0" use-resize-observer@^9.1.0: version "9.1.0" - resolved "https://registry.yarnpkg.com/use-resize-observer/-/use-resize-observer-9.1.0.tgz#14735235cf3268569c1ea468f8a90c5789fc5c6c" + resolved "https://registry.npmjs.org/use-resize-observer/-/use-resize-observer-9.1.0.tgz" integrity sha512-R25VqO9Wb3asSD4eqtcxk8sJalvIOYBqS8MNZlpDSQ4l4xMQxC/J7Id9HoTqPq8FwULIn0PVW+OAqF2dyYbjow== dependencies: "@juggle/resize-observer" "^3.3.1" use-sidecar@^1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/use-sidecar/-/use-sidecar-1.1.2.tgz#2f43126ba2d7d7e117aa5855e5d8f0276dfe73c2" + resolved "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz" integrity sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw== dependencies: detect-node-es "^1.1.0" tslib "^2.0.0" +use-sync-external-store@^1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz" + integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA== + util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== -util@^0.12.0, util@^0.12.4: +util@^0.12.4, util@^0.12.5: version "0.12.5" - resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" + resolved "https://registry.npmjs.org/util/-/util-0.12.5.tgz" integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== dependencies: inherits "^2.0.3" @@ -10399,31 +10650,31 @@ util@^0.12.0, util@^0.12.4: utila@~0.4: version "0.4.0" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + resolved "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz" integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== utils-merge@1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + resolved "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== uuid@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.0.tgz#592f550650024a38ceb0c562f2f6aa435761efb5" - integrity sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg== + version "9.0.1" + resolved "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz" + integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== v8-to-istanbul@^9.0.0: - version "9.1.0" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz#1b83ed4e397f58c85c266a570fc2558b5feb9265" - integrity sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA== + version "9.1.3" + resolved "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.3.tgz" + integrity sha512-9lDD+EVI2fjFsMWXc6dy5JJzBsVTcQ2fVkfBvncZ6xJWG9wtBhOldG+mHkSL0+V1K/xgZz0JDO5UT5hFwHUghg== dependencies: "@jridgewell/trace-mapping" "^0.3.12" "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" + convert-source-map "^2.0.0" validate-npm-package-license@^3.0.1: version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + resolved "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" @@ -10431,12 +10682,12 @@ validate-npm-package-license@^3.0.1: vary@~1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== victory-vendor@^36.6.8: version "36.6.11" - resolved "https://registry.yarnpkg.com/victory-vendor/-/victory-vendor-36.6.11.tgz#acae770717c2dae541a54929c304ecab5ab6ac2a" + resolved "https://registry.npmjs.org/victory-vendor/-/victory-vendor-36.6.11.tgz" integrity sha512-nT8kCiJp8dQh8g991J/R5w5eE2KnO8EAIP0xocWlh9l2okngMWglOPoMZzJvek8Q1KUc4XE/mJxTZnvOB1sTYg== dependencies: "@types/d3-array" "^3.0.3" @@ -10456,19 +10707,19 @@ victory-vendor@^36.6.8: vm-browserify@^1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" + resolved "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== walker@^1.0.8: version "1.0.8" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + resolved "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz" integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== dependencies: makeerror "1.0.12" watchpack@2.4.0, watchpack@^2.2.0, watchpack@^2.4.0: version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + resolved "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz" integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== dependencies: glob-to-regexp "^0.4.1" @@ -10476,19 +10727,19 @@ watchpack@2.4.0, watchpack@^2.2.0, watchpack@^2.4.0: wcwidth@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + resolved "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz" integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== dependencies: defaults "^1.0.3" webidl-conversions@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== webpack-dev-middleware@^6.1.1: version "6.1.1" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-6.1.1.tgz#6bbc257ec83ae15522de7a62f995630efde7cc3d" + resolved "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.1.tgz" integrity sha512-y51HrHaFeeWir0YO4f0g+9GwZawuigzcAdRNon6jErXy/SqV/+O6eaVAzDqE6t3e3NpGeR5CS+cCDaTC+V3yEQ== dependencies: colorette "^2.0.10" @@ -10499,7 +10750,7 @@ webpack-dev-middleware@^6.1.1: webpack-hot-middleware@^2.25.1: version "2.25.4" - resolved "https://registry.yarnpkg.com/webpack-hot-middleware/-/webpack-hot-middleware-2.25.4.tgz#d8bc9e9cb664fc3105c8e83d2b9ed436bee4e193" + resolved "https://registry.npmjs.org/webpack-hot-middleware/-/webpack-hot-middleware-2.25.4.tgz" integrity sha512-IRmTspuHM06aZh98OhBJtqLpeWFM8FXJS5UYpKYxCJzyFoyWj1w6VGFfomZU7OPA55dMLrQK0pRT1eQ3PACr4w== dependencies: ansi-html-community "0.0.8" @@ -10508,17 +10759,17 @@ webpack-hot-middleware@^2.25.1: webpack-sources@^3.2.3: version "3.2.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack-virtual-modules@^0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/webpack-virtual-modules/-/webpack-virtual-modules-0.5.0.tgz#362f14738a56dae107937ab98ea7062e8bdd3b6c" + resolved "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.5.0.tgz" integrity sha512-kyDivFZ7ZM0BVOUteVbDFhlRt7Ah/CSPwJdi8hBpkK7QLumUqdLtVfm/PX/hkcnrvr0i77fO5+TjZ94Pe+C9iw== webpack@5, webpack@^5.76.0: version "5.88.2" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.88.2.tgz#f62b4b842f1c6ff580f3fcb2ed4f0b579f4c210e" + resolved "https://registry.npmjs.org/webpack/-/webpack-5.88.2.tgz" integrity sha512-JmcgNZ1iKj+aiR0OvTYtWQqJwq37Pf683dY9bVORwVbUrDhLhdn/PlO2sHsFHPkj7sHNQF3JwaAkp49V+Sq1tQ== dependencies: "@types/eslint-scope" "^3.7.3" @@ -10548,7 +10799,7 @@ webpack@5, webpack@^5.76.0: whatwg-url@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz" integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== dependencies: tr46 "~0.0.3" @@ -10556,7 +10807,7 @@ whatwg-url@^5.0.0: which-boxed-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz" integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== dependencies: is-bigint "^1.0.1" @@ -10567,7 +10818,7 @@ which-boxed-primitive@^1.0.2: which-builtin-type@^1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/which-builtin-type/-/which-builtin-type-1.1.3.tgz#b1b8443707cc58b6e9bf98d32110ff0c2cbd029b" + resolved "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.3.tgz" integrity sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw== dependencies: function.prototype.name "^1.1.5" @@ -10585,7 +10836,7 @@ which-builtin-type@^1.1.3: which-collection@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" + resolved "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz" integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== dependencies: is-map "^2.0.1" @@ -10593,9 +10844,9 @@ which-collection@^1.0.1: is-weakmap "^2.0.1" is-weakset "^2.0.1" -which-typed-array@^1.1.10, which-typed-array@^1.1.11, which-typed-array@^1.1.2, which-typed-array@^1.1.9: +which-typed-array@^1.1.11, which-typed-array@^1.1.2, which-typed-array@^1.1.9: version "1.1.11" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.11.tgz#99d691f23c72aab6768680805a271b69761ed61a" + resolved "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.11.tgz" integrity sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew== dependencies: available-typed-arrays "^1.0.5" @@ -10606,20 +10857,19 @@ which-typed-array@^1.1.10, which-typed-array@^1.1.11, which-typed-array@^1.1.2, which@^2.0.1: version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" wordwrap@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + resolved "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz" integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: - name wrap-ansi-cjs version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== dependencies: ansi-styles "^4.0.0" @@ -10628,7 +10878,7 @@ wordwrap@^1.0.0: wrap-ansi@^8.1.0: version "8.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz" integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== dependencies: ansi-styles "^6.1.0" @@ -10637,12 +10887,12 @@ wrap-ansi@^8.1.0: wrappy@1: version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== write-file-atomic@^2.3.0: version "2.4.3" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.4.3.tgz#1fd2e9ae1df3e75b8d8c367443c692d4ca81f481" + resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz" integrity sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ== dependencies: graceful-fs "^4.1.11" @@ -10651,7 +10901,7 @@ write-file-atomic@^2.3.0: write-file-atomic@^4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz" integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== dependencies: imurmurhash "^0.1.4" @@ -10659,59 +10909,59 @@ write-file-atomic@^4.0.2: ws@^6.1.0: version "6.2.2" - resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.2.tgz#dd5cdbd57a9979916097652d78f1cc5faea0c32e" + resolved "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz" integrity sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw== dependencies: async-limiter "~1.0.0" ws@^8.2.3: - version "8.13.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" - integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== + version "8.14.2" + resolved "https://registry.npmjs.org/ws/-/ws-8.14.2.tgz" + integrity sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g== xtend@^4.0.2, xtend@~4.0.1: version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== y18n@^5.0.5: version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== yallist@^3.0.2: version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + resolved "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yallist@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== yaml@^1.10.0: version "1.10.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== yaml@^2.1.1: version "2.3.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.2.tgz#f522db4313c671a0ca963a75670f1c12ea909144" + resolved "https://registry.npmjs.org/yaml/-/yaml-2.3.2.tgz" integrity sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg== yargs-parser@^20.2.2, yargs-parser@^20.2.9: version "20.2.9" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz" integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== yargs-parser@^21.1.1: version "21.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz" integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== yargs@^16.2.0: version "16.2.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + resolved "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz" integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== dependencies: cliui "^7.0.2" @@ -10724,7 +10974,7 @@ yargs@^16.2.0: yargs@^17.7.2: version "17.7.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + resolved "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== dependencies: cliui "^8.0.1" @@ -10737,7 +10987,7 @@ yargs@^17.7.2: yauzl@^2.10.0: version "2.10.0" - resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" + resolved "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz" integrity sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g== dependencies: buffer-crc32 "~0.2.3" @@ -10745,15 +10995,15 @@ yauzl@^2.10.0: yocto-queue@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== yocto-queue@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" + resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz" integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== -zod@3.21.4: - version "3.21.4" - resolved "https://registry.yarnpkg.com/zod/-/zod-3.21.4.tgz#10882231d992519f0a10b5dd58a38c9dabbb64db" - integrity sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw== +zod@^3.22.4: + version "3.22.4" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff" + integrity sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==