diff --git a/.editorconfig b/.editorconfig index 7bb8092..fd277fc 100644 --- a/.editorconfig +++ b/.editorconfig @@ -25,8 +25,6 @@ indent_size = 2 trim_trailing_whitespace = false insert_final_newline = true -eclint_indent_style = unset - [Dockerfile] indent_size = 4 @@ -73,3 +71,6 @@ indent_size = unset [**/LICENSE-3RD-PARTY.md] insert_final_newline = unset + +[**/docs.go] +indent_style = unset diff --git a/.github/workflows/libs/pr.yaml b/.github/workflows/libs/pr.yaml deleted file mode 100644 index 17e8474..0000000 --- a/.github/workflows/libs/pr.yaml +++ /dev/null @@ -1,198 +0,0 @@ -name: PR CI - -on: - pull_request: - branches: [ develop, main ] - -env: - GO111MODULE: on - GO_VERSION: 1.19 - NODE_VERSION: 22 - LINT_ARGS: -v --timeout 5m0s --out-${NO_FUTURE}format colored-line-number - TEST_ARGS: -v -short -coverprofile=coverage.out - TEST_PATH: ./... - -jobs: - commitlint: - name: Commit Lint Job - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ env.NODE_VERSION }} - - - name: Install commitlint - run: | - npm install --save-dev @commitlint/{cli,config-conventional} - - - name: Validate PR commits with commitlint - run: npx commitlint --from ${{ github.event.pull_request.head.sha }}~${{ github.event.pull_request.commits }} --to ${{ github.event.pull_request.head.sha }} --verbose - - editor_config_job: - name: Editor Config Job - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v4 - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ env.NODE_VERSION }} - - - name: Editor Config - run: | - npm install --save-dev editorconfig-checker - ./node_modules/.bin/editorconfig-checker - - lint_job: - name: Go Lint Job - if: ${{ ! contains(github.head_ref, 'release-please--branches--main') }} - runs-on: ubuntu-latest - steps: - - name: Check out code into the Go module directory - uses: actions/checkout@v4 - - - name: Set up Go - uses: actions/setup-go@v3 - with: - go-version: ${{ env.GO_VERSION }} - - - name: Tidy - run: go mod tidy - - - name: Go Lint - uses: golangci/golangci-lint-action@v3 - with: - version: v1.50.0 - args: ${{ env.LINT_ARGS }} - skip-pkg-cache: true - skip-build-cache: true - - licenses_check: - name: 3rd Party Licenses Check - if: ${{ github.event.head_commit.committer.name != 'github-actions[bot]' || ! contains(github.head_ref, 'release-please--branches--main') }} - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Retrieve PR and branch info - run: | - PR_TITLE="chore: update 3rd-party licenses (#${{ github.event.number }})" - - PR_INFO=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ - "https://api.github.com/repos/${{ github.repository }}/pulls?state=open" | \ - jq --arg TITLE "$PR_TITLE" '.[] | select(.title == $TITLE) | { number: .number, head: .head.ref }') - - echo "PR_INFO=$PR_INFO" - - PR_NUMBER=$(echo "$PR_INFO" | jq -r .number) - BRANCH_NAME=$(echo "$PR_INFO" | jq -r .head) - - echo "PR_TITLE=$PR_TITLE" >> $GITHUB_ENV - echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV - echo "BRANCH_NAME=${BRANCH_NAME:-update-third-party-licenses-${{ github.run_id }}}" >> $GITHUB_ENV - echo "PARENT_BRANCH=${{ github.head_ref }}" >> $GITHUB_ENV - - git config user.name "github-actions[bot]" - git config user.email "github-actions[bot]@users.noreply.github.com" - - # if PR already exists - - name: Pull latest changes to existing branch - if: env.PR_NUMBER != '' - run: | - git fetch origin - git switch ${{ env.BRANCH_NAME }} - git pull origin ${{ env.PARENT_BRANCH }} --no-rebase - - - name: Set up Go - uses: actions/setup-go@v4 - with: - go-version: ${{ env.GO_VERSION }} - - - name: Tidy - run: go mod tidy - - - name: Vendor - run: go mod vendor - - - name: Install Go licenses - run: go install github.com/google/go-licenses@v1.4.0 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.9' - cache: 'pip' - cache-dependency-path: '.github/workflows/requirements.txt' - - - name: Install Python dependencies - run: | - python -m pip install --upgrade pip - pip install -r .github/workflows/requirements.txt - - - name: Run license check - run: | - go-licenses report ./... 2>/dev/null | python .github/workflows/generate_and_check_licenses.py - - - name: Check and Commit changes - run: | - if [ -d "./licenses" ]; then - git add ./licenses - fi - - if ! git diff-index --quiet HEAD; then - git commit -m "chore: update third party licenses" - echo "changes_committed=true" >> $GITHUB_ENV - else - echo "changes_committed=false" >> $GITHUB_ENV - fi - - # This will fail if the incorrect go.mod or go.sum is committed - - name: Push changes - if: env.changes_committed == 'true' - run: | - git diff - - if [[ -z "$PR_NUMBER" ]]; then - git switch -c ${{ env.BRANCH_NAME }} - fi - git push origin HEAD - - - name: Create new PR - if: env.changes_committed == 'true' && env.PR_NUMBER == '' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - echo "Creating a new PR..." - gh pr create --base "${{ env.PARENT_BRANCH }}" --head "update-third-party-licenses-${{ github.run_id }}" --title "${{ env.PR_TITLE }}" --body "This is an automated PR that updates the list of 3rd party licenses." - - test_job: - name: Test Job - if: ${{ github.base_ref == 'main' && ! contains(github.head_ref, 'release-please--branches--main') }} - runs-on: ubuntu-latest - steps: - - name: Check out code into the Go module directory - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Set up Go - uses: actions/setup-go@v3 - with: - go-version: ${{ env.GO_VERSION }} - - - name: Tidy - run: go mod tidy - - - name: Go Test - run: go test ${{ env.TEST_ARGS }} ${{ env.TEST_PATH }} diff --git a/.github/workflows/libs/push.yaml b/.github/workflows/libs/push.yaml deleted file mode 100644 index 6c8f3a4..0000000 --- a/.github/workflows/libs/push.yaml +++ /dev/null @@ -1,90 +0,0 @@ -name: PUSH CI - -on: - push: - branches: [ develop, main ] - -env: - GO_VERSION: 1.21 - -jobs: - commitlint: - name: Commit Lint Job - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: '22' - - - name: Install commitlint - run: | - npm install --save-dev @commitlint/{cli,config-conventional} - - - name: Validate current commit (last commit) with commitlint - run: npx commitlint --last --verbose - - license_headers: - name: Add License Headers - if: github.event.head_commit.committer.name != 'github-actions[bot]' - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up Go environment - uses: actions/setup-go@v4 - with: - go-version: ${{ env.GO_VERSION }} - - - name: Install NWA tool - run: go install github.com/B1NARY-GR0UP/nwa@latest - - - name: Add missing license headers - run: nwa add -c "Syntio Ltd." # WRITE FOLDER PATHS FOR ALL FOLDERS THAT CONTAIN FILES THAT REQUIRE HEADERS eg. ./persistor - - - name: Check and Commit changes - id: check_commit - run: | - git config user.name "github-actions[bot]" - git config user.email "github-actions[bot]@users.noreply.github.com" - git add . - if ! git diff-index --quiet HEAD; then - git commit -m "style: add license headers" - echo "changes_committed=true" >> $GITHUB_ENV - else - echo "changes_committed=false" >> $GITHUB_ENV - echo "All necessary headers present." - fi - - - name: Create a new branch for the PR - if: env.changes_committed == 'true' - run: | - git checkout -b "add-license-headers-${{ github.run_id }}" - git push origin HEAD - - - name: Create Pull Request - if: env.changes_committed == 'true' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - gh pr create --base ${{ github.ref_name }} --head "add-license-headers-${{ github.run_id }}" --title "style: add license headers" --body "This PR adds license headers to the affected files. Recommendation: Merge this PR using the rebase-merge method" - - release-please: - if: github.ref_name == 'main' - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - steps: - - uses: googleapis/release-please-action@v4 - with: - token: ${{ secrets.RELEASE_PLEASE_TOKEN }} - release-type: simple diff --git a/.github/workflows/products/pr.yaml b/.github/workflows/pr.yaml similarity index 77% rename from .github/workflows/products/pr.yaml rename to .github/workflows/pr.yaml index 7b439b5..8771fd1 100644 --- a/.github/workflows/products/pr.yaml +++ b/.github/workflows/pr.yaml @@ -6,9 +6,9 @@ on: env: GO111MODULE: on - GO_VERSION: 1.19 + GO_VERSION: 1.21 NODE_VERSION: 22 - LINT_ARGS: -v --skip-files .*_test.go --timeout 5m0s --out-${NO_FUTURE}format colored-line-number + LINT_ARGS: -v --skip-files .*_test.go --timeout 5m0s --out-format colored-line-number GOLANGCI_LINT_VERSION: v1.50 TEST_ARGS: -v -short -coverprofile=coverage.out @@ -39,9 +39,16 @@ jobs: if: ${{ ! contains(github.head_ref, 'release-please--branches--main') }} runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - dockerfile: # Add dockerfile paths eg. './persistor/docker/persistor/Dockerfile' - - + dockerfile: + - ./validator/docker/csv-validator/Dockerfile + - ./validator/docker/validator/Dockerfile + - ./validator/docker/xml-validator/Dockerfile + - ./registry/docker/compatibility-checker/Dockerfile + - ./registry/docker/initdb/Dockerfile + - ./registry/docker/registry/Dockerfile + - ./registry/docker/validity-checker/Dockerfile steps: - name: Check out code @@ -70,25 +77,17 @@ jobs: npm install --save-dev editorconfig-checker ./node_modules/.bin/editorconfig-checker - # Ensures that java and python code adhere to coding styles and conventions - java_and_python_lint_job: - name: Java and Python lint - if: ${{ ! contains(github.head_ref, 'release-please--branches--main') }} - uses: github/super-linter@v4 - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - VALIDATE_PYTHON_BLACK: true - VALIDATE_JAVA: true - # Ensures that the code adheres to the lint checks defined in .golangci.yaml. lint_job: name: Go lint job for all components if: ${{ ! contains(github.head_ref, 'release-please--branches--main') }} runs-on: ubuntu-latest strategy: + fail-fast: false matrix: component: - - # Add file paths eg. './persistor + - ./registry + - ./validator steps: - name: Check out code into the Go module directory uses: actions/checkout@v4 @@ -108,22 +107,23 @@ jobs: go-version: ${{ env.GO_VERSION }} # Add all component folders for monorepos cache-dependency-path: | - /go.sum + ${{ matrix.component }}/go.sum - name: Tidy Go mod for ${{ matrix.component }} if: steps.check_changed_files.outputs.any_changed == 'true' - working-directory: ${{ matrix.component }} - run: go mod tidy + run: | + cd ${{ matrix.component }} + go mod tidy - name: Run Go Lint for ${{ matrix.component }} if: steps.check_changed_files.outputs.any_changed == 'true' - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v6 with: - version: v1.50.0 - args: ${{ env.LINT_ARGS }} - skip-pkg-cache: true - skip-build-cache: true - working-directory: ${{ matrix.component }} + version: v1.61.0 + args: ${{env.LINT_ARGS}} + skip-cache: true + skip-save-cache: true + working-directory: ${{ matrix.component }} licenses_check: name: 3rd Party Licenses Check @@ -171,7 +171,8 @@ jobs: go-version: ${{ env.GO_VERSION }} # Add all component folders for monorepos cache-dependency-path: | - /go.sum + ./registry/go.sum + ./validator/go.sum - name: Install Go licenses run: go install github.com/google/go-licenses@v1.4.0 @@ -189,18 +190,20 @@ jobs: pip install -r .github/workflows/requirements.txt - name: Run go mod tidy, go mod vendor & license check - # switch to each component folder first e.g. "./persistor" + # switch to each component folder first run: | - cd + cd ./registry + go mod tidy + go mod vendor + go-licenses report ./... 2>/dev/null | python ../.github/workflows/generate_and_check_licenses.py + cd ../validator go mod tidy go mod vendor - go-licenses report ./... 2>/dev/null | python .github/workflows/generate_and_check_licenses.py - cd ../ + go-licenses report ./... 2>/dev/null | python ../.github/workflows/generate_and_check_licenses.py - name: Check and Commit changes - # add licenses for each component run: | - git add ./persistor/licenses ./indexer-api/licenses ./resubmitter-api/licenses + git add ./registry/licenses ./validator/licenses if ! git diff-index --quiet HEAD; then git commit -m "chore: update third party licenses" @@ -237,7 +240,8 @@ jobs: strategy: matrix: component: - - # Add file paths eg. './persistor + - ./registry + - ./validator steps: - name: Check out code into the Go module directory @@ -249,7 +253,7 @@ jobs: go-version: ${{ env.GO_VERSION }} # Add all component folders for monorepos cache-dependency-path: | - /go.sum + ${{ matrix.component }}/go.sum - name: Tidy Go mod for ${{ matrix.component }} working-directory: ${{ matrix.component }} @@ -266,10 +270,23 @@ jobs: runs-on: ubuntu-latest strategy: + fail-fast: false matrix: component: - - dockerfile-path: # Add dockerfile path eg. './persistor/docker/persistor/Dockerfile' - image-name: # Add image name eg. 'persistor-core' + - dockerfile-path: ./validator/docker/csv-validator/Dockerfile + image-name: schema-registry-csv-val + - dockerfile-path: ./validator/docker/validator/Dockerfile + image-name: schema-registry-validator + - dockerfile-path: ./validator/docker/xml-validator/Dockerfile + image-name: schema-registry-xml-val + - dockerfile-path: ./registry/docker/compatibility-checker/Dockerfile + image-name: schema-registry-compatibility + - dockerfile-path: ./registry/docker/initdb/Dockerfile + image-name: schema-registry-initdb + - dockerfile-path: ./registry/docker/registry/Dockerfile + image-name: schema-registry-api + - dockerfile-path: ./registry/docker/validity-checker/Dockerfile + image-name: schema-registry-validity steps: - name: Check out code diff --git a/.github/workflows/products/push.yaml b/.github/workflows/push.yaml similarity index 81% rename from .github/workflows/products/push.yaml rename to .github/workflows/push.yaml index b3e3e5f..e721192 100644 --- a/.github/workflows/products/push.yaml +++ b/.github/workflows/push.yaml @@ -43,12 +43,16 @@ jobs: uses: actions/setup-go@v4 with: go-version: ${{ env.GO_VERSION }} + # Add all component folders for monorepos + cache-dependency-path: | + ./registry/go.sum + ./validator/go.sum - name: Install NWA tool run: go install github.com/B1NARY-GR0UP/nwa@latest - name: Add missing license headers - run: nwa add -c "Syntio Ltd." # WRITE FOLDER PATHS FOR ALL FOLDERS THAT CONTAIN FILES THAT REQUIRE HEADERS eg. ./persistor + run: nwa add -c "Syntio Ltd." ./registry ./validator - name: Check and Commit changes id: check_commit @@ -87,8 +91,20 @@ jobs: strategy: matrix: component: - - dockerfile-path: # Add dockerfile path eg. './persistor/docker/persistor/Dockerfile' - image-name: # Add image name eg. 'persistor-core' + - dockerfile-path: ./validator/docker/csv-validator/Dockerfile + image-name: schema-registry-csv-val + - dockerfile-path: ./validator/docker/validator/Dockerfile + image-name: schema-registry-validator + - dockerfile-path: ./validator/docker/xml-validator/Dockerfile + image-name: schema-registry-xml-val + - dockerfile-path: ./registry/docker/compatibility-checker/Dockerfile + image-name: schema-registry-compatibility + - dockerfile-path: ./registry/docker/initdb/Dockerfile + image-name: schema-registry-initdb + - dockerfile-path: ./registry/docker/registry/Dockerfile + image-name: schema-registry-api + - dockerfile-path: ./registry/docker/validity-checker/Dockerfile + image-name: schema-registry-validity steps: - name: Check out code diff --git a/.golangci.yaml b/.golangci.yaml index 7e64123..8d22db3 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -2,59 +2,12 @@ # with their default values. # options for analysis running run: - # The default concurrency value is the number of available CPU. - concurrency: 4 - # Timeout for analysis, e.g. 30s, 5m. - # Default: 1m - timeout: 5m - # Exit code when at least one issue was found. - # Default: 1 + # default concurrency is a available CPU number issues-exit-code: 1 - # Include test files or not. - # Default: true + # include test files or not, default is true tests: true - # List of build tags, all linters use it. - # Default: []. - build-tags: - - mytag - # Which dirs to skip: issues from them won't be reported. - # Can use regexp here: `generated.*`, regexp is applied on full path. - # Default value is empty list, - # but default dirs are skipped independently of this option's value (see skip-dirs-use-default). - # "/" will be replaced by current OS file path separator to properly work on Windows. - skip-dirs: [ ] - # Enables skipping of directories: - # - vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ - # Default: true - skip-dirs-use-default: true - # Which files to skip: they will be analyzed, but issues from them won't be reported. - # Default value is empty list, - # but there is no need to include all autogenerated files, - # we confidently recognize autogenerated files. - # If it's not please let us know. - # "/" will be replaced by current OS file path separator to properly work on Windows. - skip-files: [ ] - # If set we pass it to "go list -mod={option}". From "go help modules": - # If invoked with -mod=readonly, the go command is disallowed from the implicit - # automatic updating of go.mod described above. Instead, it fails when any changes - # to go.mod are needed. This setting is most useful to check that go.mod does - # not need updates, such as in a continuous integration and testing system. - # If invoked with -mod=vendor, the go command assumes that the vendor - # directory holds the correct copies of dependencies and ignores - # the dependency descriptions in go.mod. - # - # Allowed values: readonly|vendor|mod - # By default, it isn't set. - # modules-download-mode: - # Allow multiple parallel golangci-lint instances running. - # If false (default) - golangci-lint acquires file lock on start. - allow-parallel-runners: false - # Define the Go version limit. - # Mainly related to generics support since go1.18. - # Default: use Go version from the go.mod file, fallback on the env var `GOVERSION`, fallback on 1.18 - # go: 'GOVERSION' -# output configuration options +# output configuration options output: # colored-line-number|line-number|json|tab|checkstyle|code-climate, default is "colored-line-number" format: colored-line-number @@ -66,361 +19,62 @@ output: uniq-by-line: true # add a prefix to the output file references; default is no prefix path-prefix: "" - # Sort results by: filepath, line and column. - sort-results: false # all available settings of specific linters - -linters: - disable-all: true - enable: - - asasalint - - asciicheck - - bidichk - - bodyclose - - containedctx - - contextcheck - # - cyclop - - decorder - - depguard - - dogsled - # - dupl - - durationcheck - - errchkjson - - errcheck - - errname - - errorlint - - execinquery - - exhaustive - # - exhaustruct - - exportloopref - - forcetypeassert - # - funlen - - gosimple - - govet - - gci - # - gochecknoglobals - - gochecknoinits - # - gocognit - - goconst - # - gocritic - # - gocyclo - - godot - - godox - - goerr113 - - gofumpt - - goheader - # - gomnd - - gomoddirectives - - gomodguard - - goprintffuncname - - gosec - - grouper - - ineffassign - - importas - # - ireturn - # - lll - # - maintidx - - makezero - - misspell - - nakedret - # - nestif - - nilerr - - nilnil - - nlreturn - - noctx - # - nolintlint - - nonamedreturns - - nosprintfhostport - # - paralleltest - - prealloc - - predeclared - - promlinter - # - revive - - rowserrcheck - - sqlclosecheck - - stylecheck - - staticcheck - - tagliatelle - - tenv - - testpackage - - thelper - - tparallel - - typecheck - - unconvert - - unparam - - unused - - varnamelen - - wastedassign - - whitespace - # - wrapcheck - - wsl - presets: - - bugs - - unused - # Run only fast linters from enabled linters set (first run won't be fast) - # Default: false - fast: false - linters-settings: - asasalint: - # to specify a set of function names to exclude - # the values are merged with the builtin exclusions - # the builtin exclusions can be disabled by setting `use-builtin-exclusions` to `false` - # default: ["^(fmt|log|logger|t|)\.(Print|Fprint|Sprint|Fatal|Panic|Error|Warn|Warning|Info|Debug|Log)(|f|ln)$"] - exclude: - - Append - - \.Wrapf - # to enable/disable the asasalint builtin exclusions of function names - # see the default value of `exclude` to get the builtin exclusions, true by default - use-builtin-exclusions: true - # ignore *_test.go files, false by default - ignore-test: true - bidichk: - # the following configurations check for all mentioned invisible unicode runes. - # all runes are enabled by default. - left-to-right-embedding: true - right-to-left-embedding: true - pop-directional-formatting: true - left-to-right-override: true - right-to-left-override: true - left-to-right-isolate: true - right-to-left-isolate: true - first-strong-isolate: true - pop-directional-isolate: true - cyclop: - # the maximal code complexity to report, 10 by default - max-complexity: 10 - # the maximal average package complexity, 0.0 by default - # if it's higher than 0.0 (float) the check is enabled - package-average: 0.0 - # should ignore tests, false by default - skip-tests: true - decorder: - # required order of `type`, `const`, `var` and `func` declarations inside a file - # default: types before constants before variables before functions - dec-order: - - type - - const - - var - - func - # if true, order of declarations is not checked at all, true (disabled) by default - disable-dec-order-check: true - # if true, `init` func can be anywhere in file (does not have to be declared before all other functions), true (disabled) by default - disable-init-func-first-check: false - # if true, multiple global `type`, `const` and `var` declarations are allowed, true (disabled) by default - disable-dec-num-check: true depguard: - # kind of list is passed in - # allowed values: allowlist|denylist, default: denylist list-type: blacklist - # check the list against standard lib, false by default include-go-root: false - # a list of packages for the list type specified - # can accept both string prefixes and string glob patterns - # default: [] - packages: [ ] - # a list of packages for the list type specifyed - # specify an error message to output when a denied package is used - # default: [] - packages-with-error-message: [ ] - # specify rules by which the linter ignores certain files for consideration - # can accept both string prefixes and string glob patterns - # the ! character in front of the rule is a special character - # which signals that the linter should negate the rule - # this allows for more precise control, but it is only available for glob patterns - # default: [] - ignore-file-rules: [ ] - # create additional guards that follow the same configuration pattern - # results from all guards are aggregated together - additional-guards: [ ] - # for example - # - list-type: denylist - # include-go-root: false - # packages: - # - github.com/stretchr/testify - # specify rules by which the linter ignores certain files for consideration - # ignore-file-rules: - # - "**/*_test.go" - # - "**/mock/**/*.go" + packages: + - github.com/sirupsen/logrus + packages-with-error-message: + # specify an error message to output when a blacklisted package is used + - github.com/sirupsen/logrus: "logging is allowed only by logutils.Log" dogsled: - # checks assignments with too many blank identifiers; 2 by default + # checks assignments with too many blank identifiers; default is 2 max-blank-identifiers: 2 dupl: # tokens count to trigger issue, 150 by default threshold: 100 errcheck: - # report about not checking of errors in type assertions: `a := b.(MyStruct)` - # false by default: such cases aren't reported by default - check-type-assertions: true - # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)` - # false by default: such cases aren't reported by default + # report about not checking of errors in type assertions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. check-blank: false - # path to a file containing a list of functions to exclude from checking - # see https://github.com/kisielk/errcheck#excluding-functions for details - disable-default-exclusions: true - # list of functions to exclude from checking, where each entry is a single function to exclude + + # list of functions to exclude from checking, where each entry is a single function to exclude. # see https://github.com/kisielk/errcheck#excluding-functions for details - exclude-functions: [ ] - errchkjson: - # with check-error-free-encoding set to true, errchkjson does warn about errors - # from json encoding functions that are safe to be ignored, - # because they are not possible to happen - # - # if check-error-free-encoding is set to true and errcheck linter is enabled, - # it is recommended to add the following exceptions to prevent from false positives: - # - # linters-settings: - # errcheck: - # exclude-functions: - # - encoding/json.Marshal - # - encoding/json.MarshalIndent - # - # false by default - check-error-free-encoding: false - # issue on struct encoding that doesn't have exported fields, false by default - report-no-exported: false - errorlint: - # check whether fmt.Errorf uses the %w verb for formatting errors - # see the https://github.com/polyfloyd/go-errorlint for caveats, true by default - errorf: true - # check for plain type assertions and type switches, true by default - asserts: true - # check for plain error comparisons, true by default - comparison: true + exclude-functions: + - io/os.ReadFile + - io.Copy(*bytes.Buffer) + - io.Copy(os.Stdout) + - (io.ReadCloser).Close exhaustive: - # check switch statements in generated files also, false by default + # check switch statements in generated files also check-generated: false # indicates that switch statements are to be considered exhaustive if a # 'default' case is present, even if all enum members aren't listed in the # switch default-signifies-exhaustive: false - # enum members matching the supplied regex do not have to be listed in - # switch statements to satisfy exhaustiveness - # default: "" - ignore-enum-members: "" - # consider enums only in package scopes, not in inner scopes, false by default - package-scope-only: false funlen: - # checks the number of lines in a function. - # if lower than 0, disable the check, 60 by default - lines: 100 - # checks the number of statements in a function. - # if lower than 0, disable the check, 40 by default - statements: 50 - gci: - # section configuration to compare against - # section names are case-insensitive and may contain parameters in () - # the default order of sections is `standard > default > custom > blank > dot`, - # if `custom-order` is `true`, it follows the order of `sections` option - # custom section: groups all imports with the specified Prefix - # blank section: contains all blank imports. This section is not present unless explicitly enabled - # dot section: contains all dot imports. This section is not present unless explicitly enabled - # default: ["standard", "default"] - sections: - - standard # Standard section: captures all standard packages - - default # Default section: contains all imports that could not be matched to another section type - - prefix(github.com/dataphos) - # skip generated files, true by default - skip-generated: true - # enable custom order of sections - # if `true`, make the section order the same as the order of `sections`, false by default - custom-order: false + lines: 60 + statements: 40 gocognit: # minimal code complexity to report, 30 by default (but we recommend 10-20) - min-complexity: 15 + min-complexity: 10 goconst: # minimal length of string constant, 3 by default min-len: 3 # minimal occurrences count to trigger, 3 by default min-occurrences: 3 - # ignore test files, false by default - ignore-tests: true - # look for existing constants matching the values, true by default - match-constant: true - # search also for duplicated numbers, false by default - numbers: false - # minimum value, only works with goconst.numbers, 3 by default - min: 3 - # maximum value, only works with goconst.numbers, 3 by default - max: 3 - # ignore when constant is not used as function argument, true by default - ignore-calls: true - gocritic: - # which checks should be enabled; can't be combined with 'disabled-checks' - # see https://go-critic.github.io/overview#checks-overview - # to check which checks are enabled run `GL_DEBUG=gocritic golangci-lint run` - # by default, list of stable checks is used - enabled-checks: - - nestingReduce - - unnamedResult - - ruleguard - - truncateCmp - # which checks should be disabled; can't be combined with 'enabled-checks' - # default: [] - disabled-checks: [ ] - # enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint run` to see all tags and checks - # see https://github.com/go-critic/go-critic#usage -> section "Tags" - # default: [] - enabled-tags: [ ] - disabled-tags: [ ] - # settings passed to gocritic. - # the settings key is the name of a supported gocritic checker. - # the list of supported checkers can be find in https://go-critic.github.io/overview. - settings: - # must be valid enabled check name. - nestingReduce: - # min number of statements inside a branch to trigger a warning, 5 by default - bodyWidth: 5 - # whether to check test functions, true by default - # skipTestFuncs: true - ruleguard: - # enable debug to identify which 'Where' condition was rejected - # the value of the parameter is the name of a function in a ruleguard file - # - # when a rule is evaluated: - # If: - # the Match() clause is accepted; and - # one of the conditions in the Where() clause is rejected, - # Then: - # ruleguard prints the specific Where() condition that was rejected - # - # The flag is passed to the ruleguard 'debug-group' argument - # Default: "" - debug: "" - # determines the behavior when an error occurs while parsing ruleguard files - # if flag is not set, log error and skip rule files that contain an error - # if flag is set, the value must be a comma-separated list of error conditions - # - 'all': fail on all errors - # - 'import': ruleguard rule imports a package that cannot be found - # - 'dsl': gorule file does not comply with the ruleguard DSL - # default: "" - failOn: "" - # comma-separated list of file paths containing ruleguard rules - # if a path is relative, it is relative to the directory where the golangci-lint command is executed - # the special '${configDir}' variable is substituted with the absolute directory containing the golangci config file - # glob patterns such as 'rules-*.go' may be specified - # default: "" - rules: "" - # comma-separated list of enabled groups or skip empty to enable everything - # tags can be defined with # character prefix - # default: "" - enable: "" - # comma-separated list of disabled groups or skip empty to enable everything - # tags can be defined with # character prefix - # default: "" - disable: "" - truncateCmp: - # whether to skip int/uint/uintptr types, true by deafult - skipArchDependent: true - unnamedResult: - # whether to check exported functions, false by default - checkExported: false gocyclo: # minimal code complexity to report, 30 by default (but we recommend 10-20) min-complexity: 10 godot: # check all top-level comments, not only declarations - check-all: true + check-all: false godox: # report any comments starting with keywords, this is useful for TODO or FIXME comments that # might be left in the code accidentally and should be resolved before merging @@ -429,599 +83,134 @@ linters-settings: - OPTIMIZE # marks code that should be optimized before merging - HACK # marks hack-arounds that should be removed before merging gofmt: - # Simplify code: gofmt with `-s` option. - # Default: true + # simplify code: gofmt with `-s` option, true by default simplify: true - # Apply the rewrite rules to the source before reformatting. - # https://pkg.go.dev/cmd/gofmt - # Default: [] - rewrite-rules: [ ] - gofumpt: - # module path which contains the source code being formatted - # default: "" - module-path: "" - # choose whether to use the extra rules - # false by default - extra-rules: false - goheader: - # supports two types 'const` and `regexp` - # values can be used recursively - # default: {} - values: { } - # the template use for checking - # default: "" - template: "" - # ss alternative of directive 'template', you may put the path to file with the template source - # useful if you need to load the template from a specific file - # default: "" - template-path: "" - goimports: - # put imports beginning with prefix after 3rd-party packages - # it's a comma-separated list of prefixes - # default: "" - local-prefixes: "" + golint: + # minimal confidence for issues, default is 0.8 + min-confidence: 0.8 gomnd: - # list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description - # default: ["argument", "case", "condition", "operation", "return", "assign"] - checks: [ argument,case,condition,operation,return,assign ] - # list of numbers to exclude from analysis - # the numbers should be written as string - # values always ignored: "1", "1.0", "0" and "0.0" - # default: [] - ignored-numbers: [ ] - # list of file patterns to exclude from analysis - # values always ignored: `.+_test.go` - # default: [] - ignored-files: [ ] - # list of function patterns to exclude from analysis - # values always ignored: `time.Date` - # default: [] - ignored-functions: [ ] - gomoddirectives: - # allow local `replace` directives. - # false by default - replace-local: false - # list of allowed `replace` directives. - # default: [] - replace-allow-list: [ ] - # allow to not explain why the version has been retracted in the `retract` directives. - # false by default - retract-allow-no-explanation: false - # forbid the use of the `exclude` directives. - # false by default - exclude-forbidden: false - gomodguard: - allowed: - # list of allowed modules - # default: [] - modules: [ ] - # list of allowed module domains - # default: [] - domains: [ ] - blocked: - # list of blocked modules - # default: [] - modules: - # blocked module - - github.com/uudashr/go-module: - # recommended modules that should be used instead. (Optional) - recommendations: - - golang.org/x/mod - # reason why the recommended module should be used. (Optional) - reason: "`mod` is the official go.mod parser library." - # list of blocked module version constraints - # default: [] - versions: [ ] - # set to true to raise lint issues for packages that are loaded from a local path via replace directive - # false by default - local_replace_directives: false - gosimple: - # https://staticcheck.io/docs/configuration/options/#checks - # default: ["*"] - checks: [ "*" ] - gosec: - # to select a subset of rules to run. - # available rules: https://github.com/securego/gosec#available-rules - # default: [] - means include all rules - includes: [ ] - # to specify a set of rules to explicitly exclude. - # available rules: https://github.com/securego/gosec#available-rules - # default: [] - excludes: [ ] - # exclude generated files - # false by default - exclude-generated: false - # filter out the issues with a lower severity than the given value - # valid options are: low, medium, high. - # low by default - severity: low - # filter out the issues with a lower confidence than the given value - # valid options are: low, medium, high. - # low by default - confidence: low - # concurrency value. - # default: the number of logical CPUs usable by the current process - concurrency: 12 - # to specify the configuration of rules - config: - # globals are applicable to all rules - global: - # if true, ignore #nosec in comments (and an alternative as well) - # false by default - nosec: false - # add an alternative comment prefix to #nosec (both will work at the same time) - # default: "" - "#nosec": "" - # define whether nosec issues are counted as finding or not - # default: false - show-ignored: false - # audit mode enables addition checks that for normal code analysis might be too nosy - # default: false - audit: false - G101: - # regexp pattern for variables and constants to find - # default: "(?i)passwd|pass|password|pwd|secret|token|pw|apiKey|bearer|cred" - pattern: "(?i)passwd|pass|password|pwd|secret|token|pw|apiKey|bearer|cred" - # if true, complain about all cases (even with low entropy) - # false by default - ignore_entropy: false - # maximum allowed entropy of the string - # "80.0" by default - entropy_threshold: "80.0" - # maximum allowed value of entropy/string length - # is taken into account if entropy >= entropy_threshold/2 - # "3.0" by default - per_char_threshold: "3.0" - # calculate entropy for first N chars of the string - # "16" by default - truncate: "16" - # additional functions to ignore while checking unhandled errors - # following functions always ignored: - # bytes.Buffer: - # - Write - # - WriteByte - # - WriteRune - # - WriteString - # fmt: - # - Print - # - Printf - # - Println - # - Fprint - # - Fprintf - # - Fprintln - # strings.Builder: - # - Write - # - WriteByte - # - WriteRune - # - WriteString - # io.PipeWriter: - # - CloseWithError - # hash.Hash: - # - Write - # os: - # - Unsetenv - # default: {} - G104: { } - G111: - # Regexp pattern to find potential directory traversal. - # Default: "http\\.Dir\\(\"\\/\"\\)|http\\.Dir\\('\\/'\\)" - pattern: "http\\.Dir\\(\"\\/\"\\)|http\\.Dir\\('\\/'\\)" - # maximum allowed permissions mode for os.Mkdir and os.MkdirAll - # "0750" by default - G301: "0750" - # maximum allowed permissions mode for os.OpenFile and os.Chmod - # "0600" by default - G302: "0600" - # maximum allowed permissions mode for os.WriteFile and ioutil.WriteFile - # "0600" by default - G306: "0600" - govet: - # report about shadowed variables. - # false by default - check-shadowing: false - # settings per analyzer. - # settings: - # analyzer name, run `go tool vet help` to see all analyzers - # printf: - # comma-separated list of print function names to check (in addition to default, see `go tool vet help printf`) - # default: [] - # funcs: [] - # shadow: - # Whether to be strict about shadowing; can be noisy - # false by default - # strict: false - # unusedresult: - # comma-separated list of functions whose results must be used - # (in addition to defaults context.WithCancel,context.WithDeadline,context.WithTimeout,context.WithValue, - # errors.New,fmt.Errorf,fmt.Sprint,fmt.Sprintf,sort.Reverse) - # default: [] - # funcs: [] - # comma-separated list of names of methods of type func() string whose results must be used - # (in addition to default Error,String) - # default: [] - # stringmethods: [] - # disable all analyzers - # false by default - disable-all: true - # enable analyzers by name (in addition to default) - # run `go tool vet help` to see all analyzers - # default: [] - enable: [ ] - # enable all analyzers - # false by default - enable-all: false - # disable analyzers by name - # run `go tool vet help` to see all analyzers - # default: [] - disable: [ ] - importas: - # so not allow unaliased imports of aliased packages, false by default - no-unaliased: false - # so not allow non-required aliases, false by default - no-extra-aliases: false - # list of aliases, default: [] - alias: [ ] - interfacebloat: - # the maximum number of methods allowed for an interface - # 10 by default - max: 10 + settings: + mnd: + # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + checks: + - argument + - case + - condition + - operation + - return + - assign lll: - # max line length, lines longer will be reported, 120 by default + # max line length, lines longer will be reported. Default is 120. # '\t' is counted as 1 character by default, and can be changed with the tab-width option - line-length: 130 - # tab width in spaces, 1 by default + line-length: 120 + # tab width in spaces. Default to 1. tab-width: 2 - maintidx: - # show functions with maintainability index lower than N - # a high index indicates better maintainability (it's kind of the opposite of complexity) - # 20 by default - under: 20 - makezero: - # allow only slices initialized with a length of zero - # false by default - always: false - misspell: - # correct spellings using locale preferences for US or UK - # setting locale to US will correct the British spelling of 'colour' to 'color' - # default is to use a neutral variety of English. - locale: US - # default: [] - ignore-words: [ ] - nakedret: - # make an issue if func has more lines of code than this setting, and it has naked returns - # 30 by default - max-func-lines: 30 + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true nestif: # minimal complexity of if statements to report, 5 by default min-complexity: 4 - nilnil: - # checks that there is no simultaneous return of `nil` error and an invalid value - # default: ["ptr", "func", "iface", "map", "chan"] - checked-types: - - ptr - - func - - iface - - map - - chan nolintlint: - # enable to ensure that nolint directives are all used. Default is true. - allow-unused: false - # disable to ensure that nolint directives don't have a leading space, true by default + # Enable to ensure that nolint directives are all used. Default is true. + allow-unused: true + # Disable to ensure that nolint directives don't have a leading space. Default is true. allow-leading-space: true - # exclude following linters from requiring an explanation. Default is []. + # Exclude following linters from requiring an explanation. Default is []. allow-no-explanation: [ ] - # enable to require an explanation of nonzero length after each nolint directive, false by default + # Enable to require an explanation of nonzero length after each nolint directive. Default is false. require-explanation: true - # enable to require nolint directives to mention the specific linter being suppressed, false by default + # Enable to require nolint directives to mention the specific linter being suppressed. Default is false. require-specific: true - nonamedreturns: - # report named error if it is assigned inside defer - # false by default - report-error-in-defer: false - prealloc: - # IMPORTANT: we don't recommend using this linter before doing performance profiling - # for most programs usage of prealloc will be a premature optimization - # report pre-allocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them, true by default - simple: true - # report pre-allocation suggestions on range loops, true by default - range-loops: true - # Report pre-allocation suggestions on for loops, false by default - for-loops: false - promlinter: - # promlinter cannot infer all metrics name in static analysis - # enable strict mode will also include the errors caused by failing to parse the args - # false by default - strict: false - # please refer to https://github.com/yeya24/promlinter#usage for detailed usage - # default: [] - disabled-linters: [ ] - reassign: - # patterns for global variable names that are checked for reassignment - # see https://github.com/curioswitch/go-reassign#usage - # default: ["EOF", "Err.*"] - patterns: [ "EOF", "Err.*" ] - revive: - # maximum number of open files at the same time - # see https://github.com/mgechev/revive#command-line-flags - # defaults to unlimited. - max-open-files: 2048 - # when set to false, ignores files with "GENERATED" header, similar to golint - # see https://github.com/mgechev/revive#available-rules for details - # false by default - ignore-generated-header: false - # sets the default severity. - # see https://github.com/mgechev/revive#configuration - # warning by default - severity: warning - # enable all available rules - # default: false - enable-all-rules: false - # sets the default failure confidence - # this means that linting errors with less than 0.8 confidence will be ignored - # 0.8 by default - confidence: 0.8 - rules: - - name: atomic - - name: blank-imports - - name: bool-literal-in-expr - - name: call-to-gc - - name: constant-logical-expr - - name: context-as-argument - - name: context-keys-type - - name: defer - - name: dot-imports - - name: duplicated-imports - - name: early-return - - name: empty-block - - name: empty-lines - - name: error-naming - - name: error-return - - name: error-strings - - name: errorf - - name: get-return - - name: identical-branches - - name: if-return - - name: increment-decrement - - name: indent-error-flow - - name: optimize-operands-order - - name: package-comments - - name: range - - name: range-val-in-closure - - name: receiver-naming - - name: string-of-int - - name: struct-tag - - name: superfluous-else - - name: time-equal - - name: time-naming - - name: var-declaration - - name: unconditional-recursion - - name: unexported-naming - - name: unexported-return - - name: unnecessary-stmt - - name: unreachable-code - - name: unused-parameter - - name: unused-receiver - - name: useless-break - - name: waitgroup-by-value - rowserrcheck: - # database/sql is always checked - # default: [] - packages: [ ] staticcheck: - # https://staticcheck.io/docs/configuration/options/#checks - # default: ["*"] - checks: [ "*" ] - stylecheck: - # https://staticcheck.io/docs/configuration/options/#checks - # default: ["*"] - checks: [ "*" ] - # https://staticcheck.io/docs/configuration/options/#dot_import_whitelist - # default: ["github.com/mmcloughlin/avo/build", "github.com/mmcloughlin/avo/operand", "github.com/mmcloughlin/avo/reg"] - dot-import-whitelist: [ ] - # https://staticcheck.io/docs/configuration/options/#initialisms - # default: ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", "IP", "JSON", "QPS", "RAM", "RPC", "SLA", "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", "XSS", "SIP", "RTP", "AMQP", "DB", "TS"] - initialisms: [ "ACL", "API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", "IP", "JSON", "QPS", "RAM", "RPC", "SLA", "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", "XSS", "SIP", "RTP", "AMQP", "DB", "TS" ] - # https://staticcheck.io/docs/configuration/options/#http_status_code_whitelist - # default: ["200", "400", "404", "500"] - http-status-code-whitelist: [ "200", "400", "404", "500" ] - tagliatelle: - # check the struck tag name case. - case: - # use the struct field name to check the name of the struct tag - # false by default - use-field-name: false - # `camel` is used for `json` and `yaml` (can be overridden) - # default: {} - rules: { } - tenv: - # the option `all` will run against whole test files (`_test.go`) regardless of method/function signatures - # otherwise, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked - # false by default - all: false - thelper: - test: - # check *testing.T is first param (or after context.Context) of helper function - # true by default - first: true - # check *testing.T param has name t - # true by default - name: true - # check t.Helper() begins helper function - # true by default - begin: true - benchmark: - # check *testing.B is first param (or after context.Context) of helper function - # true by default - first: true - # check *testing.B param has name b - # true by default - name: true - # check b.Helper() begins helper function - # true by default - begin: true - tb: - # check *testing.TB is first param (or after context.Context) of helper function - # true by default - first: true - # check *testing.TB param has name tb - # true by default - name: true - # check tb.Helper() begins helper function - # true by default - begin: true - fuzz: - # check *testing.F is first param (or after context.Context) of helper function - # true by default - first: true - # check *testing.F param has name f - # true by default - name: true - # check f.Helper() begins helper function - # true by default - begin: true - usestdlibvars: - # suggest the use of http.MethodXX - # true by default - http-method: true - # suggest the use of http.StatusXX - # true by default - http-status-code: true - # suggest the use of time.Weekday - # true by default - time-weekday: true - # suggest the use of time.Month - # false by default - time-month: false - # suggest the use of time.Layout - # false by default - time-layout: false - # suggest the use of crypto.Hash - # false by default - crypto-hash: false - # suggest the use of rpc.DefaultXXPath - # false by default - default-rpc-path: false - unparam: - # inspect exported functions. - # - # set to true if no external program/library imports your code. - # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: - # if it's called for subdir of a project it can't find external interfaces. All text editor integrations - # with golangci-lint call it on a directory with the changed file - # - # false by default - check-exported: false - varnamelen: - # the longest distance, in source lines, that is being considered a "small scope". - # variables used in at most this many lines will be ignored - # 5 by default - max-distance: 5 - # the minimum length of a variable's name that is considered "long" - # variable names that are at least this long will be ignored - # 3 by default - min-name-length: 3 - # check method receivers - # false by default - check-receiver: false - # check named return values - # false by default - check-return: false - # check type parameters - # false by default - check-type-param: false - # ignore "ok" variables that hold the bool return value of a type assertion - # false by default - ignore-type-assert-ok: false - # Ignore "ok" variables that hold the bool return value of a map index - # false by default - ignore-map-index-ok: false - # ignore "ok" variables that hold the bool return value of a channel receive - # false by default - ignore-chan-recv-ok: false - # optional list of variable names that should be ignored completely - # default: [] - ignore-names: [ ] - # optional list of variable declarations that should be ignored completely - # entries must be in one of the following forms (see below for examples): - # - for variables, parameters, named return values, method receivers, or type parameters: - # ( can also be a pointer/slice/map/chan/...) - # - for constants: const - # - # default: [] - ignore-decls: [ ] + # ignore some false positives for this project + checks: + - all + - '-SA9003' # disable the rule SA9003 + - '-SA4009' # disable the rule SA4009 + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature wsl: - # if true append is only allowed to be cuddled if appending value is - # matching variables, fields or types online above, true by default + # If true append is only allowed to be cuddled if appending value is + # matching variables, fields or types on line above. Default is true. strict-append: true - # allow calls and assignments to be cuddled as long as the lines have any - # matching variables, fields or types, true by default + # Allow calls and assignments to be cuddled as long as the lines have any + # matching variables, fields or types. Default is true. allow-assign-and-call: true - # allow multiline assignments to be cuddled, true by default + # Allow multiline assignments to be cuddled. Default is true. allow-multiline-assign: true - # allow declarations (var) to be cuddled + # Allow declarations (var) to be cuddled. allow-cuddle-declarations: false - # allow trailing comments in ending of blocks + # Allow trailing comments in ending of blocks allow-trailing-comment: false - # force newlines in end of case at this limit (0 = never) + # Force newlines in end of case at this limit (0 = never). force-case-trailing-whitespace: 0 - # force cuddling of err checks with err var assignment + # Force cuddling of err checks with err var assignment force-err-cuddling: false - # allow leading comments to be separated with empty liens + # Allow leading comments to be separated with empty liens allow-separated-leading-comment: false - wrapcheck: - # an array of strings that specify substrings of signatures to ignore - # if this set, it will override the default set of ignored signatures - # see https://github.com/tomarrell/wrapcheck#configuration for more information - # default: [".Errorf(", "errors.New(", "errors.Unwrap(", ".Wrap(", ".Wrapf(", ".WithMessage(", ".WithMessagef(", ".WithStack("] - ignoreSigs: - - .Errorf( - - errors.New( - - errors.Unwrap( - - .Wrap( - - .Wrapf( - - .WithMessage( - - .WithMessagef( - - .WithStack( - # an array of strings that specify regular expressions of signatures to ignore - # default: [] - ignoreSigRegexps: [ ] - # an array of strings that specify globs of packages to ignore - # default: [] - ignorePackageGlobs: [ ] - # an array of strings that specify regular expressions of interfaces to ignore - # default: [] - ignoreInterfaceRegexps: [ ] - - # the custom section can be used to define linter plugins to be loaded at runtime - # see README documentation for more info + errorlint: + # Report non-wrapping error creation using fmt.Errorf + errorf: true + # The custom section can be used to define linter plugins to be loaded at runtime. See README doc + # for more info. +(io: + ReadCloser): + Close:linters: + enable: + - megacheck + - govet + - gosec + - gocritic + - goconst + disable: + - asciicheck + - maligned + - prealloc + - unused + disable-all: false + presets: + - bugs + # - unused + fast: false issues: # Excluding configuration per-path, per-linter, per-text and per-source exclude-rules: + # Exclude some linters from running on tests files. - path: _test\.go linters: - # - gocyclo + - gocyclo - errcheck - - gosec - dupl - # - gocognit - - funlen - # independently of option `exclude` we use default exclude patterns, + - gosec + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via "nolint" comments. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + # Exclude lll issues for long lines with go:generate + - linters: + - lll + source: "^//go:generate " + # Independently from option `exclude` we use default exclude patterns, # it can be disabled by this option. To list all - # excluded by default patterns execute `golangci-lint run --help` - # default value for this option is true + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is true. exclude-use-default: false - # the default value is false. If set to true exclude and exclude-rules - # regular expressions become case sensitive + # The default value is false. If set to true exclude and exclude-rules + # regular expressions become case sensitive. exclude-case-sensitive: false - # the list of ids of default excludes to include or disable. Default is empty. + # The list of ids of default excludes to include or disable. By default it's empty. include: - EXC0002 # disable excluding of issues about comments from golint - # maximum issues count per one linter. Set to 0 to disable. Default is 50 - max-issues-per-linter: 0 - # maximum count of issues with the same text. Set to 0 to disable. Default is 3 - max-same-issues: 0 + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 50 + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 3 # Show only new issues: if there are unstaged changes or untracked files, - # only those changes are analyzed, else only changes in HEAD~ are analyzed + # only those changes are analyzed, else only changes in HEAD~ are analyzed. # It's a super-useful option for integration of golangci-lint into existing # large codebase. It's not practical to fix all existing issues at the moment # of integration: much better don't allow issues in new code. @@ -1038,7 +227,7 @@ severity: # - Github: https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message default-severity: error # The default value is false. - # If set to true severity-rules regular expressions become case-sensitive. + # If set to true severity-rules regular expressions become case sensitive. case-sensitive: false # Default value is empty list. # When a list of severity rules are provided, severity information will be added to lint diff --git a/.hadolint.yaml b/.hadolint.yaml new file mode 100644 index 0000000..ad3cb58 --- /dev/null +++ b/.hadolint.yaml @@ -0,0 +1,19 @@ +# name of threshold level (error | warning | info | style | ignore | none) +failure-threshold: error + +# list of rules you want to ignore +ignored: [ ] + +# override the severity of specific rules by writing the error code into the list +override: + error: [ ] + warning: [ ] + info: [ ] + style: [ ] + +# do not permit labels other than specified in `label-schema` +strict-labels: false + +# hadolint can warn you when images from untrusted repositories are being used in Dockerfiles, +# you can append the trustedRegistries keys to the configuration file +# trustedRegistries: [ ] diff --git a/README.md b/README.md new file mode 100644 index 0000000..85a7783 --- /dev/null +++ b/README.md @@ -0,0 +1,46 @@ +# Schema Registry + +[![Apache 2.0 License](https://img.shields.io/github/license/dataphos/schema-registry)](./LICENSE) +[![GitHub Release](https://img.shields.io/github/v/release/dataphos/schema-registry?sort=semver)](https://github.com/dataphos/schema-registry/releases/latest) + +Schema Registry is a product used for **schema management** and **message validation**. + +Schema management itself consists of 2 steps - *schema registration* and *schema versioning*, while message validation consists of validators that validate messages for the given message schema. The core components are a server with HTTP RESTful interface used to manage the schemas, and lightweight message validators, which verify the schema and validity of the incoming messages. + +It allows developers to define and manage standard schemas for events, share them across the organization, evolve the schemas while preserving compatibility, as well as validate events with the given event schema. For each schema used, the product stores their own versioned history while also providing an easy-to-use RESTful interface to work with them. + +Apart from the general idea of the product, its main features are split across two major components - Registry and Validator. + +The official Schema Registry documentation is available [here](https://docs.dataphos.com/schema_registry/). It contains an in-depth overview of each component, a quickstart setup, detailed deployment instructions, configuration options, usage guides and more, so be sure to check it out for better understanding. + +## Registry +The Registry component represents the main database, called Schema History, which is used for handling schemas, and the REST API on top of the database to enable the other major component, [Validator](#validator), to fetch all the necessary information regarding the schemas. + +## Validator +The Validator component, in essence, does what the title suggests - validates messages. It performs this by retrieving and caching messages schemas from the [Registry](#registry) database, using a message's metadata. + +## Installation +In order to use Schema Registry as a whole with both of its components, the only major requirement from the user is to have a running project on one of the two major cloud providers: GCP or Azure. + +All of the other requirements for the product to fully-function (message broker instance, incoming message type definition, identity and access management of the particular cloud) are further explained and can be analyzed in the [Quickstart section](https://docs.dataphos.com/schema_registry/quickstart/) of its official documentation. + +## Usage +### Registry +- Takes care of everything related to the schemas themselves - registration, updates, retrieval, deletion of an entire schema or its particular version, as well as performing schema checks for validity and compatibility (backwards, forwards and transitively). +- Its REST API provides handles for clients and communicates via HTTP requests. +- With regards to the message schemas themselves, the Schema History database where they get stored in can be anything from a standard SQL database like Oracle or PostgreSQL, to a NoSQL database like MongoDB. + +### Validator +- In order for the Validator to work, the message schema needs to be registered in the Schema History database. + - Each of the incoming messages needs to have its metadata enriched with the information of the schema stored in the Schema History database, with the main attributes being the *ID*, the *schema version* and the *message format*. +- Once that scenario is set up, the Validator can then filter incoming messages and route them to the appropriate destination - valid topic for successfully validated messages and dead-letter topic for unsucessfully validated messages. + - The list of supported message brokers can be found in the [Validator section](https://docs.dataphos.com/schema_registry/what-is-schema-registry/#worker) of its official documentation. +- Similarly to the various message brokers, the Validator also enables the use of different protocols for producers and consumers of messages. + - This in turn enables protocol conversion through the system. + - The list of supported protocols can also be found in the [Validator section](https://docs.dataphos.com/schema_registry/what-is-schema-registry/#worker) of its official documentation. + +## Contributing +For all the inquiries regarding contributing to the project, be sure to check out the information in the [CONTRIBUTING.md](CONTRIBUTING.md) file. + +## License +This project is licensed under the [Apache 2.0 License](LICENSE). diff --git a/registry/README.md b/registry/README.md new file mode 100644 index 0000000..fabf3ef --- /dev/null +++ b/registry/README.md @@ -0,0 +1,221 @@ +# Dataphos Schema Registry - Registry component + +Repository of the Dataphos Schema Registry API. + + +## Registry + +The Registry, which itself is a database with a REST API on top, is deployed as a deployment on a Kubernetes cluster +which supports the following: +- Schema registration +- Schema updating (adding a new version of an existing schema) +- Retrieval of existing schemas (specified version or latest version) +- Deleting the whole schema or just specified versions of a schema +- Checking for schema validity (syntactically and semantically) +- Checking for schema compatibility (backward, forward, transitive) +- Schema search + + +The main component of the Schema Registry product is entirely independent of the implementation of the data-streaming +platform. It is implemented as a REST API that provides handles (via URL) for clients and communicates via HTTP +requests. + +The worker component communicates with the REST API by sending the HTTP GET request that retrieves a message schema from +the Registry by using the necessary parameters. The message schemas themselves can be stored in any type of database ( +Schema History), whether in tables like in standard SQL databases, such as Oracle or PostgreSQL, or NoSQL databases like +MongoDB. The component itself has an interface with the database connector that can be easily modified to +work with databases that fit the client’s needs. + + +## Getting Started +### Prerequisites +Schema Registry components run in a Kubernetes environment. This quickstart guide will assume that you have +the ```kubectl``` tool installed and a running Kubernetes cluster on one of the major cloud providers (GCP, Azure) and a +connection with the cluster. + +#### Namespace +Before deploying the Schema Registry, the namespace where the components will be deployed should be created if it +doesn't exist. + +--- +Open a command line tool of your choice and connect to your cluster. Create the namespace where Schema Registry will be +deployed. We will use namespace "dataphos" in this quickstart guide. + +```yaml +kubectl create namespace dataphos +``` + +### Quick Start + +Deploy Schema Registry - registry component using the following script. The required arguments are: + +- the namespace +- Schema History Postgres password + +#### Deployment + +The script is located in the ```./scripts/registry/``` folder. from the content root. To run the script, run the +following command: + +```bash +# "dataphos" is an example of the namespace name +# "p4sSw0rD" is example of the Schema History Postgres password +./sr_registry.sh dataphos p4sSw0rD +``` + +## Usage +Even thought the Schema Registry provides REST API for registering, updating, fetching a schema, fetching all the +versions, fetching the latest, deleting a schema, etc. We will showcase here only the requests to register, update and +fetch a schema. + +### Register a schema + +After the Schema Registry is deployed you will have access to its API endpoint. To register a schema, you have to send a +POST request to the endpoint ```http://schema-registry-svc:8080/schemas``` in whose body you need to provide the name of the +schema, description, schema_type, specification (the schema), compatibility and validity mode. + +``` +{ + "description": "new json schema for testing", + "schema_type": "json", + "specification": "{\r\n \"$id\": \"https://example.com/person.schema.json\",\r\n \"$schema\": \"https://json-schema.org/draft/2020-12/schema\",\r\n \"title\": \"Person\",\r\n \"type\": \"object\",\r\n \"properties\": {\r\n \"firstName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's first name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"age\": {\r\n \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n \"type\": \"integer\",\r\n \"minimum\": 0\r\n }\r\n }\r\n}\r\n", + "name": "schema json", + "compatibility_mode": "none", + "validity_mode": "none" +} +``` + +or using curl: + +``` +curl -XPOST -H "Content-type: application/json" -d '{ + "description": "new json schema for testing", + "schema_type": "json", + "specification": "{\r\n \"$id\": \"https://example.com/person.schema.json\",\r\n \"$schema\": \"https://json-schema.org/draft/2020-12/schema\",\r\n \"title\": \"Person\",\r\n \"type\": \"object\",\r\n \"properties\": {\r\n \"firstName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's first name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"age\": {\r\n \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n \"type\": \"integer\",\r\n \"minimum\": 0\r\n }\r\n }\r\n}\r\n", + "name": "schema json", + "compatibility_mode": "none", + "validity_mode": "none" +}' 'http://schema-registry-svc:8080/schemas/' +``` + +The response to the schema registration request will be: + +- STATUS 201 Created + ``` + { + "identification": "32", + "version": "1", + "message": "schema successfully created" + } + ``` + +- STATUS 409 Conflict -> indicating that the schema already exists + ``` + { + "identification": "32", + "version": "1", + "message": "schema already exists at id=32" + } + ``` + +- STATUS 500 Internal Server Error -> indicating a server error, which means that either the request is not correct ( +missing fields) or that the server is down. + ``` + { + "message": "Internal Server Error" + } + ``` + +### Update a schema + +After the Schema Registry is registered you can update it by registering a new version under that schema ID. To update a +schema, you have to send a PUT request to the endpoint ```http://schema-registry-svc:8080/schemas/``` in whose body +you need to provide the description (optional) of the version and the specification (the schema) + +``` +{ + "description": "added field for middle name", + "specification": "{\r\n \"$id\": \"https://example.com/person.schema.json\",\r\n \"$schema\": \"https://json-schema.org/draft/2020-12/schema\",\r\n \"title\": \"Person\",\r\n \"type\": \"object\",\r\n \"properties\": {\r\n \"firstName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's first name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"age\": {\r\n \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n \"type\": \"integer\",\r\n \"minimum\": 0\r\n }\r\n }\r\n}\r\n" +} +``` + +or using curl: + +``` +curl -XPUT -H "Content-type: application/json" -d '{ + "description": "added field for middle name", + "specification": "{\r\n \"$id\": \"https://example.com/person.schema.json\",\r\n \"$schema\": \"https://json-schema.org/draft/2020-12/schema\",\r\n \"title\": \"Person\",\r\n \"type\": \"object\",\r\n \"properties\": {\r\n \"firstName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's first name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"age\": {\r\n \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n \"type\": \"integer\",\r\n \"minimum\": 0\r\n }\r\n }\r\n}\r\n" +}' 'http://schema-registry-svc:8080/schemas/' +``` + +The response to the schema updating request will be the same as for registering except when the updating is done +successfully it will be status 200 OK and a new version will be provided. + +``` +{ + "identification": "32", + "version": "2", + "message": "schema successfully updated" +} +``` + +### Fetch a schema version + +To get a schema version and its relevant details, a GET request needs to be made and the endpoint needs to be: + +```http://schema-registry-svc:8080/schemas//versions/``` + +or using curl: + +``` curl -XGET -H "Content-type: application/json" 'http://schema-registry-svc:8080/schemas//versions/' ``` + +The response to the schema registration request will be: + +- STATUS 200 OK + ``` + { + "id": "32", + "version": "1", + "schema_id": "32", + "specification": "ew0KICAiJHNjaGVtYSI6ICJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSIsDQogICJ0eXBlIjogIm9iamVjdCIsDQogICJ0aXRsZSI6ICJUaGUgUm9vdCBTY2hlbWEiLA0KICAiZGVzY3JpcHRpb24iOiAiVGhlIHJvb3Qgc2NoZW1hIGNvbXByaXNlcyB0aGUgZW50aXJlIEpTT04gZG9jdW1lbnQuIiwNCiAgImRlZmF1bHQiOiB7fSwNCiAgImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjogdHJ1ZSwNCiAgInJlcXVpcmVkIjogWw0KICAgICJwaG9uZSINCiAgXSwNCiAgInByb3BlcnRpZXMiOiB7DQogICAgInBob25lIjogew0KICAgICAgInR5cGUiOiAiaW50ZWdlciIsDQogICAgICAidGl0bGUiOiAiVGhlIFBob25lIFNjaGVtYSIsDQogICAgICAiZGVzY3JpcHRpb24iOiAiQW4gZXhwbGFuYXRpb24gYWJvdXQgdGhlIHB1cnBvc2Ugb2YgdGhpcyBpbnN0YW5jZS4iLA0KICAgICAgImRlZmF1bHQiOiAiIiwNCiAgICAgICJleGFtcGxlcyI6IFsNCiAgICAgICAgMQ0KICAgICAgXQ0KICAgIH0sDQogICAgInJvb20iOiB7DQogICAgICAidHlwZSI6ICJpbnRlZ2VyIiwNCiAgICAgICJ0aXRsZSI6ICJUaGUgUm9vbSBTY2hlbWEiLA0KICAgICAgImRlc2NyaXB0aW9uIjogIkFuIGV4cGxhbmF0aW9uIGFib3V0IHRoZSBwdXJwb3NlIG9mIHRoaXMgaW5zdGFuY2UuIiwNCiAgICAgICJkZWZhdWx0IjogIiIsDQogICAgICAiZXhhbXBsZXMiOiBbDQogICAgICAgIDEyMw0KICAgICAgXQ0KICAgIH0NCiAgfQ0KfQ==", + "description": "new json schema for testing", + "schema_hash": "72966008fdcec8627a0e43c5d9a247501fc4ab45687dd2929aebf8ef3eb06ccd", + "created_at": "2023-05-09T08:38:54.5515Z", + "autogenerated": false + } + ``` +- STATUS 404 Not Found -> indicating that the wrong schema ID or schema version was provided +- STATUS 500 Internal Server Error -> indicating a server error, which means that either the request is not correct ( +wrong endpoint) or that the server is down. + + +### Other requests + +| Description | Method | URL | Headers | Body | +|:-------------------------------------------------:|--------|:---------------------------------------------------------------:|:----------------------------------:|:---------------------------------:| +| Get all the schemas | GET | http://schema-registry-svc/schemas | Content-Type: application/json | This request does not have a body | +| Get all the schema versions of the specified ID | GET | http://schema-registry-svc/schemas/{id}/versions | Content-Type: application/json | This request does not have a body | +| Get the latest schema version of the specified ID | GET | http://schema-registry-svc/schemas/{id}/versions/latest | Content-Type: application/json | This request does not have a body | +| Get schema specification by id and version | GET | http://schema-registry-svc/schemas/{id}/versions/{version}/spec | Content-Type: application/json
| This request does not have a body | +| Delete the schema under the ID | DELETE | http://schema-registry-svc/schemas/{id} | Content-Type: application/json | This request does not have a body | +| Delete the schema by id and version | DELETE | http://schema-registry-svc/schemas/{id}/versions/{version} | Content-Type: application/json | This request does not have a body | + + +### Schema search +With schema search, users can swiftly locate relevant data schemas using a GET request and URL parameters. +```http://schema-registry-svc/schemas/search``` + 1 or more Query Parameters: + +| Query parameters | Example | +|:----------------------------------------------------------------------------------------------------:|-----------------------------------------------------------------------------------------------------------| +| id | search by id 5
URL: http://schema-registry-svc/schemas/search?id=5 | +| version | search by id 5 and version 2
URL: http://schema-registry-svc/schemas/search?id=5&version=2 | +| type | search by type JSON
URL: http://schema-registry-svc/schemas/search?type=json | +| name | search by name "json_schema_name"
URL: http://schema-registry-svc/schemas/search?name=json_schema_name | +| orderBy name, type, id or version (if sort value is given but orderBy isn’t the default value is id) | +| sort asc or desc (if orderBy value is given but sort isn’t the default value is asc) | +| limit | +| attributes | search by attributes crs and type
URL: http://schema-registry-svc/schemas/search?attributes=crs,type | + + + + diff --git a/registry/cmd/initdb/initdb.go b/registry/cmd/initdb/initdb.go new file mode 100644 index 0000000..6da91d0 --- /dev/null +++ b/registry/cmd/initdb/initdb.go @@ -0,0 +1,56 @@ +package main + +import ( + "runtime/debug" + + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/config" + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-sr/registry/repository/postgres" + "github.com/dataphos/lib-logger/logger" + "github.com/dataphos/lib-logger/standardlogger" +) + +func main() { + labels := logger.Labels{ + "product": "Schema Registry", + "component": "initdb", + } + var Commit = func() string { + if info, ok := debug.ReadBuildInfo(); ok { + for _, setting := range info.Settings { + if setting.Key == "vcs.revision" { + return setting.Value + } + } + } + + return "" + }() + if Commit != "" { + labels["commit"] = Commit + } + + logLevel, logConfigWarnings := config.GetLogLevel() + log := standardlogger.New(labels, standardlogger.WithLogLevel(logLevel)) + + for _, w := range logConfigWarnings { + log.Warn(w) + } + + db, err := postgres.InitializeGormFromEnv() + if err != nil { + log.Fatal(err.Error(), errcodes.DatabaseConnectionInitialization) + return + } + + if postgres.HealthCheck(db) { + log.Warn("database already initialized") + return + } + + if err = postgres.Initdb(db); err != nil { + log.Fatal(err.Error(), errcodes.DatabaseInitialization) + return + } + log.Info("database initialized successfully") +} diff --git a/registry/cmd/janitorsr/main.go b/registry/cmd/janitorsr/main.go new file mode 100644 index 0000000..cdf28fc --- /dev/null +++ b/registry/cmd/janitorsr/main.go @@ -0,0 +1,144 @@ +package main + +import ( + "context" + "fmt" + "net/http" + "os" + "os/signal" + "runtime/debug" + "strconv" + "syscall" + "time" + + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus/promhttp" + + "github.com/dataphos/aquarium-janitor-standalone-sr/compatibility" + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/config" + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-sr/registry" + "github.com/dataphos/aquarium-janitor-standalone-sr/registry/repository/postgres" + "github.com/dataphos/aquarium-janitor-standalone-sr/server" + "github.com/dataphos/aquarium-janitor-standalone-sr/validity" + "github.com/dataphos/lib-logger/logger" + "github.com/dataphos/lib-logger/standardlogger" +) + +const ( + serverPortEnvKey = "SERVER_PORT" +) + +const ( + defaultServerPort = 8080 +) + +// @title Schema Registry API +// @version 1.0 +func main() { + labels := logger.Labels{ + "product": "Schema Registry", + "component": "registry", + } + var Commit = func() string { + if info, ok := debug.ReadBuildInfo(); ok { + for _, setting := range info.Settings { + if setting.Key == "vcs.revision" { + return setting.Value + } + } + } + + return "" + }() + if Commit != "" { + labels["commit"] = Commit + } + + logLevel, logConfigWarnings := config.GetLogLevel() + log := standardlogger.New(labels, standardlogger.WithLogLevel(logLevel)) + for _, w := range logConfigWarnings { + log.Warn(w) + } + + db, err := postgres.InitializeGormFromEnv() + if err != nil { + log.Error(err.Error(), errcodes.DatabaseConnectionInitialization) + return + } + if !postgres.HealthCheck(db) { + log.Error("database state invalid", errcodes.InvalidDatabaseState) + return + } + + var port int + portStr := os.Getenv(serverPortEnvKey) + if portStr == "" { + port = defaultServerPort + } else { + port, err = strconv.Atoi(portStr) + if err != nil { + log.Error(errtemplates.ExpectedInt(serverPortEnvKey, portStr).Error(), errcodes.ServerInitialization) + return + } + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + compChecker, globalCompMode, err := compatibility.InitCompatibilityChecker(ctx) + if err != nil { + log.Error(err.Error(), errcodes.ExternalCheckerInitialization) + return + } + log.Info("Successfully connected compatibility checker.") + + valChecker, globalValMode, err := validity.InitExternalValidityChecker(ctx) + if err != nil { + log.Error(err.Error(), errcodes.ExternalCheckerInitialization) + return + } + log.Info("Successfully connected validity checker.") + + srv := http.Server{ + Addr: fmt.Sprintf(":%d", port), + Handler: server.New(server.NewHandler(registry.New(postgres.New(db), compChecker, valChecker, globalCompMode, globalValMode), log)), + } + + idleConnsClosed := make(chan struct{}) + go func() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT) + + <-c + + log.Info("initiating graceful shutdown") + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + if err = srv.Shutdown(ctx); err != nil { + log.Error(errors.Wrap(err, "graceful shutdown failed").Error(), errcodes.ServerShutdown) + } + close(idleConnsClosed) + }() + go func() { + http.Handle("/metrics", promhttp.Handler()) + + log.Infow("starting Prometheus server", logger.F{"port": 2112}) + err1 := http.ListenAndServe(":2112", nil) + if err1 != nil { + log.Error(errors.Wrap(err, "an error occurred starting Prometheus server").Error(), errcodes.ServerShutdown) + } + }() + + log.Infow("starting server", logger.F{"port": srv.Addr}) + if err = srv.ListenAndServe(); err != nil { + if err != http.ErrServerClosed { + log.Error(errors.Wrap(err, "an error occurred starting or closing server").Error(), errcodes.ServerShutdown) + } + } + + <-idleConnsClosed + + log.Info("shutting down") +} diff --git a/registry/cmd/sr-cli/main.go b/registry/cmd/sr-cli/main.go new file mode 100644 index 0000000..4b4c51e --- /dev/null +++ b/registry/cmd/sr-cli/main.go @@ -0,0 +1,159 @@ +package main + +import ( + "context" + "flag" + "log" + "os" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-sr/compatibility" + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-sr/registry" + "github.com/dataphos/aquarium-janitor-standalone-sr/registry/repository/postgres" + "github.com/dataphos/aquarium-janitor-standalone-sr/validity" +) + +func main() { + registerCommand := flag.NewFlagSet("register", flag.ExitOnError) + updateCommand := flag.NewFlagSet("update", flag.ExitOnError) + + if len(os.Args) < 2 { + log.Fatal("register or update command must be provided") + } + + switch os.Args[1] { + case "register": + registerSchema(registerCommand) + case "update": + updateSchema(updateCommand) + default: + log.Fatal("command not supported") + } +} + +func registerSchema(registerCommand *flag.FlagSet) { + filename := registerCommand.String("f", "", "the json file containing schema specification") + schemaType := registerCommand.String("t", "", "schema type") + name := registerCommand.String("n", "schema-janitor", "schema name") + description := registerCommand.String("d", "description of the schema", "schema description") + publisherId := registerCommand.String("p", "publisherId", "publisher id") + compMode := registerCommand.String("c", "", "compatibility mode") + valMode := registerCommand.String("v", "", "validity mode") + + err := registerCommand.Parse(os.Args[2:]) + if err != nil { + log.Fatal(err) + } + + if *filename == "" { + log.Fatal("filename must be provided") + } + + if *schemaType == "" { + log.Fatal("type must be provided") + } + + if *valMode == "" { + log.Fatal("validity mode must be provided") + } + + if *compMode == "" { + log.Fatal("compatibility mode must be provided") + } + + file, err := os.ReadFile(*filename) + if err != nil { + log.Fatal(err) + } + + schemaRegistrationRequest := registry.SchemaRegistrationRequest{ + Description: *description, + Specification: string(file), + Name: *name, + SchemaType: *schemaType, + PublisherID: *publisherId, + CompatibilityMode: *compMode, + ValidityMode: *valMode, + } + + service := createService() + details, added, err := service.CreateSchema(schemaRegistrationRequest) + + if err != nil { + log.Fatal(err) + } + if !added { + log.Print("schema already exists") + } else { + log.Print("created schema under the id ", details.VersionID) + } +} + +func updateSchema(updateCommand *flag.FlagSet) { + filename := updateCommand.String("f", "", "the json file containing updated schema specification") + description := updateCommand.String("d", "", "updated schema description") + id := updateCommand.String("id", "", "id of the schema") + + err := updateCommand.Parse(os.Args[2:]) + if err != nil { + log.Fatal(err) + } + + if *filename == "" { + log.Fatal("filename must be provided") + } + + if *id == "" { + log.Fatal("id must be provided") + } + + file, err := os.ReadFile(*filename) + if err != nil { + log.Fatal(err) + } + + schemaUpdateRequest := registry.SchemaUpdateRequest{ + Specification: string(file), + } + if *description != "" { + schemaUpdateRequest.Description = *description + } + + service := createService() + details, updated, err := service.UpdateSchema(*id, schemaUpdateRequest) + if err != nil { + log.Fatal(err) + } + if !updated { + log.Print("schema already exists") + } else { + log.Print("schema successfully updated, added version ", details.Version) + } +} + +func createService() *registry.Service { + db, err := postgres.InitializeGormFromEnv() + if err != nil { + log.Fatal(err, errcodes.DatabaseConnectionInitialization) + } + if !postgres.HealthCheck(db) { + log.Fatal("database state invalid", errcodes.InvalidDatabaseState) + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + compChecker, globalCompMode, err := compatibility.InitCompatibilityChecker(ctx) + if err != nil { + log.Fatal(err, errcodes.ExternalCheckerInitialization) + } + + valChecker, globalValMode, err := validity.InitExternalValidityChecker(ctx) + if err != nil { + log.Fatal(err, errcodes.ExternalCheckerInitialization) + } + + service := registry.New(postgres.New(db), compChecker, valChecker, globalCompMode, globalValMode) + return service +} diff --git a/registry/cmd/sr-cli/testdata/avro/schema-1.avsc b/registry/cmd/sr-cli/testdata/avro/schema-1.avsc new file mode 100644 index 0000000..52a3a44 --- /dev/null +++ b/registry/cmd/sr-cli/testdata/avro/schema-1.avsc @@ -0,0 +1,62 @@ +{ + "type": "record", + "name": "mySchema", + "fields": [ + { + "name": "MERCHANT_NAME", + "type": "string" + }, + { + "name": "rows", + "type": { + "type": "array", + "items": { + "type": "record", + "name": "rows", + "fields": [ + { + "name": "AMOUNT", + "type": "string" + }, + { + "name": "CARD_NUMBER", + "type": "string" + }, + { + "name": "CARD_TYPE", + "type": "string" + }, + { + "name": "IS_FRAUD", + "type": "string" + }, + { + "name": "MCC", + "type": "string" + }, + { + "name": "MERCHANT_CITY", + "type": "string" + }, + { + "name": "TRANSACTION_DATE", + "type": "string" + }, + { + "name": "TRANSACTION_ID", + "type": "string" + }, + { + "name": "USE_CHIP", + "type": "string" + }, + { + "name": "YEAR", + "type": "string" + } + ] + } + } + } + ] +} \ No newline at end of file diff --git a/registry/cmd/sr-cli/testdata/json/schema-1.json b/registry/cmd/sr-cli/testdata/json/schema-1.json new file mode 100644 index 0000000..687ae74 --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-1.json @@ -0,0 +1,21 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Person", + "type": "object", + "properties": { + "firstName": { + "type": "string", + "description": "The person's first name." + }, + "lastName": { + "type": "string", + "description": "The person's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + } + } +} diff --git a/registry/cmd/sr-cli/testdata/json/schema-10.json b/registry/cmd/sr-cli/testdata/json/schema-10.json new file mode 100644 index 0000000..67f3ff9 --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-10.json @@ -0,0 +1,7505 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "+1": { + "type": "string" + }, + "-1": { + "type": "string" + }, + "100": { + "type": "string" + }, + "1234": { + "type": "string" + }, + "1st_place_medal": { + "type": "string" + }, + "2nd_place_medal": { + "type": "string" + }, + "3rd_place_medal": { + "type": "string" + }, + "8ball": { + "type": "string" + }, + "a": { + "type": "string" + }, + "ab": { + "type": "string" + }, + "abacus": { + "type": "string" + }, + "abc": { + "type": "string" + }, + "abcd": { + "type": "string" + }, + "accept": { + "type": "string" + }, + "accordion": { + "type": "string" + }, + "adhesive_bandage": { + "type": "string" + }, + "adult": { + "type": "string" + }, + "aerial_tramway": { + "type": "string" + }, + "afghanistan": { + "type": "string" + }, + "airplane": { + "type": "string" + }, + "aland_islands": { + "type": "string" + }, + "alarm_clock": { + "type": "string" + }, + "albania": { + "type": "string" + }, + "alembic": { + "type": "string" + }, + "algeria": { + "type": "string" + }, + "alien": { + "type": "string" + }, + "ambulance": { + "type": "string" + }, + "american_samoa": { + "type": "string" + }, + "amphora": { + "type": "string" + }, + "anatomical_heart": { + "type": "string" + }, + "anchor": { + "type": "string" + }, + "andorra": { + "type": "string" + }, + "angel": { + "type": "string" + }, + "anger": { + "type": "string" + }, + "angola": { + "type": "string" + }, + "angry": { + "type": "string" + }, + "anguilla": { + "type": "string" + }, + "anguished": { + "type": "string" + }, + "ant": { + "type": "string" + }, + "antarctica": { + "type": "string" + }, + "antigua_barbuda": { + "type": "string" + }, + "apple": { + "type": "string" + }, + "aquarius": { + "type": "string" + }, + "argentina": { + "type": "string" + }, + "aries": { + "type": "string" + }, + "armenia": { + "type": "string" + }, + "arrow_backward": { + "type": "string" + }, + "arrow_double_down": { + "type": "string" + }, + "arrow_double_up": { + "type": "string" + }, + "arrow_down": { + "type": "string" + }, + "arrow_down_small": { + "type": "string" + }, + "arrow_forward": { + "type": "string" + }, + "arrow_heading_down": { + "type": "string" + }, + "arrow_heading_up": { + "type": "string" + }, + "arrow_left": { + "type": "string" + }, + "arrow_lower_left": { + "type": "string" + }, + "arrow_lower_right": { + "type": "string" + }, + "arrow_right": { + "type": "string" + }, + "arrow_right_hook": { + "type": "string" + }, + "arrow_up": { + "type": "string" + }, + "arrow_up_down": { + "type": "string" + }, + "arrow_up_small": { + "type": "string" + }, + "arrow_upper_left": { + "type": "string" + }, + "arrow_upper_right": { + "type": "string" + }, + "arrows_clockwise": { + "type": "string" + }, + "arrows_counterclockwise": { + "type": "string" + }, + "art": { + "type": "string" + }, + "articulated_lorry": { + "type": "string" + }, + "artificial_satellite": { + "type": "string" + }, + "artist": { + "type": "string" + }, + "aruba": { + "type": "string" + }, + "ascension_island": { + "type": "string" + }, + "asterisk": { + "type": "string" + }, + "astonished": { + "type": "string" + }, + "astronaut": { + "type": "string" + }, + "athletic_shoe": { + "type": "string" + }, + "atm": { + "type": "string" + }, + "atom": { + "type": "string" + }, + "atom_symbol": { + "type": "string" + }, + "australia": { + "type": "string" + }, + "austria": { + "type": "string" + }, + "auto_rickshaw": { + "type": "string" + }, + "avocado": { + "type": "string" + }, + "axe": { + "type": "string" + }, + "azerbaijan": { + "type": "string" + }, + "b": { + "type": "string" + }, + "baby": { + "type": "string" + }, + "baby_bottle": { + "type": "string" + }, + "baby_chick": { + "type": "string" + }, + "baby_symbol": { + "type": "string" + }, + "back": { + "type": "string" + }, + "bacon": { + "type": "string" + }, + "badger": { + "type": "string" + }, + "badminton": { + "type": "string" + }, + "bagel": { + "type": "string" + }, + "baggage_claim": { + "type": "string" + }, + "baguette_bread": { + "type": "string" + }, + "bahamas": { + "type": "string" + }, + "bahrain": { + "type": "string" + }, + "balance_scale": { + "type": "string" + }, + "bald_man": { + "type": "string" + }, + "bald_woman": { + "type": "string" + }, + "ballet_shoes": { + "type": "string" + }, + "balloon": { + "type": "string" + }, + "ballot_box": { + "type": "string" + }, + "ballot_box_with_check": { + "type": "string" + }, + "bamboo": { + "type": "string" + }, + "banana": { + "type": "string" + }, + "bangbang": { + "type": "string" + }, + "bangladesh": { + "type": "string" + }, + "banjo": { + "type": "string" + }, + "bank": { + "type": "string" + }, + "bar_chart": { + "type": "string" + }, + "barbados": { + "type": "string" + }, + "barber": { + "type": "string" + }, + "baseball": { + "type": "string" + }, + "basecamp": { + "type": "string" + }, + "basecampy": { + "type": "string" + }, + "basket": { + "type": "string" + }, + "basketball": { + "type": "string" + }, + "basketball_man": { + "type": "string" + }, + "basketball_woman": { + "type": "string" + }, + "bat": { + "type": "string" + }, + "bath": { + "type": "string" + }, + "bathtub": { + "type": "string" + }, + "battery": { + "type": "string" + }, + "beach_umbrella": { + "type": "string" + }, + "bear": { + "type": "string" + }, + "bearded_person": { + "type": "string" + }, + "beaver": { + "type": "string" + }, + "bed": { + "type": "string" + }, + "bee": { + "type": "string" + }, + "beer": { + "type": "string" + }, + "beers": { + "type": "string" + }, + "beetle": { + "type": "string" + }, + "beginner": { + "type": "string" + }, + "belarus": { + "type": "string" + }, + "belgium": { + "type": "string" + }, + "belize": { + "type": "string" + }, + "bell": { + "type": "string" + }, + "bell_pepper": { + "type": "string" + }, + "bellhop_bell": { + "type": "string" + }, + "benin": { + "type": "string" + }, + "bento": { + "type": "string" + }, + "bermuda": { + "type": "string" + }, + "beverage_box": { + "type": "string" + }, + "bhutan": { + "type": "string" + }, + "bicyclist": { + "type": "string" + }, + "bike": { + "type": "string" + }, + "biking_man": { + "type": "string" + }, + "biking_woman": { + "type": "string" + }, + "bikini": { + "type": "string" + }, + "billed_cap": { + "type": "string" + }, + "biohazard": { + "type": "string" + }, + "bird": { + "type": "string" + }, + "birthday": { + "type": "string" + }, + "bison": { + "type": "string" + }, + "black_cat": { + "type": "string" + }, + "black_circle": { + "type": "string" + }, + "black_flag": { + "type": "string" + }, + "black_heart": { + "type": "string" + }, + "black_joker": { + "type": "string" + }, + "black_large_square": { + "type": "string" + }, + "black_medium_small_square": { + "type": "string" + }, + "black_medium_square": { + "type": "string" + }, + "black_nib": { + "type": "string" + }, + "black_small_square": { + "type": "string" + }, + "black_square_button": { + "type": "string" + }, + "blond_haired_man": { + "type": "string" + }, + "blond_haired_person": { + "type": "string" + }, + "blond_haired_woman": { + "type": "string" + }, + "blonde_woman": { + "type": "string" + }, + "blossom": { + "type": "string" + }, + "blowfish": { + "type": "string" + }, + "blue_book": { + "type": "string" + }, + "blue_car": { + "type": "string" + }, + "blue_heart": { + "type": "string" + }, + "blue_square": { + "type": "string" + }, + "blueberries": { + "type": "string" + }, + "blush": { + "type": "string" + }, + "boar": { + "type": "string" + }, + "boat": { + "type": "string" + }, + "bolivia": { + "type": "string" + }, + "bomb": { + "type": "string" + }, + "bone": { + "type": "string" + }, + "book": { + "type": "string" + }, + "bookmark": { + "type": "string" + }, + "bookmark_tabs": { + "type": "string" + }, + "books": { + "type": "string" + }, + "boom": { + "type": "string" + }, + "boomerang": { + "type": "string" + }, + "boot": { + "type": "string" + }, + "bosnia_herzegovina": { + "type": "string" + }, + "botswana": { + "type": "string" + }, + "bouncing_ball_man": { + "type": "string" + }, + "bouncing_ball_person": { + "type": "string" + }, + "bouncing_ball_woman": { + "type": "string" + }, + "bouquet": { + "type": "string" + }, + "bouvet_island": { + "type": "string" + }, + "bow": { + "type": "string" + }, + "bow_and_arrow": { + "type": "string" + }, + "bowing_man": { + "type": "string" + }, + "bowing_woman": { + "type": "string" + }, + "bowl_with_spoon": { + "type": "string" + }, + "bowling": { + "type": "string" + }, + "bowtie": { + "type": "string" + }, + "boxing_glove": { + "type": "string" + }, + "boy": { + "type": "string" + }, + "brain": { + "type": "string" + }, + "brazil": { + "type": "string" + }, + "bread": { + "type": "string" + }, + "breast_feeding": { + "type": "string" + }, + "bricks": { + "type": "string" + }, + "bride_with_veil": { + "type": "string" + }, + "bridge_at_night": { + "type": "string" + }, + "briefcase": { + "type": "string" + }, + "british_indian_ocean_territory": { + "type": "string" + }, + "british_virgin_islands": { + "type": "string" + }, + "broccoli": { + "type": "string" + }, + "broken_heart": { + "type": "string" + }, + "broom": { + "type": "string" + }, + "brown_circle": { + "type": "string" + }, + "brown_heart": { + "type": "string" + }, + "brown_square": { + "type": "string" + }, + "brunei": { + "type": "string" + }, + "bubble_tea": { + "type": "string" + }, + "bucket": { + "type": "string" + }, + "bug": { + "type": "string" + }, + "building_construction": { + "type": "string" + }, + "bulb": { + "type": "string" + }, + "bulgaria": { + "type": "string" + }, + "bullettrain_front": { + "type": "string" + }, + "bullettrain_side": { + "type": "string" + }, + "burkina_faso": { + "type": "string" + }, + "burrito": { + "type": "string" + }, + "burundi": { + "type": "string" + }, + "bus": { + "type": "string" + }, + "business_suit_levitating": { + "type": "string" + }, + "busstop": { + "type": "string" + }, + "bust_in_silhouette": { + "type": "string" + }, + "busts_in_silhouette": { + "type": "string" + }, + "butter": { + "type": "string" + }, + "butterfly": { + "type": "string" + }, + "cactus": { + "type": "string" + }, + "cake": { + "type": "string" + }, + "calendar": { + "type": "string" + }, + "call_me_hand": { + "type": "string" + }, + "calling": { + "type": "string" + }, + "cambodia": { + "type": "string" + }, + "camel": { + "type": "string" + }, + "camera": { + "type": "string" + }, + "camera_flash": { + "type": "string" + }, + "cameroon": { + "type": "string" + }, + "camping": { + "type": "string" + }, + "canada": { + "type": "string" + }, + "canary_islands": { + "type": "string" + }, + "cancer": { + "type": "string" + }, + "candle": { + "type": "string" + }, + "candy": { + "type": "string" + }, + "canned_food": { + "type": "string" + }, + "canoe": { + "type": "string" + }, + "cape_verde": { + "type": "string" + }, + "capital_abcd": { + "type": "string" + }, + "capricorn": { + "type": "string" + }, + "car": { + "type": "string" + }, + "card_file_box": { + "type": "string" + }, + "card_index": { + "type": "string" + }, + "card_index_dividers": { + "type": "string" + }, + "caribbean_netherlands": { + "type": "string" + }, + "carousel_horse": { + "type": "string" + }, + "carpentry_saw": { + "type": "string" + }, + "carrot": { + "type": "string" + }, + "cartwheeling": { + "type": "string" + }, + "cat": { + "type": "string" + }, + "cat2": { + "type": "string" + }, + "cayman_islands": { + "type": "string" + }, + "cd": { + "type": "string" + }, + "central_african_republic": { + "type": "string" + }, + "ceuta_melilla": { + "type": "string" + }, + "chad": { + "type": "string" + }, + "chains": { + "type": "string" + }, + "chair": { + "type": "string" + }, + "champagne": { + "type": "string" + }, + "chart": { + "type": "string" + }, + "chart_with_downwards_trend": { + "type": "string" + }, + "chart_with_upwards_trend": { + "type": "string" + }, + "checkered_flag": { + "type": "string" + }, + "cheese": { + "type": "string" + }, + "cherries": { + "type": "string" + }, + "cherry_blossom": { + "type": "string" + }, + "chess_pawn": { + "type": "string" + }, + "chestnut": { + "type": "string" + }, + "chicken": { + "type": "string" + }, + "child": { + "type": "string" + }, + "children_crossing": { + "type": "string" + }, + "chile": { + "type": "string" + }, + "chipmunk": { + "type": "string" + }, + "chocolate_bar": { + "type": "string" + }, + "chopsticks": { + "type": "string" + }, + "christmas_island": { + "type": "string" + }, + "christmas_tree": { + "type": "string" + }, + "church": { + "type": "string" + }, + "cinema": { + "type": "string" + }, + "circus_tent": { + "type": "string" + }, + "city_sunrise": { + "type": "string" + }, + "city_sunset": { + "type": "string" + }, + "cityscape": { + "type": "string" + }, + "cl": { + "type": "string" + }, + "clamp": { + "type": "string" + }, + "clap": { + "type": "string" + }, + "clapper": { + "type": "string" + }, + "classical_building": { + "type": "string" + }, + "climbing": { + "type": "string" + }, + "climbing_man": { + "type": "string" + }, + "climbing_woman": { + "type": "string" + }, + "clinking_glasses": { + "type": "string" + }, + "clipboard": { + "type": "string" + }, + "clipperton_island": { + "type": "string" + }, + "clock1": { + "type": "string" + }, + "clock10": { + "type": "string" + }, + "clock1030": { + "type": "string" + }, + "clock11": { + "type": "string" + }, + "clock1130": { + "type": "string" + }, + "clock12": { + "type": "string" + }, + "clock1230": { + "type": "string" + }, + "clock130": { + "type": "string" + }, + "clock2": { + "type": "string" + }, + "clock230": { + "type": "string" + }, + "clock3": { + "type": "string" + }, + "clock330": { + "type": "string" + }, + "clock4": { + "type": "string" + }, + "clock430": { + "type": "string" + }, + "clock5": { + "type": "string" + }, + "clock530": { + "type": "string" + }, + "clock6": { + "type": "string" + }, + "clock630": { + "type": "string" + }, + "clock7": { + "type": "string" + }, + "clock730": { + "type": "string" + }, + "clock8": { + "type": "string" + }, + "clock830": { + "type": "string" + }, + "clock9": { + "type": "string" + }, + "clock930": { + "type": "string" + }, + "closed_book": { + "type": "string" + }, + "closed_lock_with_key": { + "type": "string" + }, + "closed_umbrella": { + "type": "string" + }, + "cloud": { + "type": "string" + }, + "cloud_with_lightning": { + "type": "string" + }, + "cloud_with_lightning_and_rain": { + "type": "string" + }, + "cloud_with_rain": { + "type": "string" + }, + "cloud_with_snow": { + "type": "string" + }, + "clown_face": { + "type": "string" + }, + "clubs": { + "type": "string" + }, + "cn": { + "type": "string" + }, + "coat": { + "type": "string" + }, + "cockroach": { + "type": "string" + }, + "cocktail": { + "type": "string" + }, + "coconut": { + "type": "string" + }, + "cocos_islands": { + "type": "string" + }, + "coffee": { + "type": "string" + }, + "coffin": { + "type": "string" + }, + "coin": { + "type": "string" + }, + "cold_face": { + "type": "string" + }, + "cold_sweat": { + "type": "string" + }, + "collision": { + "type": "string" + }, + "colombia": { + "type": "string" + }, + "comet": { + "type": "string" + }, + "comoros": { + "type": "string" + }, + "compass": { + "type": "string" + }, + "computer": { + "type": "string" + }, + "computer_mouse": { + "type": "string" + }, + "confetti_ball": { + "type": "string" + }, + "confounded": { + "type": "string" + }, + "confused": { + "type": "string" + }, + "congo_brazzaville": { + "type": "string" + }, + "congo_kinshasa": { + "type": "string" + }, + "congratulations": { + "type": "string" + }, + "construction": { + "type": "string" + }, + "construction_worker": { + "type": "string" + }, + "construction_worker_man": { + "type": "string" + }, + "construction_worker_woman": { + "type": "string" + }, + "control_knobs": { + "type": "string" + }, + "convenience_store": { + "type": "string" + }, + "cook": { + "type": "string" + }, + "cook_islands": { + "type": "string" + }, + "cookie": { + "type": "string" + }, + "cool": { + "type": "string" + }, + "cop": { + "type": "string" + }, + "copyright": { + "type": "string" + }, + "corn": { + "type": "string" + }, + "costa_rica": { + "type": "string" + }, + "cote_divoire": { + "type": "string" + }, + "couch_and_lamp": { + "type": "string" + }, + "couple": { + "type": "string" + }, + "couple_with_heart": { + "type": "string" + }, + "couple_with_heart_man_man": { + "type": "string" + }, + "couple_with_heart_woman_man": { + "type": "string" + }, + "couple_with_heart_woman_woman": { + "type": "string" + }, + "couplekiss": { + "type": "string" + }, + "couplekiss_man_man": { + "type": "string" + }, + "couplekiss_man_woman": { + "type": "string" + }, + "couplekiss_woman_woman": { + "type": "string" + }, + "cow": { + "type": "string" + }, + "cow2": { + "type": "string" + }, + "cowboy_hat_face": { + "type": "string" + }, + "crab": { + "type": "string" + }, + "crayon": { + "type": "string" + }, + "credit_card": { + "type": "string" + }, + "crescent_moon": { + "type": "string" + }, + "cricket": { + "type": "string" + }, + "cricket_game": { + "type": "string" + }, + "croatia": { + "type": "string" + }, + "crocodile": { + "type": "string" + }, + "croissant": { + "type": "string" + }, + "crossed_fingers": { + "type": "string" + }, + "crossed_flags": { + "type": "string" + }, + "crossed_swords": { + "type": "string" + }, + "crown": { + "type": "string" + }, + "cry": { + "type": "string" + }, + "crying_cat_face": { + "type": "string" + }, + "crystal_ball": { + "type": "string" + }, + "cuba": { + "type": "string" + }, + "cucumber": { + "type": "string" + }, + "cup_with_straw": { + "type": "string" + }, + "cupcake": { + "type": "string" + }, + "cupid": { + "type": "string" + }, + "curacao": { + "type": "string" + }, + "curling_stone": { + "type": "string" + }, + "curly_haired_man": { + "type": "string" + }, + "curly_haired_woman": { + "type": "string" + }, + "curly_loop": { + "type": "string" + }, + "currency_exchange": { + "type": "string" + }, + "curry": { + "type": "string" + }, + "cursing_face": { + "type": "string" + }, + "custard": { + "type": "string" + }, + "customs": { + "type": "string" + }, + "cut_of_meat": { + "type": "string" + }, + "cyclone": { + "type": "string" + }, + "cyprus": { + "type": "string" + }, + "czech_republic": { + "type": "string" + }, + "dagger": { + "type": "string" + }, + "dancer": { + "type": "string" + }, + "dancers": { + "type": "string" + }, + "dancing_men": { + "type": "string" + }, + "dancing_women": { + "type": "string" + }, + "dango": { + "type": "string" + }, + "dark_sunglasses": { + "type": "string" + }, + "dart": { + "type": "string" + }, + "dash": { + "type": "string" + }, + "date": { + "type": "string" + }, + "de": { + "type": "string" + }, + "deaf_man": { + "type": "string" + }, + "deaf_person": { + "type": "string" + }, + "deaf_woman": { + "type": "string" + }, + "deciduous_tree": { + "type": "string" + }, + "deer": { + "type": "string" + }, + "denmark": { + "type": "string" + }, + "department_store": { + "type": "string" + }, + "derelict_house": { + "type": "string" + }, + "desert": { + "type": "string" + }, + "desert_island": { + "type": "string" + }, + "desktop_computer": { + "type": "string" + }, + "detective": { + "type": "string" + }, + "diamond_shape_with_a_dot_inside": { + "type": "string" + }, + "diamonds": { + "type": "string" + }, + "diego_garcia": { + "type": "string" + }, + "disappointed": { + "type": "string" + }, + "disappointed_relieved": { + "type": "string" + }, + "disguised_face": { + "type": "string" + }, + "diving_mask": { + "type": "string" + }, + "diya_lamp": { + "type": "string" + }, + "dizzy": { + "type": "string" + }, + "dizzy_face": { + "type": "string" + }, + "djibouti": { + "type": "string" + }, + "dna": { + "type": "string" + }, + "do_not_litter": { + "type": "string" + }, + "dodo": { + "type": "string" + }, + "dog": { + "type": "string" + }, + "dog2": { + "type": "string" + }, + "dollar": { + "type": "string" + }, + "dolls": { + "type": "string" + }, + "dolphin": { + "type": "string" + }, + "dominica": { + "type": "string" + }, + "dominican_republic": { + "type": "string" + }, + "door": { + "type": "string" + }, + "doughnut": { + "type": "string" + }, + "dove": { + "type": "string" + }, + "dragon": { + "type": "string" + }, + "dragon_face": { + "type": "string" + }, + "dress": { + "type": "string" + }, + "dromedary_camel": { + "type": "string" + }, + "drooling_face": { + "type": "string" + }, + "drop_of_blood": { + "type": "string" + }, + "droplet": { + "type": "string" + }, + "drum": { + "type": "string" + }, + "duck": { + "type": "string" + }, + "dumpling": { + "type": "string" + }, + "dvd": { + "type": "string" + }, + "e-mail": { + "type": "string" + }, + "eagle": { + "type": "string" + }, + "ear": { + "type": "string" + }, + "ear_of_rice": { + "type": "string" + }, + "ear_with_hearing_aid": { + "type": "string" + }, + "earth_africa": { + "type": "string" + }, + "earth_americas": { + "type": "string" + }, + "earth_asia": { + "type": "string" + }, + "ecuador": { + "type": "string" + }, + "egg": { + "type": "string" + }, + "eggplant": { + "type": "string" + }, + "egypt": { + "type": "string" + }, + "eight": { + "type": "string" + }, + "eight_pointed_black_star": { + "type": "string" + }, + "eight_spoked_asterisk": { + "type": "string" + }, + "eject_button": { + "type": "string" + }, + "el_salvador": { + "type": "string" + }, + "electric_plug": { + "type": "string" + }, + "electron": { + "type": "string" + }, + "elephant": { + "type": "string" + }, + "elevator": { + "type": "string" + }, + "elf": { + "type": "string" + }, + "elf_man": { + "type": "string" + }, + "elf_woman": { + "type": "string" + }, + "email": { + "type": "string" + }, + "end": { + "type": "string" + }, + "england": { + "type": "string" + }, + "envelope": { + "type": "string" + }, + "envelope_with_arrow": { + "type": "string" + }, + "equatorial_guinea": { + "type": "string" + }, + "eritrea": { + "type": "string" + }, + "es": { + "type": "string" + }, + "estonia": { + "type": "string" + }, + "ethiopia": { + "type": "string" + }, + "eu": { + "type": "string" + }, + "euro": { + "type": "string" + }, + "european_castle": { + "type": "string" + }, + "european_post_office": { + "type": "string" + }, + "european_union": { + "type": "string" + }, + "evergreen_tree": { + "type": "string" + }, + "exclamation": { + "type": "string" + }, + "exploding_head": { + "type": "string" + }, + "expressionless": { + "type": "string" + }, + "eye": { + "type": "string" + }, + "eye_speech_bubble": { + "type": "string" + }, + "eyeglasses": { + "type": "string" + }, + "eyes": { + "type": "string" + }, + "face_exhaling": { + "type": "string" + }, + "face_in_clouds": { + "type": "string" + }, + "face_with_head_bandage": { + "type": "string" + }, + "face_with_spiral_eyes": { + "type": "string" + }, + "face_with_thermometer": { + "type": "string" + }, + "facepalm": { + "type": "string" + }, + "facepunch": { + "type": "string" + }, + "factory": { + "type": "string" + }, + "factory_worker": { + "type": "string" + }, + "fairy": { + "type": "string" + }, + "fairy_man": { + "type": "string" + }, + "fairy_woman": { + "type": "string" + }, + "falafel": { + "type": "string" + }, + "falkland_islands": { + "type": "string" + }, + "fallen_leaf": { + "type": "string" + }, + "family": { + "type": "string" + }, + "family_man_boy": { + "type": "string" + }, + "family_man_boy_boy": { + "type": "string" + }, + "family_man_girl": { + "type": "string" + }, + "family_man_girl_boy": { + "type": "string" + }, + "family_man_girl_girl": { + "type": "string" + }, + "family_man_man_boy": { + "type": "string" + }, + "family_man_man_boy_boy": { + "type": "string" + }, + "family_man_man_girl": { + "type": "string" + }, + "family_man_man_girl_boy": { + "type": "string" + }, + "family_man_man_girl_girl": { + "type": "string" + }, + "family_man_woman_boy": { + "type": "string" + }, + "family_man_woman_boy_boy": { + "type": "string" + }, + "family_man_woman_girl": { + "type": "string" + }, + "family_man_woman_girl_boy": { + "type": "string" + }, + "family_man_woman_girl_girl": { + "type": "string" + }, + "family_woman_boy": { + "type": "string" + }, + "family_woman_boy_boy": { + "type": "string" + }, + "family_woman_girl": { + "type": "string" + }, + "family_woman_girl_boy": { + "type": "string" + }, + "family_woman_girl_girl": { + "type": "string" + }, + "family_woman_woman_boy": { + "type": "string" + }, + "family_woman_woman_boy_boy": { + "type": "string" + }, + "family_woman_woman_girl": { + "type": "string" + }, + "family_woman_woman_girl_boy": { + "type": "string" + }, + "family_woman_woman_girl_girl": { + "type": "string" + }, + "farmer": { + "type": "string" + }, + "faroe_islands": { + "type": "string" + }, + "fast_forward": { + "type": "string" + }, + "fax": { + "type": "string" + }, + "fearful": { + "type": "string" + }, + "feather": { + "type": "string" + }, + "feelsgood": { + "type": "string" + }, + "feet": { + "type": "string" + }, + "female_detective": { + "type": "string" + }, + "female_sign": { + "type": "string" + }, + "ferris_wheel": { + "type": "string" + }, + "ferry": { + "type": "string" + }, + "field_hockey": { + "type": "string" + }, + "fiji": { + "type": "string" + }, + "file_cabinet": { + "type": "string" + }, + "file_folder": { + "type": "string" + }, + "film_projector": { + "type": "string" + }, + "film_strip": { + "type": "string" + }, + "finland": { + "type": "string" + }, + "finnadie": { + "type": "string" + }, + "fire": { + "type": "string" + }, + "fire_engine": { + "type": "string" + }, + "fire_extinguisher": { + "type": "string" + }, + "firecracker": { + "type": "string" + }, + "firefighter": { + "type": "string" + }, + "fireworks": { + "type": "string" + }, + "first_quarter_moon": { + "type": "string" + }, + "first_quarter_moon_with_face": { + "type": "string" + }, + "fish": { + "type": "string" + }, + "fish_cake": { + "type": "string" + }, + "fishing_pole_and_fish": { + "type": "string" + }, + "fist": { + "type": "string" + }, + "fist_left": { + "type": "string" + }, + "fist_oncoming": { + "type": "string" + }, + "fist_raised": { + "type": "string" + }, + "fist_right": { + "type": "string" + }, + "five": { + "type": "string" + }, + "flags": { + "type": "string" + }, + "flamingo": { + "type": "string" + }, + "flashlight": { + "type": "string" + }, + "flat_shoe": { + "type": "string" + }, + "flatbread": { + "type": "string" + }, + "fleur_de_lis": { + "type": "string" + }, + "flight_arrival": { + "type": "string" + }, + "flight_departure": { + "type": "string" + }, + "flipper": { + "type": "string" + }, + "floppy_disk": { + "type": "string" + }, + "flower_playing_cards": { + "type": "string" + }, + "flushed": { + "type": "string" + }, + "fly": { + "type": "string" + }, + "flying_disc": { + "type": "string" + }, + "flying_saucer": { + "type": "string" + }, + "fog": { + "type": "string" + }, + "foggy": { + "type": "string" + }, + "fondue": { + "type": "string" + }, + "foot": { + "type": "string" + }, + "football": { + "type": "string" + }, + "footprints": { + "type": "string" + }, + "fork_and_knife": { + "type": "string" + }, + "fortune_cookie": { + "type": "string" + }, + "fountain": { + "type": "string" + }, + "fountain_pen": { + "type": "string" + }, + "four": { + "type": "string" + }, + "four_leaf_clover": { + "type": "string" + }, + "fox_face": { + "type": "string" + }, + "fr": { + "type": "string" + }, + "framed_picture": { + "type": "string" + }, + "free": { + "type": "string" + }, + "french_guiana": { + "type": "string" + }, + "french_polynesia": { + "type": "string" + }, + "french_southern_territories": { + "type": "string" + }, + "fried_egg": { + "type": "string" + }, + "fried_shrimp": { + "type": "string" + }, + "fries": { + "type": "string" + }, + "frog": { + "type": "string" + }, + "frowning": { + "type": "string" + }, + "frowning_face": { + "type": "string" + }, + "frowning_man": { + "type": "string" + }, + "frowning_person": { + "type": "string" + }, + "frowning_woman": { + "type": "string" + }, + "fu": { + "type": "string" + }, + "fuelpump": { + "type": "string" + }, + "full_moon": { + "type": "string" + }, + "full_moon_with_face": { + "type": "string" + }, + "funeral_urn": { + "type": "string" + }, + "gabon": { + "type": "string" + }, + "gambia": { + "type": "string" + }, + "game_die": { + "type": "string" + }, + "garlic": { + "type": "string" + }, + "gb": { + "type": "string" + }, + "gear": { + "type": "string" + }, + "gem": { + "type": "string" + }, + "gemini": { + "type": "string" + }, + "genie": { + "type": "string" + }, + "genie_man": { + "type": "string" + }, + "genie_woman": { + "type": "string" + }, + "georgia": { + "type": "string" + }, + "ghana": { + "type": "string" + }, + "ghost": { + "type": "string" + }, + "gibraltar": { + "type": "string" + }, + "gift": { + "type": "string" + }, + "gift_heart": { + "type": "string" + }, + "giraffe": { + "type": "string" + }, + "girl": { + "type": "string" + }, + "globe_with_meridians": { + "type": "string" + }, + "gloves": { + "type": "string" + }, + "goal_net": { + "type": "string" + }, + "goat": { + "type": "string" + }, + "goberserk": { + "type": "string" + }, + "godmode": { + "type": "string" + }, + "goggles": { + "type": "string" + }, + "golf": { + "type": "string" + }, + "golfing": { + "type": "string" + }, + "golfing_man": { + "type": "string" + }, + "golfing_woman": { + "type": "string" + }, + "gorilla": { + "type": "string" + }, + "grapes": { + "type": "string" + }, + "greece": { + "type": "string" + }, + "green_apple": { + "type": "string" + }, + "green_book": { + "type": "string" + }, + "green_circle": { + "type": "string" + }, + "green_heart": { + "type": "string" + }, + "green_salad": { + "type": "string" + }, + "green_square": { + "type": "string" + }, + "greenland": { + "type": "string" + }, + "grenada": { + "type": "string" + }, + "grey_exclamation": { + "type": "string" + }, + "grey_question": { + "type": "string" + }, + "grimacing": { + "type": "string" + }, + "grin": { + "type": "string" + }, + "grinning": { + "type": "string" + }, + "guadeloupe": { + "type": "string" + }, + "guam": { + "type": "string" + }, + "guard": { + "type": "string" + }, + "guardsman": { + "type": "string" + }, + "guardswoman": { + "type": "string" + }, + "guatemala": { + "type": "string" + }, + "guernsey": { + "type": "string" + }, + "guide_dog": { + "type": "string" + }, + "guinea": { + "type": "string" + }, + "guinea_bissau": { + "type": "string" + }, + "guitar": { + "type": "string" + }, + "gun": { + "type": "string" + }, + "guyana": { + "type": "string" + }, + "haircut": { + "type": "string" + }, + "haircut_man": { + "type": "string" + }, + "haircut_woman": { + "type": "string" + }, + "haiti": { + "type": "string" + }, + "hamburger": { + "type": "string" + }, + "hammer": { + "type": "string" + }, + "hammer_and_pick": { + "type": "string" + }, + "hammer_and_wrench": { + "type": "string" + }, + "hamster": { + "type": "string" + }, + "hand": { + "type": "string" + }, + "hand_over_mouth": { + "type": "string" + }, + "handbag": { + "type": "string" + }, + "handball_person": { + "type": "string" + }, + "handshake": { + "type": "string" + }, + "hankey": { + "type": "string" + }, + "hash": { + "type": "string" + }, + "hatched_chick": { + "type": "string" + }, + "hatching_chick": { + "type": "string" + }, + "headphones": { + "type": "string" + }, + "headstone": { + "type": "string" + }, + "health_worker": { + "type": "string" + }, + "hear_no_evil": { + "type": "string" + }, + "heard_mcdonald_islands": { + "type": "string" + }, + "heart": { + "type": "string" + }, + "heart_decoration": { + "type": "string" + }, + "heart_eyes": { + "type": "string" + }, + "heart_eyes_cat": { + "type": "string" + }, + "heart_on_fire": { + "type": "string" + }, + "heartbeat": { + "type": "string" + }, + "heartpulse": { + "type": "string" + }, + "hearts": { + "type": "string" + }, + "heavy_check_mark": { + "type": "string" + }, + "heavy_division_sign": { + "type": "string" + }, + "heavy_dollar_sign": { + "type": "string" + }, + "heavy_exclamation_mark": { + "type": "string" + }, + "heavy_heart_exclamation": { + "type": "string" + }, + "heavy_minus_sign": { + "type": "string" + }, + "heavy_multiplication_x": { + "type": "string" + }, + "heavy_plus_sign": { + "type": "string" + }, + "hedgehog": { + "type": "string" + }, + "helicopter": { + "type": "string" + }, + "herb": { + "type": "string" + }, + "hibiscus": { + "type": "string" + }, + "high_brightness": { + "type": "string" + }, + "high_heel": { + "type": "string" + }, + "hiking_boot": { + "type": "string" + }, + "hindu_temple": { + "type": "string" + }, + "hippopotamus": { + "type": "string" + }, + "hocho": { + "type": "string" + }, + "hole": { + "type": "string" + }, + "honduras": { + "type": "string" + }, + "honey_pot": { + "type": "string" + }, + "honeybee": { + "type": "string" + }, + "hong_kong": { + "type": "string" + }, + "hook": { + "type": "string" + }, + "horse": { + "type": "string" + }, + "horse_racing": { + "type": "string" + }, + "hospital": { + "type": "string" + }, + "hot_face": { + "type": "string" + }, + "hot_pepper": { + "type": "string" + }, + "hotdog": { + "type": "string" + }, + "hotel": { + "type": "string" + }, + "hotsprings": { + "type": "string" + }, + "hourglass": { + "type": "string" + }, + "hourglass_flowing_sand": { + "type": "string" + }, + "house": { + "type": "string" + }, + "house_with_garden": { + "type": "string" + }, + "houses": { + "type": "string" + }, + "hugs": { + "type": "string" + }, + "hungary": { + "type": "string" + }, + "hurtrealbad": { + "type": "string" + }, + "hushed": { + "type": "string" + }, + "hut": { + "type": "string" + }, + "ice_cream": { + "type": "string" + }, + "ice_cube": { + "type": "string" + }, + "ice_hockey": { + "type": "string" + }, + "ice_skate": { + "type": "string" + }, + "icecream": { + "type": "string" + }, + "iceland": { + "type": "string" + }, + "id": { + "type": "string" + }, + "ideograph_advantage": { + "type": "string" + }, + "imp": { + "type": "string" + }, + "inbox_tray": { + "type": "string" + }, + "incoming_envelope": { + "type": "string" + }, + "india": { + "type": "string" + }, + "indonesia": { + "type": "string" + }, + "infinity": { + "type": "string" + }, + "information_desk_person": { + "type": "string" + }, + "information_source": { + "type": "string" + }, + "innocent": { + "type": "string" + }, + "interrobang": { + "type": "string" + }, + "iphone": { + "type": "string" + }, + "iran": { + "type": "string" + }, + "iraq": { + "type": "string" + }, + "ireland": { + "type": "string" + }, + "isle_of_man": { + "type": "string" + }, + "israel": { + "type": "string" + }, + "it": { + "type": "string" + }, + "izakaya_lantern": { + "type": "string" + }, + "jack_o_lantern": { + "type": "string" + }, + "jamaica": { + "type": "string" + }, + "japan": { + "type": "string" + }, + "japanese_castle": { + "type": "string" + }, + "japanese_goblin": { + "type": "string" + }, + "japanese_ogre": { + "type": "string" + }, + "jeans": { + "type": "string" + }, + "jersey": { + "type": "string" + }, + "jigsaw": { + "type": "string" + }, + "jordan": { + "type": "string" + }, + "joy": { + "type": "string" + }, + "joy_cat": { + "type": "string" + }, + "joystick": { + "type": "string" + }, + "jp": { + "type": "string" + }, + "judge": { + "type": "string" + }, + "juggling_person": { + "type": "string" + }, + "kaaba": { + "type": "string" + }, + "kangaroo": { + "type": "string" + }, + "kazakhstan": { + "type": "string" + }, + "kenya": { + "type": "string" + }, + "key": { + "type": "string" + }, + "keyboard": { + "type": "string" + }, + "keycap_ten": { + "type": "string" + }, + "kick_scooter": { + "type": "string" + }, + "kimono": { + "type": "string" + }, + "kiribati": { + "type": "string" + }, + "kiss": { + "type": "string" + }, + "kissing": { + "type": "string" + }, + "kissing_cat": { + "type": "string" + }, + "kissing_closed_eyes": { + "type": "string" + }, + "kissing_heart": { + "type": "string" + }, + "kissing_smiling_eyes": { + "type": "string" + }, + "kite": { + "type": "string" + }, + "kiwi_fruit": { + "type": "string" + }, + "kneeling_man": { + "type": "string" + }, + "kneeling_person": { + "type": "string" + }, + "kneeling_woman": { + "type": "string" + }, + "knife": { + "type": "string" + }, + "knot": { + "type": "string" + }, + "koala": { + "type": "string" + }, + "koko": { + "type": "string" + }, + "kosovo": { + "type": "string" + }, + "kr": { + "type": "string" + }, + "kuwait": { + "type": "string" + }, + "kyrgyzstan": { + "type": "string" + }, + "lab_coat": { + "type": "string" + }, + "label": { + "type": "string" + }, + "lacrosse": { + "type": "string" + }, + "ladder": { + "type": "string" + }, + "lady_beetle": { + "type": "string" + }, + "lantern": { + "type": "string" + }, + "laos": { + "type": "string" + }, + "large_blue_circle": { + "type": "string" + }, + "large_blue_diamond": { + "type": "string" + }, + "large_orange_diamond": { + "type": "string" + }, + "last_quarter_moon": { + "type": "string" + }, + "last_quarter_moon_with_face": { + "type": "string" + }, + "latin_cross": { + "type": "string" + }, + "latvia": { + "type": "string" + }, + "laughing": { + "type": "string" + }, + "leafy_green": { + "type": "string" + }, + "leaves": { + "type": "string" + }, + "lebanon": { + "type": "string" + }, + "ledger": { + "type": "string" + }, + "left_luggage": { + "type": "string" + }, + "left_right_arrow": { + "type": "string" + }, + "left_speech_bubble": { + "type": "string" + }, + "leftwards_arrow_with_hook": { + "type": "string" + }, + "leg": { + "type": "string" + }, + "lemon": { + "type": "string" + }, + "leo": { + "type": "string" + }, + "leopard": { + "type": "string" + }, + "lesotho": { + "type": "string" + }, + "level_slider": { + "type": "string" + }, + "liberia": { + "type": "string" + }, + "libra": { + "type": "string" + }, + "libya": { + "type": "string" + }, + "liechtenstein": { + "type": "string" + }, + "light_rail": { + "type": "string" + }, + "link": { + "type": "string" + }, + "lion": { + "type": "string" + }, + "lips": { + "type": "string" + }, + "lipstick": { + "type": "string" + }, + "lithuania": { + "type": "string" + }, + "lizard": { + "type": "string" + }, + "llama": { + "type": "string" + }, + "lobster": { + "type": "string" + }, + "lock": { + "type": "string" + }, + "lock_with_ink_pen": { + "type": "string" + }, + "lollipop": { + "type": "string" + }, + "long_drum": { + "type": "string" + }, + "loop": { + "type": "string" + }, + "lotion_bottle": { + "type": "string" + }, + "lotus_position": { + "type": "string" + }, + "lotus_position_man": { + "type": "string" + }, + "lotus_position_woman": { + "type": "string" + }, + "loud_sound": { + "type": "string" + }, + "loudspeaker": { + "type": "string" + }, + "love_hotel": { + "type": "string" + }, + "love_letter": { + "type": "string" + }, + "love_you_gesture": { + "type": "string" + }, + "low_brightness": { + "type": "string" + }, + "luggage": { + "type": "string" + }, + "lungs": { + "type": "string" + }, + "luxembourg": { + "type": "string" + }, + "lying_face": { + "type": "string" + }, + "m": { + "type": "string" + }, + "macau": { + "type": "string" + }, + "macedonia": { + "type": "string" + }, + "madagascar": { + "type": "string" + }, + "mag": { + "type": "string" + }, + "mag_right": { + "type": "string" + }, + "mage": { + "type": "string" + }, + "mage_man": { + "type": "string" + }, + "mage_woman": { + "type": "string" + }, + "magic_wand": { + "type": "string" + }, + "magnet": { + "type": "string" + }, + "mahjong": { + "type": "string" + }, + "mailbox": { + "type": "string" + }, + "mailbox_closed": { + "type": "string" + }, + "mailbox_with_mail": { + "type": "string" + }, + "mailbox_with_no_mail": { + "type": "string" + }, + "malawi": { + "type": "string" + }, + "malaysia": { + "type": "string" + }, + "maldives": { + "type": "string" + }, + "male_detective": { + "type": "string" + }, + "male_sign": { + "type": "string" + }, + "mali": { + "type": "string" + }, + "malta": { + "type": "string" + }, + "mammoth": { + "type": "string" + }, + "man": { + "type": "string" + }, + "man_artist": { + "type": "string" + }, + "man_astronaut": { + "type": "string" + }, + "man_beard": { + "type": "string" + }, + "man_cartwheeling": { + "type": "string" + }, + "man_cook": { + "type": "string" + }, + "man_dancing": { + "type": "string" + }, + "man_facepalming": { + "type": "string" + }, + "man_factory_worker": { + "type": "string" + }, + "man_farmer": { + "type": "string" + }, + "man_feeding_baby": { + "type": "string" + }, + "man_firefighter": { + "type": "string" + }, + "man_health_worker": { + "type": "string" + }, + "man_in_manual_wheelchair": { + "type": "string" + }, + "man_in_motorized_wheelchair": { + "type": "string" + }, + "man_in_tuxedo": { + "type": "string" + }, + "man_judge": { + "type": "string" + }, + "man_juggling": { + "type": "string" + }, + "man_mechanic": { + "type": "string" + }, + "man_office_worker": { + "type": "string" + }, + "man_pilot": { + "type": "string" + }, + "man_playing_handball": { + "type": "string" + }, + "man_playing_water_polo": { + "type": "string" + }, + "man_scientist": { + "type": "string" + }, + "man_shrugging": { + "type": "string" + }, + "man_singer": { + "type": "string" + }, + "man_student": { + "type": "string" + }, + "man_teacher": { + "type": "string" + }, + "man_technologist": { + "type": "string" + }, + "man_with_gua_pi_mao": { + "type": "string" + }, + "man_with_probing_cane": { + "type": "string" + }, + "man_with_turban": { + "type": "string" + }, + "man_with_veil": { + "type": "string" + }, + "mandarin": { + "type": "string" + }, + "mango": { + "type": "string" + }, + "mans_shoe": { + "type": "string" + }, + "mantelpiece_clock": { + "type": "string" + }, + "manual_wheelchair": { + "type": "string" + }, + "maple_leaf": { + "type": "string" + }, + "marshall_islands": { + "type": "string" + }, + "martial_arts_uniform": { + "type": "string" + }, + "martinique": { + "type": "string" + }, + "mask": { + "type": "string" + }, + "massage": { + "type": "string" + }, + "massage_man": { + "type": "string" + }, + "massage_woman": { + "type": "string" + }, + "mate": { + "type": "string" + }, + "mauritania": { + "type": "string" + }, + "mauritius": { + "type": "string" + }, + "mayotte": { + "type": "string" + }, + "meat_on_bone": { + "type": "string" + }, + "mechanic": { + "type": "string" + }, + "mechanical_arm": { + "type": "string" + }, + "mechanical_leg": { + "type": "string" + }, + "medal_military": { + "type": "string" + }, + "medal_sports": { + "type": "string" + }, + "medical_symbol": { + "type": "string" + }, + "mega": { + "type": "string" + }, + "melon": { + "type": "string" + }, + "memo": { + "type": "string" + }, + "men_wrestling": { + "type": "string" + }, + "mending_heart": { + "type": "string" + }, + "menorah": { + "type": "string" + }, + "mens": { + "type": "string" + }, + "mermaid": { + "type": "string" + }, + "merman": { + "type": "string" + }, + "merperson": { + "type": "string" + }, + "metal": { + "type": "string" + }, + "metro": { + "type": "string" + }, + "mexico": { + "type": "string" + }, + "microbe": { + "type": "string" + }, + "micronesia": { + "type": "string" + }, + "microphone": { + "type": "string" + }, + "microscope": { + "type": "string" + }, + "middle_finger": { + "type": "string" + }, + "military_helmet": { + "type": "string" + }, + "milk_glass": { + "type": "string" + }, + "milky_way": { + "type": "string" + }, + "minibus": { + "type": "string" + }, + "minidisc": { + "type": "string" + }, + "mirror": { + "type": "string" + }, + "mobile_phone_off": { + "type": "string" + }, + "moldova": { + "type": "string" + }, + "monaco": { + "type": "string" + }, + "money_mouth_face": { + "type": "string" + }, + "money_with_wings": { + "type": "string" + }, + "moneybag": { + "type": "string" + }, + "mongolia": { + "type": "string" + }, + "monkey": { + "type": "string" + }, + "monkey_face": { + "type": "string" + }, + "monocle_face": { + "type": "string" + }, + "monorail": { + "type": "string" + }, + "montenegro": { + "type": "string" + }, + "montserrat": { + "type": "string" + }, + "moon": { + "type": "string" + }, + "moon_cake": { + "type": "string" + }, + "morocco": { + "type": "string" + }, + "mortar_board": { + "type": "string" + }, + "mosque": { + "type": "string" + }, + "mosquito": { + "type": "string" + }, + "motor_boat": { + "type": "string" + }, + "motor_scooter": { + "type": "string" + }, + "motorcycle": { + "type": "string" + }, + "motorized_wheelchair": { + "type": "string" + }, + "motorway": { + "type": "string" + }, + "mount_fuji": { + "type": "string" + }, + "mountain": { + "type": "string" + }, + "mountain_bicyclist": { + "type": "string" + }, + "mountain_biking_man": { + "type": "string" + }, + "mountain_biking_woman": { + "type": "string" + }, + "mountain_cableway": { + "type": "string" + }, + "mountain_railway": { + "type": "string" + }, + "mountain_snow": { + "type": "string" + }, + "mouse": { + "type": "string" + }, + "mouse2": { + "type": "string" + }, + "mouse_trap": { + "type": "string" + }, + "movie_camera": { + "type": "string" + }, + "moyai": { + "type": "string" + }, + "mozambique": { + "type": "string" + }, + "mrs_claus": { + "type": "string" + }, + "muscle": { + "type": "string" + }, + "mushroom": { + "type": "string" + }, + "musical_keyboard": { + "type": "string" + }, + "musical_note": { + "type": "string" + }, + "musical_score": { + "type": "string" + }, + "mute": { + "type": "string" + }, + "mx_claus": { + "type": "string" + }, + "myanmar": { + "type": "string" + }, + "nail_care": { + "type": "string" + }, + "name_badge": { + "type": "string" + }, + "namibia": { + "type": "string" + }, + "national_park": { + "type": "string" + }, + "nauru": { + "type": "string" + }, + "nauseated_face": { + "type": "string" + }, + "nazar_amulet": { + "type": "string" + }, + "neckbeard": { + "type": "string" + }, + "necktie": { + "type": "string" + }, + "negative_squared_cross_mark": { + "type": "string" + }, + "nepal": { + "type": "string" + }, + "nerd_face": { + "type": "string" + }, + "nesting_dolls": { + "type": "string" + }, + "netherlands": { + "type": "string" + }, + "neutral_face": { + "type": "string" + }, + "new": { + "type": "string" + }, + "new_caledonia": { + "type": "string" + }, + "new_moon": { + "type": "string" + }, + "new_moon_with_face": { + "type": "string" + }, + "new_zealand": { + "type": "string" + }, + "newspaper": { + "type": "string" + }, + "newspaper_roll": { + "type": "string" + }, + "next_track_button": { + "type": "string" + }, + "ng": { + "type": "string" + }, + "ng_man": { + "type": "string" + }, + "ng_woman": { + "type": "string" + }, + "nicaragua": { + "type": "string" + }, + "niger": { + "type": "string" + }, + "nigeria": { + "type": "string" + }, + "night_with_stars": { + "type": "string" + }, + "nine": { + "type": "string" + }, + "ninja": { + "type": "string" + }, + "niue": { + "type": "string" + }, + "no_bell": { + "type": "string" + }, + "no_bicycles": { + "type": "string" + }, + "no_entry": { + "type": "string" + }, + "no_entry_sign": { + "type": "string" + }, + "no_good": { + "type": "string" + }, + "no_good_man": { + "type": "string" + }, + "no_good_woman": { + "type": "string" + }, + "no_mobile_phones": { + "type": "string" + }, + "no_mouth": { + "type": "string" + }, + "no_pedestrians": { + "type": "string" + }, + "no_smoking": { + "type": "string" + }, + "non-potable_water": { + "type": "string" + }, + "norfolk_island": { + "type": "string" + }, + "north_korea": { + "type": "string" + }, + "northern_mariana_islands": { + "type": "string" + }, + "norway": { + "type": "string" + }, + "nose": { + "type": "string" + }, + "notebook": { + "type": "string" + }, + "notebook_with_decorative_cover": { + "type": "string" + }, + "notes": { + "type": "string" + }, + "nut_and_bolt": { + "type": "string" + }, + "o": { + "type": "string" + }, + "o2": { + "type": "string" + }, + "ocean": { + "type": "string" + }, + "octocat": { + "type": "string" + }, + "octopus": { + "type": "string" + }, + "oden": { + "type": "string" + }, + "office": { + "type": "string" + }, + "office_worker": { + "type": "string" + }, + "oil_drum": { + "type": "string" + }, + "ok": { + "type": "string" + }, + "ok_hand": { + "type": "string" + }, + "ok_man": { + "type": "string" + }, + "ok_person": { + "type": "string" + }, + "ok_woman": { + "type": "string" + }, + "old_key": { + "type": "string" + }, + "older_adult": { + "type": "string" + }, + "older_man": { + "type": "string" + }, + "older_woman": { + "type": "string" + }, + "olive": { + "type": "string" + }, + "om": { + "type": "string" + }, + "oman": { + "type": "string" + }, + "on": { + "type": "string" + }, + "oncoming_automobile": { + "type": "string" + }, + "oncoming_bus": { + "type": "string" + }, + "oncoming_police_car": { + "type": "string" + }, + "oncoming_taxi": { + "type": "string" + }, + "one": { + "type": "string" + }, + "one_piece_swimsuit": { + "type": "string" + }, + "onion": { + "type": "string" + }, + "open_book": { + "type": "string" + }, + "open_file_folder": { + "type": "string" + }, + "open_hands": { + "type": "string" + }, + "open_mouth": { + "type": "string" + }, + "open_umbrella": { + "type": "string" + }, + "ophiuchus": { + "type": "string" + }, + "orange": { + "type": "string" + }, + "orange_book": { + "type": "string" + }, + "orange_circle": { + "type": "string" + }, + "orange_heart": { + "type": "string" + }, + "orange_square": { + "type": "string" + }, + "orangutan": { + "type": "string" + }, + "orthodox_cross": { + "type": "string" + }, + "otter": { + "type": "string" + }, + "outbox_tray": { + "type": "string" + }, + "owl": { + "type": "string" + }, + "ox": { + "type": "string" + }, + "oyster": { + "type": "string" + }, + "package": { + "type": "string" + }, + "page_facing_up": { + "type": "string" + }, + "page_with_curl": { + "type": "string" + }, + "pager": { + "type": "string" + }, + "paintbrush": { + "type": "string" + }, + "pakistan": { + "type": "string" + }, + "palau": { + "type": "string" + }, + "palestinian_territories": { + "type": "string" + }, + "palm_tree": { + "type": "string" + }, + "palms_up_together": { + "type": "string" + }, + "panama": { + "type": "string" + }, + "pancakes": { + "type": "string" + }, + "panda_face": { + "type": "string" + }, + "paperclip": { + "type": "string" + }, + "paperclips": { + "type": "string" + }, + "papua_new_guinea": { + "type": "string" + }, + "parachute": { + "type": "string" + }, + "paraguay": { + "type": "string" + }, + "parasol_on_ground": { + "type": "string" + }, + "parking": { + "type": "string" + }, + "parrot": { + "type": "string" + }, + "part_alternation_mark": { + "type": "string" + }, + "partly_sunny": { + "type": "string" + }, + "partying_face": { + "type": "string" + }, + "passenger_ship": { + "type": "string" + }, + "passport_control": { + "type": "string" + }, + "pause_button": { + "type": "string" + }, + "paw_prints": { + "type": "string" + }, + "peace_symbol": { + "type": "string" + }, + "peach": { + "type": "string" + }, + "peacock": { + "type": "string" + }, + "peanuts": { + "type": "string" + }, + "pear": { + "type": "string" + }, + "pen": { + "type": "string" + }, + "pencil": { + "type": "string" + }, + "pencil2": { + "type": "string" + }, + "penguin": { + "type": "string" + }, + "pensive": { + "type": "string" + }, + "people_holding_hands": { + "type": "string" + }, + "people_hugging": { + "type": "string" + }, + "performing_arts": { + "type": "string" + }, + "persevere": { + "type": "string" + }, + "person_bald": { + "type": "string" + }, + "person_curly_hair": { + "type": "string" + }, + "person_feeding_baby": { + "type": "string" + }, + "person_fencing": { + "type": "string" + }, + "person_in_manual_wheelchair": { + "type": "string" + }, + "person_in_motorized_wheelchair": { + "type": "string" + }, + "person_in_tuxedo": { + "type": "string" + }, + "person_red_hair": { + "type": "string" + }, + "person_white_hair": { + "type": "string" + }, + "person_with_probing_cane": { + "type": "string" + }, + "person_with_turban": { + "type": "string" + }, + "person_with_veil": { + "type": "string" + }, + "peru": { + "type": "string" + }, + "petri_dish": { + "type": "string" + }, + "philippines": { + "type": "string" + }, + "phone": { + "type": "string" + }, + "pick": { + "type": "string" + }, + "pickup_truck": { + "type": "string" + }, + "pie": { + "type": "string" + }, + "pig": { + "type": "string" + }, + "pig2": { + "type": "string" + }, + "pig_nose": { + "type": "string" + }, + "pill": { + "type": "string" + }, + "pilot": { + "type": "string" + }, + "pinata": { + "type": "string" + }, + "pinched_fingers": { + "type": "string" + }, + "pinching_hand": { + "type": "string" + }, + "pineapple": { + "type": "string" + }, + "ping_pong": { + "type": "string" + }, + "pirate_flag": { + "type": "string" + }, + "pisces": { + "type": "string" + }, + "pitcairn_islands": { + "type": "string" + }, + "pizza": { + "type": "string" + }, + "placard": { + "type": "string" + }, + "place_of_worship": { + "type": "string" + }, + "plate_with_cutlery": { + "type": "string" + }, + "play_or_pause_button": { + "type": "string" + }, + "pleading_face": { + "type": "string" + }, + "plunger": { + "type": "string" + }, + "point_down": { + "type": "string" + }, + "point_left": { + "type": "string" + }, + "point_right": { + "type": "string" + }, + "point_up": { + "type": "string" + }, + "point_up_2": { + "type": "string" + }, + "poland": { + "type": "string" + }, + "polar_bear": { + "type": "string" + }, + "police_car": { + "type": "string" + }, + "police_officer": { + "type": "string" + }, + "policeman": { + "type": "string" + }, + "policewoman": { + "type": "string" + }, + "poodle": { + "type": "string" + }, + "poop": { + "type": "string" + }, + "popcorn": { + "type": "string" + }, + "portugal": { + "type": "string" + }, + "post_office": { + "type": "string" + }, + "postal_horn": { + "type": "string" + }, + "postbox": { + "type": "string" + }, + "potable_water": { + "type": "string" + }, + "potato": { + "type": "string" + }, + "potted_plant": { + "type": "string" + }, + "pouch": { + "type": "string" + }, + "poultry_leg": { + "type": "string" + }, + "pound": { + "type": "string" + }, + "pout": { + "type": "string" + }, + "pouting_cat": { + "type": "string" + }, + "pouting_face": { + "type": "string" + }, + "pouting_man": { + "type": "string" + }, + "pouting_woman": { + "type": "string" + }, + "pray": { + "type": "string" + }, + "prayer_beads": { + "type": "string" + }, + "pregnant_woman": { + "type": "string" + }, + "pretzel": { + "type": "string" + }, + "previous_track_button": { + "type": "string" + }, + "prince": { + "type": "string" + }, + "princess": { + "type": "string" + }, + "printer": { + "type": "string" + }, + "probing_cane": { + "type": "string" + }, + "puerto_rico": { + "type": "string" + }, + "punch": { + "type": "string" + }, + "purple_circle": { + "type": "string" + }, + "purple_heart": { + "type": "string" + }, + "purple_square": { + "type": "string" + }, + "purse": { + "type": "string" + }, + "pushpin": { + "type": "string" + }, + "put_litter_in_its_place": { + "type": "string" + }, + "qatar": { + "type": "string" + }, + "question": { + "type": "string" + }, + "rabbit": { + "type": "string" + }, + "rabbit2": { + "type": "string" + }, + "raccoon": { + "type": "string" + }, + "racehorse": { + "type": "string" + }, + "racing_car": { + "type": "string" + }, + "radio": { + "type": "string" + }, + "radio_button": { + "type": "string" + }, + "radioactive": { + "type": "string" + }, + "rage": { + "type": "string" + }, + "rage1": { + "type": "string" + }, + "rage2": { + "type": "string" + }, + "rage3": { + "type": "string" + }, + "rage4": { + "type": "string" + }, + "railway_car": { + "type": "string" + }, + "railway_track": { + "type": "string" + }, + "rainbow": { + "type": "string" + }, + "rainbow_flag": { + "type": "string" + }, + "raised_back_of_hand": { + "type": "string" + }, + "raised_eyebrow": { + "type": "string" + }, + "raised_hand": { + "type": "string" + }, + "raised_hand_with_fingers_splayed": { + "type": "string" + }, + "raised_hands": { + "type": "string" + }, + "raising_hand": { + "type": "string" + }, + "raising_hand_man": { + "type": "string" + }, + "raising_hand_woman": { + "type": "string" + }, + "ram": { + "type": "string" + }, + "ramen": { + "type": "string" + }, + "rat": { + "type": "string" + }, + "razor": { + "type": "string" + }, + "receipt": { + "type": "string" + }, + "record_button": { + "type": "string" + }, + "recycle": { + "type": "string" + }, + "red_car": { + "type": "string" + }, + "red_circle": { + "type": "string" + }, + "red_envelope": { + "type": "string" + }, + "red_haired_man": { + "type": "string" + }, + "red_haired_woman": { + "type": "string" + }, + "red_square": { + "type": "string" + }, + "registered": { + "type": "string" + }, + "relaxed": { + "type": "string" + }, + "relieved": { + "type": "string" + }, + "reminder_ribbon": { + "type": "string" + }, + "repeat": { + "type": "string" + }, + "repeat_one": { + "type": "string" + }, + "rescue_worker_helmet": { + "type": "string" + }, + "restroom": { + "type": "string" + }, + "reunion": { + "type": "string" + }, + "revolving_hearts": { + "type": "string" + }, + "rewind": { + "type": "string" + }, + "rhinoceros": { + "type": "string" + }, + "ribbon": { + "type": "string" + }, + "rice": { + "type": "string" + }, + "rice_ball": { + "type": "string" + }, + "rice_cracker": { + "type": "string" + }, + "rice_scene": { + "type": "string" + }, + "right_anger_bubble": { + "type": "string" + }, + "ring": { + "type": "string" + }, + "ringed_planet": { + "type": "string" + }, + "robot": { + "type": "string" + }, + "rock": { + "type": "string" + }, + "rocket": { + "type": "string" + }, + "rofl": { + "type": "string" + }, + "roll_eyes": { + "type": "string" + }, + "roll_of_paper": { + "type": "string" + }, + "roller_coaster": { + "type": "string" + }, + "roller_skate": { + "type": "string" + }, + "romania": { + "type": "string" + }, + "rooster": { + "type": "string" + }, + "rose": { + "type": "string" + }, + "rosette": { + "type": "string" + }, + "rotating_light": { + "type": "string" + }, + "round_pushpin": { + "type": "string" + }, + "rowboat": { + "type": "string" + }, + "rowing_man": { + "type": "string" + }, + "rowing_woman": { + "type": "string" + }, + "ru": { + "type": "string" + }, + "rugby_football": { + "type": "string" + }, + "runner": { + "type": "string" + }, + "running": { + "type": "string" + }, + "running_man": { + "type": "string" + }, + "running_shirt_with_sash": { + "type": "string" + }, + "running_woman": { + "type": "string" + }, + "rwanda": { + "type": "string" + }, + "sa": { + "type": "string" + }, + "safety_pin": { + "type": "string" + }, + "safety_vest": { + "type": "string" + }, + "sagittarius": { + "type": "string" + }, + "sailboat": { + "type": "string" + }, + "sake": { + "type": "string" + }, + "salt": { + "type": "string" + }, + "samoa": { + "type": "string" + }, + "san_marino": { + "type": "string" + }, + "sandal": { + "type": "string" + }, + "sandwich": { + "type": "string" + }, + "santa": { + "type": "string" + }, + "sao_tome_principe": { + "type": "string" + }, + "sari": { + "type": "string" + }, + "sassy_man": { + "type": "string" + }, + "sassy_woman": { + "type": "string" + }, + "satellite": { + "type": "string" + }, + "satisfied": { + "type": "string" + }, + "saudi_arabia": { + "type": "string" + }, + "sauna_man": { + "type": "string" + }, + "sauna_person": { + "type": "string" + }, + "sauna_woman": { + "type": "string" + }, + "sauropod": { + "type": "string" + }, + "saxophone": { + "type": "string" + }, + "scarf": { + "type": "string" + }, + "school": { + "type": "string" + }, + "school_satchel": { + "type": "string" + }, + "scientist": { + "type": "string" + }, + "scissors": { + "type": "string" + }, + "scorpion": { + "type": "string" + }, + "scorpius": { + "type": "string" + }, + "scotland": { + "type": "string" + }, + "scream": { + "type": "string" + }, + "scream_cat": { + "type": "string" + }, + "screwdriver": { + "type": "string" + }, + "scroll": { + "type": "string" + }, + "seal": { + "type": "string" + }, + "seat": { + "type": "string" + }, + "secret": { + "type": "string" + }, + "see_no_evil": { + "type": "string" + }, + "seedling": { + "type": "string" + }, + "selfie": { + "type": "string" + }, + "senegal": { + "type": "string" + }, + "serbia": { + "type": "string" + }, + "service_dog": { + "type": "string" + }, + "seven": { + "type": "string" + }, + "sewing_needle": { + "type": "string" + }, + "seychelles": { + "type": "string" + }, + "shallow_pan_of_food": { + "type": "string" + }, + "shamrock": { + "type": "string" + }, + "shark": { + "type": "string" + }, + "shaved_ice": { + "type": "string" + }, + "sheep": { + "type": "string" + }, + "shell": { + "type": "string" + }, + "shield": { + "type": "string" + }, + "shinto_shrine": { + "type": "string" + }, + "ship": { + "type": "string" + }, + "shipit": { + "type": "string" + }, + "shirt": { + "type": "string" + }, + "shit": { + "type": "string" + }, + "shoe": { + "type": "string" + }, + "shopping": { + "type": "string" + }, + "shopping_cart": { + "type": "string" + }, + "shorts": { + "type": "string" + }, + "shower": { + "type": "string" + }, + "shrimp": { + "type": "string" + }, + "shrug": { + "type": "string" + }, + "shushing_face": { + "type": "string" + }, + "sierra_leone": { + "type": "string" + }, + "signal_strength": { + "type": "string" + }, + "singapore": { + "type": "string" + }, + "singer": { + "type": "string" + }, + "sint_maarten": { + "type": "string" + }, + "six": { + "type": "string" + }, + "six_pointed_star": { + "type": "string" + }, + "skateboard": { + "type": "string" + }, + "ski": { + "type": "string" + }, + "skier": { + "type": "string" + }, + "skull": { + "type": "string" + }, + "skull_and_crossbones": { + "type": "string" + }, + "skunk": { + "type": "string" + }, + "sled": { + "type": "string" + }, + "sleeping": { + "type": "string" + }, + "sleeping_bed": { + "type": "string" + }, + "sleepy": { + "type": "string" + }, + "slightly_frowning_face": { + "type": "string" + }, + "slightly_smiling_face": { + "type": "string" + }, + "slot_machine": { + "type": "string" + }, + "sloth": { + "type": "string" + }, + "slovakia": { + "type": "string" + }, + "slovenia": { + "type": "string" + }, + "small_airplane": { + "type": "string" + }, + "small_blue_diamond": { + "type": "string" + }, + "small_orange_diamond": { + "type": "string" + }, + "small_red_triangle": { + "type": "string" + }, + "small_red_triangle_down": { + "type": "string" + }, + "smile": { + "type": "string" + }, + "smile_cat": { + "type": "string" + }, + "smiley": { + "type": "string" + }, + "smiley_cat": { + "type": "string" + }, + "smiling_face_with_tear": { + "type": "string" + }, + "smiling_face_with_three_hearts": { + "type": "string" + }, + "smiling_imp": { + "type": "string" + }, + "smirk": { + "type": "string" + }, + "smirk_cat": { + "type": "string" + }, + "smoking": { + "type": "string" + }, + "snail": { + "type": "string" + }, + "snake": { + "type": "string" + }, + "sneezing_face": { + "type": "string" + }, + "snowboarder": { + "type": "string" + }, + "snowflake": { + "type": "string" + }, + "snowman": { + "type": "string" + }, + "snowman_with_snow": { + "type": "string" + }, + "soap": { + "type": "string" + }, + "sob": { + "type": "string" + }, + "soccer": { + "type": "string" + }, + "socks": { + "type": "string" + }, + "softball": { + "type": "string" + }, + "solomon_islands": { + "type": "string" + }, + "somalia": { + "type": "string" + }, + "soon": { + "type": "string" + }, + "sos": { + "type": "string" + }, + "sound": { + "type": "string" + }, + "south_africa": { + "type": "string" + }, + "south_georgia_south_sandwich_islands": { + "type": "string" + }, + "south_sudan": { + "type": "string" + }, + "space_invader": { + "type": "string" + }, + "spades": { + "type": "string" + }, + "spaghetti": { + "type": "string" + }, + "sparkle": { + "type": "string" + }, + "sparkler": { + "type": "string" + }, + "sparkles": { + "type": "string" + }, + "sparkling_heart": { + "type": "string" + }, + "speak_no_evil": { + "type": "string" + }, + "speaker": { + "type": "string" + }, + "speaking_head": { + "type": "string" + }, + "speech_balloon": { + "type": "string" + }, + "speedboat": { + "type": "string" + }, + "spider": { + "type": "string" + }, + "spider_web": { + "type": "string" + }, + "spiral_calendar": { + "type": "string" + }, + "spiral_notepad": { + "type": "string" + }, + "sponge": { + "type": "string" + }, + "spoon": { + "type": "string" + }, + "squid": { + "type": "string" + }, + "sri_lanka": { + "type": "string" + }, + "st_barthelemy": { + "type": "string" + }, + "st_helena": { + "type": "string" + }, + "st_kitts_nevis": { + "type": "string" + }, + "st_lucia": { + "type": "string" + }, + "st_martin": { + "type": "string" + }, + "st_pierre_miquelon": { + "type": "string" + }, + "st_vincent_grenadines": { + "type": "string" + }, + "stadium": { + "type": "string" + }, + "standing_man": { + "type": "string" + }, + "standing_person": { + "type": "string" + }, + "standing_woman": { + "type": "string" + }, + "star": { + "type": "string" + }, + "star2": { + "type": "string" + }, + "star_and_crescent": { + "type": "string" + }, + "star_of_david": { + "type": "string" + }, + "star_struck": { + "type": "string" + }, + "stars": { + "type": "string" + }, + "station": { + "type": "string" + }, + "statue_of_liberty": { + "type": "string" + }, + "steam_locomotive": { + "type": "string" + }, + "stethoscope": { + "type": "string" + }, + "stew": { + "type": "string" + }, + "stop_button": { + "type": "string" + }, + "stop_sign": { + "type": "string" + }, + "stopwatch": { + "type": "string" + }, + "straight_ruler": { + "type": "string" + }, + "strawberry": { + "type": "string" + }, + "stuck_out_tongue": { + "type": "string" + }, + "stuck_out_tongue_closed_eyes": { + "type": "string" + }, + "stuck_out_tongue_winking_eye": { + "type": "string" + }, + "student": { + "type": "string" + }, + "studio_microphone": { + "type": "string" + }, + "stuffed_flatbread": { + "type": "string" + }, + "sudan": { + "type": "string" + }, + "sun_behind_large_cloud": { + "type": "string" + }, + "sun_behind_rain_cloud": { + "type": "string" + }, + "sun_behind_small_cloud": { + "type": "string" + }, + "sun_with_face": { + "type": "string" + }, + "sunflower": { + "type": "string" + }, + "sunglasses": { + "type": "string" + }, + "sunny": { + "type": "string" + }, + "sunrise": { + "type": "string" + }, + "sunrise_over_mountains": { + "type": "string" + }, + "superhero": { + "type": "string" + }, + "superhero_man": { + "type": "string" + }, + "superhero_woman": { + "type": "string" + }, + "supervillain": { + "type": "string" + }, + "supervillain_man": { + "type": "string" + }, + "supervillain_woman": { + "type": "string" + }, + "surfer": { + "type": "string" + }, + "surfing_man": { + "type": "string" + }, + "surfing_woman": { + "type": "string" + }, + "suriname": { + "type": "string" + }, + "sushi": { + "type": "string" + }, + "suspect": { + "type": "string" + }, + "suspension_railway": { + "type": "string" + }, + "svalbard_jan_mayen": { + "type": "string" + }, + "swan": { + "type": "string" + }, + "swaziland": { + "type": "string" + }, + "sweat": { + "type": "string" + }, + "sweat_drops": { + "type": "string" + }, + "sweat_smile": { + "type": "string" + }, + "sweden": { + "type": "string" + }, + "sweet_potato": { + "type": "string" + }, + "swim_brief": { + "type": "string" + }, + "swimmer": { + "type": "string" + }, + "swimming_man": { + "type": "string" + }, + "swimming_woman": { + "type": "string" + }, + "switzerland": { + "type": "string" + }, + "symbols": { + "type": "string" + }, + "synagogue": { + "type": "string" + }, + "syria": { + "type": "string" + }, + "syringe": { + "type": "string" + }, + "t-rex": { + "type": "string" + }, + "taco": { + "type": "string" + }, + "tada": { + "type": "string" + }, + "taiwan": { + "type": "string" + }, + "tajikistan": { + "type": "string" + }, + "takeout_box": { + "type": "string" + }, + "tamale": { + "type": "string" + }, + "tanabata_tree": { + "type": "string" + }, + "tangerine": { + "type": "string" + }, + "tanzania": { + "type": "string" + }, + "taurus": { + "type": "string" + }, + "taxi": { + "type": "string" + }, + "tea": { + "type": "string" + }, + "teacher": { + "type": "string" + }, + "teapot": { + "type": "string" + }, + "technologist": { + "type": "string" + }, + "teddy_bear": { + "type": "string" + }, + "telephone": { + "type": "string" + }, + "telephone_receiver": { + "type": "string" + }, + "telescope": { + "type": "string" + }, + "tennis": { + "type": "string" + }, + "tent": { + "type": "string" + }, + "test_tube": { + "type": "string" + }, + "thailand": { + "type": "string" + }, + "thermometer": { + "type": "string" + }, + "thinking": { + "type": "string" + }, + "thong_sandal": { + "type": "string" + }, + "thought_balloon": { + "type": "string" + }, + "thread": { + "type": "string" + }, + "three": { + "type": "string" + }, + "thumbsdown": { + "type": "string" + }, + "thumbsup": { + "type": "string" + }, + "ticket": { + "type": "string" + }, + "tickets": { + "type": "string" + }, + "tiger": { + "type": "string" + }, + "tiger2": { + "type": "string" + }, + "timer_clock": { + "type": "string" + }, + "timor_leste": { + "type": "string" + }, + "tipping_hand_man": { + "type": "string" + }, + "tipping_hand_person": { + "type": "string" + }, + "tipping_hand_woman": { + "type": "string" + }, + "tired_face": { + "type": "string" + }, + "tm": { + "type": "string" + }, + "togo": { + "type": "string" + }, + "toilet": { + "type": "string" + }, + "tokelau": { + "type": "string" + }, + "tokyo_tower": { + "type": "string" + }, + "tomato": { + "type": "string" + }, + "tonga": { + "type": "string" + }, + "tongue": { + "type": "string" + }, + "toolbox": { + "type": "string" + }, + "tooth": { + "type": "string" + }, + "toothbrush": { + "type": "string" + }, + "top": { + "type": "string" + }, + "tophat": { + "type": "string" + }, + "tornado": { + "type": "string" + }, + "tr": { + "type": "string" + }, + "trackball": { + "type": "string" + }, + "tractor": { + "type": "string" + }, + "traffic_light": { + "type": "string" + }, + "train": { + "type": "string" + }, + "train2": { + "type": "string" + }, + "tram": { + "type": "string" + }, + "transgender_flag": { + "type": "string" + }, + "transgender_symbol": { + "type": "string" + }, + "triangular_flag_on_post": { + "type": "string" + }, + "triangular_ruler": { + "type": "string" + }, + "trident": { + "type": "string" + }, + "trinidad_tobago": { + "type": "string" + }, + "tristan_da_cunha": { + "type": "string" + }, + "triumph": { + "type": "string" + }, + "trolleybus": { + "type": "string" + }, + "trollface": { + "type": "string" + }, + "trophy": { + "type": "string" + }, + "tropical_drink": { + "type": "string" + }, + "tropical_fish": { + "type": "string" + }, + "truck": { + "type": "string" + }, + "trumpet": { + "type": "string" + }, + "tshirt": { + "type": "string" + }, + "tulip": { + "type": "string" + }, + "tumbler_glass": { + "type": "string" + }, + "tunisia": { + "type": "string" + }, + "turkey": { + "type": "string" + }, + "turkmenistan": { + "type": "string" + }, + "turks_caicos_islands": { + "type": "string" + }, + "turtle": { + "type": "string" + }, + "tuvalu": { + "type": "string" + }, + "tv": { + "type": "string" + }, + "twisted_rightwards_arrows": { + "type": "string" + }, + "two": { + "type": "string" + }, + "two_hearts": { + "type": "string" + }, + "two_men_holding_hands": { + "type": "string" + }, + "two_women_holding_hands": { + "type": "string" + }, + "u5272": { + "type": "string" + }, + "u5408": { + "type": "string" + }, + "u55b6": { + "type": "string" + }, + "u6307": { + "type": "string" + }, + "u6708": { + "type": "string" + }, + "u6709": { + "type": "string" + }, + "u6e80": { + "type": "string" + }, + "u7121": { + "type": "string" + }, + "u7533": { + "type": "string" + }, + "u7981": { + "type": "string" + }, + "u7a7a": { + "type": "string" + }, + "uganda": { + "type": "string" + }, + "uk": { + "type": "string" + }, + "ukraine": { + "type": "string" + }, + "umbrella": { + "type": "string" + }, + "unamused": { + "type": "string" + }, + "underage": { + "type": "string" + }, + "unicorn": { + "type": "string" + }, + "united_arab_emirates": { + "type": "string" + }, + "united_nations": { + "type": "string" + }, + "unlock": { + "type": "string" + }, + "up": { + "type": "string" + }, + "upside_down_face": { + "type": "string" + }, + "uruguay": { + "type": "string" + }, + "us": { + "type": "string" + }, + "us_outlying_islands": { + "type": "string" + }, + "us_virgin_islands": { + "type": "string" + }, + "uzbekistan": { + "type": "string" + }, + "v": { + "type": "string" + }, + "vampire": { + "type": "string" + }, + "vampire_man": { + "type": "string" + }, + "vampire_woman": { + "type": "string" + }, + "vanuatu": { + "type": "string" + }, + "vatican_city": { + "type": "string" + }, + "venezuela": { + "type": "string" + }, + "vertical_traffic_light": { + "type": "string" + }, + "vhs": { + "type": "string" + }, + "vibration_mode": { + "type": "string" + }, + "video_camera": { + "type": "string" + }, + "video_game": { + "type": "string" + }, + "vietnam": { + "type": "string" + }, + "violin": { + "type": "string" + }, + "virgo": { + "type": "string" + }, + "volcano": { + "type": "string" + }, + "volleyball": { + "type": "string" + }, + "vomiting_face": { + "type": "string" + }, + "vs": { + "type": "string" + }, + "vulcan_salute": { + "type": "string" + }, + "waffle": { + "type": "string" + }, + "wales": { + "type": "string" + }, + "walking": { + "type": "string" + }, + "walking_man": { + "type": "string" + }, + "walking_woman": { + "type": "string" + }, + "wallis_futuna": { + "type": "string" + }, + "waning_crescent_moon": { + "type": "string" + }, + "waning_gibbous_moon": { + "type": "string" + }, + "warning": { + "type": "string" + }, + "wastebasket": { + "type": "string" + }, + "watch": { + "type": "string" + }, + "water_buffalo": { + "type": "string" + }, + "water_polo": { + "type": "string" + }, + "watermelon": { + "type": "string" + }, + "wave": { + "type": "string" + }, + "wavy_dash": { + "type": "string" + }, + "waxing_crescent_moon": { + "type": "string" + }, + "waxing_gibbous_moon": { + "type": "string" + }, + "wc": { + "type": "string" + }, + "weary": { + "type": "string" + }, + "wedding": { + "type": "string" + }, + "weight_lifting": { + "type": "string" + }, + "weight_lifting_man": { + "type": "string" + }, + "weight_lifting_woman": { + "type": "string" + }, + "western_sahara": { + "type": "string" + }, + "whale": { + "type": "string" + }, + "whale2": { + "type": "string" + }, + "wheel_of_dharma": { + "type": "string" + }, + "wheelchair": { + "type": "string" + }, + "white_check_mark": { + "type": "string" + }, + "white_circle": { + "type": "string" + }, + "white_flag": { + "type": "string" + }, + "white_flower": { + "type": "string" + }, + "white_haired_man": { + "type": "string" + }, + "white_haired_woman": { + "type": "string" + }, + "white_heart": { + "type": "string" + }, + "white_large_square": { + "type": "string" + }, + "white_medium_small_square": { + "type": "string" + }, + "white_medium_square": { + "type": "string" + }, + "white_small_square": { + "type": "string" + }, + "white_square_button": { + "type": "string" + }, + "wilted_flower": { + "type": "string" + }, + "wind_chime": { + "type": "string" + }, + "wind_face": { + "type": "string" + }, + "window": { + "type": "string" + }, + "wine_glass": { + "type": "string" + }, + "wink": { + "type": "string" + }, + "wolf": { + "type": "string" + }, + "woman": { + "type": "string" + }, + "woman_artist": { + "type": "string" + }, + "woman_astronaut": { + "type": "string" + }, + "woman_beard": { + "type": "string" + }, + "woman_cartwheeling": { + "type": "string" + }, + "woman_cook": { + "type": "string" + }, + "woman_dancing": { + "type": "string" + }, + "woman_facepalming": { + "type": "string" + }, + "woman_factory_worker": { + "type": "string" + }, + "woman_farmer": { + "type": "string" + }, + "woman_feeding_baby": { + "type": "string" + }, + "woman_firefighter": { + "type": "string" + }, + "woman_health_worker": { + "type": "string" + }, + "woman_in_manual_wheelchair": { + "type": "string" + }, + "woman_in_motorized_wheelchair": { + "type": "string" + }, + "woman_in_tuxedo": { + "type": "string" + }, + "woman_judge": { + "type": "string" + }, + "woman_juggling": { + "type": "string" + }, + "woman_mechanic": { + "type": "string" + }, + "woman_office_worker": { + "type": "string" + }, + "woman_pilot": { + "type": "string" + }, + "woman_playing_handball": { + "type": "string" + }, + "woman_playing_water_polo": { + "type": "string" + }, + "woman_scientist": { + "type": "string" + }, + "woman_shrugging": { + "type": "string" + }, + "woman_singer": { + "type": "string" + }, + "woman_student": { + "type": "string" + }, + "woman_teacher": { + "type": "string" + }, + "woman_technologist": { + "type": "string" + }, + "woman_with_headscarf": { + "type": "string" + }, + "woman_with_probing_cane": { + "type": "string" + }, + "woman_with_turban": { + "type": "string" + }, + "woman_with_veil": { + "type": "string" + }, + "womans_clothes": { + "type": "string" + }, + "womans_hat": { + "type": "string" + }, + "women_wrestling": { + "type": "string" + }, + "womens": { + "type": "string" + }, + "wood": { + "type": "string" + }, + "woozy_face": { + "type": "string" + }, + "world_map": { + "type": "string" + }, + "worm": { + "type": "string" + }, + "worried": { + "type": "string" + }, + "wrench": { + "type": "string" + }, + "wrestling": { + "type": "string" + }, + "writing_hand": { + "type": "string" + }, + "x": { + "type": "string" + }, + "yarn": { + "type": "string" + }, + "yawning_face": { + "type": "string" + }, + "yellow_circle": { + "type": "string" + }, + "yellow_heart": { + "type": "string" + }, + "yellow_square": { + "type": "string" + }, + "yemen": { + "type": "string" + }, + "yen": { + "type": "string" + }, + "yin_yang": { + "type": "string" + }, + "yo_yo": { + "type": "string" + }, + "yum": { + "type": "string" + }, + "zambia": { + "type": "string" + }, + "zany_face": { + "type": "string" + }, + "zap": { + "type": "string" + }, + "zebra": { + "type": "string" + }, + "zero": { + "type": "string" + }, + "zimbabwe": { + "type": "string" + }, + "zipper_mouth_face": { + "type": "string" + }, + "zombie": { + "type": "string" + }, + "zombie_man": { + "type": "string" + }, + "zombie_woman": { + "type": "string" + }, + "zzz": { + "type": "string" + } + }, + "required": [ + "+1", + "-1", + "100", + "1234", + "1st_place_medal", + "2nd_place_medal", + "3rd_place_medal", + "8ball", + "a", + "ab", + "abacus", + "abc", + "abcd", + "accept", + "accordion", + "adhesive_bandage", + "adult", + "aerial_tramway", + "afghanistan", + "airplane", + "aland_islands", + "alarm_clock", + "albania", + "alembic", + "algeria", + "alien", + "ambulance", + "american_samoa", + "amphora", + "anatomical_heart", + "anchor", + "andorra", + "angel", + "anger", + "angola", + "angry", + "anguilla", + "anguished", + "ant", + "antarctica", + "antigua_barbuda", + "apple", + "aquarius", + "argentina", + "aries", + "armenia", + "arrow_backward", + "arrow_double_down", + "arrow_double_up", + "arrow_down", + "arrow_down_small", + "arrow_forward", + "arrow_heading_down", + "arrow_heading_up", + "arrow_left", + "arrow_lower_left", + "arrow_lower_right", + "arrow_right", + "arrow_right_hook", + "arrow_up", + "arrow_up_down", + "arrow_up_small", + "arrow_upper_left", + "arrow_upper_right", + "arrows_clockwise", + "arrows_counterclockwise", + "art", + "articulated_lorry", + "artificial_satellite", + "artist", + "aruba", + "ascension_island", + "asterisk", + "astonished", + "astronaut", + "athletic_shoe", + "atm", + "atom", + "atom_symbol", + "australia", + "austria", + "auto_rickshaw", + "avocado", + "axe", + "azerbaijan", + "b", + "baby", + "baby_bottle", + "baby_chick", + "baby_symbol", + "back", + "bacon", + "badger", + "badminton", + "bagel", + "baggage_claim", + "baguette_bread", + "bahamas", + "bahrain", + "balance_scale", + "bald_man", + "bald_woman", + "ballet_shoes", + "balloon", + "ballot_box", + "ballot_box_with_check", + "bamboo", + "banana", + "bangbang", + "bangladesh", + "banjo", + "bank", + "bar_chart", + "barbados", + "barber", + "baseball", + "basecamp", + "basecampy", + "basket", + "basketball", + "basketball_man", + "basketball_woman", + "bat", + "bath", + "bathtub", + "battery", + "beach_umbrella", + "bear", + "bearded_person", + "beaver", + "bed", + "bee", + "beer", + "beers", + "beetle", + "beginner", + "belarus", + "belgium", + "belize", + "bell", + "bell_pepper", + "bellhop_bell", + "benin", + "bento", + "bermuda", + "beverage_box", + "bhutan", + "bicyclist", + "bike", + "biking_man", + "biking_woman", + "bikini", + "billed_cap", + "biohazard", + "bird", + "birthday", + "bison", + "black_cat", + "black_circle", + "black_flag", + "black_heart", + "black_joker", + "black_large_square", + "black_medium_small_square", + "black_medium_square", + "black_nib", + "black_small_square", + "black_square_button", + "blond_haired_man", + "blond_haired_person", + "blond_haired_woman", + "blonde_woman", + "blossom", + "blowfish", + "blue_book", + "blue_car", + "blue_heart", + "blue_square", + "blueberries", + "blush", + "boar", + "boat", + "bolivia", + "bomb", + "bone", + "book", + "bookmark", + "bookmark_tabs", + "books", + "boom", + "boomerang", + "boot", + "bosnia_herzegovina", + "botswana", + "bouncing_ball_man", + "bouncing_ball_person", + "bouncing_ball_woman", + "bouquet", + "bouvet_island", + "bow", + "bow_and_arrow", + "bowing_man", + "bowing_woman", + "bowl_with_spoon", + "bowling", + "bowtie", + "boxing_glove", + "boy", + "brain", + "brazil", + "bread", + "breast_feeding", + "bricks", + "bride_with_veil", + "bridge_at_night", + "briefcase", + "british_indian_ocean_territory", + "british_virgin_islands", + "broccoli", + "broken_heart", + "broom", + "brown_circle", + "brown_heart", + "brown_square", + "brunei", + "bubble_tea", + "bucket", + "bug", + "building_construction", + "bulb", + "bulgaria", + "bullettrain_front", + "bullettrain_side", + "burkina_faso", + "burrito", + "burundi", + "bus", + "business_suit_levitating", + "busstop", + "bust_in_silhouette", + "busts_in_silhouette", + "butter", + "butterfly", + "cactus", + "cake", + "calendar", + "call_me_hand", + "calling", + "cambodia", + "camel", + "camera", + "camera_flash", + "cameroon", + "camping", + "canada", + "canary_islands", + "cancer", + "candle", + "candy", + "canned_food", + "canoe", + "cape_verde", + "capital_abcd", + "capricorn", + "car", + "card_file_box", + "card_index", + "card_index_dividers", + "caribbean_netherlands", + "carousel_horse", + "carpentry_saw", + "carrot", + "cartwheeling", + "cat", + "cat2", + "cayman_islands", + "cd", + "central_african_republic", + "ceuta_melilla", + "chad", + "chains", + "chair", + "champagne", + "chart", + "chart_with_downwards_trend", + "chart_with_upwards_trend", + "checkered_flag", + "cheese", + "cherries", + "cherry_blossom", + "chess_pawn", + "chestnut", + "chicken", + "child", + "children_crossing", + "chile", + "chipmunk", + "chocolate_bar", + "chopsticks", + "christmas_island", + "christmas_tree", + "church", + "cinema", + "circus_tent", + "city_sunrise", + "city_sunset", + "cityscape", + "cl", + "clamp", + "clap", + "clapper", + "classical_building", + "climbing", + "climbing_man", + "climbing_woman", + "clinking_glasses", + "clipboard", + "clipperton_island", + "clock1", + "clock10", + "clock1030", + "clock11", + "clock1130", + "clock12", + "clock1230", + "clock130", + "clock2", + "clock230", + "clock3", + "clock330", + "clock4", + "clock430", + "clock5", + "clock530", + "clock6", + "clock630", + "clock7", + "clock730", + "clock8", + "clock830", + "clock9", + "clock930", + "closed_book", + "closed_lock_with_key", + "closed_umbrella", + "cloud", + "cloud_with_lightning", + "cloud_with_lightning_and_rain", + "cloud_with_rain", + "cloud_with_snow", + "clown_face", + "clubs", + "cn", + "coat", + "cockroach", + "cocktail", + "coconut", + "cocos_islands", + "coffee", + "coffin", + "coin", + "cold_face", + "cold_sweat", + "collision", + "colombia", + "comet", + "comoros", + "compass", + "computer", + "computer_mouse", + "confetti_ball", + "confounded", + "confused", + "congo_brazzaville", + "congo_kinshasa", + "congratulations", + "construction", + "construction_worker", + "construction_worker_man", + "construction_worker_woman", + "control_knobs", + "convenience_store", + "cook", + "cook_islands", + "cookie", + "cool", + "cop", + "copyright", + "corn", + "costa_rica", + "cote_divoire", + "couch_and_lamp", + "couple", + "couple_with_heart", + "couple_with_heart_man_man", + "couple_with_heart_woman_man", + "couple_with_heart_woman_woman", + "couplekiss", + "couplekiss_man_man", + "couplekiss_man_woman", + "couplekiss_woman_woman", + "cow", + "cow2", + "cowboy_hat_face", + "crab", + "crayon", + "credit_card", + "crescent_moon", + "cricket", + "cricket_game", + "croatia", + "crocodile", + "croissant", + "crossed_fingers", + "crossed_flags", + "crossed_swords", + "crown", + "cry", + "crying_cat_face", + "crystal_ball", + "cuba", + "cucumber", + "cup_with_straw", + "cupcake", + "cupid", + "curacao", + "curling_stone", + "curly_haired_man", + "curly_haired_woman", + "curly_loop", + "currency_exchange", + "curry", + "cursing_face", + "custard", + "customs", + "cut_of_meat", + "cyclone", + "cyprus", + "czech_republic", + "dagger", + "dancer", + "dancers", + "dancing_men", + "dancing_women", + "dango", + "dark_sunglasses", + "dart", + "dash", + "date", + "de", + "deaf_man", + "deaf_person", + "deaf_woman", + "deciduous_tree", + "deer", + "denmark", + "department_store", + "derelict_house", + "desert", + "desert_island", + "desktop_computer", + "detective", + "diamond_shape_with_a_dot_inside", + "diamonds", + "diego_garcia", + "disappointed", + "disappointed_relieved", + "disguised_face", + "diving_mask", + "diya_lamp", + "dizzy", + "dizzy_face", + "djibouti", + "dna", + "do_not_litter", + "dodo", + "dog", + "dog2", + "dollar", + "dolls", + "dolphin", + "dominica", + "dominican_republic", + "door", + "doughnut", + "dove", + "dragon", + "dragon_face", + "dress", + "dromedary_camel", + "drooling_face", + "drop_of_blood", + "droplet", + "drum", + "duck", + "dumpling", + "dvd", + "e-mail", + "eagle", + "ear", + "ear_of_rice", + "ear_with_hearing_aid", + "earth_africa", + "earth_americas", + "earth_asia", + "ecuador", + "egg", + "eggplant", + "egypt", + "eight", + "eight_pointed_black_star", + "eight_spoked_asterisk", + "eject_button", + "el_salvador", + "electric_plug", + "electron", + "elephant", + "elevator", + "elf", + "elf_man", + "elf_woman", + "email", + "end", + "england", + "envelope", + "envelope_with_arrow", + "equatorial_guinea", + "eritrea", + "es", + "estonia", + "ethiopia", + "eu", + "euro", + "european_castle", + "european_post_office", + "european_union", + "evergreen_tree", + "exclamation", + "exploding_head", + "expressionless", + "eye", + "eye_speech_bubble", + "eyeglasses", + "eyes", + "face_exhaling", + "face_in_clouds", + "face_with_head_bandage", + "face_with_spiral_eyes", + "face_with_thermometer", + "facepalm", + "facepunch", + "factory", + "factory_worker", + "fairy", + "fairy_man", + "fairy_woman", + "falafel", + "falkland_islands", + "fallen_leaf", + "family", + "family_man_boy", + "family_man_boy_boy", + "family_man_girl", + "family_man_girl_boy", + "family_man_girl_girl", + "family_man_man_boy", + "family_man_man_boy_boy", + "family_man_man_girl", + "family_man_man_girl_boy", + "family_man_man_girl_girl", + "family_man_woman_boy", + "family_man_woman_boy_boy", + "family_man_woman_girl", + "family_man_woman_girl_boy", + "family_man_woman_girl_girl", + "family_woman_boy", + "family_woman_boy_boy", + "family_woman_girl", + "family_woman_girl_boy", + "family_woman_girl_girl", + "family_woman_woman_boy", + "family_woman_woman_boy_boy", + "family_woman_woman_girl", + "family_woman_woman_girl_boy", + "family_woman_woman_girl_girl", + "farmer", + "faroe_islands", + "fast_forward", + "fax", + "fearful", + "feather", + "feelsgood", + "feet", + "female_detective", + "female_sign", + "ferris_wheel", + "ferry", + "field_hockey", + "fiji", + "file_cabinet", + "file_folder", + "film_projector", + "film_strip", + "finland", + "finnadie", + "fire", + "fire_engine", + "fire_extinguisher", + "firecracker", + "firefighter", + "fireworks", + "first_quarter_moon", + "first_quarter_moon_with_face", + "fish", + "fish_cake", + "fishing_pole_and_fish", + "fist", + "fist_left", + "fist_oncoming", + "fist_raised", + "fist_right", + "five", + "flags", + "flamingo", + "flashlight", + "flat_shoe", + "flatbread", + "fleur_de_lis", + "flight_arrival", + "flight_departure", + "flipper", + "floppy_disk", + "flower_playing_cards", + "flushed", + "fly", + "flying_disc", + "flying_saucer", + "fog", + "foggy", + "fondue", + "foot", + "football", + "footprints", + "fork_and_knife", + "fortune_cookie", + "fountain", + "fountain_pen", + "four", + "four_leaf_clover", + "fox_face", + "fr", + "framed_picture", + "free", + "french_guiana", + "french_polynesia", + "french_southern_territories", + "fried_egg", + "fried_shrimp", + "fries", + "frog", + "frowning", + "frowning_face", + "frowning_man", + "frowning_person", + "frowning_woman", + "fu", + "fuelpump", + "full_moon", + "full_moon_with_face", + "funeral_urn", + "gabon", + "gambia", + "game_die", + "garlic", + "gb", + "gear", + "gem", + "gemini", + "genie", + "genie_man", + "genie_woman", + "georgia", + "ghana", + "ghost", + "gibraltar", + "gift", + "gift_heart", + "giraffe", + "girl", + "globe_with_meridians", + "gloves", + "goal_net", + "goat", + "goberserk", + "godmode", + "goggles", + "golf", + "golfing", + "golfing_man", + "golfing_woman", + "gorilla", + "grapes", + "greece", + "green_apple", + "green_book", + "green_circle", + "green_heart", + "green_salad", + "green_square", + "greenland", + "grenada", + "grey_exclamation", + "grey_question", + "grimacing", + "grin", + "grinning", + "guadeloupe", + "guam", + "guard", + "guardsman", + "guardswoman", + "guatemala", + "guernsey", + "guide_dog", + "guinea", + "guinea_bissau", + "guitar", + "gun", + "guyana", + "haircut", + "haircut_man", + "haircut_woman", + "haiti", + "hamburger", + "hammer", + "hammer_and_pick", + "hammer_and_wrench", + "hamster", + "hand", + "hand_over_mouth", + "handbag", + "handball_person", + "handshake", + "hankey", + "hash", + "hatched_chick", + "hatching_chick", + "headphones", + "headstone", + "health_worker", + "hear_no_evil", + "heard_mcdonald_islands", + "heart", + "heart_decoration", + "heart_eyes", + "heart_eyes_cat", + "heart_on_fire", + "heartbeat", + "heartpulse", + "hearts", + "heavy_check_mark", + "heavy_division_sign", + "heavy_dollar_sign", + "heavy_exclamation_mark", + "heavy_heart_exclamation", + "heavy_minus_sign", + "heavy_multiplication_x", + "heavy_plus_sign", + "hedgehog", + "helicopter", + "herb", + "hibiscus", + "high_brightness", + "high_heel", + "hiking_boot", + "hindu_temple", + "hippopotamus", + "hocho", + "hole", + "honduras", + "honey_pot", + "honeybee", + "hong_kong", + "hook", + "horse", + "horse_racing", + "hospital", + "hot_face", + "hot_pepper", + "hotdog", + "hotel", + "hotsprings", + "hourglass", + "hourglass_flowing_sand", + "house", + "house_with_garden", + "houses", + "hugs", + "hungary", + "hurtrealbad", + "hushed", + "hut", + "ice_cream", + "ice_cube", + "ice_hockey", + "ice_skate", + "icecream", + "iceland", + "id", + "ideograph_advantage", + "imp", + "inbox_tray", + "incoming_envelope", + "india", + "indonesia", + "infinity", + "information_desk_person", + "information_source", + "innocent", + "interrobang", + "iphone", + "iran", + "iraq", + "ireland", + "isle_of_man", + "israel", + "it", + "izakaya_lantern", + "jack_o_lantern", + "jamaica", + "japan", + "japanese_castle", + "japanese_goblin", + "japanese_ogre", + "jeans", + "jersey", + "jigsaw", + "jordan", + "joy", + "joy_cat", + "joystick", + "jp", + "judge", + "juggling_person", + "kaaba", + "kangaroo", + "kazakhstan", + "kenya", + "key", + "keyboard", + "keycap_ten", + "kick_scooter", + "kimono", + "kiribati", + "kiss", + "kissing", + "kissing_cat", + "kissing_closed_eyes", + "kissing_heart", + "kissing_smiling_eyes", + "kite", + "kiwi_fruit", + "kneeling_man", + "kneeling_person", + "kneeling_woman", + "knife", + "knot", + "koala", + "koko", + "kosovo", + "kr", + "kuwait", + "kyrgyzstan", + "lab_coat", + "label", + "lacrosse", + "ladder", + "lady_beetle", + "lantern", + "laos", + "large_blue_circle", + "large_blue_diamond", + "large_orange_diamond", + "last_quarter_moon", + "last_quarter_moon_with_face", + "latin_cross", + "latvia", + "laughing", + "leafy_green", + "leaves", + "lebanon", + "ledger", + "left_luggage", + "left_right_arrow", + "left_speech_bubble", + "leftwards_arrow_with_hook", + "leg", + "lemon", + "leo", + "leopard", + "lesotho", + "level_slider", + "liberia", + "libra", + "libya", + "liechtenstein", + "light_rail", + "link", + "lion", + "lips", + "lipstick", + "lithuania", + "lizard", + "llama", + "lobster", + "lock", + "lock_with_ink_pen", + "lollipop", + "long_drum", + "loop", + "lotion_bottle", + "lotus_position", + "lotus_position_man", + "lotus_position_woman", + "loud_sound", + "loudspeaker", + "love_hotel", + "love_letter", + "love_you_gesture", + "low_brightness", + "luggage", + "lungs", + "luxembourg", + "lying_face", + "m", + "macau", + "macedonia", + "madagascar", + "mag", + "mag_right", + "mage", + "mage_man", + "mage_woman", + "magic_wand", + "magnet", + "mahjong", + "mailbox", + "mailbox_closed", + "mailbox_with_mail", + "mailbox_with_no_mail", + "malawi", + "malaysia", + "maldives", + "male_detective", + "male_sign", + "mali", + "malta", + "mammoth", + "man", + "man_artist", + "man_astronaut", + "man_beard", + "man_cartwheeling", + "man_cook", + "man_dancing", + "man_facepalming", + "man_factory_worker", + "man_farmer", + "man_feeding_baby", + "man_firefighter", + "man_health_worker", + "man_in_manual_wheelchair", + "man_in_motorized_wheelchair", + "man_in_tuxedo", + "man_judge", + "man_juggling", + "man_mechanic", + "man_office_worker", + "man_pilot", + "man_playing_handball", + "man_playing_water_polo", + "man_scientist", + "man_shrugging", + "man_singer", + "man_student", + "man_teacher", + "man_technologist", + "man_with_gua_pi_mao", + "man_with_probing_cane", + "man_with_turban", + "man_with_veil", + "mandarin", + "mango", + "mans_shoe", + "mantelpiece_clock", + "manual_wheelchair", + "maple_leaf", + "marshall_islands", + "martial_arts_uniform", + "martinique", + "mask", + "massage", + "massage_man", + "massage_woman", + "mate", + "mauritania", + "mauritius", + "mayotte", + "meat_on_bone", + "mechanic", + "mechanical_arm", + "mechanical_leg", + "medal_military", + "medal_sports", + "medical_symbol", + "mega", + "melon", + "memo", + "men_wrestling", + "mending_heart", + "menorah", + "mens", + "mermaid", + "merman", + "merperson", + "metal", + "metro", + "mexico", + "microbe", + "micronesia", + "microphone", + "microscope", + "middle_finger", + "military_helmet", + "milk_glass", + "milky_way", + "minibus", + "minidisc", + "mirror", + "mobile_phone_off", + "moldova", + "monaco", + "money_mouth_face", + "money_with_wings", + "moneybag", + "mongolia", + "monkey", + "monkey_face", + "monocle_face", + "monorail", + "montenegro", + "montserrat", + "moon", + "moon_cake", + "morocco", + "mortar_board", + "mosque", + "mosquito", + "motor_boat", + "motor_scooter", + "motorcycle", + "motorized_wheelchair", + "motorway", + "mount_fuji", + "mountain", + "mountain_bicyclist", + "mountain_biking_man", + "mountain_biking_woman", + "mountain_cableway", + "mountain_railway", + "mountain_snow", + "mouse", + "mouse2", + "mouse_trap", + "movie_camera", + "moyai", + "mozambique", + "mrs_claus", + "muscle", + "mushroom", + "musical_keyboard", + "musical_note", + "musical_score", + "mute", + "mx_claus", + "myanmar", + "nail_care", + "name_badge", + "namibia", + "national_park", + "nauru", + "nauseated_face", + "nazar_amulet", + "neckbeard", + "necktie", + "negative_squared_cross_mark", + "nepal", + "nerd_face", + "nesting_dolls", + "netherlands", + "neutral_face", + "new", + "new_caledonia", + "new_moon", + "new_moon_with_face", + "new_zealand", + "newspaper", + "newspaper_roll", + "next_track_button", + "ng", + "ng_man", + "ng_woman", + "nicaragua", + "niger", + "nigeria", + "night_with_stars", + "nine", + "ninja", + "niue", + "no_bell", + "no_bicycles", + "no_entry", + "no_entry_sign", + "no_good", + "no_good_man", + "no_good_woman", + "no_mobile_phones", + "no_mouth", + "no_pedestrians", + "no_smoking", + "non-potable_water", + "norfolk_island", + "north_korea", + "northern_mariana_islands", + "norway", + "nose", + "notebook", + "notebook_with_decorative_cover", + "notes", + "nut_and_bolt", + "o", + "o2", + "ocean", + "octocat", + "octopus", + "oden", + "office", + "office_worker", + "oil_drum", + "ok", + "ok_hand", + "ok_man", + "ok_person", + "ok_woman", + "old_key", + "older_adult", + "older_man", + "older_woman", + "olive", + "om", + "oman", + "on", + "oncoming_automobile", + "oncoming_bus", + "oncoming_police_car", + "oncoming_taxi", + "one", + "one_piece_swimsuit", + "onion", + "open_book", + "open_file_folder", + "open_hands", + "open_mouth", + "open_umbrella", + "ophiuchus", + "orange", + "orange_book", + "orange_circle", + "orange_heart", + "orange_square", + "orangutan", + "orthodox_cross", + "otter", + "outbox_tray", + "owl", + "ox", + "oyster", + "package", + "page_facing_up", + "page_with_curl", + "pager", + "paintbrush", + "pakistan", + "palau", + "palestinian_territories", + "palm_tree", + "palms_up_together", + "panama", + "pancakes", + "panda_face", + "paperclip", + "paperclips", + "papua_new_guinea", + "parachute", + "paraguay", + "parasol_on_ground", + "parking", + "parrot", + "part_alternation_mark", + "partly_sunny", + "partying_face", + "passenger_ship", + "passport_control", + "pause_button", + "paw_prints", + "peace_symbol", + "peach", + "peacock", + "peanuts", + "pear", + "pen", + "pencil", + "pencil2", + "penguin", + "pensive", + "people_holding_hands", + "people_hugging", + "performing_arts", + "persevere", + "person_bald", + "person_curly_hair", + "person_feeding_baby", + "person_fencing", + "person_in_manual_wheelchair", + "person_in_motorized_wheelchair", + "person_in_tuxedo", + "person_red_hair", + "person_white_hair", + "person_with_probing_cane", + "person_with_turban", + "person_with_veil", + "peru", + "petri_dish", + "philippines", + "phone", + "pick", + "pickup_truck", + "pie", + "pig", + "pig2", + "pig_nose", + "pill", + "pilot", + "pinata", + "pinched_fingers", + "pinching_hand", + "pineapple", + "ping_pong", + "pirate_flag", + "pisces", + "pitcairn_islands", + "pizza", + "placard", + "place_of_worship", + "plate_with_cutlery", + "play_or_pause_button", + "pleading_face", + "plunger", + "point_down", + "point_left", + "point_right", + "point_up", + "point_up_2", + "poland", + "polar_bear", + "police_car", + "police_officer", + "policeman", + "policewoman", + "poodle", + "poop", + "popcorn", + "portugal", + "post_office", + "postal_horn", + "postbox", + "potable_water", + "potato", + "potted_plant", + "pouch", + "poultry_leg", + "pound", + "pout", + "pouting_cat", + "pouting_face", + "pouting_man", + "pouting_woman", + "pray", + "prayer_beads", + "pregnant_woman", + "pretzel", + "previous_track_button", + "prince", + "princess", + "printer", + "probing_cane", + "puerto_rico", + "punch", + "purple_circle", + "purple_heart", + "purple_square", + "purse", + "pushpin", + "put_litter_in_its_place", + "qatar", + "question", + "rabbit", + "rabbit2", + "raccoon", + "racehorse", + "racing_car", + "radio", + "radio_button", + "radioactive", + "rage", + "rage1", + "rage2", + "rage3", + "rage4", + "railway_car", + "railway_track", + "rainbow", + "rainbow_flag", + "raised_back_of_hand", + "raised_eyebrow", + "raised_hand", + "raised_hand_with_fingers_splayed", + "raised_hands", + "raising_hand", + "raising_hand_man", + "raising_hand_woman", + "ram", + "ramen", + "rat", + "razor", + "receipt", + "record_button", + "recycle", + "red_car", + "red_circle", + "red_envelope", + "red_haired_man", + "red_haired_woman", + "red_square", + "registered", + "relaxed", + "relieved", + "reminder_ribbon", + "repeat", + "repeat_one", + "rescue_worker_helmet", + "restroom", + "reunion", + "revolving_hearts", + "rewind", + "rhinoceros", + "ribbon", + "rice", + "rice_ball", + "rice_cracker", + "rice_scene", + "right_anger_bubble", + "ring", + "ringed_planet", + "robot", + "rock", + "rocket", + "rofl", + "roll_eyes", + "roll_of_paper", + "roller_coaster", + "roller_skate", + "romania", + "rooster", + "rose", + "rosette", + "rotating_light", + "round_pushpin", + "rowboat", + "rowing_man", + "rowing_woman", + "ru", + "rugby_football", + "runner", + "running", + "running_man", + "running_shirt_with_sash", + "running_woman", + "rwanda", + "sa", + "safety_pin", + "safety_vest", + "sagittarius", + "sailboat", + "sake", + "salt", + "samoa", + "san_marino", + "sandal", + "sandwich", + "santa", + "sao_tome_principe", + "sari", + "sassy_man", + "sassy_woman", + "satellite", + "satisfied", + "saudi_arabia", + "sauna_man", + "sauna_person", + "sauna_woman", + "sauropod", + "saxophone", + "scarf", + "school", + "school_satchel", + "scientist", + "scissors", + "scorpion", + "scorpius", + "scotland", + "scream", + "scream_cat", + "screwdriver", + "scroll", + "seal", + "seat", + "secret", + "see_no_evil", + "seedling", + "selfie", + "senegal", + "serbia", + "service_dog", + "seven", + "sewing_needle", + "seychelles", + "shallow_pan_of_food", + "shamrock", + "shark", + "shaved_ice", + "sheep", + "shell", + "shield", + "shinto_shrine", + "ship", + "shipit", + "shirt", + "shit", + "shoe", + "shopping", + "shopping_cart", + "shorts", + "shower", + "shrimp", + "shrug", + "shushing_face", + "sierra_leone", + "signal_strength", + "singapore", + "singer", + "sint_maarten", + "six", + "six_pointed_star", + "skateboard", + "ski", + "skier", + "skull", + "skull_and_crossbones", + "skunk", + "sled", + "sleeping", + "sleeping_bed", + "sleepy", + "slightly_frowning_face", + "slightly_smiling_face", + "slot_machine", + "sloth", + "slovakia", + "slovenia", + "small_airplane", + "small_blue_diamond", + "small_orange_diamond", + "small_red_triangle", + "small_red_triangle_down", + "smile", + "smile_cat", + "smiley", + "smiley_cat", + "smiling_face_with_tear", + "smiling_face_with_three_hearts", + "smiling_imp", + "smirk", + "smirk_cat", + "smoking", + "snail", + "snake", + "sneezing_face", + "snowboarder", + "snowflake", + "snowman", + "snowman_with_snow", + "soap", + "sob", + "soccer", + "socks", + "softball", + "solomon_islands", + "somalia", + "soon", + "sos", + "sound", + "south_africa", + "south_georgia_south_sandwich_islands", + "south_sudan", + "space_invader", + "spades", + "spaghetti", + "sparkle", + "sparkler", + "sparkles", + "sparkling_heart", + "speak_no_evil", + "speaker", + "speaking_head", + "speech_balloon", + "speedboat", + "spider", + "spider_web", + "spiral_calendar", + "spiral_notepad", + "sponge", + "spoon", + "squid", + "sri_lanka", + "st_barthelemy", + "st_helena", + "st_kitts_nevis", + "st_lucia", + "st_martin", + "st_pierre_miquelon", + "st_vincent_grenadines", + "stadium", + "standing_man", + "standing_person", + "standing_woman", + "star", + "star2", + "star_and_crescent", + "star_of_david", + "star_struck", + "stars", + "station", + "statue_of_liberty", + "steam_locomotive", + "stethoscope", + "stew", + "stop_button", + "stop_sign", + "stopwatch", + "straight_ruler", + "strawberry", + "stuck_out_tongue", + "stuck_out_tongue_closed_eyes", + "stuck_out_tongue_winking_eye", + "student", + "studio_microphone", + "stuffed_flatbread", + "sudan", + "sun_behind_large_cloud", + "sun_behind_rain_cloud", + "sun_behind_small_cloud", + "sun_with_face", + "sunflower", + "sunglasses", + "sunny", + "sunrise", + "sunrise_over_mountains", + "superhero", + "superhero_man", + "superhero_woman", + "supervillain", + "supervillain_man", + "supervillain_woman", + "surfer", + "surfing_man", + "surfing_woman", + "suriname", + "sushi", + "suspect", + "suspension_railway", + "svalbard_jan_mayen", + "swan", + "swaziland", + "sweat", + "sweat_drops", + "sweat_smile", + "sweden", + "sweet_potato", + "swim_brief", + "swimmer", + "swimming_man", + "swimming_woman", + "switzerland", + "symbols", + "synagogue", + "syria", + "syringe", + "t-rex", + "taco", + "tada", + "taiwan", + "tajikistan", + "takeout_box", + "tamale", + "tanabata_tree", + "tangerine", + "tanzania", + "taurus", + "taxi", + "tea", + "teacher", + "teapot", + "technologist", + "teddy_bear", + "telephone", + "telephone_receiver", + "telescope", + "tennis", + "tent", + "test_tube", + "thailand", + "thermometer", + "thinking", + "thong_sandal", + "thought_balloon", + "thread", + "three", + "thumbsdown", + "thumbsup", + "ticket", + "tickets", + "tiger", + "tiger2", + "timer_clock", + "timor_leste", + "tipping_hand_man", + "tipping_hand_person", + "tipping_hand_woman", + "tired_face", + "tm", + "togo", + "toilet", + "tokelau", + "tokyo_tower", + "tomato", + "tonga", + "tongue", + "toolbox", + "tooth", + "toothbrush", + "top", + "tophat", + "tornado", + "tr", + "trackball", + "tractor", + "traffic_light", + "train", + "train2", + "tram", + "transgender_flag", + "transgender_symbol", + "triangular_flag_on_post", + "triangular_ruler", + "trident", + "trinidad_tobago", + "tristan_da_cunha", + "triumph", + "trolleybus", + "trollface", + "trophy", + "tropical_drink", + "tropical_fish", + "truck", + "trumpet", + "tshirt", + "tulip", + "tumbler_glass", + "tunisia", + "turkey", + "turkmenistan", + "turks_caicos_islands", + "turtle", + "tuvalu", + "tv", + "twisted_rightwards_arrows", + "two", + "two_hearts", + "two_men_holding_hands", + "two_women_holding_hands", + "u5272", + "u5408", + "u55b6", + "u6307", + "u6708", + "u6709", + "u6e80", + "u7121", + "u7533", + "u7981", + "u7a7a", + "uganda", + "uk", + "ukraine", + "umbrella", + "unamused", + "underage", + "unicorn", + "united_arab_emirates", + "united_nations", + "unlock", + "up", + "upside_down_face", + "uruguay", + "us", + "us_outlying_islands", + "us_virgin_islands", + "uzbekistan", + "v", + "vampire", + "vampire_man", + "vampire_woman", + "vanuatu", + "vatican_city", + "venezuela", + "vertical_traffic_light", + "vhs", + "vibration_mode", + "video_camera", + "video_game", + "vietnam", + "violin", + "virgo", + "volcano", + "volleyball", + "vomiting_face", + "vs", + "vulcan_salute", + "waffle", + "wales", + "walking", + "walking_man", + "walking_woman", + "wallis_futuna", + "waning_crescent_moon", + "waning_gibbous_moon", + "warning", + "wastebasket", + "watch", + "water_buffalo", + "water_polo", + "watermelon", + "wave", + "wavy_dash", + "waxing_crescent_moon", + "waxing_gibbous_moon", + "wc", + "weary", + "wedding", + "weight_lifting", + "weight_lifting_man", + "weight_lifting_woman", + "western_sahara", + "whale", + "whale2", + "wheel_of_dharma", + "wheelchair", + "white_check_mark", + "white_circle", + "white_flag", + "white_flower", + "white_haired_man", + "white_haired_woman", + "white_heart", + "white_large_square", + "white_medium_small_square", + "white_medium_square", + "white_small_square", + "white_square_button", + "wilted_flower", + "wind_chime", + "wind_face", + "window", + "wine_glass", + "wink", + "wolf", + "woman", + "woman_artist", + "woman_astronaut", + "woman_beard", + "woman_cartwheeling", + "woman_cook", + "woman_dancing", + "woman_facepalming", + "woman_factory_worker", + "woman_farmer", + "woman_feeding_baby", + "woman_firefighter", + "woman_health_worker", + "woman_in_manual_wheelchair", + "woman_in_motorized_wheelchair", + "woman_in_tuxedo", + "woman_judge", + "woman_juggling", + "woman_mechanic", + "woman_office_worker", + "woman_pilot", + "woman_playing_handball", + "woman_playing_water_polo", + "woman_scientist", + "woman_shrugging", + "woman_singer", + "woman_student", + "woman_teacher", + "woman_technologist", + "woman_with_headscarf", + "woman_with_probing_cane", + "woman_with_turban", + "woman_with_veil", + "womans_clothes", + "womans_hat", + "women_wrestling", + "womens", + "wood", + "woozy_face", + "world_map", + "worm", + "worried", + "wrench", + "wrestling", + "writing_hand", + "x", + "yarn", + "yawning_face", + "yellow_circle", + "yellow_heart", + "yellow_square", + "yemen", + "yen", + "yin_yang", + "yo_yo", + "yum", + "zambia", + "zany_face", + "zap", + "zebra", + "zero", + "zimbabwe", + "zipper_mouth_face", + "zombie", + "zombie_man", + "zombie_woman", + "zzz" + ] +} \ No newline at end of file diff --git a/registry/cmd/sr-cli/testdata/json/schema-2.json b/registry/cmd/sr-cli/testdata/json/schema-2.json new file mode 100644 index 0000000..72ab606 --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-2.json @@ -0,0 +1,34 @@ +{ + "$id": "https://example.com/arrays.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "description": "A representation of a person, company, organization, or place", + "type": "object", + "properties": { + "fruits": { + "type": "array", + "items": { + "type": "string" + } + }, + "vegetables": { + "type": "array", + "items": { "$ref": "#/$defs/veggie" } + } + }, + "$defs": { + "veggie": { + "type": "object", + "required": [ "veggieName", "veggieLike" ], + "properties": { + "veggieName": { + "type": "string", + "description": "The name of the vegetable." + }, + "veggieLike": { + "type": "boolean", + "description": "Do I like this vegetable?" + } + } + } + } +} diff --git a/registry/cmd/sr-cli/testdata/json/schema-3.json b/registry/cmd/sr-cli/testdata/json/schema-3.json new file mode 100644 index 0000000..3a8ad3f --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-3.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$id": "https://example.com/employee.schema.json", + "title": "Record of employee", + "description": "This document records the details of an employee", + "type": "object", + "properties": { + "id": { + "description": "A unique identifier for an employee", + "type": "number" + }, + "name": { + "description": "Full name of the employee", + "type": "string" + }, + "age": { + "description": "Age of the employee", + "type": "number" + }, + "hobbies": { + "description": "Hobbies of the employee", + "type": "object", + "properties": { + "indoor": { + "type": "array", + "items": { + "description": "List of indoor hobbies", + "type": "string" + } + }, + "outdoor": { + "type": "array", + "items": { + "description": "List of outdoor hobbies", + "type": "string" + } + } + } + } + } +} diff --git a/registry/cmd/sr-cli/testdata/json/schema-4.json b/registry/cmd/sr-cli/testdata/json/schema-4.json new file mode 100644 index 0000000..f46f61d --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-4.json @@ -0,0 +1,253 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "web-app": { + "type": "object", + "properties": { + "servlet": { + "type": "array", + "items": { + "type": "object", + "properties": { + "servlet-name": { + "type": "string" + }, + "servlet-class": { + "type": "string" + }, + "init-param": { + "type": "object", + "properties": { + "configGlossary:installationAt": { + "type": "string" + }, + "configGlossary:adminEmail": { + "type": "string" + }, + "configGlossary:poweredBy": { + "type": "string" + }, + "configGlossary:poweredByIcon": { + "type": "string" + }, + "configGlossary:staticPath": { + "type": "string" + }, + "templateProcessorClass": { + "type": "string" + }, + "templateLoaderClass": { + "type": "string" + }, + "templatePath": { + "type": "string" + }, + "templateOverridePath": { + "type": "string" + }, + "defaultListTemplate": { + "type": "string" + }, + "defaultFileTemplate": { + "type": "string" + }, + "useJSP": { + "type": "boolean" + }, + "jspListTemplate": { + "type": "string" + }, + "jspFileTemplate": { + "type": "string" + }, + "cachePackageTagsTrack": { + "type": "integer" + }, + "cachePackageTagsStore": { + "type": "integer" + }, + "cachePackageTagsRefresh": { + "type": "integer" + }, + "cacheTemplatesTrack": { + "type": "integer" + }, + "cacheTemplatesStore": { + "type": "integer" + }, + "cacheTemplatesRefresh": { + "type": "integer" + }, + "cachePagesTrack": { + "type": "integer" + }, + "cachePagesStore": { + "type": "integer" + }, + "cachePagesRefresh": { + "type": "integer" + }, + "cachePagesDirtyRead": { + "type": "integer" + }, + "searchEngineListTemplate": { + "type": "string" + }, + "searchEngineFileTemplate": { + "type": "string" + }, + "searchEngineRobotsDb": { + "type": "string" + }, + "useDataStore": { + "type": "boolean" + }, + "dataStoreClass": { + "type": "string" + }, + "redirectionClass": { + "type": "string" + }, + "dataStoreName": { + "type": "string" + }, + "dataStoreDriver": { + "type": "string" + }, + "dataStoreUrl": { + "type": "string" + }, + "dataStoreUser": { + "type": "string" + }, + "dataStorePassword": { + "type": "string" + }, + "dataStoreTestQuery": { + "type": "string" + }, + "dataStoreLogFile": { + "type": "string" + }, + "dataStoreInitConns": { + "type": "integer" + }, + "dataStoreMaxConns": { + "type": "integer" + }, + "dataStoreConnUsageLimit": { + "type": "integer" + }, + "dataStoreLogLevel": { + "type": "string" + }, + "maxUrlLength": { + "type": "integer" + }, + "mailHost": { + "type": "string" + }, + "mailHostOverride": { + "type": "string" + }, + "log": { + "type": "integer" + }, + "logLocation": { + "type": "string" + }, + "logMaxSize": { + "type": "string" + }, + "dataLog": { + "type": "integer" + }, + "dataLogLocation": { + "type": "string" + }, + "dataLogMaxSize": { + "type": "string" + }, + "removePageCache": { + "type": "string" + }, + "removeTemplateCache": { + "type": "string" + }, + "fileTransferFolder": { + "type": "string" + }, + "lookInContext": { + "type": "integer" + }, + "adminGroupID": { + "type": "integer" + }, + "betaServer": { + "type": "boolean" + } + } + } + }, + "required": [ + "servlet-class", + "servlet-name" + ] + } + }, + "servlet-mapping": { + "type": "object", + "properties": { + "cofaxCDS": { + "type": "string" + }, + "cofaxEmail": { + "type": "string" + }, + "cofaxAdmin": { + "type": "string" + }, + "fileServlet": { + "type": "string" + }, + "cofaxTools": { + "type": "string" + } + }, + "required": [ + "cofaxAdmin", + "cofaxCDS", + "cofaxEmail", + "cofaxTools", + "fileServlet" + ] + }, + "taglib": { + "type": "object", + "properties": { + "taglib-uri": { + "type": "string" + }, + "taglib-location": { + "type": "string" + } + }, + "required": [ + "taglib-location", + "taglib-uri" + ] + } + }, + "required": [ + "servlet", + "servlet-mapping", + "taglib" + ] + } + }, + "required": [ + "web-app" + ] +} \ No newline at end of file diff --git a/registry/cmd/sr-cli/testdata/json/schema-5.json b/registry/cmd/sr-cli/testdata/json/schema-5.json new file mode 100644 index 0000000..628a55f --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-5.json @@ -0,0 +1,157 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "GroupDefinitions": { + "type": "array", + "items": { + "type": "object", + "properties": { + "Type": { + "type": "string" + }, + "Key": { + "type": "string" + } + }, + "required": [ + "Key", + "Type" + ] + } + }, + "ResultsByTime": { + "type": "array", + "items": { + "type": "object", + "properties": { + "TimePeriod": { + "type": "object", + "properties": { + "Start": { + "type": "string" + }, + "End": { + "type": "string" + } + }, + "required": [ + "End", + "Start" + ] + }, + "Total": { + "type": "object" + }, + "Groups": { + "type": "array", + "items": { + "type": "object", + "properties": { + "Keys": { + "type": "array", + "items": { + "type": "string" + } + }, + "Metrics": { + "type": "object", + "properties": { + "UnblendedCost": { + "type": "object", + "properties": { + "Amount": { + "type": "string" + }, + "Unit": { + "type": "string" + } + }, + "required": [ + "Amount", + "Unit" + ] + } + }, + "required": [ + "UnblendedCost" + ] + } + }, + "required": [ + "Keys", + "Metrics" + ] + } + }, + "Estimated": { + "type": "boolean" + } + }, + "required": [ + "Estimated", + "Groups", + "TimePeriod", + "Total" + ] + } + }, + "ResponseMetadata": { + "type": "object", + "properties": { + "RequestId": { + "type": "string" + }, + "HTTPStatusCode": { + "type": "integer" + }, + "HTTPHeaders": { + "type": "object", + "properties": { + "date": { + "type": "string" + }, + "content-type": { + "type": "string" + }, + "content-length": { + "type": "string" + }, + "connection": { + "type": "string" + }, + "x-amzn-requestid": { + "type": "string" + }, + "cache-control": { + "type": "string" + } + }, + "required": [ + "cache-control", + "connection", + "content-length", + "content-type", + "date", + "x-amzn-requestid" + ] + }, + "RetryAttempts": { + "type": "integer" + } + }, + "required": [ + "HTTPHeaders", + "HTTPStatusCode", + "RequestId", + "RetryAttempts" + ] + } + }, + "required": [ + "GroupDefinitions", + "ResponseMetadata", + "ResultsByTime" + ] +} \ No newline at end of file diff --git a/registry/cmd/sr-cli/testdata/json/schema-6.json b/registry/cmd/sr-cli/testdata/json/schema-6.json new file mode 100644 index 0000000..e711952 --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-6.json @@ -0,0 +1,546 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "servers": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "status": { + "type": "string" + }, + "created": { + "type": "string" + }, + "public_net": { + "type": "object", + "properties": { + "ipv4": { + "type": "object", + "properties": { + "ip": { + "type": "string" + }, + "blocked": { + "type": "boolean" + }, + "dns_ptr": { + "type": "string" + } + }, + "required": [ + "blocked", + "dns_ptr", + "ip" + ] + }, + "ipv6": { + "type": "object", + "properties": { + "ip": { + "type": "string" + }, + "blocked": { + "type": "boolean" + }, + "dns_ptr": { + "type": "array" + } + }, + "required": [ + "blocked", + "dns_ptr", + "ip" + ] + }, + "floating_ips": { + "type": "array" + }, + "firewalls": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "status": { + "type": "string" + } + }, + "required": [ + "id", + "status" + ] + } + } + }, + "required": [ + "firewalls", + "floating_ips", + "ipv4", + "ipv6" + ] + }, + "private_net": { + "type": "array", + "items": { + "type": "object", + "properties": { + "network": { + "type": "integer" + }, + "ip": { + "type": "string" + }, + "alias_ips": { + "type": "array" + }, + "mac_address": { + "type": "string" + } + }, + "required": [ + "alias_ips", + "ip", + "mac_address", + "network" + ] + } + }, + "server_type": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "cores": { + "type": "integer" + }, + "memory": { + "type": "number" + }, + "disk": { + "type": "integer" + }, + "deprecated": { + "type": [ + "boolean", + "null" + ] + }, + "prices": { + "type": "array", + "items": { + "type": "object", + "properties": { + "location": { + "type": "string" + }, + "price_hourly": { + "type": "object", + "properties": { + "net": { + "type": "string" + }, + "gross": { + "type": "string" + } + }, + "required": [ + "gross", + "net" + ] + }, + "price_monthly": { + "type": "object", + "properties": { + "net": { + "type": "string" + }, + "gross": { + "type": "string" + } + }, + "required": [ + "gross", + "net" + ] + } + }, + "required": [ + "location", + "price_hourly", + "price_monthly" + ] + } + }, + "storage_type": { + "type": "string" + }, + "cpu_type": { + "type": "string" + } + }, + "required": [ + "cores", + "cpu_type", + "deprecated", + "description", + "disk", + "id", + "memory", + "name", + "prices", + "storage_type" + ] + }, + "datacenter": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "location": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "country": { + "type": "string" + }, + "city": { + "type": "string" + }, + "latitude": { + "type": "number" + }, + "longitude": { + "type": "number" + }, + "network_zone": { + "type": "string" + } + }, + "required": [ + "city", + "country", + "description", + "id", + "latitude", + "longitude", + "name", + "network_zone" + ] + }, + "server_types": { + "type": "object", + "properties": { + "supported": { + "type": "array", + "items": { + "type": "integer" + } + }, + "available": { + "type": "array", + "items": { + "type": "integer" + } + }, + "available_for_migration": { + "type": "array", + "items": { + "type": "integer" + } + } + }, + "required": [ + "available", + "available_for_migration", + "supported" + ] + } + }, + "required": [ + "description", + "id", + "location", + "name", + "server_types" + ] + }, + "image": { + "anyOf": [ + { + "type": "null" + }, + { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "type": { + "type": "string" + }, + "status": { + "type": "string" + }, + "name": { + "type": [ + "null", + "string" + ] + }, + "description": { + "type": "string" + }, + "image_size": { + "type": [ + "null", + "number" + ] + }, + "disk_size": { + "type": "integer" + }, + "created": { + "type": "string" + }, + "created_from": { + "anyOf": [ + { + "type": "null" + }, + { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + }, + "required": [ + "id", + "name" + ] + } + ] + }, + "bound_to": { + "type": "null" + }, + "os_flavor": { + "type": "string" + }, + "os_version": { + "type": [ + "null", + "string" + ] + }, + "rapid_deploy": { + "type": "boolean" + }, + "protection": { + "type": "object", + "properties": { + "delete": { + "type": "boolean" + } + }, + "required": [ + "delete" + ] + }, + "deprecated": { + "type": "null" + }, + "labels": { + "type": "object" + }, + "deleted": { + "type": "null" + } + }, + "required": [ + "bound_to", + "created", + "created_from", + "deleted", + "deprecated", + "description", + "disk_size", + "id", + "image_size", + "labels", + "name", + "os_flavor", + "os_version", + "protection", + "rapid_deploy", + "status", + "type" + ] + } + ] + }, + "iso": { + "type": "null" + }, + "rescue_enabled": { + "type": "boolean" + }, + "locked": { + "type": "boolean" + }, + "backup_window": { + "type": "null" + }, + "outgoing_traffic": { + "type": "integer" + }, + "ingoing_traffic": { + "type": "integer" + }, + "included_traffic": { + "type": "integer" + }, + "protection": { + "type": "object", + "properties": { + "delete": { + "type": "boolean" + }, + "rebuild": { + "type": "boolean" + } + }, + "required": [ + "delete", + "rebuild" + ] + }, + "labels": { + "type": "object" + }, + "volumes": { + "type": "array", + "items": { + "type": "integer" + } + }, + "load_balancers": { + "type": "array", + "items": { + "type": "integer" + } + }, + "primary_disk_size": { + "type": "integer" + }, + "placement_group": { + "type": "null" + } + }, + "required": [ + "backup_window", + "created", + "datacenter", + "id", + "image", + "included_traffic", + "ingoing_traffic", + "iso", + "labels", + "load_balancers", + "locked", + "name", + "outgoing_traffic", + "placement_group", + "primary_disk_size", + "private_net", + "protection", + "public_net", + "rescue_enabled", + "server_type", + "status", + "volumes" + ] + } + }, + "meta": { + "type": "object", + "properties": { + "pagination": { + "type": "object", + "properties": { + "page": { + "type": "integer" + }, + "per_page": { + "type": "integer" + }, + "previous_page": { + "type": "null" + }, + "next_page": { + "type": "integer" + }, + "last_page": { + "type": "integer" + }, + "total_entries": { + "type": "integer" + } + }, + "required": [ + "last_page", + "next_page", + "page", + "per_page", + "previous_page", + "total_entries" + ] + } + }, + "required": [ + "pagination" + ] + } + }, + "required": [ + "meta", + "servers" + ] +} \ No newline at end of file diff --git a/registry/cmd/sr-cli/testdata/json/schema-7.json b/registry/cmd/sr-cli/testdata/json/schema-7.json new file mode 100644 index 0000000..d7778da --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-7.json @@ -0,0 +1,243 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "effect_changes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "effect_entries": { + "type": "array", + "items": { + "type": "object", + "properties": { + "effect": { + "type": "string" + }, + "language": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + } + }, + "required": [ + "effect", + "language" + ] + } + }, + "version_group": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + } + }, + "required": [ + "effect_entries", + "version_group" + ] + } + }, + "effect_entries": { + "type": "array", + "items": { + "type": "object", + "properties": { + "effect": { + "type": "string" + }, + "language": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + }, + "short_effect": { + "type": "string" + } + }, + "required": [ + "effect", + "language", + "short_effect" + ] + } + }, + "flavor_text_entries": { + "type": "array", + "items": { + "type": "object", + "properties": { + "flavor_text": { + "type": "string" + }, + "language": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + }, + "version_group": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + } + }, + "required": [ + "flavor_text", + "language", + "version_group" + ] + } + }, + "generation": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + }, + "id": { + "type": "integer" + }, + "is_main_series": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "names": { + "type": "array", + "items": { + "type": "object", + "properties": { + "language": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + }, + "name": { + "type": "string" + } + }, + "required": [ + "language", + "name" + ] + } + }, + "pokemon": { + "type": "array", + "items": { + "type": "object", + "properties": { + "is_hidden": { + "type": "boolean" + }, + "pokemon": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "name", + "url" + ] + }, + "slot": { + "type": "integer" + } + }, + "required": [ + "is_hidden", + "pokemon", + "slot" + ] + } + } + }, + "required": [ + "effect_changes", + "effect_entries", + "flavor_text_entries", + "generation", + "id", + "is_main_series", + "name", + "names", + "pokemon" + ] +} \ No newline at end of file diff --git a/registry/cmd/sr-cli/testdata/json/schema-8.json b/registry/cmd/sr-cli/testdata/json/schema-8.json new file mode 100644 index 0000000..c82956c --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-8.json @@ -0,0 +1,57 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "args": { + "type": "object" + }, + "headers": { + "type": "object", + "properties": { + "Accept": { + "type": "string" + }, + "Accept-Encoding": { + "type": "string" + }, + "Accept-Language": { + "type": "string" + }, + "Host": { + "type": "string" + }, + "Upgrade-Insecure-Requests": { + "type": "string" + }, + "User-Agent": { + "type": "string" + }, + "X-Amzn-Trace-Id": { + "type": "string" + } + }, + "required": [ + "Accept", + "Accept-Encoding", + "Accept-Language", + "Host", + "Upgrade-Insecure-Requests", + "User-Agent", + "X-Amzn-Trace-Id" + ] + }, + "origin": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": [ + "args", + "headers", + "origin", + "url" + ] +} \ No newline at end of file diff --git a/registry/cmd/sr-cli/testdata/json/schema-9.json b/registry/cmd/sr-cli/testdata/json/schema-9.json new file mode 100644 index 0000000..a58e95e --- /dev/null +++ b/registry/cmd/sr-cli/testdata/json/schema-9.json @@ -0,0 +1,127 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "features": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "id": { + "type": "string" + }, + "geometry": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "coordinates": { + "type": "array", + "items": { + "type": "number" + } + } + }, + "required": [ + "coordinates", + "type" + ] + }, + "geometry_name": { + "type": "string" + }, + "properties": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "streetaddress": { + "type": "string" + }, + "twitteraccount": { + "type": "string" + }, + "facebookaccount": { + "type": "string" + }, + "siteurl": { + "type": "string" + }, + "frequencyfinderurl": { + "type": "string" + } + }, + "required": [ + "facebookaccount", + "frequencyfinderurl", + "name", + "siteurl", + "streetaddress", + "twitteraccount" + ] + } + }, + "required": [ + "geometry", + "geometry_name", + "id", + "properties", + "type" + ] + } + }, + "totalFeatures": { + "type": "integer" + }, + "numberMatched": { + "type": "integer" + }, + "numberReturned": { + "type": "integer" + }, + "timeStamp": { + "type": "string" + }, + "crs": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "properties": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + }, + "required": [ + "name" + ] + } + }, + "required": [ + "properties", + "type" + ] + } + }, + "required": [ + "crs", + "features", + "numberMatched", + "numberReturned", + "timeStamp", + "totalFeatures", + "type" + ] +} \ No newline at end of file diff --git a/registry/compatibility/checker.go b/registry/compatibility/checker.go new file mode 100644 index 0000000..4e33d90 --- /dev/null +++ b/registry/compatibility/checker.go @@ -0,0 +1,11 @@ +package compatibility + +type Checker interface { + Check(schema string, history []string, mode string) (bool, error) +} + +type CheckerFunc func(schema string, history []string, mode string) (bool, error) + +func (f CheckerFunc) Check(schema string, history []string, mode string) (bool, error) { + return f(schema, history, mode) +} diff --git a/registry/compatibility/checker_test.go b/registry/compatibility/checker_test.go new file mode 100644 index 0000000..8ba4f4b --- /dev/null +++ b/registry/compatibility/checker_test.go @@ -0,0 +1,105 @@ +package compatibility + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "os" + "path/filepath" + "runtime" + "testing" + "time" +) + +func TestNew(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + v, err := New(ctx, "http://localhost:8088", 2*time.Second) + + fmt.Println(v.url) + + if err != nil { + t.Fatal(err) + } +} + +func TestCompatibilityChecker_Check(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + defer cancel() + + checker, err := NewFromEnv(ctx) + if err != nil { + t.Fatal(err) + } + + type Data struct { + Id string `json:"id"` + Format string `json:"format"` + Schema string `json:"schema"` + } + + tt := []struct { + name string + schema string + history string + mode string + compatible bool + }{ + {"compatible-1", "backward_json_true/schema2.json", "backward_json_true/schema1.json", "BACKWARD", true}, + {"compatible-2", "backward_json_false/schema2.json", "backward_json_false/schema1.json", "BACKWARD", false}, + {"compatible-3", "backward_avro_true/schema2.json", "backward_avro_true/schema1.json", "BACKWARD", true}, + {"compatible-4", "backward_avro_false/schema2.json", "backward_avro_false/schema1.json", "BACKWARD", false}, + + {"compatible-5", "forward_json_true/schema2.json", "forward_json_true/schema1.json", "FORWARD", true}, + {"compatible-6", "forward_json_false/schema2.json", "forward_json_false/schema1.json", "FORWARD", false}, + {"compatible-7", "forward_avro_true/schema2.json", "forward_avro_true/schema1.json", "FORWARD", true}, + {"compatible-8", "forward_avro_false/schema2.json", "forward_avro_false/schema1.json", "FORWARD", false}, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + newSchema, err := os.ReadFile(filepath.Join(testdataDir, tc.schema)) + if err != nil { + t.Fatalf("newSchema read error: %s", err) + } + + var schemaHistory []string + previousSchema, err := os.ReadFile(filepath.Join(testdataDir, tc.history)) + if err != nil { + t.Fatalf("schemaHistory read error: %s", err) + } + var previousSchemaJson Data + if err := json.Unmarshal(previousSchema, &previousSchemaJson); err != nil { + t.Fatalf("couldn't unmarshall schema history") + } + + schemaHistory = append(schemaHistory, base64.StdEncoding.EncodeToString([]byte(previousSchemaJson.Schema))) + + compatible, err := checker.Check(string(newSchema), schemaHistory, tc.mode) + if err != nil { + t.Errorf("validator error: %s", err) + } + if compatible != tc.compatible { + if compatible { + t.Errorf("message compatible, incompatible expected") + } else { + t.Errorf("message incompatible, compatible expected") + } + } + }) + } +} diff --git a/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-common-2.3.1.Final.jar b/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-common-2.3.1.Final.jar new file mode 100644 index 0000000..747e7fb Binary files /dev/null and b/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-common-2.3.1.Final.jar differ diff --git a/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-json-2.3.1.Final.jar b/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-json-2.3.1.Final.jar new file mode 100644 index 0000000..2f05400 Binary files /dev/null and b/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-json-2.3.1.Final.jar differ diff --git a/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-protobuf-2.3.1.Final.jar b/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-protobuf-2.3.1.Final.jar new file mode 100644 index 0000000..6bbb2ec Binary files /dev/null and b/registry/compatibility/external/compatibility-checker/lib/apicurio-registry-schema-util-protobuf-2.3.1.Final.jar differ diff --git a/registry/compatibility/external/compatibility-checker/lib/kafka-schema-registry-client-7.2.1.jar b/registry/compatibility/external/compatibility-checker/lib/kafka-schema-registry-client-7.2.1.jar new file mode 100644 index 0000000..037753e Binary files /dev/null and b/registry/compatibility/external/compatibility-checker/lib/kafka-schema-registry-client-7.2.1.jar differ diff --git a/registry/compatibility/external/compatibility-checker/pom.xml b/registry/compatibility/external/compatibility-checker/pom.xml new file mode 100644 index 0000000..8490adb --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/pom.xml @@ -0,0 +1,252 @@ + + + + Syntio + + 2017 + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 2.4.4 + + + + org.example + Compatibility + 1.0-SNAPSHOT + + + 11 + 11 + 3.0.2 + 6.0.4 + 10.1.5 + + + + + io.confluent + io.confluent + https://packages.confluent.io/maven + + + io.apicurio + io.apicurio + https://repo1.maven.org/maven2 + + + central + Maven Central Repository + http://repo1.maven.org/maven2 + + + + + + + org.apache.tomcat.embed + tomcat-embed-core + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-el + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-websocket + ${tomcat.version} + + + org.apache.tomcat + tomcat-annotations-api + ${tomcat.version} + + + + com.fasterxml.jackson.core + jackson-core + 2.13.4 + + + com.fasterxml.jackson.core + jackson-annotations + 2.13.4 + + + com.fasterxml.jackson.core + jackson-databind + 2.13.4.1 + + + com.google.protobuf + protobuf-java + 3.21.9 + + + + org.springframework + spring-beans + ${springframework.version} + + + org.springframework + spring-core + ${springframework.version} + + + org.springframework + spring-webmvc + ${springframework.version} + + + org.springframework + spring-web + ${springframework.version} + + + org.springframework + spring-aop + ${springframework.version} + + + org.springframework + spring-context + ${springframework.version} + + + org.springframework + spring-expression + ${springframework.version} + + + org.springframework + spring-jcl + ${springframework.version} + + + org.springframework + spring-test + ${springframework.version} + + + + + org.springframework.boot + spring-boot-starter-test + test + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-web + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-tomcat + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-validation + ${springframework.boot.version} + + + org.springframework.boot + spring-boot + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-autoconfigure + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-json + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-logging + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-test + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-test-autoconfigure + ${springframework.boot.version} + + + + org.yaml + snakeyaml + 2.0 + + + org.projectlombok + lombok + 1.18.22 + + + + io.apicurio + apicurio-registry-schema-util-common + 2.3.1.Final + + + io.apicurio + apicurio-registry-schema-util-json + 2.3.1.Final + + + io.apicurio + apicurio-registry-schema-util-protobuf + 2.3.1.Final + + + io.confluent + kafka-schema-registry-client + 7.2.1 + + + org.json + json + 20230227 + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + 3.10.1 + + 14 + 14 + + + + compatibility-checker + + diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/CheckerFactory.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/CheckerFactory.java new file mode 100644 index 0000000..1303bfc --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/CheckerFactory.java @@ -0,0 +1,17 @@ +package net.syntio.compatibility; + +import net.syntio.compatibility.checker.AvroChecker; +import net.syntio.compatibility.checker.CompatibilityChecker; +import net.syntio.compatibility.checker.JsonChecker; +import net.syntio.compatibility.checker.ProtobufChecker; + +public class CheckerFactory { + public static CompatibilityChecker createChecker(String format) throws Exception { + return switch (format) { + case FileTypes.JSON -> new JsonChecker(); + case FileTypes.PROTOBUF -> new ProtobufChecker(); + case FileTypes.AVRO -> new AvroChecker(); + default -> throw new Exception("Unknown format"); + }; + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/CompatibilityCheckerApplication.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/CompatibilityCheckerApplication.java new file mode 100644 index 0000000..2b8a20d --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/CompatibilityCheckerApplication.java @@ -0,0 +1,15 @@ +package net.syntio.compatibility; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +import java.util.Collections; + +@SpringBootApplication +public class CompatibilityCheckerApplication { + public static void main(String[] args) { + SpringApplication app = new SpringApplication(CompatibilityCheckerApplication.class); + app.setDefaultProperties(Collections.singletonMap("server.port", "8088")); + app.run(args); + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/FileTypes.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/FileTypes.java new file mode 100644 index 0000000..ade6de8 --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/FileTypes.java @@ -0,0 +1,7 @@ +package net.syntio.compatibility; + +public class FileTypes { + public static final String JSON = "json"; + public static final String AVRO = "avro"; + public static final String PROTOBUF = "protobuf"; +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/Message.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/Message.java new file mode 100644 index 0000000..a3831f0 --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/Message.java @@ -0,0 +1,26 @@ +package net.syntio.compatibility; + +public class Message { + private final String id; + private final String format; + private final String schema; + + public Message(String id, String format, String schema) { + this.id = id; + this.format = format; + this.schema = schema; + } + + + public String getSchema() { + return schema; + } + + public String getID() { + return id; + } + + public String getFormat() { + return format; + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/AvroChecker.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/AvroChecker.java new file mode 100644 index 0000000..4310dc9 --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/AvroChecker.java @@ -0,0 +1,40 @@ +package net.syntio.compatibility.checker; + +import io.apicurio.registry.content.ContentHandle; +import io.apicurio.registry.rules.compatibility.CompatibilityLevel; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import io.confluent.kafka.schemaregistry.CompatibilityChecker; + +import java.util.ArrayList; +import java.util.List; + +public class AvroChecker implements net.syntio.compatibility.checker.CompatibilityChecker { + @Override + public boolean testCompatibility(CompatibilityLevel level, List history, ContentHandle currentSchema) { + io.confluent.kafka.schemaregistry.CompatibilityLevel avroCompatibilityLevel = switch (level) { + case NONE -> io.confluent.kafka.schemaregistry.CompatibilityLevel.NONE; + case BACKWARD -> io.confluent.kafka.schemaregistry.CompatibilityLevel.BACKWARD; + case BACKWARD_TRANSITIVE -> io.confluent.kafka.schemaregistry.CompatibilityLevel.BACKWARD_TRANSITIVE; + case FORWARD -> io.confluent.kafka.schemaregistry.CompatibilityLevel.FORWARD; + case FORWARD_TRANSITIVE -> io.confluent.kafka.schemaregistry.CompatibilityLevel.FORWARD_TRANSITIVE; + case FULL -> io.confluent.kafka.schemaregistry.CompatibilityLevel.FULL; + case FULL_TRANSITIVE -> io.confluent.kafka.schemaregistry.CompatibilityLevel.FULL_TRANSITIVE; + }; + List newHistory = new ArrayList<>(); + for (ContentHandle existingArtifact : history) { + newHistory.add(new AvroSchema(existingArtifact.content())); + } + AvroSchema newSchema = new AvroSchema(currentSchema.content()); + + List issues = switch (avroCompatibilityLevel) { + case BACKWARD -> CompatibilityChecker.BACKWARD_CHECKER.isCompatible(newSchema, newHistory); + case BACKWARD_TRANSITIVE -> CompatibilityChecker.BACKWARD_TRANSITIVE_CHECKER.isCompatible(newSchema, newHistory); + case FORWARD -> CompatibilityChecker.FORWARD_CHECKER.isCompatible(newSchema, newHistory); + case FORWARD_TRANSITIVE -> CompatibilityChecker.FORWARD_TRANSITIVE_CHECKER.isCompatible(newSchema, newHistory); + case FULL -> CompatibilityChecker.FULL_CHECKER.isCompatible(newSchema, newHistory); + case FULL_TRANSITIVE -> CompatibilityChecker.FULL_TRANSITIVE_CHECKER.isCompatible(newSchema, newHistory); + case NONE -> CompatibilityChecker.NO_OP_CHECKER.isCompatible(newSchema, newHistory); + }; + return issues.isEmpty(); + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/Checker.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/Checker.java new file mode 100644 index 0000000..b549a4e --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/Checker.java @@ -0,0 +1,24 @@ +package net.syntio.compatibility.checker; + +import io.apicurio.registry.content.ContentHandle; +import io.apicurio.registry.rules.compatibility.CompatibilityLevel; +import net.syntio.compatibility.Message; +import net.syntio.compatibility.CheckerFactory; + +import java.util.ArrayList; +import java.util.List; + +public class Checker { + public static boolean checkCompatibility(Message msg, List history, CompatibilityLevel mode) throws Exception { + ContentHandle schema = ContentHandle.create(msg.getSchema()); + List schemaHistory = new ArrayList<>(); + + for (String s : history) { + ContentHandle ps = ContentHandle.create(s); + schemaHistory.add(ps); + } + CompatibilityChecker cc = CheckerFactory.createChecker(msg.getFormat()); + + return cc.testCompatibility(mode, schemaHistory, schema); + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/CompatibilityChecker.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/CompatibilityChecker.java new file mode 100644 index 0000000..3b423d9 --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/CompatibilityChecker.java @@ -0,0 +1,10 @@ +package net.syntio.compatibility.checker; + +import io.apicurio.registry.content.ContentHandle; +import io.apicurio.registry.rules.compatibility.CompatibilityLevel; + +import java.util.List; + +public interface CompatibilityChecker { + boolean testCompatibility(CompatibilityLevel level, List history, ContentHandle currentSchema); +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/JsonChecker.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/JsonChecker.java new file mode 100644 index 0000000..d482e6d --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/JsonChecker.java @@ -0,0 +1,15 @@ +package net.syntio.compatibility.checker; + +import io.apicurio.registry.content.ContentHandle; +import io.apicurio.registry.rules.compatibility.CompatibilityLevel; +import io.apicurio.registry.rules.compatibility.JsonSchemaCompatibilityChecker; + +import java.util.List; + +public class JsonChecker implements CompatibilityChecker { + @Override + public boolean testCompatibility(CompatibilityLevel level, List history, ContentHandle currentSchema) { + JsonSchemaCompatibilityChecker cc = new JsonSchemaCompatibilityChecker(); + return cc.testCompatibility(level, history, currentSchema).isCompatible(); + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/ProtobufChecker.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/ProtobufChecker.java new file mode 100644 index 0000000..e162c32 --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/checker/ProtobufChecker.java @@ -0,0 +1,15 @@ +package net.syntio.compatibility.checker; + +import io.apicurio.registry.content.ContentHandle; +import io.apicurio.registry.rules.compatibility.CompatibilityLevel; +import io.apicurio.registry.rules.compatibility.ProtobufCompatibilityChecker; + +import java.util.List; + +public class ProtobufChecker implements CompatibilityChecker { + @Override + public boolean testCompatibility(CompatibilityLevel level, List history, ContentHandle currentSchema) { + ProtobufCompatibilityChecker cc = new ProtobufCompatibilityChecker(); + return cc.testCompatibility(level, history, currentSchema).isCompatible(); + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/controller/CheckerController.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/controller/CheckerController.java new file mode 100644 index 0000000..2c9ca26 --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/controller/CheckerController.java @@ -0,0 +1,69 @@ +package net.syntio.compatibility.controller; + +import io.apicurio.registry.rules.compatibility.CompatibilityLevel; +import net.syntio.compatibility.Message; +import net.syntio.compatibility.checker.Checker; +import net.syntio.compatibility.dto.CheckRequestDto; +import net.syntio.compatibility.dto.CheckResponseDto; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RestController; + +import java.util.List; + +@RestController +public class CheckerController { + @PostMapping(value = "/") + public ResponseEntity check(@RequestBody CheckRequestDto req) { + Message latestSchema = req.getMessage(); + List schemaHistory = req.getHistory(); + try { + for (int i = 0; i < schemaHistory.size(); i++) { + schemaHistory.set(i, schemaHistory.get(i).replaceAll("\r\n", "\n")); + } + String mode = req.getMode(); + + CompatibilityLevel cl = getCompatibilityLevel(mode); + boolean result; + if (cl.equals(CompatibilityLevel.NONE)) { + result = true; + } else { + result = Checker.checkCompatibility(latestSchema, schemaHistory, cl); + } + + CheckResponseDto res = new CheckResponseDto(result); + if (result) { + res.setInfo("Schema is compatible"); + return ResponseEntity.ok(res); + } + res.setInfo("Schema is incompatible"); + return ResponseEntity.ok(res); + } catch (NullPointerException e) { + System.err.println("Schema history is null."); + return ResponseEntity.badRequest().build(); + } catch (Exception e) { + return ResponseEntity.badRequest().build(); + } + } + + @GetMapping(value = "/health") + public ResponseEntity healthCheck() { + return ResponseEntity.ok().build(); + } + + private CompatibilityLevel getCompatibilityLevel(String mode) throws Exception { + return switch (mode.toUpperCase()) { + case "BACKWARD" -> CompatibilityLevel.BACKWARD; + case "BACKWARD_TRANSITIVE" -> CompatibilityLevel.BACKWARD_TRANSITIVE; + case "FORWARD" -> CompatibilityLevel.FORWARD; + case "FORWARD_TRANSITIVE" -> CompatibilityLevel.FORWARD_TRANSITIVE; + case "FULL" -> CompatibilityLevel.FULL; + case "FULL_TRANSITIVE" -> CompatibilityLevel.FULL_TRANSITIVE; + case "NONE", "" -> CompatibilityLevel.NONE; + default -> throw new Exception("Unknown compatibility mode"); + }; + } + +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/dto/CheckRequestDto.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/dto/CheckRequestDto.java new file mode 100644 index 0000000..353751d --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/dto/CheckRequestDto.java @@ -0,0 +1,48 @@ +package net.syntio.compatibility.dto; + +import net.syntio.compatibility.Message; +import org.json.JSONException; +import org.json.JSONObject; + +import java.util.List; + +public class CheckRequestDto { + private Message message; + private final List history; + private final String mode; + + public CheckRequestDto(String payload, List history, String mode) { + try { + this.message = transformStringToMessage(payload); + } catch (Exception e) { + this.message = new Message("", "", ""); + System.err.println("Cannot read message"); + } + this.history = history; + this.mode = mode; + } + + public Message getMessage() { + return message; + } + + public String getSchema() { + return message.getSchema(); + } + + public List getHistory() { + return history; + } + + public String getMode() { + return mode; + } + + private static Message transformStringToMessage(String payload) throws JSONException { + JSONObject jsonObject = new JSONObject(payload); + String id = jsonObject.getString("id"); + String format = jsonObject.getString("format"); + String newSchema = jsonObject.getString("schema"); + return new Message(id, format, newSchema); + } +} diff --git a/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/dto/CheckResponseDto.java b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/dto/CheckResponseDto.java new file mode 100644 index 0000000..02440fd --- /dev/null +++ b/registry/compatibility/external/compatibility-checker/src/main/java/net/syntio/compatibility/dto/CheckResponseDto.java @@ -0,0 +1,22 @@ +package net.syntio.compatibility.dto; + +public class CheckResponseDto { + private final boolean result; + private String info; + + public CheckResponseDto(boolean result) { + this.result = result; + } + + public boolean getResult() { + return result; + } + + public String getInfo() { + return info; + } + + public void setInfo(String info) { + this.info = info; + } +} diff --git a/registry/compatibility/externalChecker.go b/registry/compatibility/externalChecker.go new file mode 100644 index 0000000..aa72cf0 --- /dev/null +++ b/registry/compatibility/externalChecker.go @@ -0,0 +1,131 @@ +package compatibility + +import ( + "context" + "encoding/base64" + "fmt" + "os" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-sr/compatibility/http" + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errtemplates" + "github.com/dataphos/lib-httputil/pkg/httputil" + "github.com/dataphos/lib-retry/pkg/retry" +) + +const ( + urlEnvKey = "COMPATIBILITY_CHECKER_URL" + timeoutEnvKey = "COMPATIBILITY_CHECKER_TIMEOUT_BASE" + globalCompatibilityMode = "GLOBAL_COMPATIBILITY_MODE" +) + +const ( + DefaultTimeoutBase = 2 * time.Second + defaultGlobalCompatibilityMode = "BACKWARD" +) + +type ExternalChecker struct { + url string + TimeoutBase time.Duration +} + +// NewFromEnv loads the needed environment variables and calls New. +func NewFromEnv(ctx context.Context) (*ExternalChecker, error) { + url := os.Getenv(urlEnvKey) + if url == "" { + return nil, errtemplates.EnvVariableNotDefined(urlEnvKey) + } + + timeout := DefaultTimeoutBase + if timeoutStr := os.Getenv(timeoutEnvKey); timeoutStr != "" { + var err error + timeout, err = time.ParseDuration(timeoutStr) + if err != nil { + return nil, errors.Wrap(err, errtemplates.ParsingEnvVariableFailed(timeoutEnvKey)) + } + } + + return New(ctx, url, timeout) +} + +// New returns a new instance of Repository. +func New(ctx context.Context, url string, timeoutBase time.Duration) (*ExternalChecker, error) { + if err := retry.Do(ctx, retry.WithJitter(retry.Constant(2*time.Second)), func(ctx context.Context) error { + return httputil.HealthCheck(ctx, url+"/health") + }); err != nil { + return nil, errors.Wrapf(err, "attempting to reach compatibility checker at %s failed", url) + } + + return &ExternalChecker{ + url: url, + TimeoutBase: timeoutBase, + }, nil +} + +func (c *ExternalChecker) Check(schemaInfo string, history []string, mode string) (bool, error) { + //check if compatibility mode is none, if it is, don't send HTTP request to java code + if strings.ToLower(mode) == "none" { + return true, nil + } + size := calculateSizeInBytes(schemaInfo, history, mode) + ctx, cancel := context.WithTimeout(context.Background(), http.EstimateHTTPTimeout(size, c.TimeoutBase)) + defer cancel() + + decodedHistory, err := c.DecodeHistory(history) + if err != nil { + return false, err + } + return http.CheckOverHTTP(ctx, schemaInfo, decodedHistory, mode, c.url+"/") +} + +func (c *ExternalChecker) DecodeHistory(history []string) ([]string, error) { + var decodedHistory []string + for i := 0; i < len(history); i++ { + decoded, err := base64.StdEncoding.DecodeString(history[i]) + if err != nil { + fmt.Println(fmt.Errorf("could not decode").Error()) + return nil, err + } + decodedHistory = append(decodedHistory, string(decoded)) + } + return decodedHistory, nil +} + +func calculateSizeInBytes(schema string, history []string, mode string) int { + bytes := []byte(schema + mode) + for i := 0; i < len(history); i++ { + bytes = append(bytes, []byte(history[i])...) + } + return len(bytes) +} + +func InitCompatibilityChecker(ctx context.Context) (*ExternalChecker, string, error) { + compChecker, err := NewFromEnv(ctx) + if err != nil { + return nil, "", err + } + globalCompMode := os.Getenv(globalCompatibilityMode) + if globalCompMode == "" { + globalCompMode = defaultGlobalCompatibilityMode + } + if globalCompMode == "BACKWARD" || globalCompMode == "BACKWARD_TRANSITIVE" || + globalCompMode == "FORWARD" || globalCompMode == "FORWARD_TRANSITIVE" || + globalCompMode == "FULL" || globalCompMode == "FULL_TRANSITIVE" || globalCompMode == "NONE" { + return compChecker, globalCompMode, nil + } + return nil, "", errors.Errorf("unsupported compatibility mode") +} + +func CheckIfValidMode(mode *string) bool { + if *mode == "" { + *mode = defaultGlobalCompatibilityMode + } + lowerMode := strings.ToLower(*mode) + if lowerMode != "none" && lowerMode != "backward" && lowerMode != "backward_transitive" && lowerMode != "forward" && lowerMode != "forward_transitive" && lowerMode != "full" && lowerMode != "full_transitive" { + return false + } + return true +} diff --git a/registry/compatibility/http/http.go b/registry/compatibility/http/http.go new file mode 100644 index 0000000..a2fc3e1 --- /dev/null +++ b/registry/compatibility/http/http.go @@ -0,0 +1,92 @@ +package http + +import ( + "bytes" + "context" + "encoding/json" + "github.com/dataphos/lib-httputil/pkg/httputil" + "io" + "math" + "net/http" + "time" + + "github.com/pkg/errors" +) + +// checkRequest contains a new schema, list of old schemas and a compatibility mode which should be enforced. The structure represents an HTTP +// request body. +type checkRequest struct { + Payload string `json:"payload"` + History []string `json:"history"` + Mode string `json:"mode"` +} + +// checkResponse contains the compatibility result and an info message. The structure represents an HTTP response body. +type checkResponse struct { + Result bool `json:"result"` + Info string `json:"info"` +} + +// HTTPTimeoutBytesUnit the base amount of bytes used by EstimateHTTPTimeout. +const HTTPTimeoutBytesUnit = 1024 * 100 + +// EstimateHTTPTimeout calculates the expected timeout, by dividing the size given in bytes with HTTPTimeoutBytesUnit, and then +// multiplying the coefficient with the given time duration. +// +// If the given size is less than HTTPTimeoutBytesUnit, base is returned, to avoid problems due to the http overhead which isn't fully linear. +func EstimateHTTPTimeout(size int, base time.Duration) time.Duration { + coef := int(math.Round(float64(size) / float64(HTTPTimeoutBytesUnit))) + if coef <= 1 { + return base + } + + return time.Duration(coef) * base +} + +// CheckOverHTTP requests a schema check over HTTP. +// Function returns false if schema isn't compatible. +func CheckOverHTTP(ctx context.Context, schema string, history []string, mode, url string) (bool, error) { + response, err := sendCheckRequest(ctx, schema, history, mode, url) + if err != nil { + return false, err + } + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + panic(errors.New("couldn't close response body")) + } + }(response.Body) + + body, err := io.ReadAll(response.Body) + if err != nil { + return false, err + } + + var parsedBody checkResponse + if err = json.Unmarshal(body, &parsedBody); err != nil { + return false, err + } + + compatible := parsedBody.Result + + switch response.StatusCode { + case http.StatusOK: + return compatible, nil + case http.StatusBadRequest: + return compatible, nil + default: + return compatible, errors.Errorf("error: status code [%v]", response.StatusCode) + } +} + +func sendCheckRequest(ctx context.Context, payload string, history []string, mode, url string) (*http.Response, error) { + // this can't generate an error, so it's safe to ignore + data, _ := json.Marshal(checkRequest{Payload: payload, History: history, Mode: mode}) + + request, err := httputil.Post(ctx, url, "application/json", bytes.NewBuffer(data)) + if err != nil { + return nil, err + } + + return http.DefaultClient.Do(request) +} diff --git a/registry/compatibility/testdata/backward_avro_false/schema1.json b/registry/compatibility/testdata/backward_avro_false/schema1.json new file mode 100644 index 0000000..ba32aa8 --- /dev/null +++ b/registry/compatibility/testdata/backward_avro_false/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "avro", + "schema": "{\r\n \"namespace\": \"example.avro\",\r\n \"type\": \"record\",\r\n \"name\": \"user\",\r\n \"fields\": [\r\n {\"name\": \"name\", \"type\": \"string\"},\r\n {\"name\": \"favorite_animal\", \"type\": \"string\"},\r\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\r\n ]\r\n}" +} diff --git a/registry/compatibility/testdata/backward_avro_false/schema2.json b/registry/compatibility/testdata/backward_avro_false/schema2.json new file mode 100644 index 0000000..f35e735 --- /dev/null +++ b/registry/compatibility/testdata/backward_avro_false/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "avro", + "schema": "{\r\n \"namespace\": \"example.avro\",\r\n \"type\": \"record\",\r\n \"name\": \"user\",\r\n \"fields\": [\r\n {\"name\": \"name\", \"type\": \"string\"},\r\n {\"name\": \"favorite_number\", \"type\": \"int\"},\r\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\r\n ]\r\n}" +} diff --git a/registry/compatibility/testdata/backward_avro_true/schema1.json b/registry/compatibility/testdata/backward_avro_true/schema1.json new file mode 100644 index 0000000..f0502c3 --- /dev/null +++ b/registry/compatibility/testdata/backward_avro_true/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "avro", + "schema": "{\r\n \"namespace\": \"example.avro\",\r\n \"type\": \"record\",\r\n \"name\": \"user\",\r\n \"fields\": [\r\n {\"name\": \"name\", \"type\": \"string\"},\r\n {\"name\": \"favorite_number\", \"type\": \"int\"},\r\n {\"name\": \"favorite_animal\", \"type\": \"string\"},\r\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\r\n ]\r\n}" +} diff --git a/registry/compatibility/testdata/backward_avro_true/schema2.json b/registry/compatibility/testdata/backward_avro_true/schema2.json new file mode 100644 index 0000000..f35e735 --- /dev/null +++ b/registry/compatibility/testdata/backward_avro_true/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "avro", + "schema": "{\r\n \"namespace\": \"example.avro\",\r\n \"type\": \"record\",\r\n \"name\": \"user\",\r\n \"fields\": [\r\n {\"name\": \"name\", \"type\": \"string\"},\r\n {\"name\": \"favorite_number\", \"type\": \"int\"},\r\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\r\n ]\r\n}" +} diff --git a/registry/compatibility/testdata/backward_json_false/schema1.json b/registry/compatibility/testdata/backward_json_false/schema1.json new file mode 100644 index 0000000..d8298d9 --- /dev/null +++ b/registry/compatibility/testdata/backward_json_false/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "json", + "schema": "{\r\n \"$id\": \"https:\/\/example.com\/person.schema.json\",\r\n \"$schema\": \"https:\/\/json-schema.org\/draft-07\/schema\",\r\n \"title\": \"Person\",\r\n \"type\": \"object\",\r\n \"properties\": {\r\n \"firstName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's first name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"age\": {\r\n \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n \"type\": \"integer\",\r\n \"minimum\": 0\r\n }\r\n },\r\n \"additionalProperties\": true\r\n}" +} diff --git a/registry/compatibility/testdata/backward_json_false/schema2.json b/registry/compatibility/testdata/backward_json_false/schema2.json new file mode 100644 index 0000000..94d24a8 --- /dev/null +++ b/registry/compatibility/testdata/backward_json_false/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "json", + "schema": "{\r\n \"$id\": \"https:\/\/example.com\/person.schema.json\",\r\n \"$schema\": \"https:\/\/json-schema.org\/draft-07\/schema\",\r\n \"title\": \"Person\",\r\n \"type\": \"object\",\r\n \"properties\": {\r\n \"firstName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's first name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"age\": {\r\n \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n \"type\": \"integer\",\r\n \"minimum\": 0\r\n },\r\n \"phoneNumber\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's phone number.\"\r\n }\r\n },\r\n \"additionalProperties\": true\r\n}" +} diff --git a/registry/compatibility/testdata/backward_json_true/schema1.json b/registry/compatibility/testdata/backward_json_true/schema1.json new file mode 100644 index 0000000..0d4f3bb --- /dev/null +++ b/registry/compatibility/testdata/backward_json_true/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "json", + "schema": "{\n \"$id\": \"https://example.com/person.schema.json\",\n \"$schema\": \"https://json-schema.org/draft-07/schema\",\n \"title\": \"Person\",\n \"type\": \"object\",\n \"properties\": {\n \"firstName\": {\n \"type\": \"string\",\n \"description\": \"The person's first name.\"\n },\n \"lastName\": {\n \"type\": \"string\",\n \"description\": \"The person's last name.\"\n },\n \"age\": {\n \"description\": \"Age in years which must be equal to or greater than zero.\",\n \"type\": \"integer\",\n \"minimum\": 0\n },\n \"phoneNumber\": {\n \"type\": \"string\",\n \"description\": \"The person's phone number.\"\n }\n },\n \"additionalProperties\": true\n}" +} diff --git a/registry/compatibility/testdata/backward_json_true/schema2.json b/registry/compatibility/testdata/backward_json_true/schema2.json new file mode 100644 index 0000000..19c6606 --- /dev/null +++ b/registry/compatibility/testdata/backward_json_true/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "json", + "schema": "{\r\n \"$id\": \"https:\/\/example.com\/person.schema.json\",\r\n \"$schema\": \"https:\/\/json-schema.org\/draft-07\/schema\",\r\n \"title\": \"Person\",\r\n \"type\": \"object\",\r\n \"properties\": {\r\n \"firstName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's first name.\"\r\n },\r\n \"lastName\": {\r\n \"type\": \"string\",\r\n \"description\": \"The person's last name.\"\r\n },\r\n \"age\": {\r\n \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n \"type\": \"integer\",\r\n \"minimum\": 0\r\n }\r\n },\r\n \"additionalProperties\": true\r\n}" +} diff --git a/registry/compatibility/testdata/compatible_avro_back1.json b/registry/compatibility/testdata/compatible_avro_back1.json new file mode 100644 index 0000000..9a214ec --- /dev/null +++ b/registry/compatibility/testdata/compatible_avro_back1.json @@ -0,0 +1,4 @@ +{ + "schema": "{\n \"type\" : \"record\",\n \"namespace\" : \"Tutorialspoint\",\n \"name\" : \"Employee\",\n \"fields\" : [\n { \"name\" : \"Name\" , \"type\" : \"string\" },\n { \"name\" : \"Age\" , \"type\" : \"int\" }\n ]\n}", + "history": ["{\n \"type\" : \"record\",\n \"namespace\" : \"Tutorialspoint\",\n \"name\" : \"Employee\",\n \"fields\" : [\n { \"name\" : \"Name\" , \"type\" : \"string\" },\n { \"name\" : \"Age\" , \"type\" : \"int\" }\n ]\n}"] +} diff --git a/registry/compatibility/testdata/compatible_json_back1.json b/registry/compatibility/testdata/compatible_json_back1.json new file mode 100644 index 0000000..b0cc46f --- /dev/null +++ b/registry/compatibility/testdata/compatible_json_back1.json @@ -0,0 +1,4 @@ +{ + "schema": "{\n \"$schema\": \"http://json-schema.org/draft-07/schema\",\n \"type\": \"object\",\n \"title222\": \"The Root Schema\",\n \"description\": \"The root schema comprises the entire JSON document.\",\n \"default\": {},\n \"additionalProperties\": true,\n \"required\": [\n \"phone\",\n \"room\"\n ],\n \"properties\": {\n \"phone\": {\n \"type\": \"integer\",\n \"title\": \"The Phone Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 23541\n ]\n }\n }\n}", + "history": ["{\n \"$schema\": \"http://json-schema.org/draft-07/schema\",\n \"type\": \"object\",\n \"title222\": \"The Root Schema\",\n \"description\": \"The root schema comprises the entire JSON document.\",\n \"default\": {},\n \"additionalProperties\": true,\n \"required\": [\n \"phone\",\n \"room\"\n ],\n \"properties\": {\n \"phone\": {\n \"type\": \"integer\",\n \"title\": \"The Phone Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 23541\n ]\n },\n \"room\": {\n \"type\": \"integer\",\n \"title\": \"The Room Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 18\n ]\n }\n }\n}"] +} diff --git a/registry/compatibility/testdata/forward_avro_false/schema1.json b/registry/compatibility/testdata/forward_avro_false/schema1.json new file mode 100644 index 0000000..97381a5 --- /dev/null +++ b/registry/compatibility/testdata/forward_avro_false/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "avro", + "schema": "{\n \"namespace\": \"example.avro\",\n \"type\": \"record\",\n \"name\": \"user\",\n \"fields\": [\n {\"name\": \"name\", \"type\": \"string\"},\n {\"name\": \"favorite_number\", \"type\": \"int\"},\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\n ]\n}" +} diff --git a/registry/compatibility/testdata/forward_avro_false/schema2.json b/registry/compatibility/testdata/forward_avro_false/schema2.json new file mode 100644 index 0000000..b878c70 --- /dev/null +++ b/registry/compatibility/testdata/forward_avro_false/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "avro", + "schema": "{\n \"namespace\": \"example.avro\",\n \"type\": \"record\",\n \"name\": \"user\",\n \"fields\": [\n {\"name\": \"name\", \"type\": \"string\"},\n {\"name\": \"favorite_animal\", \"type\": \"string\"},\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\n ]\n}" +} diff --git a/registry/compatibility/testdata/forward_avro_true/schema1.json b/registry/compatibility/testdata/forward_avro_true/schema1.json new file mode 100644 index 0000000..97381a5 --- /dev/null +++ b/registry/compatibility/testdata/forward_avro_true/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "avro", + "schema": "{\n \"namespace\": \"example.avro\",\n \"type\": \"record\",\n \"name\": \"user\",\n \"fields\": [\n {\"name\": \"name\", \"type\": \"string\"},\n {\"name\": \"favorite_number\", \"type\": \"int\"},\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\n ]\n}" +} diff --git a/registry/compatibility/testdata/forward_avro_true/schema2.json b/registry/compatibility/testdata/forward_avro_true/schema2.json new file mode 100644 index 0000000..bc1850d --- /dev/null +++ b/registry/compatibility/testdata/forward_avro_true/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "avro", + "schema": "{\n \"namespace\": \"example.avro\",\n \"type\": \"record\",\n \"name\": \"user\",\n \"fields\": [\n {\"name\": \"name\", \"type\": \"string\"},\n {\"name\": \"favorite_number\", \"type\": \"int\"},\n {\"name\": \"favorite_animal\", \"type\": \"string\"},\n {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}\n ]\n}" +} diff --git a/registry/compatibility/testdata/forward_json_false/schema1.json b/registry/compatibility/testdata/forward_json_false/schema1.json new file mode 100644 index 0000000..0d4f3bb --- /dev/null +++ b/registry/compatibility/testdata/forward_json_false/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "json", + "schema": "{\n \"$id\": \"https://example.com/person.schema.json\",\n \"$schema\": \"https://json-schema.org/draft-07/schema\",\n \"title\": \"Person\",\n \"type\": \"object\",\n \"properties\": {\n \"firstName\": {\n \"type\": \"string\",\n \"description\": \"The person's first name.\"\n },\n \"lastName\": {\n \"type\": \"string\",\n \"description\": \"The person's last name.\"\n },\n \"age\": {\n \"description\": \"Age in years which must be equal to or greater than zero.\",\n \"type\": \"integer\",\n \"minimum\": 0\n },\n \"phoneNumber\": {\n \"type\": \"string\",\n \"description\": \"The person's phone number.\"\n }\n },\n \"additionalProperties\": true\n}" +} diff --git a/registry/compatibility/testdata/forward_json_false/schema2.json b/registry/compatibility/testdata/forward_json_false/schema2.json new file mode 100644 index 0000000..69e7a56 --- /dev/null +++ b/registry/compatibility/testdata/forward_json_false/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "json", + "schema": "{\n \"$id\": \"https://example.com/person.schema.json\",\n \"$schema\": \"https://json-schema.org/draft-07/schema\",\n \"title\": \"Person\",\n \"type\": \"object\",\n \"properties\": {\n \"firstName\": {\n \"type\": \"string\",\n \"description\": \"The person's first name.\"\n },\n \"lastName\": {\n \"type\": \"string\",\n \"description\": \"The person's last name.\"\n },\n \"age\": {\n \"description\": \"Age in years which must be equal to or greater than zero.\",\n \"type\": \"integer\",\n \"minimum\": 0\n }\n },\n \"additionalProperties\": true\n}" +} diff --git a/registry/compatibility/testdata/forward_json_true/schema1.json b/registry/compatibility/testdata/forward_json_true/schema1.json new file mode 100644 index 0000000..2cfe071 --- /dev/null +++ b/registry/compatibility/testdata/forward_json_true/schema1.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "format": "json", + "schema": "{\n \"$id\": \"https://example.com/person.schema.json\",\n \"$schema\": \"https://json-schema.org/draft-07/schema\",\n \"title\": \"Person\",\n \"type\": \"object\",\n \"properties\": {\n \"firstName\": {\n \"type\": \"string\",\n \"description\": \"The person's first name.\"\n },\n \"lastName\": {\n \"type\": \"string\",\n \"description\": \"The person's last name.\"\n },\n \"age\": {\n \"description\": \"Age in years which must be equal to or greater than zero.\",\n \"type\": \"integer\",\n \"minimum\": 0\n }\n },\n \"additionalProperties\": true\n}" +} diff --git a/registry/compatibility/testdata/forward_json_true/schema2.json b/registry/compatibility/testdata/forward_json_true/schema2.json new file mode 100644 index 0000000..b6169d3 --- /dev/null +++ b/registry/compatibility/testdata/forward_json_true/schema2.json @@ -0,0 +1,5 @@ +{ + "id": "2", + "format": "json", + "schema": "{\n \"$id\": \"https://example.com/person.schema.json\",\n \"$schema\": \"https://json-schema.org/draft-07/schema\",\n \"title\": \"Person\",\n \"type\": \"object\",\n \"properties\": {\n \"firstName\": {\n \"type\": \"string\",\n \"description\": \"The person's first name.\"\n },\n \"lastName\": {\n \"type\": \"string\",\n \"description\": \"The person's last name.\"\n },\n \"age\": {\n \"description\": \"Age in years which must be equal to or greater than zero.\",\n \"type\": \"integer\",\n \"minimum\": 0\n },\n \"phoneNumber\": {\n \"type\": \"string\",\n \"description\": \"The person's phone number.\"\n }\n },\n \"additionalProperties\": true\n}" +} diff --git a/registry/compatibility/testdata/incompatible_avro_back1.json b/registry/compatibility/testdata/incompatible_avro_back1.json new file mode 100644 index 0000000..1e0001e --- /dev/null +++ b/registry/compatibility/testdata/incompatible_avro_back1.json @@ -0,0 +1,4 @@ +{ + "schema": "{\n \"type\" : \"record\",\n \"namespace\" : \"Tutorialspoint\",\n \"name\" : \"Employee\",\n \"fields\" : [\n { \"name\" : \"Name\" , \"type\" : \"string\" },\n { \"name\" : \"Age\" , \"type\" : \"int\" }\n ]\n}", + "history": ["{\n \"type\" : \"record\",\n \"namespace\" : \"Tutorialspoint\",\n \"name\" : \"Employee\",\n \"fields\" : [\n { \"name\" : \"Name\" , \"type\" : \"string\" }]\n}"] +} diff --git a/registry/compatibility/testdata/incompatible_json_back1.json b/registry/compatibility/testdata/incompatible_json_back1.json new file mode 100644 index 0000000..435896e --- /dev/null +++ b/registry/compatibility/testdata/incompatible_json_back1.json @@ -0,0 +1,4 @@ +{ + "schema": "{\n \"$schema\": \"http://json-schema.org/draft-07/schema\",\n \"type\": \"object\",\n \"title222\": \"The Root Schema\",\n \"description\": \"The root schema comprises the entire JSON document.\",\n \"default\": {},\n \"additionalProperties\": true,\n \"required\": [\n \"phone\",\n \"room\"\n ],\n \"properties\": {\n \"phone\": {\n \"type\": \"integer\",\n \"title\": \"The Phone Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 23541\n ]\n },\n \"room\": {\n \"type\": \"integer\",\n \"title\": \"The Room Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 18\n ]\n }\n }\n}", + "history": ["{\n \"$schema\": \"http://json-schema.org/draft-07/schema\",\n \"type\": \"object\",\n \"title222\": \"The Root Schema\",\n \"description\": \"The root schema comprises the entire JSON document.\",\n \"default\": {},\n \"additionalProperties\": true,\n \"required\": [\n \"phone\",\n \"room\"\n ],\n \"properties\": {\n \"phone\": {\n \"type\": \"integer\",\n \"title\": \"The Phone Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 23541\n ]\n }\n }\n}"] +} diff --git a/registry/docker/compatibility-checker/Dockerfile b/registry/docker/compatibility-checker/Dockerfile new file mode 100644 index 0000000..07fc765 --- /dev/null +++ b/registry/docker/compatibility-checker/Dockerfile @@ -0,0 +1,37 @@ +# References the base image for Java 18 and maven +FROM maven:3.8.7-openjdk-18-slim AS build + +# Maintainer Info +LABEL maintainer="Syntio Inc." + +# Copy the source code to a new working directory +COPY registry/compatibility/external/compatibility-checker/src /home/app/src + +# Copy the pom.xml to the root of the project +COPY registry/compatibility/external/compatibility-checker/pom.xml /home/app + +COPY registry/licenses/compatibility-checker/LICENSE-3RD-PARTY.md /home/app/licenses/LICENSE-3RD-PARTY.md +COPY LICENSE /home/app/licenses/LICENSE + +# Download dependecies and build +RUN mvn -f /home/app/pom.xml clean package + +# References base image for Java 18 runtime +FROM openjdk:18-jdk-slim + +# Copy the binaries in a new working directory +COPY --from=build /home/app/target/compatibility-checker.jar /home/checker/compatibility-checker.jar +COPY --from=build /home/app/licenses/LICENSE-3RD-PARTY.md /home/checker/licenses/LICENSE-3RD-PARTY.md +COPY --from=build /home/app/licenses/LICENSE /home/checker/licenses/LICENSE + +# Expose port 8088 to the outside world +EXPOSE 8088 + +# change to a non-root user for security +RUN adduser --disabled-password --home /home/checker user +RUN chown -R user /home/checker +RUN chmod -R 500 /home/checker +USER user + +# Set entrypoint of command that will run when container is started +ENTRYPOINT ["java","-jar","/home/checker/compatibility-checker.jar"] diff --git a/registry/docker/initdb/Dockerfile b/registry/docker/initdb/Dockerfile new file mode 100644 index 0000000..7f85076 --- /dev/null +++ b/registry/docker/initdb/Dockerfile @@ -0,0 +1,36 @@ +FROM golang:alpine3.15 AS build + +LABEL maintainer="Syntio Inc." + +ENV GO111MODULE=on \ + GOOS=linux \ + GOARCH=amd64 \ + CGO_ENABLED=0 + +RUN apk add --no-cache git + +WORKDIR /src + +COPY ./registry/go.mod ./registry/go.sum ./ +RUN go mod download + +COPY ./registry . +COPY LICENSE ./licenses/ + +RUN go mod tidy + +RUN go build -buildvcs=false -o /app/initdb ./cmd/initdb + +FROM alpine:3.16 + +COPY --from=build /app/initdb /app/initdb +COPY --from=build /src/licenses/LICENSE-3RD-PARTY.md /app/licenses/ +COPY --from=build /src/licenses/LICENSE /app/licenses/ + +# change to a non-root user for security +RUN adduser -D -h /app user +RUN chown -R user /app +RUN chmod -R 700 /app +USER user + +CMD ["/app/initdb"] diff --git a/registry/docker/registry/Dockerfile b/registry/docker/registry/Dockerfile new file mode 100644 index 0000000..da40817 --- /dev/null +++ b/registry/docker/registry/Dockerfile @@ -0,0 +1,39 @@ +FROM golang:alpine3.15 AS build + +LABEL maintainer="Syntio Inc." + +ENV GO111MODULE=on \ + GOOS=linux \ + GOARCH=amd64 \ + CGO_ENABLED=0 + +RUN apk add --no-cache git + +WORKDIR /src + +COPY ./registry/go.mod ./registry/go.sum ./ +RUN go mod download + +COPY ./registry . +COPY LICENSE ./licenses/ + +RUN go mod tidy + +RUN go build -buildvcs=false -o /app/sr ./cmd/janitorsr + +FROM alpine:3.16 + +COPY --from=build /app/sr /app/sr +COPY --from=build /src/docs /app/docs +COPY --from=build /src/licenses/LICENSE-3RD-PARTY.md /app/licenses/ +COPY --from=build /src/licenses/LICENSE /app/licenses/ + +# change to a non-root user for security +RUN adduser -D -h /app user +RUN chown -R user /app +RUN chmod -R 700 /app +USER user + +EXPOSE 8080 + +CMD ["/app/sr"] diff --git a/registry/docker/validity-checker/Dockerfile b/registry/docker/validity-checker/Dockerfile new file mode 100644 index 0000000..61e31df --- /dev/null +++ b/registry/docker/validity-checker/Dockerfile @@ -0,0 +1,37 @@ +# References the base image for Java 18 and maven +FROM maven:3.8.7-openjdk-18-slim AS build + +# Maintainer Info +LABEL maintainer="Syntio Inc." + +# Copy the source code to a new working directory +COPY registry/validity/external/validity-checker/src /home/app/src + +# Copy the pom.xml to the root of the project +COPY registry/validity/external/validity-checker/pom.xml /home/app + +COPY registry/licenses/validity-checker/LICENSE-3RD-PARTY.md /home/app/licenses/LICENSE-3RD-PARTY.md +COPY LICENSE /home/app/licenses/LICENSE + +# Download dependecies and build +RUN mvn -f /home/app/pom.xml clean package + +# References base image for Java 18 runtime +FROM openjdk:18-jdk-slim + +# Copy the binaries in a new working directory +COPY --from=build /home/app/target/validity-checker.jar /home/checker/validity-checker.jar +COPY --from=build /home/app/licenses/LICENSE-3RD-PARTY.md /home/checker/licenses/LICENSE-3RD-PARTY.md +COPY --from=build /home/app/licenses/LICENSE /home/checker/licenses/LICENSE + +# Expose port 8089 to the outside world +EXPOSE 8089 + +# change to a non-root user for security +RUN adduser --disabled-password --home /home/checker user +RUN chown -R user /home/checker +RUN chmod -R 500 /home/checker +USER user + +# Set entrypoint of command that will run when container is started +ENTRYPOINT ["java","-jar","/home/checker/validity-checker.jar"] diff --git a/registry/docs/docs.go b/registry/docs/docs.go new file mode 100644 index 0000000..d781fda --- /dev/null +++ b/registry/docs/docs.go @@ -0,0 +1,491 @@ +// Code generated by swaggo/swag. DO NOT EDIT. + +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "contact": {}, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/schemas": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get all active schemas", + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + }, + "post": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "summary": "Post new schema", + "parameters": [ + { + "description": "schema registration request", + "name": "data", + "in": "body", + "schema": { + "$ref": "#/definitions/registry.SchemaRegistrationRequest" + } + } + ], + "responses": { + "201": { + "description": "Created" + }, + "400": { + "description": "Bad Request" + }, + "409": { + "description": "Conflict" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/all": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get all schemas", + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/search": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Search schemas", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "query" + }, + { + "type": "string", + "description": "schema version", + "name": "version", + "in": "query" + }, + { + "type": "string", + "description": "schema type", + "name": "type", + "in": "query" + }, + { + "type": "string", + "description": "schema name", + "name": "name", + "in": "query" + }, + { + "type": "string", + "description": "order by name, type, id or version", + "name": "orderBy", + "in": "query" + }, + { + "type": "string", + "description": "sort schemas either asc or desc", + "name": "sort", + "in": "query" + }, + { + "type": "string", + "description": "maximum number of retrieved schemas matching the criteria", + "name": "limit", + "in": "query" + }, + { + "type": "string", + "description": "schema attributes", + "name": "attributes", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}": { + "put": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "summary": "Put new schema version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "schema update request", + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/registry.SchemaUpdateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + }, + "409": { + "description": "Conflict" + }, + "500": { + "description": "Internal Server Error" + } + } + }, + "delete": { + "produces": [ + "application/json" + ], + "summary": "Delete schema by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + } + } + } + }, + "/schemas/{id}/versions": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get all active schema versions by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}/versions/all": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get schema by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}/versions/latest": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get the latest schema version by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}/versions/{version}": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get schema version by schema id and version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "version", + "name": "version", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + }, + "delete": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "summary": "Delete schema version by schema id and version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "version", + "name": "version", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + } + } + } + }, + "/schemas/{id}/versions/{version}/spec": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get schema specification by schema id and version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "version", + "name": "version", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + } + }, + "definitions": { + "registry.SchemaRegistrationRequest": { + "type": "object", + "properties": { + "attributes": { + "type": "string" + }, + "compatibility_mode": { + "type": "string" + }, + "description": { + "type": "string" + }, + "last_created": { + "type": "string" + }, + "name": { + "type": "string" + }, + "publisher_id": { + "type": "string" + }, + "schema_type": { + "type": "string" + }, + "specification": { + "type": "string" + }, + "validity_mode": { + "type": "string" + } + } + }, + "registry.SchemaUpdateRequest": { + "type": "object", + "properties": { + "attributes": { + "type": "string" + }, + "description": { + "type": "string" + }, + "specification": { + "type": "string" + } + } + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "1.0", + Host: "", + BasePath: "", + Schemes: []string{}, + Title: "Schema Registry API", + Description: "", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, + LeftDelim: "{{", + RightDelim: "}}", +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/registry/docs/swagger.json b/registry/docs/swagger.json new file mode 100644 index 0000000..985bafe --- /dev/null +++ b/registry/docs/swagger.json @@ -0,0 +1,463 @@ +{ + "swagger": "2.0", + "info": { + "title": "Schema Registry API", + "contact": {}, + "version": "1.0" + }, + "paths": { + "/schemas": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get all active schemas", + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + }, + "post": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "summary": "Post new schema", + "parameters": [ + { + "description": "schema registration request", + "name": "data", + "in": "body", + "schema": { + "$ref": "#/definitions/registry.SchemaRegistrationRequest" + } + } + ], + "responses": { + "201": { + "description": "Created" + }, + "400": { + "description": "Bad Request" + }, + "409": { + "description": "Conflict" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/all": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get all schemas", + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/search": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Search schemas", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "query" + }, + { + "type": "string", + "description": "schema version", + "name": "version", + "in": "query" + }, + { + "type": "string", + "description": "schema type", + "name": "type", + "in": "query" + }, + { + "type": "string", + "description": "schema name", + "name": "name", + "in": "query" + }, + { + "type": "string", + "description": "order by name, type, id or version", + "name": "orderBy", + "in": "query" + }, + { + "type": "string", + "description": "sort schemas either asc or desc", + "name": "sort", + "in": "query" + }, + { + "type": "string", + "description": "maximum number of retrieved schemas matching the criteria", + "name": "limit", + "in": "query" + }, + { + "type": "string", + "description": "schema attributes", + "name": "attributes", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}": { + "put": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "summary": "Put new schema version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "schema update request", + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/registry.SchemaUpdateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + }, + "409": { + "description": "Conflict" + }, + "500": { + "description": "Internal Server Error" + } + } + }, + "delete": { + "produces": [ + "application/json" + ], + "summary": "Delete schema by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + } + } + } + }, + "/schemas/{id}/versions": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get all active schema versions by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}/versions/all": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get schema by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}/versions/latest": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get the latest schema version by schema id", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + }, + "/schemas/{id}/versions/{version}": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get schema version by schema id and version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "version", + "name": "version", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + }, + "delete": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "summary": "Delete schema version by schema id and version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "version", + "name": "version", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "400": { + "description": "Bad Request" + }, + "404": { + "description": "Not Found" + } + } + } + }, + "/schemas/{id}/versions/{version}/spec": { + "get": { + "produces": [ + "application/json" + ], + "summary": "Get schema specification by schema id and version", + "parameters": [ + { + "type": "string", + "description": "schema id", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "version", + "name": "version", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + }, + "404": { + "description": "Not Found" + }, + "500": { + "description": "Internal Server Error" + } + } + } + } + }, + "definitions": { + "registry.SchemaRegistrationRequest": { + "type": "object", + "properties": { + "attributes": { + "type": "string" + }, + "compatibility_mode": { + "type": "string" + }, + "description": { + "type": "string" + }, + "last_created": { + "type": "string" + }, + "name": { + "type": "string" + }, + "publisher_id": { + "type": "string" + }, + "schema_type": { + "type": "string" + }, + "specification": { + "type": "string" + }, + "validity_mode": { + "type": "string" + } + } + }, + "registry.SchemaUpdateRequest": { + "type": "object", + "properties": { + "attributes": { + "type": "string" + }, + "description": { + "type": "string" + }, + "specification": { + "type": "string" + } + } + } + } +} diff --git a/registry/docs/swagger.yaml b/registry/docs/swagger.yaml new file mode 100644 index 0000000..142f974 --- /dev/null +++ b/registry/docs/swagger.yaml @@ -0,0 +1,300 @@ +definitions: + registry.SchemaRegistrationRequest: + properties: + attributes: + type: string + compatibility_mode: + type: string + description: + type: string + last_created: + type: string + name: + type: string + publisher_id: + type: string + schema_type: + type: string + specification: + type: string + validity_mode: + type: string + type: object + registry.SchemaUpdateRequest: + properties: + attributes: + type: string + description: + type: string + specification: + type: string + type: object +info: + contact: {} + title: Schema Registry API + version: "1.0" +paths: + /schemas: + get: + produces: + - application/json + responses: + "200": + description: OK + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Get all active schemas + post: + consumes: + - application/json + parameters: + - description: schema registration request + in: body + name: data + schema: + $ref: '#/definitions/registry.SchemaRegistrationRequest' + produces: + - application/json + responses: + "201": + description: Created + "400": + description: Bad Request + "409": + description: Conflict + "500": + description: Internal Server Error + summary: Post new schema + /schemas/{id}: + delete: + parameters: + - description: schema id + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + "400": + description: Bad Request + "404": + description: Not Found + summary: Delete schema by schema id + put: + consumes: + - application/json + parameters: + - description: schema id + in: path + name: id + required: true + type: string + - description: schema update request + in: body + name: data + required: true + schema: + $ref: '#/definitions/registry.SchemaUpdateRequest' + produces: + - application/json + responses: + "200": + description: OK + "400": + description: Bad Request + "404": + description: Not Found + "409": + description: Conflict + "500": + description: Internal Server Error + summary: Put new schema version + /schemas/{id}/versions: + get: + parameters: + - description: schema id + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Get all active schema versions by schema id + /schemas/{id}/versions/{version}: + delete: + consumes: + - application/json + parameters: + - description: schema id + in: path + name: id + required: true + type: string + - description: version + in: path + name: version + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + "400": + description: Bad Request + "404": + description: Not Found + summary: Delete schema version by schema id and version + get: + parameters: + - description: schema id + in: path + name: id + required: true + type: string + - description: version + in: path + name: version + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Get schema version by schema id and version + /schemas/{id}/versions/{version}/spec: + get: + parameters: + - description: schema id + in: path + name: id + required: true + type: string + - description: version + in: path + name: version + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Get schema specification by schema id and version + /schemas/{id}/versions/all: + get: + parameters: + - description: schema id + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Get schema by schema id + /schemas/{id}/versions/latest: + get: + parameters: + - description: schema id + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Get the latest schema version by schema id + /schemas/all: + get: + produces: + - application/json + responses: + "200": + description: OK + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Get all schemas + /schemas/search: + get: + parameters: + - description: schema id + in: query + name: id + type: string + - description: schema version + in: query + name: version + type: string + - description: schema type + in: query + name: type + type: string + - description: schema name + in: query + name: name + type: string + - description: order by name, type, id or version + in: query + name: orderBy + type: string + - description: sort schemas either asc or desc + in: query + name: sort + type: string + - description: maximum number of retrieved schemas matching the criteria + in: query + name: limit + type: string + - description: schema attributes + in: query + name: attributes + type: string + produces: + - application/json + responses: + "200": + description: OK + "400": + description: Bad Request + "404": + description: Not Found + "500": + description: Internal Server Error + summary: Search schemas +swagger: "2.0" diff --git a/registry/go.mod b/registry/go.mod new file mode 100644 index 0000000..826d661 --- /dev/null +++ b/registry/go.mod @@ -0,0 +1,65 @@ +module github.com/dataphos/aquarium-janitor-standalone-sr + +go 1.17 + +require ( + github.com/DATA-DOG/go-sqlmock v1.5.0 + github.com/dataphos/lib-logger v1.0.0 + github.com/go-chi/chi/v5 v5.0.10 + github.com/pkg/errors v0.9.1 + github.com/stretchr/testify v1.8.4 + golang.org/x/text v0.13.0 // indirect + gorm.io/driver/postgres v1.5.2 + gorm.io/gorm v1.25.4 +) + +require ( + github.com/cyberphone/json-canonicalization v0.0.0-20230710064741-aa7fe85c7dbd + github.com/dataphos/lib-httputil v1.0.0 + github.com/dataphos/lib-retry v1.0.0 + github.com/google/go-cmp v0.5.9 + github.com/hamba/avro/v2 v2.16.0 + github.com/hashicorp/golang-lru v1.0.2 + github.com/prometheus/client_golang v1.17.0 + github.com/spf13/cast v1.5.1 + github.com/swaggo/http-swagger v1.3.4 + github.com/swaggo/swag v1.16.2 + golang.org/x/sync v0.3.0 +) + +require ( + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/go-openapi/jsonpointer v0.20.0 // indirect + github.com/go-openapi/jsonreference v0.20.2 // indirect + github.com/go-openapi/spec v0.20.9 // indirect + github.com/go-openapi/swag v0.22.4 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jackc/pgx/v5 v5.4.3 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_model v0.5.0 // indirect + github.com/prometheus/common v0.44.0 // indirect + github.com/prometheus/procfs v0.12.0 // indirect + github.com/swaggo/files v1.0.1 // indirect + go.uber.org/multierr v1.11.0 // indirect + go.uber.org/zap v1.26.0 // indirect + golang.org/x/crypto v0.13.0 // indirect + golang.org/x/net v0.15.0 // indirect + golang.org/x/sys v0.12.0 // indirect + golang.org/x/tools v0.13.0 // indirect + google.golang.org/protobuf v1.31.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/registry/go.sum b/registry/go.sum new file mode 100644 index 0000000..27a7b5b --- /dev/null +++ b/registry/go.sum @@ -0,0 +1,716 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= +github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/agiledragon/gomonkey/v2 v2.3.1/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY= +github.com/alecthomas/kingpin/v2 v2.3.1/go.mod h1:oYL5vtsvEHZGHxU7DMp32Dvx+qL+ptGn6lWaot2vCNE= +github.com/alecthomas/kingpin/v2 v2.3.2/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyberphone/json-canonicalization v0.0.0-20230710064741-aa7fe85c7dbd h1:0av0vtcjA8Hqv5gyWj79CLCFVwOOyBNWPjrfUWceMNg= +github.com/cyberphone/json-canonicalization v0.0.0-20230710064741-aa7fe85c7dbd/go.mod h1:uzvlm1mxhHkdfqitSA92i7Se+S9ksOn3a3qmv/kyOCw= +github.com/dataphos/lib-httputil v1.0.0 h1:xfaZqHz+PXxifPJU0kS/FhbQG7dEVQEibBCz9MPBPgY= +github.com/dataphos/lib-httputil v1.0.0/go.mod h1:XlXMsNAj94vwBt0pc3G9reLln51G5puRX8Qv24zmmiI= +github.com/dataphos/lib-logger v1.0.0 h1:c6d1//cyVpXB0QvixUb79rMz9OuFzvGYtk2PE8WXqtE= +github.com/dataphos/lib-logger v1.0.0/go.mod h1:AJi106+YVssJ0ak0GrrMoqvtgA+0ido2ZlvxuKyxqUQ= +github.com/dataphos/lib-retry v1.0.0 h1:pvh00Esu34z9bWKliphkeT8DHO9paLOGAi9oQ3yVN4c= +github.com/dataphos/lib-retry v1.0.0/go.mod h1:0T0VfgdamSHvieGMVMBRThXqZGezx/E1bItanDHsmDM= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= +github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= +github.com/go-openapi/jsonpointer v0.20.0 h1:ESKJdU9ASRfaPNOPRx12IUyA1vn3R9GiE3KYD14BXdQ= +github.com/go-openapi/jsonpointer v0.20.0/go.mod h1:6PGzBjjIIumbLYysB73Klnms1mwnU4G3YHOECG3CedA= +github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= +github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= +github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= +github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I= +github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= +github.com/go-openapi/spec v0.20.9 h1:xnlYNQAwKd2VQRRfwTEI0DcK+2cbuvI/0c7jx3gA8/8= +github.com/go-openapi/spec v0.20.9/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU= +github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/hamba/avro/v2 v2.16.0 h1:0XhyP65Hs8iMLtdSR0v7ZrwRjsbIZdvr7KzYgmx1Mbo= +github.com/hamba/avro/v2 v2.16.0/go.mod h1:Q9YK+qxAhtVrNqOhwlZTATLgLA8qxG2vtvkhK8fJ7Jo= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= +github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.3.1/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8= +github.com/jackc/pgx/v5 v5.4.3 h1:cxFyXhxlvAifxnkKKdlxv8XqUf59tDlYjnV5YYfsJJY= +github.com/jackc/pgx/v5 v5.4.3/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA= +github.com/jackc/puddle/v2 v2.2.0/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U= +github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= +github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= +github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= +github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= +github.com/prometheus/client_golang v1.15.1/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= +github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/client_model v0.4.1-0.20230718164431-9a2bf3000d16/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= +github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc= +github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= +github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY= +github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= +github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= +github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg= +github.com/swaggo/http-swagger v1.3.4 h1:q7t/XLx0n15H1Q9/tk3Y9L4n210XzJF5WtnDX64a5ww= +github.com/swaggo/http-swagger v1.3.4/go.mod h1:9dAh0unqMBAlbp1uE2Uc2mQTxNMU/ha4UbucIg1MFkQ= +github.com/swaggo/swag v1.8.1/go.mod h1:ugemnJsPZm/kRwFUnzBlbHRd0JY9zE1M4F+uy2pAaPQ= +github.com/swaggo/swag v1.16.2 h1:28Pp+8DkQoV+HLzLx8RGJZXNGKbFqnuvSbAAtoxiY04= +github.com/swaggo/swag v1.16.2/go.mod h1:6YzXnDcpr0767iOejs318CwYkCQqyGer6BizOg03f+E= +github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/xhit/go-str2duration v1.2.0/go.mod h1:3cPSlfZlUHVlneIVfePFWcJZsuwf+P1v2SRTV4cUmp4= +github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= +go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= +go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo= +go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE= +golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= +golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= +golang.org/x/oauth2 v0.8.0/go.mod h1:yr7u4HXZRm1R1kBWqr/xKNqewf0plRYoB7sla+BCIXE= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= +golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/postgres v1.5.2 h1:ytTDxxEv+MplXOfFe3Lzm7SjG09fcdb3Z/c056DTBx0= +gorm.io/driver/postgres v1.5.2/go.mod h1:fmpX0m2I1PKuR7mKZiEluwrP3hbs+ps7JIGMUBpCgl8= +gorm.io/gorm v1.25.0/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +gorm.io/gorm v1.25.4 h1:iyNd8fNAe8W9dvtlgeRI5zSVZPsq3OpcTu37cYcpCmw= +gorm.io/gorm v1.25.4/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/registry/internal/config/logger.go b/registry/internal/config/logger.go new file mode 100644 index 0000000..50b273e --- /dev/null +++ b/registry/internal/config/logger.go @@ -0,0 +1,41 @@ +package config + +import ( + "fmt" + "os" + + "github.com/dataphos/lib-logger/logger" +) + +const ( + LogLevelEnvKey = "LOG_LEVEL_MINIMUM" +) + +const ( + InfoLevel = "info" + WarnLevel = "warn" + ErrorLevel = "error" + DefaultLevel = InfoLevel +) + +var levels = map[string]logger.Level{InfoLevel: logger.LevelInfo, WarnLevel: logger.LevelWarn, ErrorLevel: logger.LevelError} + +// GetLogLevel returns minimum log level based on environment variable. +// Possible levels are info, warn, and error. Defaults to info. +func GetLogLevel() (logger.Level, []string) { + warnings := make([]string, 0, 2) // warnings about log config to be logged after the logger is configured + + levelString := os.Getenv(LogLevelEnvKey) + if levelString == "" { + warnings = append(warnings, fmt.Sprintf("Value for '%s' not set! Using level %s.", LogLevelEnvKey, DefaultLevel)) + return levels[DefaultLevel], warnings + } + + level, supported := levels[levelString] + if supported { + return level, warnings + } else { + warnings = append(warnings, fmt.Sprintf("Value %v for %v is not supported, using level %v.", levelString, LogLevelEnvKey, DefaultLevel)) + return levels[DefaultLevel], warnings + } +} diff --git a/registry/internal/errcodes/errcodes.go b/registry/internal/errcodes/errcodes.go new file mode 100644 index 0000000..e71f615 --- /dev/null +++ b/registry/internal/errcodes/errcodes.go @@ -0,0 +1,24 @@ +package errcodes + +const ( + DatabaseConnectionInitialization = 100 + InvalidDatabaseState = 101 + DatabaseInitialization = 102 + ServerInitialization = 103 + ExternalCheckerInitialization = 104 + ServerShutdown = 200 + BadRequest = 400 + InternalServerError = 500 + Miscellaneous = 999 +) + +func FromHttpStatusCode(code int) uint64 { + switch { + case code >= 400 && code < 500: + return BadRequest + case code >= 500: + return InternalServerError + default: + return Miscellaneous + } +} diff --git a/registry/internal/errtemplates/errtemplates.go b/registry/internal/errtemplates/errtemplates.go new file mode 100644 index 0000000..1f31d72 --- /dev/null +++ b/registry/internal/errtemplates/errtemplates.go @@ -0,0 +1,27 @@ +package errtemplates + +import ( + "fmt" + "github.com/pkg/errors" +) + +const ( + envVariableNotDefinedTemplate = "env variable %s not defined" + expectedEnvVariableAsInt = "expected env variable %s as int, received %s instead" + parsingEnvVariableFailedTemplate = "parsing env variable %s failed" +) + +// EnvVariableNotDefined returns an error stating that the given env variable is not defined. +func EnvVariableNotDefined(name string) error { + return errors.Errorf(envVariableNotDefinedTemplate, name) +} + +// ExpectedInt returns an error stating that the given env variable was expected to be an int. +func ExpectedInt(name string, value string) error { + return errors.Errorf(expectedEnvVariableAsInt, name, value) +} + +// ParsingEnvVariableFailed returns a string stating that the given env variable couldn't be parsed properly. +func ParsingEnvVariableFailed(name string) string { + return fmt.Sprintf(parsingEnvVariableFailedTemplate, name) +} diff --git a/registry/internal/metrics/metrics.go b/registry/internal/metrics/metrics.go new file mode 100644 index 0000000..e0b175a --- /dev/null +++ b/registry/internal/metrics/metrics.go @@ -0,0 +1,49 @@ +package metrics + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" +) + +var ( + schemaDeletedProm = promauto.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "schema_registry", + Name: "schema_deleted", + Help: "Indicates whether the schema has been deleted (1 = schema deleted)", + }, + []string{"id"}) + schemaRegisteredProm = promauto.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "schema_registry", + Name: "schema_registered", + Help: "Indicates whether new schema has been registered (1 = schema registered)", + }, + []string{"id", "version"}) + schemaUpdatedProm = promauto.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "schema_registry", + Name: "schema_updated", + Help: "Indicates whether the schema has been updated (1 = schema updated)", + }, + []string{"id", "version"}) + schemaVersionDeletedProm = promauto.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "schema_registry", + Name: "schema_version_deleted", + Help: "Indicates whether schema version has been deleted (1 = schema version deleted)", + }, + []string{"id", "version"}) +) + +func UpdateSchemaMetricUpdate(id string, ver string) { + schemaUpdatedProm.WithLabelValues(id, ver).Set(1) +} + +func AddedSchemaMetricUpdate(id string, ver string) { + schemaRegisteredProm.WithLabelValues(id, ver).Set(1) +} + +func DeletedSchemaMetricUpdate(id string) { + schemaDeletedProm.WithLabelValues(id).Set(1) +} + +func DeleteSchemaVersionMetricUpdate(id string, ver string) { + schemaVersionDeletedProm.WithLabelValues(id, ver).Set(1) +} diff --git a/registry/licenses/LICENSE-3RD-PARTY.md b/registry/licenses/LICENSE-3RD-PARTY.md new file mode 100644 index 0000000..5193a73 --- /dev/null +++ b/registry/licenses/LICENSE-3RD-PARTY.md @@ -0,0 +1,48 @@ +| Module | License | +|:--------------------------------------------------------------------------------|:-------------| +| github.com/KyleBanks/depth v1.2.1 (indirect) | MIT | +| github.com/beorn7/perks/quantile | MIT | +| github.com/cespare/xxhash/v2 v2.2.0 (indirect) | MIT | +| github.com/cyberphone/json-canonicalization/go/src/webpki.org/jsoncanonicalizer | Apache-2.0 | +| github.com/go-chi/chi/v5 v5.0.10 | MIT | +| github.com/go-openapi/jsonpointer v0.20.0 (indirect) | Apache-2.0 | +| github.com/go-openapi/jsonreference v0.20.2 (indirect) | Apache-2.0 | +| github.com/go-openapi/spec v0.20.9 (indirect) | Apache-2.0 | +| github.com/go-openapi/swag v0.22.4 (indirect) | Apache-2.0 | +| github.com/golang/protobuf/proto | BSD-3-Clause | +| github.com/hamba/avro/v2 v2.16.0 | MIT | +| github.com/hashicorp/golang-lru v1.0.2 | MPL-2.0 | +| github.com/jackc/pgpassfile v1.0.0 (indirect) | MIT | +| github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a (indirect) | MIT | +| github.com/jackc/pgx/v5 v5.4.3 (indirect) | MIT | +| github.com/jinzhu/inflection v1.0.0 (indirect) | MIT | +| github.com/jinzhu/now v1.1.5 (indirect) | MIT | +| github.com/josharian/intern v1.0.0 (indirect) | MIT | +| github.com/json-iterator/go v1.1.12 (indirect) | MIT | +| github.com/mailru/easyjson v0.7.7 (indirect) | MIT | +| github.com/matttproud/golang_protobuf_extensions/pbutil | Apache-2.0 | +| github.com/mitchellh/mapstructure v1.5.0 (indirect) | MIT | +| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd (indirect) | Apache-2.0 | +| github.com/modern-go/reflect2 v1.0.2 (indirect) | Apache-2.0 | +| github.com/pkg/errors v0.9.1 | BSD-2-Clause | +| github.com/prometheus/client_golang/prometheus | Apache-2.0 | +| github.com/prometheus/client_model/go | Apache-2.0 | +| github.com/prometheus/common v0.44.0 (indirect) | Apache-2.0 | +| github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg | BSD-3-Clause | +| github.com/prometheus/procfs v0.12.0 (indirect) | Apache-2.0 | +| github.com/spf13/cast v1.5.1 | MIT | +| github.com/swaggo/files v1.0.1 (indirect) | MIT | +| github.com/swaggo/http-swagger v1.3.4 | MIT | +| github.com/swaggo/swag v1.16.2 | MIT | +| go.uber.org/multierr v1.11.0 (indirect) | MIT | +| go.uber.org/zap v1.26.0 (indirect) | MIT | +| golang.org/x/crypto/pbkdf2 | BSD-3-Clause | +| golang.org/x/net/webdav | BSD-3-Clause | +| golang.org/x/sync/singleflight | BSD-3-Clause | +| golang.org/x/sys v0.12.0 (indirect) | BSD-3-Clause | +| golang.org/x/text v0.13.0 (indirect) | BSD-3-Clause | +| golang.org/x/tools v0.13.0 (indirect) | BSD-3-Clause | +| google.golang.org/protobuf v1.31.0 (indirect) | BSD-3-Clause | +| gopkg.in/yaml.v3 v3.0.1 (indirect) | MIT | +| gorm.io/driver/postgres v1.5.2 | MIT | +| gorm.io/gorm v1.25.4 | MIT | \ No newline at end of file diff --git a/registry/licenses/compatibility-checker/LICENSE-3RD-PARTY.md b/registry/licenses/compatibility-checker/LICENSE-3RD-PARTY.md new file mode 100644 index 0000000..73ce6fa --- /dev/null +++ b/registry/licenses/compatibility-checker/LICENSE-3RD-PARTY.md @@ -0,0 +1,127 @@ +# Licenses list + +Dependencies sometimes change licenses between versions, please keep this up to date with every new library use. + + (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Classic Module (ch.qos.logback:logback-classic:1.2.3 - http://logback.qos.ch/logback-classic) + (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Core Module (ch.qos.logback:logback-core:1.2.3 - http://logback.qos.ch/logback-core) + (The Apache License, Version 2.0) kaml (com.charleskorn.kaml:kaml:0.20.0 - https://github.com/charleskorn/kaml) + (The Apache Software License, Version 2.0) Handy URI Templates (com.damnhandy:handy-uri-templates:2.1.8 - https://github.com/damnhandy/Handy-URI-Templates) + (Apache License, Version 2.0) ClassMate (com.fasterxml:classmate:1.5.1 - https://github.com/FasterXML/java-classmate) + (The Apache Software License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.13.4 - http://github.com/FasterXML/jackson) + (The Apache Software License, Version 2.0) Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core) + (The Apache Software License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.1 - http://github.com/FasterXML/jackson) + (The Apache Software License, Version 2.0) Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + (The Apache Software License, Version 2.0) Jackson datatype: org.json (com.fasterxml.jackson.datatype:jackson-datatype-json-org:2.11.4 - http://github.com/FasterXML/jackson-datatypes-misc) + (The Apache Software License, Version 2.0) Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + (The Apache Software License, Version 2.0) Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + (Apache License, Version 2.0) everit-org/json-schema (com.github.everit-org.json-schema:org.everit.json.schema:1.14.1 - https://github.com/everit-org/json-schema) + (BSD 2-Clause License) zstd-jni (com.github.luben:zstd-jni:1.4.4-7 - https://github.com/luben/zstd-jni) + (Apache-2.0) proto-google-common-protos (com.google.api.grpc:proto-google-common-protos:2.9.3 - https://github.com/googleapis/java-iam/proto-google-common-protos) + (Apache 2.0) error-prone annotations (com.google.errorprone:error_prone_annotations:2.5.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) + (The Apache Software License, Version 2.0) Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) + (Apache License, Version 2.0) Guava: Google Core Libraries for Java (com.google.guava:guava:30.1.1-jre - https://github.com/google/guava/guava) + (The Apache Software License, Version 2.0) J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) + (The Apache Software License, Version 2.0) Jimfs (com.google.jimfs:jimfs:1.1 - https://github.com/google/jimfs/jimfs) + (BSD-3-Clause) Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.21.9 - https://developers.google.com/protocol-buffers/protobuf-java/) + (Go License) RE2/J (com.google.re2j:re2j:1.6 - http://github.com/google/re2j) + (Unicode/ICU License) ICU4J (com.ibm.icu:icu4j:71.1 - https://icu.unicode.org/) + (The Apache Software License, Version 2.0) project ':json-path' (com.jayway.jsonpath:json-path:2.4.0 - https://github.com/jayway/JsonPath) + (Apache 2.0) JavaPoet (com.squareup:javapoet:1.13.0 - http://github.com/square/javapoet/) + (The Apache Software License, Version 2.0) KotlinPoet (com.squareup:kotlinpoet:1.7.2 - https://github.com/square/kotlinpoet) + (The Apache Software License, Version 2.0) Okio (com.squareup.okio:okio:2.8.0 - https://github.com/square/okio/) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-compiler:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-grpc-server-generator:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-java-generator:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-kotlin-generator:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-profiles:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire Multiplatform Runtime (Experimental) (com.squareup.wire:wire-runtime:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-schema:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-swift-generator:3.7.1 - https://github.com/square/wire) + (Apache License 2.0) JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) + (Apache License, Version 2.0) Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/) + (The Apache Software License, Version 2.0) Commons Digester (commons-digester:commons-digester:2.1 - http://commons.apache.org/digester/) + (The Apache Software License, Version 2.0) Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/) + (Apache License, Version 2.0) Apache Commons Validator (commons-validator:commons-validator:1.7 - http://commons.apache.org/proper/commons-validator/) + (Apache License Version 2.0) apicurio-common-app-components-logging (io.apicurio:apicurio-common-app-components-logging:0.1.13.Final - https://www.apicur.io/apicurio-common-app-components-logging/) + (Apache License Version 2.0) apicurio-registry-common (io.apicurio:apicurio-registry-common:2.3.1.Final - https://www.apicur.io/apicurio-registry-common/) + (Apache License Version 2.0) apicurio-registry-protobuf-schema-utilities (io.apicurio:apicurio-registry-protobuf-schema-utilities:2.3.1.Final - https://www.apicur.io/apicurio-registry-protobuf-schema-utilities/) + (Apache License Version 2.0) apicurio-registry-schema-util-common (io.apicurio:apicurio-registry-schema-util-common:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-common/) + (Apache License Version 2.0) apicurio-registry-schema-util-json (io.apicurio:apicurio-registry-schema-util-json:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-json/) + (Apache License Version 2.0) apicurio-registry-schema-util-protobuf (io.apicurio:apicurio-registry-schema-util-protobuf:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-protobuf/) + (Apache License 2.0) utils (io.confluent:common-utils:7.2.1 - http://confluent.io/common-utils) + (Apache License 2.0) kafka-schema-registry-client (io.confluent:kafka-schema-registry-client:7.2.1 - http://confluent.io/kafka-schema-registry-client) + (Apache License 2.0) Swift Poet (io.outfoxx:swiftpoet:1.0.0 - https://github.com/outfoxx/swiftpoet) + (BSD New) Sentry-Java client (io.sentry:sentry:1.7.30 - https://github.com/getsentry/sentry-java/sentry) + (Apache License 2.0) swagger-annotations (io.swagger.core.v3:swagger-annotations:2.1.10 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations) + (EDL 1.0) Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) + (EPL 2.0) (GPL2 w/ CPE) Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) + (Apache License 2.0) Jakarta Bean Validation API (jakarta.validation:jakarta.validation-api:2.0.2 - https://beanvalidation.org) + (Eclipse Distribution License - v 1.0) Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) + (Apache 2) Joda-Time (joda-time:joda-time:2.10.2 - https://www.joda.org/joda-time/) + (Apache License, Version 2.0) Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.10.22 - https://bytebuddy.net/byte-buddy) + (Apache License, Version 2.0) Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.10.22 - https://bytebuddy.net/byte-buddy-agent) + (The Apache Software License, Version 2.0) ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/) + (The Apache Software License, Version 2.0) JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/) + (Apache License, Version 2.0) Apache Avro (org.apache.avro:avro:1.11.0 - https://avro.apache.org) + (Apache License, Version 2.0) Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/) + (The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka-clients:2.6.0 - https://kafka.apache.org) + (Apache License, Version 2.0) Apache Log4j API (org.apache.logging.log4j:log4j-api:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-api/) + (Apache License, Version 2.0) Apache Log4j to SLF4J Adapter (org.apache.logging.log4j:log4j-to-slf4j:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-to-slf4j/) + (Apache License, Version 2.0) tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.44 - https://tomcat.apache.org/) + (Apache License, Version 2.0) tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.44 - https://tomcat.apache.org/) + (Apache License, Version 2.0) tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.44 - https://tomcat.apache.org/) + (The Apache License, Version 2.0) org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) + (Apache License, Version 2.0) AssertJ fluent assertions (org.assertj:assertj-core:3.18.1 - https://assertj.github.io/doc/assertj-core/) + (The MIT License) Checker Qual (org.checkerframework:checker-qual:3.8.0 - https://checkerframework.org) + (BSD License 3) Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/) + (Apache License 2.0) Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.1.7.Final - http://hibernate.org/validator/hibernate-validator) + (Apache License, version 2.0) JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.1.Final - http://www.jboss.org) + (EPL 2.0) (GPL2 w/ CPE) jboss-jakarta-jaxrs-api_spec (org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.1_spec:2.0.1.Final - http://www.jboss.org/jboss-jaxrs-api_2.1_spec) + (The Apache Software License, Version 2.0) IntelliJ IDEA Annotations (org.jetbrains:annotations:13.0 - http://www.jetbrains.org) + (The Apache License, Version 2.0) Kotlin Reflect (org.jetbrains.kotlin:kotlin-reflect:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib (org.jetbrains.kotlin:kotlin-stdlib:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib Common (org.jetbrains.kotlin:kotlin-stdlib-common:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib Jdk7 (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib Jdk8 (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.4.31 - https://kotlinlang.org/) + (The Apache Software License, Version 2.0) kotlinx-serialization-core (org.jetbrains.kotlinx:kotlinx-serialization-core-jvm:1.0.1 - https://github.com/Kotlin/kotlinx.serialization) + (The JSON License) JSON in Java (org.json:json:20220320 - https://github.com/douglascrockford/JSON-java) + (Eclipse Public License v2.0) JUnit Jupiter (Aggregator) (org.junit.jupiter:junit-jupiter:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter API (org.junit.jupiter:junit-jupiter-api:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter Engine (org.junit.jupiter:junit-jupiter-engine:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter Params (org.junit.jupiter:junit-jupiter-params:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Platform Commons (org.junit.platform:junit-platform-commons:1.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Platform Engine API (org.junit.platform:junit-platform-engine:1.7.1 - https://junit.org/junit5/) + (The Apache Software License, Version 2.0) LZ4 and xxHash (org.lz4:lz4-java:1.7.1 - https://github.com/lz4/lz4-java) + (The MIT License) mockito-core (org.mockito:mockito-core:3.6.28 - https://github.com/mockito/mockito) + (The MIT License) mockito-junit-jupiter (org.mockito:mockito-junit-jupiter:3.6.28 - https://github.com/mockito/mockito) + (Apache License, Version 2.0) Objenesis (org.objenesis:objenesis:3.1 - http://objenesis.org) + (The Apache License, Version 2.0) org.opentest4j:opentest4j (org.opentest4j:opentest4j:1.2.0 - https://github.com/ota4j-team/opentest4j) + (BSD) ASM Core (org.ow2.asm:asm:5.0.4 - http://asm.objectweb.org/asm/) + (The MIT License) Project Lombok (org.projectlombok:lombok:1.18.22 - https://projectlombok.org) + (The Apache Software License, Version 2.0) JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) + (MIT License) JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.30 - http://www.slf4j.org) + (MIT License) SLF4J API Module (org.slf4j:slf4j-api:1.7.30 - http://www.slf4j.org) + (Apache License, Version 2.0) SnakeYAML Engine (org.snakeyaml:snakeyaml-engine:2.1 - http://www.snakeyaml.org) + (Apache License, Version 2.0) Spring AOP (org.springframework:spring-aop:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Beans (org.springframework:spring-beans:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Context (org.springframework:spring-context:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Core (org.springframework:spring-core:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring TestContext Framework (org.springframework:spring-test:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Web (org.springframework:spring-web:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Web MVC (org.springframework:spring-webmvc:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) spring-boot (org.springframework.boot:spring-boot:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter (org.springframework.boot:spring-boot-starter:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-logging (org.springframework.boot:spring-boot-starter-logging:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-validation (org.springframework.boot:spring-boot-starter-validation:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-test (org.springframework.boot:spring-boot-test:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.4.4 - https://spring.io/projects/spring-boot) + (The Apache Software License, Version 2.0) snappy-java (org.xerial.snappy:snappy-java:1.1.7.3 - https://github.com/xerial/snappy-java) + (The Apache Software License, Version 2.0) org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.7.0 - https://www.xmlunit.org/) + (Apache License, Version 2.0) SnakeYAML (org.yaml:snakeyaml:1.33 - https://bitbucket.org/snakeyaml/snakeyaml) diff --git a/registry/licenses/validity-checker/LICENSE-3RD-PARTY.md b/registry/licenses/validity-checker/LICENSE-3RD-PARTY.md new file mode 100644 index 0000000..5315892 --- /dev/null +++ b/registry/licenses/validity-checker/LICENSE-3RD-PARTY.md @@ -0,0 +1,128 @@ +# Licenses list + +Dependencies sometimes change licenses between versions, please keep this up to date with every new library use. + + (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Classic Module (ch.qos.logback:logback-classic:1.2.3 - http://logback.qos.ch/logback-classic) + (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Core Module (ch.qos.logback:logback-core:1.2.3 - http://logback.qos.ch/logback-core) + (The Apache License, Version 2.0) kaml (com.charleskorn.kaml:kaml:0.20.0 - https://github.com/charleskorn/kaml) + (The Apache Software License, Version 2.0) Handy URI Templates (com.damnhandy:handy-uri-templates:2.1.8 - https://github.com/damnhandy/Handy-URI-Templates) + (Apache License, Version 2.0) ClassMate (com.fasterxml:classmate:1.5.1 - https://github.com/FasterXML/java-classmate) + (The Apache Software License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.13.4 - http://github.com/FasterXML/jackson) + (The Apache Software License, Version 2.0) Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core) + (The Apache Software License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.1 - http://github.com/FasterXML/jackson) + (The Apache Software License, Version 2.0) Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + (The Apache Software License, Version 2.0) Jackson datatype: org.json (com.fasterxml.jackson.datatype:jackson-datatype-json-org:2.11.4 - http://github.com/FasterXML/jackson-datatypes-misc) + (The Apache Software License, Version 2.0) Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + (The Apache Software License, Version 2.0) Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + (The Apache License, Version 2.0) Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.8 - https://github.com/FasterXML/woodstox) + (Apache License, Version 2.0) everit-org/json-schema (com.github.everit-org.json-schema:org.everit.json.schema:1.14.1 - https://github.com/everit-org/json-schema) + (Apache-2.0) proto-google-common-protos (com.google.api.grpc:proto-google-common-protos:2.9.3 - https://github.com/googleapis/java-iam/proto-google-common-protos) + (Apache 2.0) error-prone annotations (com.google.errorprone:error_prone_annotations:2.5.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) + (The Apache Software License, Version 2.0) Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) + (Apache License, Version 2.0) Guava: Google Core Libraries for Java (com.google.guava:guava:30.1.1-jre - https://github.com/google/guava/guava) + (The Apache Software License, Version 2.0) J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) + (The Apache Software License, Version 2.0) Jimfs (com.google.jimfs:jimfs:1.1 - https://github.com/google/jimfs/jimfs) + (BSD-3-Clause) Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.21.9 - https://developers.google.com/protocol-buffers/protobuf-java/) + (Go License) RE2/J (com.google.re2j:re2j:1.6 - http://github.com/google/re2j) + (Unicode/ICU License) ICU4J (com.ibm.icu:icu4j:71.1 - https://icu.unicode.org/) + (The Apache Software License, Version 2.0) project ':json-path' (com.jayway.jsonpath:json-path:2.4.0 - https://github.com/jayway/JsonPath) + (Apache 2.0) JavaPoet (com.squareup:javapoet:1.13.0 - http://github.com/square/javapoet/) + (The Apache Software License, Version 2.0) KotlinPoet (com.squareup:kotlinpoet:1.7.2 - https://github.com/square/kotlinpoet) + (The Apache Software License, Version 2.0) Okio (com.squareup.okio:okio:2.8.0 - https://github.com/square/okio/) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-compiler:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-grpc-server-generator:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-java-generator:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-kotlin-generator:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-profiles:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire Multiplatform Runtime (Experimental) (com.squareup.wire:wire-runtime:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-schema:3.7.1 - https://github.com/square/wire) + (The Apache Software License, Version 2.0) Wire (com.squareup.wire:wire-swift-generator:3.7.1 - https://github.com/square/wire) + (Apache License 2.0) JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) + (Apache License, Version 2.0) Apache Commons Codec (commons-codec:commons-codec:1.15 - https://commons.apache.org/proper/commons-codec/) + (Apache License, Version 2.0) Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/) + (The Apache Software License, Version 2.0) Commons Digester (commons-digester:commons-digester:2.1 - http://commons.apache.org/digester/) + (The Apache Software License, Version 2.0) Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/) + (Apache License, Version 2.0) Apache Commons Validator (commons-validator:commons-validator:1.7 - http://commons.apache.org/proper/commons-validator/) + (Apache License Version 2.0) apicurio-common-app-components-logging (io.apicurio:apicurio-common-app-components-logging:0.1.13.Final - https://www.apicur.io/apicurio-common-app-components-logging/) + (Apache License Version 2.0) apicurio-registry-common (io.apicurio:apicurio-registry-common:2.3.1.Final - https://www.apicur.io/apicurio-registry-common/) + (Apache License Version 2.0) apicurio-registry-protobuf-schema-utilities (io.apicurio:apicurio-registry-protobuf-schema-utilities:2.3.1.Final - https://www.apicur.io/apicurio-registry-protobuf-schema-utilities/) + (Apache License Version 2.0) apicurio-registry-schema-util-avro (io.apicurio:apicurio-registry-schema-util-avro:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-avro/) + (Apache License Version 2.0) apicurio-registry-schema-util-common (io.apicurio:apicurio-registry-schema-util-common:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-common/) + (Apache License Version 2.0) apicurio-registry-schema-util-json (io.apicurio:apicurio-registry-schema-util-json:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-json/) + (Apache License Version 2.0) apicurio-registry-schema-util-protobuf (io.apicurio:apicurio-registry-schema-util-protobuf:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-protobuf/) + (Apache License Version 2.0) apicurio-registry-schema-util-xml (io.apicurio:apicurio-registry-schema-util-xml:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-xml/) + (Apache License Version 2.0) apicurio-registry-schema-util-xsd (io.apicurio:apicurio-registry-schema-util-xsd:2.3.1.Final - https://www.apicur.io/apicurio-registry-schema-util-xsd/) + (Apache License 2.0) Swift Poet (io.outfoxx:swiftpoet:1.0.0 - https://github.com/outfoxx/swiftpoet) + (BSD New) Sentry-Java client (io.sentry:sentry:1.7.30 - https://github.com/getsentry/sentry-java/sentry) + (EDL 1.0) Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) + (EPL 2.0) (GPL2 w/ CPE) Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) + (Apache License 2.0) Jakarta Bean Validation API (jakarta.validation:jakarta.validation-api:2.0.2 - https://beanvalidation.org) + (Eclipse Distribution License - v 1.0) Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) + (Apache 2) Joda-Time (joda-time:joda-time:2.10.2 - https://www.joda.org/joda-time/) + (Apache License, Version 2.0) Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.10.22 - https://bytebuddy.net/byte-buddy) + (Apache License, Version 2.0) Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.10.22 - https://bytebuddy.net/byte-buddy-agent) + (The Apache Software License, Version 2.0) ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/) + (The Apache Software License, Version 2.0) JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/) + (Apache License, Version 2.0) Apache Avro (org.apache.avro:avro:1.11.1 - https://avro.apache.org) + (Apache License, Version 2.0) Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/) + (Apache License, Version 2.0) Apache Log4j API (org.apache.logging.log4j:log4j-api:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-api/) + (Apache License, Version 2.0) Apache Log4j to SLF4J Adapter (org.apache.logging.log4j:log4j-to-slf4j:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-to-slf4j/) + (Apache License, Version 2.0) Apache XML Security for Java (org.apache.santuario:xmlsec:3.0.1 - https://santuario.apache.org/) + (Apache License, Version 2.0) tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.44 - https://tomcat.apache.org/) + (Apache License, Version 2.0) tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.44 - https://tomcat.apache.org/) + (Apache License, Version 2.0) tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.44 - https://tomcat.apache.org/) + (The Apache License, Version 2.0) org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) + (Apache License, Version 2.0) AssertJ fluent assertions (org.assertj:assertj-core:3.18.1 - https://assertj.github.io/doc/assertj-core/) + (The MIT License) Checker Qual (org.checkerframework:checker-qual:3.8.0 - https://checkerframework.org) + (The BSD License) Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api) + (BSD License 3) Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/) + (Apache License 2.0) Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.1.7.Final - http://hibernate.org/validator/hibernate-validator) + (Apache License, version 2.0) JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.1.Final - http://www.jboss.org) + (EPL 2.0) (GPL2 w/ CPE) jboss-jakarta-jaxrs-api_spec (org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.1_spec:2.0.1.Final - http://www.jboss.org/jboss-jaxrs-api_2.1_spec) + (The Apache Software License, Version 2.0) IntelliJ IDEA Annotations (org.jetbrains:annotations:13.0 - http://www.jetbrains.org) + (The Apache License, Version 2.0) Kotlin Reflect (org.jetbrains.kotlin:kotlin-reflect:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib (org.jetbrains.kotlin:kotlin-stdlib:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib Common (org.jetbrains.kotlin:kotlin-stdlib-common:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib Jdk7 (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.4.31 - https://kotlinlang.org/) + (The Apache License, Version 2.0) Kotlin Stdlib Jdk8 (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.4.31 - https://kotlinlang.org/) + (The Apache Software License, Version 2.0) kotlinx-serialization-core (org.jetbrains.kotlinx:kotlinx-serialization-core-jvm:1.0.1 - https://github.com/Kotlin/kotlinx.serialization) + (The JSON License) JSON in Java (org.json:json:20220320 - https://github.com/douglascrockford/JSON-java) + (Eclipse Public License v2.0) JUnit Jupiter (Aggregator) (org.junit.jupiter:junit-jupiter:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter API (org.junit.jupiter:junit-jupiter-api:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter Engine (org.junit.jupiter:junit-jupiter-engine:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter Params (org.junit.jupiter:junit-jupiter-params:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Platform Commons (org.junit.platform:junit-platform-commons:1.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Platform Engine API (org.junit.platform:junit-platform-engine:1.7.1 - https://junit.org/junit5/) + (The MIT License) mockito-core (org.mockito:mockito-core:3.6.28 - https://github.com/mockito/mockito) + (The MIT License) mockito-junit-jupiter (org.mockito:mockito-junit-jupiter:3.6.28 - https://github.com/mockito/mockito) + (Apache License, Version 2.0) Objenesis (org.objenesis:objenesis:3.1 - http://objenesis.org) + (The Apache License, Version 2.0) org.opentest4j:opentest4j (org.opentest4j:opentest4j:1.2.0 - https://github.com/ota4j-team/opentest4j) + (BSD) ASM Core (org.ow2.asm:asm:5.0.4 - http://asm.objectweb.org/asm/) + (The MIT License) Project Lombok (org.projectlombok:lombok:1.18.20 - https://projectlombok.org) + (The Apache Software License, Version 2.0) JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) + (MIT License) JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.30 - http://www.slf4j.org) + (MIT License) SLF4J API Module (org.slf4j:slf4j-api:1.7.30 - http://www.slf4j.org) + (Apache License, Version 2.0) SnakeYAML Engine (org.snakeyaml:snakeyaml-engine:2.1 - http://www.snakeyaml.org) + (Apache License, Version 2.0) Spring AOP (org.springframework:spring-aop:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Beans (org.springframework:spring-beans:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Context (org.springframework:spring-context:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Core (org.springframework:spring-core:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring TestContext Framework (org.springframework:spring-test:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Web (org.springframework:spring-web:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Web MVC (org.springframework:spring-webmvc:5.3.23 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) spring-boot (org.springframework.boot:spring-boot:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter (org.springframework.boot:spring-boot-starter:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-logging (org.springframework.boot:spring-boot-starter-logging:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-validation (org.springframework.boot:spring-boot-starter-validation:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.5 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-test (org.springframework.boot:spring-boot-test:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.4.4 - https://spring.io/projects/spring-boot) + (The Apache Software License, Version 2.0) org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.7.0 - https://www.xmlunit.org/) + (Apache License, Version 2.0) SnakeYAML (org.yaml:snakeyaml:1.33 - https://bitbucket.org/snakeyaml/snakeyaml) + diff --git a/registry/registry/cache.go b/registry/registry/cache.go new file mode 100644 index 0000000..580adca --- /dev/null +++ b/registry/registry/cache.go @@ -0,0 +1,92 @@ +package registry + +import ( + lru "github.com/hashicorp/golang-lru" + "github.com/spf13/cast" + "golang.org/x/sync/singleflight" +) + +// cached decorates Service with a lru cache. +type cached struct { + Repository + cache *lru.TwoQueueCache + group singleflight.Group +} + +// newCache returns a new cached. +func newCache(repository Repository, size int) (*cached, error) { + cache, err := lru.New2Q(size) + if err != nil { + return nil, err + } + + return &cached{ + Repository: repository, + cache: cache, + group: singleflight.Group{}, + }, nil +} + +// GetSchemaVersionByIdAndVersion overrides the Repository.GetSchemaVersionByIdAndVersion method, caching each call to the underlying Repository, while also +// making sure there's only one inflight request for the same key (if multiple goroutines request the same schema, +// only one request is actually sent down, the rest wait for the first one to share its result). +func (c *cached) GetSchemaVersionByIdAndVersion(id, version string) (VersionDetails, error) { + // this should be faster than string concatenation + arrKey := [2]string{id, version} + var err error + v, ok := c.cache.Get(arrKey) + if !ok { + // cache miss, we need a string version of the key to satisfy the singleflight.Group method signature + key := id + "_" + version + v, err, _ = c.group.Do(key, func() (interface{}, error) { + if v, err = c.Repository.GetSchemaVersionByIdAndVersion(id, version); err != nil { + return VersionDetails{}, err + } + c.cache.Add(arrKey, v) + return v, err + }) + } + return v.(VersionDetails), err +} + +// DeleteSchemaVersion overrides the Repository.DeleteSchemaVersion method, caching each call to the underlying Repository, while also +// making sure there's only one inflight request for the same key (if multiple goroutines request the deletion of the same +// Schema version, only one request is actually sent down, the rest wait for the first one to share its result). +func (c *cached) DeleteSchemaVersion(id, version string) (bool, error) { + key := id + "_" + version + bool, err, _ := c.group.Do(key, func() (interface{}, error) { + bool, err := c.Repository.DeleteSchemaVersion(id, version) + if err != nil { + return false, err + } + + arrKey := [2]string{id, version} + c.cache.Remove(arrKey) + return bool, err + }) + return cast.ToBool(bool), err +} + +// DeleteSchema overrides the Repository.DeleteSchema method, caching each call to the underlying Repository, while also +// making sure there's only one inflight request for the same key (if multiple goroutines request the deletion of the same +// Schema, only one request is actually sent down, the rest wait for the first one to share its result). +func (c *cached) DeleteSchema(id string) (bool, error) { + v, err := c.Repository.GetSchemaVersionsById(id) + if err == nil { + //Schema with the given ID exists, and it's not already deactivated + bool, err, _ := c.group.Do(id, func() (interface{}, error) { + bool, err := c.Repository.DeleteSchema(id) + if err != nil { + return false, err + } + // remove schemas that are present in cache + for _, v := range v.VersionDetails { + arrKey := [2]string{v.SchemaID, v.Version} + c.cache.Remove(arrKey) + } + return bool, nil + }) + return cast.ToBool(bool), err + } + return false, nil +} diff --git a/registry/registry/cache_test.go b/registry/registry/cache_test.go new file mode 100644 index 0000000..71f8d2c --- /dev/null +++ b/registry/registry/cache_test.go @@ -0,0 +1,82 @@ +package registry + +import ( + "strconv" + "testing" + + "github.com/google/go-cmp/cmp" +) + +func TestCacheGetSchemaVersionByIdAndVersion(t *testing.T) { + repo := NewMockRepository() + c, err := newCache(repo, 10) + if err != nil { + t.Error(err) + } + id, version := "1", "1" + var storedSchema, cachedSchema VersionDetails + // after the first call the schema should be stored in cache + if storedSchema, err = c.GetSchemaVersionByIdAndVersion(id, version); err != nil { + t.Error(err) + } + // second call returns schema from cache + if cachedSchema, err = c.GetSchemaVersionByIdAndVersion(id, version); err != nil { + t.Error(err) + } + + if c.cache.Len() == 0 { + t.Error("Schema unsuccessfully stored in cache.") + } + if !cmp.Equal(cachedSchema, storedSchema) { + t.Error("Cached schema differs from the stored schema") + } +} + +func TestCacheDeleteSchemaVersion(t *testing.T) { + repo := NewMockRepository() + c, err := newCache(repo, 10) + if err != nil { + t.Error(err) + } + + id, version := "1", "1" + arrKey := [2]string{id, version} + VersionDetails := MockVersionDetails(id, version) + c.cache.Add(arrKey, VersionDetails) + + if _, err = c.DeleteSchemaVersion(id, version); err != nil { + t.Error(err) + } + if _, bool := c.cache.Get(arrKey); bool { + t.Error("Schema is still stored in cache") + } +} + +func TestDeleteSchema(t *testing.T) { + repo := NewMockRepository() + c, err := newCache(repo, 10) + if err != nil { + t.Error(err) + } + id := "mocking" + schema := MockSchema(id) + + for i := 1; i <= 10; i++ { + k := strconv.Itoa(i) + VersionDetails := MockVersionDetails(k, k) + arrKey := [2]string{id, k} + c.cache.Add(arrKey, VersionDetails) + schema.VersionDetails = append(schema.VersionDetails, VersionDetails) + } + repo.SetGetSchemaVersionsByIdResponse(id, schema, nil) + if bool, err := c.DeleteSchema(id); err != nil { + t.Error(err) + } else { + if c.cache.Len() != 0 { + t.Error("Some schemas are still stored in cache") + } else if !bool { + t.Error("Schema does not exist") + } + } + +} diff --git a/registry/registry/internal/hashutils/hash.go b/registry/registry/internal/hashutils/hash.go new file mode 100644 index 0000000..b17b11d --- /dev/null +++ b/registry/registry/internal/hashutils/hash.go @@ -0,0 +1,13 @@ +package hashutils + +import ( + "crypto/sha256" + "encoding/hex" +) + +// SHA256 calculates schema hash using SHA256 algorithm. +func SHA256(data []byte) string { + hasher := sha256.New() + _, _ = hasher.Write(data) + return hex.EncodeToString(hasher.Sum(nil)) +} diff --git a/registry/registry/mock.go b/registry/registry/mock.go new file mode 100644 index 0000000..8fea124 --- /dev/null +++ b/registry/registry/mock.go @@ -0,0 +1,196 @@ +//nolint:staticcheck,unused + +package registry + +import ( + "time" +) + +type mockRepository struct { + createSchemaResponse map[string]mockCreateSchema + getSchemaByIdAndVersionResponse map[string]mockGetSchemaVersionByIdAndVersion + updateSchemaByIdResponse map[string]mockUpdateSchemaById + getSchemaVersionsResponse map[string]mockGetSchemaVersionsById + getAllSchemaVersionsResponse map[string]mockGetAllSchemaVersions + getLatestSchemaVersionResponse map[string]mockGetLatestSchemaVersion + deleteSchemaResponse map[string]mockDeleteSchema + deleteVersionResponse map[string]mockDeleteSchemaVersion + getSchemasResponse map[string]mockGetSchemas + getAllSchemasResponse map[string]mockGetAllSchemas +} + +type mockCompChecker struct { + // checkCompResponse map[string]mockCheckComp +} + +type mockValChecker struct { + // checkValResponse map[string]mockValComp +} + +//type mockCheckComp struct { +// ok bool +// err error +//} + +//type mockValComp struct { +// ok bool +// err error +//} + +type mockCreateSchema struct { + VersionDetails VersionDetails +} + +type mockGetSchemaVersionByIdAndVersion struct { + VersionDetails VersionDetails +} + +type mockUpdateSchemaById struct { + VersionDetails VersionDetails +} + +type mockGetSchemaVersionsById struct { + schema Schema + err error +} + +type mockGetAllSchemaVersions struct { +} + +type mockGetLatestSchemaVersion struct { +} + +type mockDeleteSchema struct { +} + +type mockDeleteSchemaVersion struct { +} + +type mockGetSchemas struct { +} + +type mockGetAllSchemas struct { +} + +func MockSchema(id string) Schema { + return Schema{ + SchemaID: id, + SchemaType: "mocking", + Name: "mocking", + VersionDetails: nil, + Description: "mocking", + LastCreated: "mocking", + PublisherID: "mocking", + CompatibilityMode: "none", + ValidityMode: "none", + } +} + +func MockVersionDetails(id, version string) VersionDetails { + return VersionDetails{ + VersionID: id, + Version: version, + SchemaID: "mocking", + Specification: "mocking", + Description: "mocking", + SchemaHash: "mocking", + CreatedAt: time.Time{}, + VersionDeactivated: false, + } +} + +func NewMockRepository() *mockRepository { + return &mockRepository{ + createSchemaResponse: map[string]mockCreateSchema{}, + getSchemaByIdAndVersionResponse: map[string]mockGetSchemaVersionByIdAndVersion{}, + updateSchemaByIdResponse: map[string]mockUpdateSchemaById{}, + getSchemaVersionsResponse: map[string]mockGetSchemaVersionsById{}, + getAllSchemaVersionsResponse: map[string]mockGetAllSchemaVersions{}, + getLatestSchemaVersionResponse: map[string]mockGetLatestSchemaVersion{}, + deleteSchemaResponse: map[string]mockDeleteSchema{}, + deleteVersionResponse: map[string]mockDeleteSchemaVersion{}, + getSchemasResponse: map[string]mockGetSchemas{}, + getAllSchemasResponse: map[string]mockGetAllSchemas{}, + } +} + +func (c *mockCompChecker) Check(_ string, _ []string, _ string) (bool, error) { + return true, nil +} + +func (c *mockValChecker) Check(_, _, _ string) (bool, error) { + return true, nil +} + +func (m *mockRepository) CheckCompatibility(_, _ string) (bool, error) { + return true, nil +} + +func (m *mockRepository) DeleteSchema(_ string) (bool, error) { + return true, nil +} + +func (m *mockRepository) DeleteSchemaVersion(_, _ string) (bool, error) { + return true, nil +} + +func (m *mockRepository) GetSchemas() ([]Schema, error) { + return []Schema{{ + SchemaID: "mocking", + SchemaType: "mocking", + Name: "mocking", + VersionDetails: nil, + Description: "mocking", + LastCreated: "mocking", + PublisherID: "mocking", + CompatibilityMode: "none", + ValidityMode: "none", + }}, nil +} + +func (m *mockRepository) GetAllSchemas() ([]Schema, error) { + return []Schema{{ + SchemaID: "mocking", + SchemaType: "mocking", + Name: "mocking", + VersionDetails: nil, + Description: "mocking", + LastCreated: "mocking", + PublisherID: "mocking", + CompatibilityMode: "none", + ValidityMode: "none", + }}, nil +} + +func (m *mockRepository) GetLatestSchemaVersion(_ string) (VersionDetails, error) { + return MockVersionDetails("mocking", "mocking"), nil +} + +func (m *mockRepository) CreateSchema(_ SchemaRegistrationRequest) (VersionDetails, bool, error) { + return MockVersionDetails("mocking", "mocking"), true, nil +} + +func (m *mockRepository) GetSchemaVersionByIdAndVersion(id string, version string) (VersionDetails, error) { + return MockVersionDetails(id, version), nil +} + +func (m *mockRepository) UpdateSchemaById(id string, _ SchemaUpdateRequest) (VersionDetails, bool, error) { + return MockVersionDetails(id, "mocking"), true, nil +} + +func (m *mockRepository) SetGetSchemaVersionsByIdResponse(id string, schema Schema, err error) { + m.getSchemaVersionsResponse[id] = mockGetSchemaVersionsById{ + schema: schema, + err: err, + } +} + +func (m *mockRepository) GetSchemaVersionsById(id string) (Schema, error) { + response := m.getSchemaVersionsResponse[id] + return response.schema, response.err +} + +func (m *mockRepository) GetAllSchemaVersions(id string) (Schema, error) { + response := m.getSchemaVersionsResponse[id] + return response.schema, response.err +} diff --git a/registry/registry/model.go b/registry/registry/model.go new file mode 100644 index 0000000..12d4108 --- /dev/null +++ b/registry/registry/model.go @@ -0,0 +1,65 @@ +package registry + +import ( + "time" +) + +// Schema is a structure that defines the parent entity in the schema registry +type Schema struct { + SchemaID string `json:"schema_id,omitempty"` + SchemaType string `json:"schema_type"` + Name string `json:"name"` + VersionDetails []VersionDetails `json:"schemas"` + Description string `json:"description"` + LastCreated string `json:"last_created"` + PublisherID string `json:"publisher_id"` + CompatibilityMode string `json:"compatibility_mode"` + ValidityMode string `json:"validity_mode"` +} + +// VersionDetails represent the child entity in the schema registry model. +// The schema (specification) and version with some other details is set here. +type VersionDetails struct { + VersionID string `json:"version_id,omitempty"` + Version string `json:"version"` + SchemaID string `json:"schema_id"` + Specification string `json:"specification"` + Description string `json:"description"` + SchemaHash string `json:"schema_hash"` + CreatedAt time.Time `json:"created_at"` + VersionDeactivated bool `json:"version_deactivated"` + Attributes string `json:"attributes"` +} + +// SchemaRegistrationRequest contains information needed to register a schema. +type SchemaRegistrationRequest struct { + Description string `json:"description"` + Specification string `json:"specification"` + Name string `json:"name"` + SchemaType string `json:"schema_type"` + LastCreated string `json:"last_created"` + PublisherID string `json:"publisher_id"` + CompatibilityMode string `json:"compatibility_mode"` + ValidityMode string `json:"validity_mode"` + Attributes string `json:"attributes"` +} + +// SchemaUpdateRequest contains information needed to update a schema. +type SchemaUpdateRequest struct { + Description string `json:"description"` + Specification string `json:"specification"` + Attributes string `json:"attributes"` +} + +// SchemaCompatibilityRequest contains information needed to check compatibility of schemas +type SchemaCompatibilityRequest struct { + SchemaID string `json:"schema_id"` + NewSchema string `json:"new_schema"` +} + +// SchemaValidityRequest contains information needed to check validity of a schema +type SchemaValidityRequest struct { + NewSchema string `json:"new_schema"` + Format string `json:"format"` + Mode string `json:"mode"` +} diff --git a/registry/registry/repo.go b/registry/registry/repo.go new file mode 100644 index 0000000..58c450a --- /dev/null +++ b/registry/registry/repo.go @@ -0,0 +1,29 @@ +package registry + +import ( + "github.com/pkg/errors" +) + +var ErrNotFound = errors.New("not found") +var ErrUnknownComp = errors.New("unknown value for compatibility_mode") +var ErrUnknownVal = errors.New("unknown value for validity mode") +var ErrNotValid = errors.New("schema is not valid") +var ErrNotComp = errors.New("schemas are not compatible") + +type Repository interface { + CreateSchema(schemaRegisterRequest SchemaRegistrationRequest) (VersionDetails, bool, error) + GetSchemaVersionByIdAndVersion(id string, version string) (VersionDetails, error) + UpdateSchemaById(id string, schemaUpdateRequest SchemaUpdateRequest) (VersionDetails, bool, error) + GetSchemaVersionsById(id string) (Schema, error) + GetAllSchemaVersions(id string) (Schema, error) + GetLatestSchemaVersion(id string) (VersionDetails, error) + DeleteSchema(id string) (bool, error) + DeleteSchemaVersion(id, version string) (bool, error) + GetAllSchemas() ([]Schema, error) + GetSchemas() ([]Schema, error) +} + +// WithCache decorates the given Repository with an in-memory cache of the given size. +func WithCache(repository Repository, size int) (Repository, error) { + return newCache(repository, size) +} diff --git a/registry/registry/repository/postgres/env.go b/registry/registry/repository/postgres/env.go new file mode 100644 index 0000000..db13cfc --- /dev/null +++ b/registry/registry/repository/postgres/env.go @@ -0,0 +1,58 @@ +package postgres + +import ( + "os" + + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errtemplates" +) + +type DatabaseConfig struct { + TablePrefix string + Host string + User string + Password string + DatabaseName string +} + +const ( + tablePrefixEnvKey = "SR_TABLE_PREFIX" + hostEnvKey = "SR_HOST" + userEnvKey = "SR_USER" + passwordEnvKey = "SR_PASSWORD" + databaseNameEnvKey = "SR_DBNAME" +) + +func LoadDatabaseConfigFromEnv() (DatabaseConfig, error) { + tablePrefix := os.Getenv(tablePrefixEnvKey) + if tablePrefix == "" { + return DatabaseConfig{}, errtemplates.EnvVariableNotDefined(tablePrefixEnvKey) + } + + host := os.Getenv(hostEnvKey) + if host == "" { + return DatabaseConfig{}, errtemplates.EnvVariableNotDefined(hostEnvKey) + } + + user := os.Getenv(userEnvKey) + if user == "" { + return DatabaseConfig{}, errtemplates.EnvVariableNotDefined(userEnvKey) + } + + password := os.Getenv(passwordEnvKey) + if password == "" { + return DatabaseConfig{}, errtemplates.EnvVariableNotDefined(passwordEnvKey) + } + + dbName := os.Getenv(databaseNameEnvKey) + if dbName == "" { + return DatabaseConfig{}, errtemplates.EnvVariableNotDefined(databaseNameEnvKey) + } + + return DatabaseConfig{ + TablePrefix: tablePrefix, + Host: host, + User: user, + Password: password, + DatabaseName: dbName, + }, nil +} diff --git a/registry/registry/repository/postgres/gorm.go b/registry/registry/repository/postgres/gorm.go new file mode 100644 index 0000000..cbd472b --- /dev/null +++ b/registry/registry/repository/postgres/gorm.go @@ -0,0 +1,38 @@ +package postgres + +import ( + "fmt" + "gorm.io/driver/postgres" + "gorm.io/gorm" + "gorm.io/gorm/schema" +) + +func InitializeGormFromEnv() (*gorm.DB, error) { + config, err := LoadDatabaseConfigFromEnv() + if err != nil { + return nil, err + } + + return InitializeGorm(config) +} + +func InitializeGorm(config DatabaseConfig) (*gorm.DB, error) { + connectionString := fmt.Sprintf( + "host=%s user=%s password=%s dbname=%s port=5432 sslmode=disable", + config.Host, config.User, config.Password, config.DatabaseName, + ) + dialector := postgres.Open(connectionString) + gcfg := &gorm.Config{ + NamingStrategy: schema.NamingStrategy{ + TablePrefix: config.TablePrefix, + SingularTable: true, + }, + } + + db, err := gorm.Open(dialector, gcfg) + if err != nil { + return nil, err + } + + return db, nil +} diff --git a/registry/registry/repository/postgres/initdb.go b/registry/registry/repository/postgres/initdb.go new file mode 100644 index 0000000..f33dc1f --- /dev/null +++ b/registry/registry/repository/postgres/initdb.go @@ -0,0 +1,21 @@ +package postgres + +import ( + "gorm.io/gorm" +) + +// Initdb initializes the schema registry database. +func Initdb(db *gorm.DB) error { + if err := db.Exec("create schema if not exists syntio_schema authorization postgres").Error; err != nil { + return err + } + return db.AutoMigrate(&Schema{}, &VersionDetails{}) +} + +// HealthCheck checks if the necessary tables exist. +// +// Note that this function returns false in case of network issues as well, acting like a health check of sorts. +func HealthCheck(db *gorm.DB) bool { + migrator := db.Migrator() + return migrator.HasTable(&Schema{}) && migrator.HasTable(&VersionDetails{}) +} diff --git a/registry/registry/repository/postgres/model.go b/registry/registry/repository/postgres/model.go new file mode 100644 index 0000000..2de65aa --- /dev/null +++ b/registry/registry/repository/postgres/model.go @@ -0,0 +1,69 @@ +package postgres + +import ( + "strconv" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-sr/registry" +) + +// Schema is a structure that defines the parent entity in the schema registry. +type Schema struct { + SchemaID uint `gorm:"primaryKey;column:schema_id;autoIncrement"` + SchemaType string `gorm:"column:schema_type;type:varchar(8)"` + Name string `gorm:"column:name;type:varchar(256)"` + Description string `gorm:"column:description;type:text"` + LastCreated string `gorm:"column:last_created;type:varchar(8)"` + PublisherID string `gorm:"column:publisher_id;type:varchar(256)"` + VersionDetails []VersionDetails `gorm:"foreignKey:schema_id"` + CompatibilityMode string `gorm:"column:compatibility_mode;type:varchar(256)"` + ValidityMode string `gorm:"column:validity_mode;type:varchar(256)"` +} + +// VersionDetails represents the child entity in the schema registry model. +type VersionDetails struct { + VersionID uint `gorm:"primaryKey;column:version_id;autoIncrement"` + Version string `gorm:"column:version;type:int;index:idver_idx"` + SchemaID uint `gorm:"column:schema_id;index:idver_idx"` + Description string `gorm:"column:description;type:text"` + Specification string `gorm:"column:specification;type:text"` + SchemaHash string `gorm:"column:schema_hash;type:varchar(256)"` + CreatedAt time.Time `gorm:"column:created_at"` + VersionDeactivated bool `gorm:"column:version_deactivated;type:boolean"` + Attributes string `gorm:"column:attributes;type:text"` +} + +// intoRegistrySchema maps Schema from repository to service layer. +func intoRegistrySchema(schema Schema) registry.Schema { + var registryVersionDetails []registry.VersionDetails + for _, versionDetails := range schema.VersionDetails { + registryVersionDetails = append(registryVersionDetails, intoRegistryVersionDetails(versionDetails)) + } + + return registry.Schema{ + SchemaID: strconv.Itoa(int(schema.SchemaID)), + SchemaType: schema.SchemaType, + Name: schema.Name, + VersionDetails: registryVersionDetails, + Description: schema.Description, + LastCreated: schema.LastCreated, + PublisherID: schema.PublisherID, + CompatibilityMode: schema.CompatibilityMode, + ValidityMode: schema.ValidityMode, + } +} + +// intoRegistryVersionDetails maps VersionDetails from repository to service layer. +func intoRegistryVersionDetails(VersionDetails VersionDetails) registry.VersionDetails { + return registry.VersionDetails{ + VersionID: strconv.Itoa(int(VersionDetails.VersionID)), + Version: VersionDetails.Version, + SchemaID: strconv.Itoa(int(VersionDetails.SchemaID)), + Specification: VersionDetails.Specification, + Description: VersionDetails.Description, + SchemaHash: VersionDetails.SchemaHash, + CreatedAt: VersionDetails.CreatedAt, + VersionDeactivated: VersionDetails.VersionDeactivated, + Attributes: VersionDetails.Attributes, + } +} diff --git a/registry/registry/repository/postgres/postgres.go b/registry/registry/repository/postgres/postgres.go new file mode 100644 index 0000000..2ad9a38 --- /dev/null +++ b/registry/registry/repository/postgres/postgres.go @@ -0,0 +1,291 @@ +package postgres + +import ( + "encoding/base64" + "gorm.io/gorm" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-sr/registry" + "github.com/dataphos/aquarium-janitor-standalone-sr/registry/internal/hashutils" +) + +type Repository struct { + db *gorm.DB +} + +// New returns a new instance of Repository. +func New(db *gorm.DB) *Repository { + return &Repository{ + db: db, + } +} + +// GetSchemaVersionByIdAndVersion retrieves a schema version by its id and version. +// Returns registry.ErrNotFound in case there's no schema under the given id and version. +func (r *Repository) GetSchemaVersionByIdAndVersion(id, version string) (registry.VersionDetails, error) { + var details VersionDetails + if err := r.db.Where("schema_id = ? and version = ? and version_deactivated = ?", id, version, false).Take(&details).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return registry.VersionDetails{}, registry.ErrNotFound + } + return registry.VersionDetails{}, err + } + return intoRegistryVersionDetails(details), nil +} + +// GetSchemaVersionsById returns a Schema with all active versions. +// Returns registry.ErrNotFound in case there's no schema under the given id. +func (r *Repository) GetSchemaVersionsById(id string) (registry.Schema, error) { + var schema Schema + err := r.db.Preload("VersionDetails", "version_deactivated = ?", false).Take(&schema, id).Error + if errors.Is(err, gorm.ErrRecordNotFound) || len(schema.VersionDetails) == 0 { + return registry.Schema{}, registry.ErrNotFound + } + if err != nil { + return registry.Schema{}, err + } + return intoRegistrySchema(schema), nil +} + +// GetAllSchemaVersions returns a Schema with all versions. +// Returns registry.ErrNotFound in case there's no schema under the given id. +func (r *Repository) GetAllSchemaVersions(id string) (registry.Schema, error) { + var schema Schema + if err := r.db.Preload("VersionDetails").Take(&schema, id).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return registry.Schema{}, registry.ErrNotFound + } + return registry.Schema{}, err + } + return intoRegistrySchema(schema), nil +} + +// GetLatestSchemaVersion returns the latest active version of selected schema. +// Returns registry.ErrNotFound in case there's no schema under the given id. +func (r *Repository) GetLatestSchemaVersion(id string) (registry.VersionDetails, error) { + var details VersionDetails + if err := r.db.Where("schema_id = ? and version_deactivated = ?", id, false).Last(&details).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return registry.VersionDetails{}, registry.ErrNotFound + } + return registry.VersionDetails{}, err + } + return intoRegistryVersionDetails(details), nil +} + +// GetSchemas returns all active Schema instances. +// Returns registry.ErrNotFound in case there's no schemas. +func (r *Repository) GetSchemas() ([]registry.Schema, error) { + var schemaList []Schema + // This query examines if there is at least one active version of the schema and based on that, it determines whether to retrieve the schema. + tx := r.db.Preload("VersionDetails", "version_deactivated = ?", false).Where("EXISTS (SELECT 1 FROM syntio_schema.version_details WHERE syntio_schema.version_details.schema_id = syntio_schema.schema.schema_id AND syntio_schema.version_details.version_deactivated = 'false')").Find(&schemaList) + if tx.Error != nil { + return nil, tx.Error + } + if tx.RowsAffected == 0 { + return nil, registry.ErrNotFound + } + + var registrySchemaList []registry.Schema + for _, schema := range schemaList { + registrySchemaList = append(registrySchemaList, intoRegistrySchema(schema)) + } + return registrySchemaList, nil +} + +// GetAllSchemas returns all Schema instances. +// Returns registry.ErrNotFound in case there's no schemas. +func (r *Repository) GetAllSchemas() ([]registry.Schema, error) { + var schemaList []Schema + tx := r.db.Preload("VersionDetails").Find(&schemaList) + if tx.Error != nil { + return nil, tx.Error + } + if tx.RowsAffected == 0 { + return nil, registry.ErrNotFound + } + + registrySchemaList := make([]registry.Schema, len(schemaList)) + for i, schema := range schemaList { + registrySchemaList[i] = intoRegistrySchema(schema) + } + return registrySchemaList, nil +} + +// CreateSchema inserts a new Schema structure. +// Returns a new VersionDetails structure and a bool flag indicating if a new version of schema was added or if it already existed. +func (r *Repository) CreateSchema(schemaRegisterRequest registry.SchemaRegistrationRequest) (registry.VersionDetails, bool, error) { + specification := []byte(schemaRegisterRequest.Specification) + hash := hashutils.SHA256(specification) + + // Prior to saving the schema in the database we must verify the distinctness of the schema hash and publisher ID. + // To accomplish this, we must join the "VersionDetails" and "Schema" tables on the columns that contain the schema ID, + // while also filtering the schemas with the specified schema hash and publisher ID. If the query does not return a schema, + // it means that a schema with the given criteria does not exist in the database and a new one needs to be created. + var schema Schema + if err := r.db.Table("syntio_schema.schema").Preload("VersionDetails", "schema_hash = ? and version_deactivated = ?", hash, false).Joins("JOIN syntio_schema.version_details ON syntio_schema.version_details.schema_id = syntio_schema.schema.schema_id AND syntio_schema.version_details.schema_hash = ? and syntio_schema.version_details.version_deactivated = ?", hash, false).Where("syntio_schema.schema.publisher_id = ?", schemaRegisterRequest.PublisherID).Take(&schema).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + schema := Schema{ + SchemaType: strings.ToLower(schemaRegisterRequest.SchemaType), + Name: schemaRegisterRequest.Name, + Description: schemaRegisterRequest.Description, + PublisherID: schemaRegisterRequest.PublisherID, + LastCreated: "1", + CompatibilityMode: schemaRegisterRequest.CompatibilityMode, + ValidityMode: schemaRegisterRequest.ValidityMode, + VersionDetails: []VersionDetails{ + { + Version: "1", + Specification: base64.StdEncoding.EncodeToString(specification), + Description: schemaRegisterRequest.Description, + SchemaHash: hash, + CreatedAt: time.Now(), + VersionDeactivated: false, + Attributes: schemaRegisterRequest.Attributes, + }, + }, + } + if err := r.db.Create(&schema).Error; err != nil { + return registry.VersionDetails{}, false, err + } + return intoRegistryVersionDetails(schema.VersionDetails[0]), true, nil + } + return registry.VersionDetails{}, false, err + } + + return intoRegistryVersionDetails(schema.VersionDetails[0]), false, nil +} + +// UpdateSchemaById updates the schema specification and description if sent. +// Returns the new VersionDetails and a flag indicating if a new version of schema was added. +func (r *Repository) UpdateSchemaById(id string, schemaUpdateRequest registry.SchemaUpdateRequest) (registry.VersionDetails, bool, error) { + schemaId, err := strconv.Atoi(id) + if err != nil { + return registry.VersionDetails{}, false, errors.Wrap(err, "wrong type of schemaID") + } + + specification := []byte(schemaUpdateRequest.Specification) + hash := hashutils.SHA256(specification) + + var details VersionDetails + if err = r.db.Where("schema_hash = ? and schema_id = ?", hash, id).Take(&details).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + updated := VersionDetails{} + err = r.db.Transaction(func(tx *gorm.DB) error { + + schema := &Schema{SchemaID: uint(schemaId)} + if err := tx.Select("last_created").Take(&schema).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return registry.ErrNotFound + } + return err + } + + lastCreated, err := strconv.Atoi(schema.LastCreated) + if err != nil { + return errors.Wrap(err, "wrong type of latest version") + } + incrementedLastCreated := strconv.Itoa(lastCreated + 1) + + updated = VersionDetails{ + Version: incrementedLastCreated, + Specification: base64.StdEncoding.EncodeToString(specification), + SchemaHash: hash, + Description: schemaUpdateRequest.Description, + Attributes: schemaUpdateRequest.Attributes, + } + + // append the new version to the VersionDetails array + if err = tx.Model(&schema).Association("VersionDetails").Append(&updated); err != nil { + return errors.Wrap(err, "could not update version details") + } + + // updating description and last_created values in schema table + if err = tx.Model(&schema).Updates(Schema{Description: schemaUpdateRequest.Description, LastCreated: incrementedLastCreated}).Error; err != nil { + return errors.Wrap(err, "could not update schema") + } + + return nil + }) + if err != nil { + return registry.VersionDetails{}, false, err + } + return intoRegistryVersionDetails(updated), true, nil + } + return registry.VersionDetails{}, false, err + } + + if details.VersionDeactivated { + //Activates already existing Schema + var schema Schema + if err := r.db.Take(&schema, id).Error; err != nil { + return registry.VersionDetails{}, false, err + } + lastCreated, err := strconv.Atoi(schema.LastCreated) + if err != nil { + return registry.VersionDetails{}, false, errors.Wrap(err, "wrong type of latest version") + } + incrementedLastCreated := strconv.Itoa(lastCreated + 1) + + // updating description and last_created values in schema table + if err = r.db.Model(&Schema{SchemaID: uint(schemaId)}).Updates(Schema{Description: schemaUpdateRequest.Description, LastCreated: incrementedLastCreated}).Error; err != nil { + return registry.VersionDetails{}, false, errors.Wrap(err, "could not update schema") + } + + // activating the schema version with a new creation time and version number + if err = r.db.Model(&details).Updates(map[string]interface{}{ + "created_at": time.Now(), + "version_deactivated": false, + "version": incrementedLastCreated, + }).Error; err != nil { + return registry.VersionDetails{}, false, errors.Wrap(err, "could not update version details") + } + + details.VersionDeactivated = false + return intoRegistryVersionDetails(details), true, nil + } + return intoRegistryVersionDetails(details), false, nil +} + +// DeleteSchema deactivates a schema. +// Returns a boolean flag indicating if a schema with the given id existed before this call. +func (r *Repository) DeleteSchema(id string) (bool, error) { + var schema Schema + if err := r.db.Preload("VersionDetails", "version_deactivated = ?", false).Take(&schema, id).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return false, nil + } + return false, err + } + if len(schema.VersionDetails) == 0 { + return false, nil + } + // deactivation of all active versions + tx := r.db.Model(&schema.VersionDetails).Update("version_deactivated", true) + if tx.Error != nil { + return false, tx.Error + } + return tx.RowsAffected > 0, nil +} + +// DeleteSchemaVersion deactivates the specified schema version. +// Returns a boolean flag indicating if a schema with the given id and version existed before this call. +func (r *Repository) DeleteSchemaVersion(id, version string) (bool, error) { + var details VersionDetails + if err := r.db.Where("schema_id = ? and version = ? and version_deactivated = ?", id, version, false).Take(&details).Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return false, nil + } + return false, err + } + + tx := r.db.Model(&details).Update("version_deactivated", true) + if tx.Error != nil { + return false, tx.Error + } + return tx.RowsAffected > 0, nil +} diff --git a/registry/registry/repository/postgres/postgres_test.go b/registry/registry/repository/postgres/postgres_test.go new file mode 100644 index 0000000..26b956d --- /dev/null +++ b/registry/registry/repository/postgres/postgres_test.go @@ -0,0 +1,54 @@ +package postgres + +import ( + "gorm.io/driver/postgres" + "gorm.io/gorm" + "testing" + "time" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/stretchr/testify/assert" + + "github.com/dataphos/aquarium-janitor-standalone-sr/registry/internal/hashutils" +) + +func TestGetSchemaVersionByIdAndVersion(t *testing.T) { + // skip this test until it is not remodeled + t.Skip() + db, mock, err := sqlmock.New() + if err != nil { + t.Fatalf("failed to create new sqlmock: %s", err) + } + + dialector := postgres.New(postgres.Config{ + DriverName: "postgres", + DSN: "sqlmock_db_1", + PreferSimpleProtocol: true, + Conn: db, + }) + gcfg := &gorm.Config{} + dbInstance, err := gorm.Open(dialector, gcfg) + if err != nil { + t.Fatal(err) + } + defer db.Close() + + pdb := Repository{ + db: dbInstance, + } + resultRow := sqlmock.NewRows([]string{"version_id", "version", "schema_id", "specification", "description", "schema_hash", "created_at", "version_deactivated"}). + AddRow(1, "1", 1, "test_spec", "a description", "9f8f1a88fdc11bf262095a82a607a61086641ad8da16ab4b6e104dd32920d20f", time.Now(), false) + + mock.ExpectQuery(`SELECT * FROM "version_details" WHERE schema_id = $1 and version = $2 and version_deactivated = $3 LIMIT 1`). + WithArgs(1, "1", false). + WillReturnRows(resultRow) + + sd, err := pdb.GetSchemaVersionByIdAndVersion("1", "1") + if err != nil { + t.Fatal(err) + } + + assert.Equal(t, "1", sd.Version) + assert.Equal(t, "1", sd.SchemaID) + assert.Equal(t, hashutils.SHA256([]byte("test_spec")), sd.SchemaHash) +} diff --git a/registry/registry/schema.go b/registry/registry/schema.go new file mode 100644 index 0000000..01da7a2 --- /dev/null +++ b/registry/registry/schema.go @@ -0,0 +1,448 @@ +package registry + +import ( + "encoding/json" + "log" + "os" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/cyberphone/json-canonicalization/go/src/webpki.org/jsoncanonicalizer" + "github.com/hamba/avro/v2" + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-sr/compatibility" + "github.com/dataphos/aquarium-janitor-standalone-sr/validity" +) + +type Service struct { + Repository Repository + CompChecker compatibility.Checker + ValChecker validity.Checker + GlobalCompMode string + GlobalValMode string +} + +// Attribute search depth limit to prevent infinite recursion +const attSearchDepth = 10 + +const cacheSizeEnv = "CACHE_SIZE" +const defaultCacheSizeEnv = 0 + +type QueryParams struct { + Id string + Version string + SchemaType string + Name string + OrderBy string + Sort string + Limit int + Attributes []string +} + +func New(Repository Repository, CompChecker compatibility.Checker, ValChecker validity.Checker, GlobalCompMode, GlobalValMode string) *Service { + var size int + var err error + cacheSize := os.Getenv(cacheSizeEnv) + if cacheSize == "" { + size = defaultCacheSizeEnv + } else { + size, err = strconv.Atoi(cacheSize) + if err != nil { + log.Println("Cache size can not hold non-numeric data.") + return &Service{} + } + } + + if size > 0 { + log.Println("Using in-memory cache for repository") + Repository, err = WithCache(Repository, size) + if err != nil { + log.Println("Encountered error while trying to create cache.") + return &Service{} + } + } + + return &Service{ + Repository: Repository, + CompChecker: CompChecker, + ValChecker: ValChecker, + GlobalCompMode: GlobalCompMode, + GlobalValMode: GlobalValMode, + } +} + +// GetSchemaVersion gets the schema version with the specific id and version. +func (service *Service) GetSchemaVersion(id, version string) (VersionDetails, error) { + return service.Repository.GetSchemaVersionByIdAndVersion(id, version) +} + +// ListSchemaVersions lists all active schema versions of a specific schema. +func (service *Service) ListSchemaVersions(id string) (Schema, error) { + return service.Repository.GetSchemaVersionsById(id) +} + +// ListAllSchemaVersions lists all schema versions of a specific schema. +func (service *Service) ListAllSchemaVersions(id string) (Schema, error) { + return service.Repository.GetAllSchemaVersions(id) +} + +// GetLatestSchemaVersion gets the latest version of a certain schema. +func (service *Service) GetLatestSchemaVersion(id string) (VersionDetails, error) { + return service.Repository.GetLatestSchemaVersion(id) +} + +// GetSchemas gets all active schemas. +func (service *Service) GetSchemas() ([]Schema, error) { + return service.Repository.GetSchemas() +} + +// GetAllSchemas gets all schemas. +func (service *Service) GetAllSchemas() ([]Schema, error) { + return service.Repository.GetAllSchemas() +} + +// SearchSchemas gets filtered schemas. +func (service *Service) SearchSchemas(params QueryParams) ([]Schema, error) { + schemas, err := service.Repository.GetSchemas() + if err != nil { + return schemas, errors.Wrap(err, "couldn't retrieve schemas") + } + + var filteredSchemas []Schema + for _, schema := range schemas { + if params.Id != "" && schema.SchemaID != params.Id { + continue + } + if params.Name != "" && !strings.Contains(schema.Name, params.Name) { + continue + } + if params.SchemaType != "" && schema.SchemaType != params.SchemaType { + continue + } + + filteredVersions := Schema{ + SchemaID: schema.SchemaID, + SchemaType: schema.SchemaType, + Name: schema.Name, + Description: schema.Description, + LastCreated: schema.LastCreated, + PublisherID: schema.PublisherID, + CompatibilityMode: schema.CompatibilityMode, + ValidityMode: schema.ValidityMode, + } + for _, detail := range schema.VersionDetails { + if params.Version != "" && detail.Version != params.Version { + continue + } + if !containsAttributes(detail, params.Attributes) { + continue + } + filteredVersions.VersionDetails = append(filteredVersions.VersionDetails, detail) + } + if len(filteredVersions.VersionDetails) > 0 { + if params.OrderBy == "version" && len(filteredVersions.VersionDetails) > 1 { + if params.Sort == "asc" { + sort.Slice(filteredVersions.VersionDetails, func(i, j int) bool { + return filteredVersions.VersionDetails[i].Version < filteredVersions.VersionDetails[j].Version + }) + } else if params.Sort == "desc" { + sort.Slice(filteredVersions.VersionDetails, func(i, j int) bool { + return filteredVersions.VersionDetails[i].Version > filteredVersions.VersionDetails[j].Version + }) + } + } + filteredSchemas = append(filteredSchemas, filteredVersions) + } + } + + switch params.OrderBy { + case "name": + if params.Sort == "asc" { + sort.Slice(filteredSchemas, func(i, j int) bool { + return filteredSchemas[i].Name < filteredSchemas[j].Name + }) + } else if params.Sort == "desc" { + sort.Slice(filteredSchemas, func(i, j int) bool { + return filteredSchemas[i].Name > filteredSchemas[j].Name + }) + } + case "id": + if params.Sort == "asc" { + sort.Slice(filteredSchemas, func(i, j int) bool { + l1, l2 := len(filteredSchemas[i].SchemaID), len(filteredSchemas[j].SchemaID) + if l1 != l2 { + return l1 < l2 + } + return filteredSchemas[i].SchemaID < filteredSchemas[j].SchemaID + }) + } else if params.Sort == "desc" { + sort.Slice(filteredSchemas, func(i, j int) bool { + l1, l2 := len(filteredSchemas[i].SchemaID), len(filteredSchemas[j].SchemaID) + if l1 != l2 { + return l1 > l2 + } + return filteredSchemas[i].SchemaID > filteredSchemas[j].SchemaID + }) + } + case "type": + if params.Sort == "asc" { + sort.Slice(filteredSchemas, func(i, j int) bool { + return filteredSchemas[i].SchemaType < filteredSchemas[j].SchemaType + }) + } else if params.Sort == "desc" { + sort.Slice(filteredSchemas, func(i, j int) bool { + return filteredSchemas[i].SchemaType > filteredSchemas[j].SchemaType + }) + } + } + + if params.Limit > 0 && params.Limit < len(filteredSchemas) { + filteredSchemas = filteredSchemas[:params.Limit] + } + return filteredSchemas, nil +} + +func containsAttributes(details VersionDetails, attributes []string) bool { + numMatched := 0 + for i, filterAtt := range attributes { + for _, att := range strings.FieldsFunc(details.Attributes, func(r rune) bool { return r == '/' || r == ',' }) { + if filterAtt == att { + numMatched += 1 + break + } + } + if numMatched != i+1 { + return false + } + } + return true +} + +// CreateSchema creates a new schema. +func (service *Service) CreateSchema(schemaRegisterRequest SchemaRegistrationRequest) (VersionDetails, bool, error) { + if !compatibility.CheckIfValidMode(&schemaRegisterRequest.CompatibilityMode) { + return VersionDetails{}, false, ErrUnknownComp + } + if !validity.CheckIfValidMode(&schemaRegisterRequest.ValidityMode) { + return VersionDetails{}, false, ErrUnknownVal + } + valid, err := service.CheckValidity(schemaRegisterRequest.SchemaType, schemaRegisterRequest.Specification, schemaRegisterRequest.ValidityMode) + if err != nil { + return VersionDetails{}, false, err + } + if !valid { + return VersionDetails{}, false, ErrNotValid + } + //cannot canonicalize schema that is invalid + if strings.ToLower(schemaRegisterRequest.ValidityMode) == "syntax-only" || strings.ToLower(schemaRegisterRequest.ValidityMode) == "full" { + canonicalSpec, err := canonicalizeSchema([]byte(schemaRegisterRequest.Specification), strings.ToLower(schemaRegisterRequest.SchemaType)) + if err != nil { + return VersionDetails{}, false, err + } + schemaRegisterRequest.Specification = canonicalSpec + } + + attributes, err := extractAttributes(schemaRegisterRequest.Specification, strings.ToLower(schemaRegisterRequest.SchemaType), attSearchDepth) + if err != nil { + return VersionDetails{}, false, errors.Wrap(err, "unable to extract attributes") + } + schemaRegisterRequest.Attributes = attributes + + return service.Repository.CreateSchema(schemaRegisterRequest) +} + +// canonicalizeSchema converts the given schema to its canonical form +func canonicalizeSchema(specification []byte, schemaType string) (string, error) { + switch schemaType { + case "json": + var canonicalSpec []byte + var schema map[string]interface{} + err := json.Unmarshal(specification, &schema) + if err != nil { + return "", err + } + + required, ok := schema["required"].([]interface{}) + if ok { + sort.Slice(required, func(i, j int) bool { + return required[i].(string) < required[j].(string) + }) + schema["required"] = required + specification, err = json.Marshal(schema) + if err != nil { + return "", err + } + } + + canonicalSpec, err = jsoncanonicalizer.Transform(specification) + if err != nil { + return "", err + } + return strings.TrimSpace(string(canonicalSpec)), nil + case "avro": + schema, err := avro.Parse(string(specification)) + if err != nil { + return "", err + } + return strings.TrimSpace(schema.String()), nil + default: + return strings.TrimSpace(string(specification)), nil + } +} + +func extractAttributes(specification string, schemaType string, maxDepth int) (string, error) { + switch schemaType { + case "json": + var schema map[string]interface{} + err := json.Unmarshal([]byte(specification), &schema) + if err != nil { + return "", errors.Wrap(err, "couldn't unmarshal schema") + } + + flatSchema := make(map[string]interface{}) + err = flattenJSON("", schema, flatSchema, 0, maxDepth, "/") + if err != nil { + return "", errors.Wrap(err, "unable to flatten json schema") + } + + var allAttributes string + for _, att := range reflect.ValueOf(flatSchema).MapKeys() { + allAttributes += att.String() + "," + } + + if len(allAttributes) > 0 { + return allAttributes[:len(allAttributes)-1], nil + } else { + return "", nil + } + default: + return "", nil + } +} + +func flattenJSON(prefix string, nested interface{}, flat map[string]interface{}, currentDepth int, maxDepth int, delimiter string) error { + if currentDepth >= maxDepth { + flat[prefix] = nested + return nil + } + + switch nested.(type) { //nolint:gosimple // fine here + case map[string]interface{}: + for k, v := range nested.(map[string]interface{}) { //nolint:gosimple // fine here + newKey := k + if currentDepth == 0 && strings.ToLower(newKey) != "properties" { + continue + } + if currentDepth != 0 { + newKey = prefix + delimiter + newKey + } + err := flattenJSON(newKey, v, flat, currentDepth+1, maxDepth, delimiter) + if err != nil { + return err + } + } + case []interface{}: + for i, v := range nested.([]interface{}) { //nolint:gosimple // fine here + newKey := strconv.Itoa(i) + if currentDepth != 0 { + newKey = prefix + delimiter + newKey + } + err := flattenJSON(newKey, v, flat, currentDepth+1, maxDepth, delimiter) + if err != nil { + return err + } + } + default: + flat[prefix] = nested + } + return nil +} + +// UpdateSchema updates the schemas by assigning a new version to it. +func (service *Service) UpdateSchema(id string, schemaUpdateRequest SchemaUpdateRequest) (VersionDetails, bool, error) { + schemas, err := service.ListSchemaVersions(id) + if err != nil { + return VersionDetails{}, false, err + } + + valid, err := service.CheckValidity(schemas.SchemaType, schemaUpdateRequest.Specification, schemas.ValidityMode) + if err != nil { + return VersionDetails{}, false, err + } + if !valid { + return VersionDetails{}, false, ErrNotValid + } + + compatible, err := service.CheckCompatibility(schemaUpdateRequest.Specification, id) + if err != nil { + return VersionDetails{}, false, err + } + if !compatible { + return VersionDetails{}, false, ErrNotComp + } + if strings.ToLower(schemas.ValidityMode) == "syntax-only" || strings.ToLower(schemas.ValidityMode) == "full" { + canonicalSpec, err := canonicalizeSchema([]byte(schemaUpdateRequest.Specification), strings.ToLower(schemas.SchemaType)) + if err != nil { + return VersionDetails{}, false, err + } + schemaUpdateRequest.Specification = canonicalSpec + + } + + attributes, err := extractAttributes(schemaUpdateRequest.Specification, schemas.SchemaType, attSearchDepth) + if err != nil { + return VersionDetails{}, false, errors.Wrap(err, "unable to extract attributes") + } + schemaUpdateRequest.Attributes = attributes + + return service.Repository.UpdateSchemaById(id, schemaUpdateRequest) +} + +// DeleteSchema deletes the schema and its versions. +func (service *Service) DeleteSchema(id string) (bool, error) { + return service.Repository.DeleteSchema(id) +} + +// DeleteSchemaVersion deletes a specific version of a schema. +func (service *Service) DeleteSchemaVersion(id, version string) (bool, error) { + return service.Repository.DeleteSchemaVersion(id, version) +} + +// CheckCompatibility checks if schemas are compatible +func (service *Service) CheckCompatibility(newSchema, id string) (bool, error) { + schemas, err := service.ListSchemaVersions(id) + if err != nil { + return false, err + } + + jsonAttrs := make(map[string]string) + jsonAttrs["id"] = id + jsonAttrs["format"] = schemas.SchemaType + jsonAttrs["schema"] = newSchema + jsonMessage, err := json.Marshal(jsonAttrs) + if err != nil { + return false, err + } + + var stringHistory []string + for _, el := range schemas.VersionDetails { + stringHistory = append(stringHistory, el.Specification) + } + mode := schemas.CompatibilityMode + if schemas.CompatibilityMode == "" { + mode = service.GlobalCompMode + } + + return service.CompChecker.Check(string(jsonMessage), stringHistory, mode) +} + +// CheckValidity checks if a schema is valid +func (service *Service) CheckValidity(schemaType, newSchema, mode string) (bool, error) { + if mode == "" { + mode = service.GlobalValMode + } + return service.ValChecker.Check(newSchema, schemaType, mode) +} diff --git a/registry/registry/schema_test.go b/registry/registry/schema_test.go new file mode 100644 index 0000000..ed68e62 --- /dev/null +++ b/registry/registry/schema_test.go @@ -0,0 +1,116 @@ +package registry + +import ( + "testing" +) + +func Test_DeleteSchema(t *testing.T) { + repo := NewMockRepository() + repo.SetGetSchemaVersionsByIdResponse("mocking", MockSchema("mocking"), nil) + deleted, err := (*Service).DeleteSchema(New(repo, &mockCompChecker{}, &mockValChecker{}, "none", "none"), "mocking") + if err != nil { + t.Errorf("returned error") + } + if !deleted { + t.Errorf("deleted returned false") + } +} + +func Test_DeleteSchemaVersion(t *testing.T) { + deleted, err := (*Service).DeleteSchemaVersion(New(&mockRepository{}, &mockCompChecker{}, &mockValChecker{}, "none", "none"), "mocking", "mocking") + if err != nil { + t.Errorf("returned error") + } + if !deleted { + t.Errorf("deleted returned false") + } +} + +func Test_GetAllSchemas(t *testing.T) { + schemas, _ := (*Service).GetAllSchemas(New(&mockRepository{}, &mockCompChecker{}, &mockValChecker{}, "none", "none")) + if schemas[0].SchemaID != "mocking" { + t.Errorf("wrong schemaId returned") + } +} + +func Test_GetSchemas(t *testing.T) { + schemas, _ := (*Service).GetAllSchemas(New(&mockRepository{}, &mockCompChecker{}, &mockValChecker{}, "none", "none")) + if schemas[0].SchemaID != "mocking" { + t.Errorf("wrong schemaId returned") + } +} + +func Test_GetLatestSchemaVersion(t *testing.T) { + VersionDetails, _ := (*Service).GetLatestSchemaVersion(New(&mockRepository{}, &mockCompChecker{}, &mockValChecker{}, "none", "none"), "mocking") + if VersionDetails.VersionID != "mocking" { + t.Errorf("wrong schemaId returned") + } +} + +func Test_CreateSchema(t *testing.T) { + sdto := SchemaRegistrationRequest{ + Description: "mocking", + Specification: "mocking", + Name: "mocking", + SchemaType: "mocking", + PublisherID: "mocking", + ValidityMode: "none", + CompatibilityMode: "none", + } + VersionDetails, added, err := (*Service).CreateSchema(New(&mockRepository{}, &mockCompChecker{}, &mockValChecker{}, "none", "none"), sdto) + if err != nil { + t.Errorf("returned error") + } + + if !added { + t.Errorf("could not add schema") + } + + if VersionDetails.SchemaID != "mocking" { + t.Errorf("wrong schemaId returned") + } +} + +func Test_GetSchemaVersion(t *testing.T) { + VersionDetails, _ := (*Service).GetSchemaVersion(New(&mockRepository{}, &mockCompChecker{}, &mockValChecker{}, "none", "none"), "mocking", "mocking") + if VersionDetails.SchemaID != "mocking" { + t.Errorf("wrong schemaId returned") + } +} + +func Test_UpdateSchema(t *testing.T) { + sdto := SchemaUpdateRequest{ + Description: "mocking", + Specification: "mocking", + } + VersionDetails, added, err := (*Service).UpdateSchema(New(&mockRepository{}, &mockCompChecker{}, &mockValChecker{}, "none", "none"), "mocking", sdto) + if err != nil { + t.Errorf("returned error") + } + + if !added { + t.Errorf("could not add schema") + } + + if VersionDetails.SchemaID != "mocking" { + t.Errorf("wrong schemaId returned") + } +} + +func Test_GetSchemaVersionsById(t *testing.T) { + repo := NewMockRepository() + repo.SetGetSchemaVersionsByIdResponse("mocking", MockSchema("mocking"), nil) + schema, _ := (*Service).ListSchemaVersions(New(repo, &mockCompChecker{}, &mockValChecker{}, "none", "none"), "mocking") + if schema.SchemaID != "mocking" { + t.Errorf("wrong schema ID returned") + } +} + +func Test_GetAllSchemaVersions(t *testing.T) { + repo := NewMockRepository() + repo.SetGetSchemaVersionsByIdResponse("mocking", MockSchema("mocking"), nil) + schema, _ := (*Service).ListAllSchemaVersions(New(repo, &mockCompChecker{}, &mockValChecker{}, "none", "none"), "mocking") + if schema.SchemaID != "mocking" { + t.Errorf("wrong schema ID returned") + } +} diff --git a/registry/registry/testdata/avro/canonical-schema.avsc b/registry/registry/testdata/avro/canonical-schema.avsc new file mode 100644 index 0000000..82a6082 --- /dev/null +++ b/registry/registry/testdata/avro/canonical-schema.avsc @@ -0,0 +1 @@ +{"name":"my.example.userInfo","type":"record","fields":[{"name":"username","type":"string"},{"name":"age","type":"int"},{"name":"phone","type":"string"},{"name":"housenum","type":"string"},{"name":"address","type":{"name":"my.example.mailing_address","type":"record","fields":[{"name":"street","type":"string"},{"name":"city","type":"string"},{"name":"state_prov","type":"string"},{"name":"country","type":"string"},{"name":"zip","type":"string"}]}}]} diff --git a/registry/registry/testdata/avro/schema-1.avsc b/registry/registry/testdata/avro/schema-1.avsc new file mode 100644 index 0000000..aa47589 --- /dev/null +++ b/registry/registry/testdata/avro/schema-1.avsc @@ -0,0 +1,48 @@ +{ + "namespace" : "my.example", + "name" : "userInfo", + "type" : "record", + "fields" : [{"name" : "username", + "type" : "string", + "default" : "NONE"}, + + {"name" : "age", + "type" : "int", + "default" : -1}, + + {"name" : "phone", + "type" : "string", + "default" : "NONE"}, + + {"name" : "housenum", + "type" : "string", + "default" : "NONE"}, + + {"name" : "address", + "type" : { + "type" : "record", + "name" : "mailing_address", + "fields" : [ + {"name" : "street", + "type" : "string", + "default" : "NONE"}, + + {"name" : "city", + "type" : "string", + "default" : "NONE"}, + + {"name" : "state_prov", + "type" : "string", + "default" : "NONE"}, + + {"name" : "country", + "type" : "string", + "default" : "NONE"}, + + {"name" : "zip", + "type" : "string", + "default" : "NONE"} + ]} + } + ] +} diff --git a/registry/registry/testdata/avro/schema-2.avsc b/registry/registry/testdata/avro/schema-2.avsc new file mode 100644 index 0000000..9815163 --- /dev/null +++ b/registry/registry/testdata/avro/schema-2.avsc @@ -0,0 +1,39 @@ +{ + "name" : "my.example.userInfo", + "type" : "record", + "fields" : [{"name" : "username", + "type" : "string"}, + + {"name" : "age", + "type" : "int", + "default" : -1}, + + {"name" : "phone", + "type" : "string"}, + + {"name" : "housenum", + "type" : "string"}, + + {"name" : "address", + "type" : { + "type" : "record", + "name" : "my.example.mailing_address", + "fields" : [ + {"name" : "street", + "type" : "string"}, + + {"name" : "city", + "type" : "string"}, + + {"name" : "state_prov", + "type" : "string"}, + + {"name" : "country", + "type" : "string"}, + + {"name" : "zip", + "type" : "string"} + ]} + } + ] +} diff --git a/registry/registry/testdata/json/canonical-schema.json b/registry/registry/testdata/json/canonical-schema.json new file mode 100644 index 0000000..19a6998 --- /dev/null +++ b/registry/registry/testdata/json/canonical-schema.json @@ -0,0 +1 @@ +{"$schema":"http://json-schema.org/draft-080/schema","additionalProperties":true,"default":{},"description":"The root schema comprises the entire JSON document.","properties":{"phone":{"default":"","description":"An explanation about the purpose of this instance.","examples":[23541],"title":"The Phone Schema","type":"integer"},"room":{"default":"","description":"An explanation about the purpose of this instance.","examples":[18],"title":"The Room Schema","type":"integer"}},"required":["phone","room"],"title222":"The Root Schema","type":"object"} diff --git a/registry/registry/testdata/json/schema-1.json b/registry/registry/testdata/json/schema-1.json new file mode 100644 index 0000000..76baca2 --- /dev/null +++ b/registry/registry/testdata/json/schema-1.json @@ -0,0 +1,32 @@ +{ + "$schema": "http://json-schema.org/draft-080/schema", + "type": "object", + "title222": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": true, + "required": [ + "phone", + "room" + ], + "properties": { + "phone": { + "type": "integer", + "title": "The Phone Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + 23541 + ] + }, + "room": { + "type": "integer", + "title": "The Room Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + 18 + ] + } + } +} diff --git a/registry/registry/testdata/json/schema-2.json b/registry/registry/testdata/json/schema-2.json new file mode 100644 index 0000000..58e18ee --- /dev/null +++ b/registry/registry/testdata/json/schema-2.json @@ -0,0 +1,32 @@ +{ + "$schema": "http://json-schema.org/draft-080/schema", + "type": "object", + "title222": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": true, + "required": [ + "room", + "phone" + ], + "properties": { + "phone": { + "type": "integer", + "title": "The Phone Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + 23541 + ] + }, + "room": { + "type": "integer", + "title": "The Room Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + 18 + ] + } + } +} diff --git a/registry/server/handler.go b/registry/server/handler.go new file mode 100644 index 0000000..f4b0ef6 --- /dev/null +++ b/registry/server/handler.go @@ -0,0 +1,887 @@ +package server + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "strconv" + "strings" + + "github.com/go-chi/chi/v5" + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/metrics" + "github.com/dataphos/aquarium-janitor-standalone-sr/registry" + "github.com/dataphos/lib-logger/logger" +) + +type Handler struct { + Service *registry.Service + log logger.Log +} + +// report is a simple wrapper of the system's message for the user. +type report struct { + Message string `json:"message"` +} + +// insertInfo represents a schema registry/evolution response for methods other than GET. +type insertInfo struct { + Id string `json:"identification"` + Version string `json:"version"` + Message string `json:"message"` +} + +// NewHandler is a convenience function which returns a new instance of Handler. +func NewHandler(Service *registry.Service, log logger.Log) *Handler { + return &Handler{ + Service: Service, + log: log, + } +} + +// GetSchemaVersionByIdAndVersion is a GET method that expects parameters "id" and "version" for +// retrieving the schema version from the underlying repository. +// +// It currently writes back either: +// - status 200 with a schema version in JSON format, if the schema is registered and active +// - status 404 with error message, if the schema version is not registered or registered but deactivated +// - status 500 with error message, if an internal server error occurred +// +// @Title Get schema version by schema id and version +// @Summary Get schema version by schema id and version +// @Produce json +// @Param id path string true "schema id" +// @Param version path string true "version" +// @Success 200 +// @Failure 404 +// @Failure 500 +// @Router /schemas/{id}/versions/{version} [get] +func (h Handler) GetSchemaVersionByIdAndVersion(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + version := chi.URLParam(r, "version") + + details, err := h.Service.GetSchemaVersion(id, version) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + body, _ := json.Marshal(report{ + Message: fmt.Sprintf("Schema with id=%v and version=%v is not registered", id, version), + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + body, _ := json.Marshal(details) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// GetSpecificationByIdAndVersion is a GET method that expects parameters "id" and "version" for +// retrieving the specification of schema version from the underlying repository. +// +// It currently writes back either: +// - status 200 with a schema in JSON format, if the schema version is registered and active +// - status 404 with error message, if the schema version is not registered or registered but deactivated +// - status 500 with error message, if an internal server error occurred +// +// @Title Get schema specification by schema id and version +// @Summary Get schema specification by schema id and version +// @Produce json +// @Param id path string true "schema id" +// @Param version path string true "version" +// @Success 200 +// @Failure 404 +// @Failure 500 +// @Router /schemas/{id}/versions/{version}/spec [get] +func (h Handler) GetSpecificationByIdAndVersion(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + version := chi.URLParam(r, "version") + + details, err := h.Service.GetSchemaVersion(id, version) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + body, _ := json.Marshal(report{ + Message: fmt.Sprintf("Schema with id=%v and version=%v is not registered", id, version), + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + specification, err := base64.StdEncoding.DecodeString(details.Specification) + if err != nil { + log.Println(err) + } + + body, _ := json.Marshal(specification) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// GetSchemaVersionsById is a GET method that expects "id" of the wanted schema and returns all active versions of the schema +// +// It currently gives the following responses: +// - status 200 for a successful invocation along with an instance of the schema structure +// - status 404 if there is no registered or active schema version under the given id +// - status 500 with error message, if an internal server error occurred +// +// @Title Get all active schema versions by schema id +// @Summary Get all active schema versions by schema id +// @Produce json +// @Param id path string true "schema id" +// @Success 200 +// @Failure 404 +// @Failure 500 +// @Router /schemas/{id}/versions [get] +func (h Handler) GetSchemaVersionsById(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + + schemas, err := h.Service.ListSchemaVersions(id) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusNotFound)), + Code: http.StatusNotFound, + }) + return + } + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + body, _ := json.Marshal(schemas) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// GetAllSchemaVersionsById is a GET method that expects "id" of the wanted schema and returns all versions of the schema +// +// It currently gives the following responses: +// - status 200 for a successful invocation along with an instance of the schema structure +// - status 404 if there is no registered schema version under the given id +// - status 500 with error message, if an internal server error occurred +// +// @Title Get schema by schema id +// @Summary Get schema by schema id +// @Produce json +// @Param id path string true "schema id" +// @Success 200 +// @Failure 404 +// @Failure 500 +// @Router /schemas/{id}/versions/all [get] +func (h Handler) GetAllSchemaVersionsById(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + + schemas, err := h.Service.ListAllSchemaVersions(id) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + body, _ := json.Marshal(report{ + Message: fmt.Sprintf("Schema with id=%v is not registered", id), + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + body, _ := json.Marshal(schemas) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// GetLatestSchemaVersionById is a GET method that expects "id" of the wanted schema and returns +// the latest versions of the schema +// +// It currently gives the following responses: +// - status 200 with the latest schema version in JSON format, if the schema is registered +// - status 404 if there is no registered or active schema under the given id +// - status 500 with error message, if an internal server error occurred +// +// @Title Get the latest schema version by schema id +// @Summary Get the latest schema version by schema id +// @Produce json +// @Param id path string true "schema id" +// @Success 200 +// @Failure 404 +// @Failure 500 +// @Router /schemas/{id}/versions/latest [get] +func (h Handler) GetLatestSchemaVersionById(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + + details, err := h.Service.GetLatestSchemaVersion(id) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + body, _ := json.Marshal(report{ + Message: fmt.Sprintf("Schema with id=%v is not registered", id), + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + body, _ := json.Marshal(details) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// GetAllSchemas is a GET method that retrieves all schemas +// +// It currently writes back either: +// - status 200 with all schemas in JSON format +// - status 404 with error message if there are no registered schemas +// - status 500 with error message, if an internal server error occurred +// +// @Title Get all schemas +// @Summary Get all schemas +// @Produce json +// @Success 200 +// @Failure 404 +// @Failure 500 +// @Router /schemas/all [get] +func (h Handler) GetAllSchemas(w http.ResponseWriter, _ *http.Request) { + schemas, err := h.Service.GetAllSchemas() + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + body, _ := json.Marshal(report{ + Message: "There are no schemas in Registry", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + body, _ := json.Marshal(schemas) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// GetSchemas is a GET method that retrieves all active schemas +// +// It currently writes back either: +// - status 200 with active schemas in JSON format +// - status 404 with error message if there are no active schemas +// - status 500 with error message, if an internal server error occurred +// +// @Title Get all active schemas +// @Summary Get all active schemas +// @Produce json +// @Success 200 +// @Failure 404 +// @Failure 500 +// @Router /schemas [get] +func (h Handler) GetSchemas(w http.ResponseWriter, _ *http.Request) { + schemas, err := h.Service.GetSchemas() + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusNotFound)), + Code: http.StatusNotFound, + }) + return + } + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + body, _ := json.Marshal(schemas) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// SearchSchemas is a GET method that expects one of the following parameters: id, version, type, name, orderBy, +// sort, limit and gets schemas that match given filter criteria +// +// It currently writes back either: +// - status 200 with filtered schemas in JSON format +// - status 400 with error message, if a bad search query was given +// - status 404 with error message, if there is no schema that matches the given search criteria +// - status 500 with error message, if an internal server error occurred +// +// @Title Search schemas +// @Summary Search schemas +// @Produce json +// @Param id query string false "schema id" +// @Param version query string false "schema version" +// @Param type query string false "schema type" +// @Param name query string false "schema name" +// @Param orderBy query string false "order by name, type, id or version" +// @Param sort query string false "sort schemas either asc or desc" +// @Param limit query string false "maximum number of retrieved schemas matching the criteria" +// @Param attributes query string false "schema attributes" +// @Success 200 +// @Failure 400 +// @Failure 404 +// @Failure 500 +// @Router /schemas/search [get] +func (h Handler) SearchSchemas(w http.ResponseWriter, r *http.Request) { + id := r.URL.Query().Get("id") + version := r.URL.Query().Get("version") + schemaType := r.URL.Query().Get("type") + name := r.URL.Query().Get("name") + orderBy := r.URL.Query().Get("orderBy") + sort := r.URL.Query().Get("sort") + + if orderBy == "" && sort != "" { + orderBy = "id" + } else if orderBy != "" && orderBy != "name" && orderBy != "id" && orderBy != "type" && orderBy != "version" { + body, _ := json.Marshal(report{ + Message: "Bad request: unknown value for orderBy", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusBadRequest, + }) + return + } + + if sort == "" && orderBy != "" { + sort = "asc" + } else if sort != "" && sort != "asc" && sort != "desc" { + body, _ := json.Marshal(report{ + Message: "Bad request: unknown value for sort", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusBadRequest, + }) + return + } + + limitStr := r.URL.Query().Get("limit") + limit := 0 + var err error + if limitStr != "" { + limit, err = strconv.Atoi(limitStr) + if err != nil { + body, _ := json.Marshal(report{ + Message: "Bad request: limit must be integer", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusBadRequest, + }) + return + } + } + + var attributes []string + if r.URL.Query().Get("attributes") != "" { + attributes = strings.Split(r.URL.Query().Get("attributes"), ",") + } + + queryParams := registry.QueryParams{ + Id: id, + Version: version, + SchemaType: schemaType, + Name: name, + OrderBy: orderBy, + Sort: sort, + Limit: limit, + Attributes: attributes, + } + + schemas, err := h.Service.SearchSchemas(queryParams) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusNotFound)), + Code: http.StatusNotFound, + }) + return + } + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + if schemas == nil { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusNotFound)), + Code: http.StatusNotFound, + }) + return + } + + body, _ := json.Marshal(schemas) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +// PostSchema is a POST function that registers the received schema to the underlying repository. +// +// The expected input schema JSON should contain following fields: +// - Description string +// - Specification string +// - Name string +// - SchemaType string +// - LastCreated string +// - PublisherID string +// - CompatibilityMode string +// - ValidityMode string +// +// It currently writes back either: +// - status 201 with newly created version details in JSON format +// - status 400 with error message, if the schema isn't valid or the values for validity and/or compatibility mode are missing +// - status 409 with error message, if the schema already exists +// - status 500 with error message, if an internal server error occurred +// +// In case of correct invocation the function writes back a JSON with fields: +// - Identification int64 +// - Version int32 +// - Message string +// +// @Title Post new schema +// @Summary Post new schema +// @Accept json +// @Produce json +// @Param data body registry.SchemaRegistrationRequest false "schema registration request" +// @Success 201 +// @Failure 400 +// @Failure 409 +// @Failure 500 +// @Router /schemas [post] +func (h Handler) PostSchema(w http.ResponseWriter, r *http.Request) { + registerRequest, err := readSchemaRegisterRequest(r.Body) + if err != nil { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusBadRequest)), + Code: http.StatusBadRequest, + }) + return + } + + details, added, err := h.Service.CreateSchema(registerRequest) + if err != nil { + if errors.Is(err, registry.ErrUnknownComp) { + body, _ := json.Marshal(report{ + Message: "Bad request: unknown value for compatibility_mode", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusBadRequest, + }) + return + } + + if errors.Is(err, registry.ErrUnknownVal) { + body, _ := json.Marshal(report{ + Message: "Bad request: unknown value for validity_mode", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusBadRequest, + }) + return + } + + if errors.Is(err, registry.ErrNotValid) { + body, _ := json.Marshal(report{ + Message: "Schema is not valid", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusBadRequest, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + if !added { + body, _ := json.Marshal(insertInfo{ + Id: details.SchemaID, + Version: details.Version, + Message: fmt.Sprintf("Schema already exists at id=%v", details.SchemaID), + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusConflict, + }) + return + } + + body, _ := json.Marshal(insertInfo{ + Id: details.SchemaID, + Version: details.Version, + Message: "Schema successfully created", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusCreated, + }) + metrics.AddedSchemaMetricUpdate(details.SchemaID, details.Version) +} + +// PutSchema registers a new schema version in the Schema Registry. The new version is connected to other schemas +// by schema id from the request URL. +// The expected input schema JSON should contain the following field: +// - Specification string +// The input can also include the following field: +// - Description string +// +// It currently writes back either: +// - status 200 with updated version details in JSON format +// - status 400 with error message, if the schemas aren't compatible +// - status 404 if there is no registered or active schema version under the given id +// - status 409 with error message, if the schema already exists +// - status 500 with error message, if an internal server error occurred +// +// In case of correct invocation the function writes back a JSON with fields: +// - Identification int64 +// - Version int32 +// - Message string +// +// In case of a bad invocation, it only returns the message. +// @Title Put new schema version +// @Summary Put new schema version +// @Accept json +// @Produce json +// @Param id path string true "schema id" +// @Param data body registry.SchemaUpdateRequest true "schema update request" +// @Success 200 +// @Failure 400 +// @Failure 404 +// @Failure 409 +// @Failure 500 +// @Router /schemas/{id} [put] +func (h Handler) PutSchema(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + + updateRequest, err := readSchemaUpdateRequest(r.Body) + if err != nil { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusBadRequest)), + Code: http.StatusBadRequest, + }) + return + } + + details, updated, err := h.Service.UpdateSchema(id, updateRequest) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + body, _ := json.Marshal(report{ + Message: fmt.Sprintf("Schema with id=%v doesn't exist", id), + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + if errors.Is(err, registry.ErrNotComp) { + body, _ := json.Marshal(report{ + Message: "Schemas are not compatible", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusBadRequest, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + if !updated { + body, _ := json.Marshal(insertInfo{ + Id: details.SchemaID, + Version: details.Version, + Message: fmt.Sprintf("Schema already exists at id=%s", details.SchemaID), + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusConflict, + }) + return + } + + body, _ := json.Marshal(insertInfo{ + Id: details.SchemaID, + Version: details.Version, + Message: "Schema successfully updated", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) + metrics.UpdateSchemaMetricUpdate(details.SchemaID, details.Version) +} + +// DeleteSchema is a DELETE method that deactivates a schema. +// It expects the "id" of the wanted schema +// +// It currently gives the following responses: +// - status 200 for a successful invocation along with an instance of the schema structure +// - status 400 if the deletion caused an error +// - status 404 if the schema does not exist or is already deactivated +// +// @Title Delete schema by schema id +// @Summary Delete schema by schema id +// @Produce json +// @Param id path string true "schema id" +// @Success 200 +// @Failure 400 +// @Failure 404 +// @Router /schemas/{id} [delete] +func (h Handler) DeleteSchema(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + + deleted, err := h.Service.DeleteSchema(id) + if err != nil { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusBadRequest)), + Code: http.StatusBadRequest, + }) + return + } + + if !deleted { + body, _ := json.Marshal(report{Message: fmt.Sprintf("Schema with id=%s doesn't exist", id)}) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + body, _ := json.Marshal(report{Message: fmt.Sprintf("Schema with id=%s successfully deleted", id)}) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) + metrics.DeletedSchemaMetricUpdate(id) +} + +// DeleteSchemaVersion is a DELETE method that deletes a schema version. +// It expects the "id" and "version" of the wanted schema +// +// It currently gives the following responses: +// - status 200 for a successful invocation along with an instance of the schema structure +// - status 400 if the deletion caused an error +// - status 404 if the schema version does not exist or is already deactivated +// +// @Title Delete schema version by schema id and version +// @Summary Delete schema version by schema id and version +// @Accept json +// @Produce json +// @Param id path string true "schema id" +// @Param version path string true "version" +// @Success 200 +// @Failure 400 +// @Failure 404 +// @Router /schemas/{id}/versions/{version} [delete] +func (h Handler) DeleteSchemaVersion(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + version := chi.URLParam(r, "version") + + deleted, err := h.Service.DeleteSchemaVersion(id, version) + if err != nil { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusBadRequest)), + Code: http.StatusBadRequest, + }) + return + } + + if !deleted { + body, _ := json.Marshal(report{Message: fmt.Sprintf("Schema with id=%s and version=%s doesn't exist", id, version)}) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusNotFound, + }) + return + } + + body, _ := json.Marshal(report{Message: fmt.Sprintf("Schema with id=%s and version=%s successfully deleted", id, version)}) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) + metrics.DeleteSchemaVersionMetricUpdate(id, version) +} + +// HealthCheck is a GET method that gives the response status 200 to signalize +// that the Schema Registry component is up and running. +func (h Handler) HealthCheck(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) +} + +func (h Handler) SchemaCompatibility(w http.ResponseWriter, r *http.Request) { + compRequest, err := readSchemaCompatibilityRequest(r.Body) + + if err != nil { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusBadRequest)), + Code: http.StatusBadRequest, + }) + return + } + + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + log.Println(fmt.Errorf("couldn't close request body")) + return + } + }(r.Body) + + compatible, err := h.Service.CheckCompatibility(compRequest.SchemaID, compRequest.NewSchema) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusNotFound)), + Code: http.StatusNotFound, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + if !compatible { + body, _ := json.Marshal(insertInfo{ + Message: "Schemas are not compatible", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusConflict, + }) + return + } + + body, _ := json.Marshal(compatible) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} + +func (h Handler) SchemaValidity(w http.ResponseWriter, r *http.Request) { + valRequest, err := readSchemaValidityRequest(r.Body) + if err != nil { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusBadRequest)), + Code: http.StatusBadRequest, + }) + return + } + + valid, err := h.Service.CheckValidity(valRequest.Format, valRequest.NewSchema, valRequest.Mode) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusNotFound)), + Code: http.StatusNotFound, + }) + return + } + + writeResponse(w, responseBodyAndCode{ + Body: serializeErrorMessage(http.StatusText(http.StatusInternalServerError)), + Code: http.StatusInternalServerError, + }) + return + } + + if !valid { + body, _ := json.Marshal(insertInfo{ + Message: "Schema is not valid", + }) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusConflict, + }) + return + } + + body, _ := json.Marshal(valid) + writeResponse(w, responseBodyAndCode{ + Body: body, + Code: http.StatusOK, + }) +} diff --git a/registry/server/log.go b/registry/server/log.go new file mode 100644 index 0000000..157f95c --- /dev/null +++ b/registry/server/log.go @@ -0,0 +1,44 @@ +package server + +import ( + "net/http" + "strconv" + "time" + + "github.com/go-chi/chi/v5/middleware" + + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errcodes" + "github.com/dataphos/lib-logger/logger" +) + +func RequestLogger(log logger.Log) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + fn := func(w http.ResponseWriter, r *http.Request) { + ww := middleware.NewWrapResponseWriter(w, r.ProtoMajor) + + t1 := time.Now() + defer func() { + fields := logger.F{ + "method": r.Method, + "path": r.URL.Path, + "remote_adrr": r.RemoteAddr, + "status": strconv.Itoa(ww.Status()), + "content_length": strconv.FormatInt(r.ContentLength, 10), + "bytes": strconv.Itoa(ww.BytesWritten()), + "response_time": strconv.FormatInt(time.Since(t1).Milliseconds(), 10), + } + + status := ww.Status() + if status >= 100 && status < 400 { + log.Infow("request completed", fields) + } else { + log.Errorw("request not completed successfully", errcodes.FromHttpStatusCode(status), fields) + } + }() + + next.ServeHTTP(ww, r) + } + + return http.HandlerFunc(fn) + } +} diff --git a/registry/server/server.go b/registry/server/server.go new file mode 100644 index 0000000..f1d3133 --- /dev/null +++ b/registry/server/server.go @@ -0,0 +1,66 @@ +// Package server contains the Schema registry REST Server configuration and start-up functions. +package server + +import ( + httpSwagger "github.com/swaggo/http-swagger" + "net/http" + "time" + + _ "github.com/dataphos/aquarium-janitor-standalone-sr/docs" + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" +) + +// New sets up the schema registry endpoints. +func New(h *Handler) http.Handler { + router := chi.NewRouter() + + router.Use(middleware.StripSlashes) + router.Use(middleware.RealIP) + router.Use(middleware.Recoverer) + router.Use(middleware.Timeout(30 * time.Second)) + + router.Use(RequestLogger(h.log)) + + router.Route("/schemas", func(router chi.Router) { + router.Get("/", h.GetSchemas) + router.Post("/", h.PostSchema) + router.Get("/all", h.GetAllSchemas) + + router.Route("/{id}", func(router chi.Router) { + router.Delete("/", h.DeleteSchema) + router.Put("/", h.PutSchema) + + router.Route("/versions", func(router chi.Router) { + router.Get("/", h.GetSchemaVersionsById) + router.Get("/latest", h.GetLatestSchemaVersionById) + router.Get("/all", h.GetAllSchemaVersionsById) + + router.Route("/{version}", func(router chi.Router) { + router.Get("/", h.GetSchemaVersionByIdAndVersion) + router.Delete("/", h.DeleteSchemaVersion) + + router.Route("/spec", func(router chi.Router) { + router.Get("/", h.GetSpecificationByIdAndVersion) + }) + }) + }) + }) + + router.Get("/search", h.SearchSchemas) + }) + + router.Get("/health", h.HealthCheck) + + router.Post("/check/compatibility", h.SchemaCompatibility) + router.Get("/check/compatibility/health", h.HealthCheck) + + router.Post("/check/validity", h.SchemaValidity) + router.Get("/check/validity/health", h.HealthCheck) + + router.Get("/swagger/*", httpSwagger.Handler( + httpSwagger.URL("http://localhost:8080/swagger/doc.json"), //The url pointing to API definition + )) + + return router +} diff --git a/registry/server/util.go b/registry/server/util.go new file mode 100644 index 0000000..c441c46 --- /dev/null +++ b/registry/server/util.go @@ -0,0 +1,81 @@ +package server + +import ( + "encoding/json" + "io" + "net/http" + + "github.com/dataphos/aquarium-janitor-standalone-sr/registry" +) + +type responseBodyAndCode struct { + Body []byte + Code int +} + +func writeResponse(w http.ResponseWriter, response responseBodyAndCode) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(response.Code) + _, _ = w.Write(response.Body) +} + +func serializeErrorMessage(message string) []byte { + encoded, _ := json.Marshal(report{Message: message}) + return encoded +} + +func readSchemaRegisterRequest(body io.ReadCloser) (registry.SchemaRegistrationRequest, error) { + encoded, err := io.ReadAll(body) + if err != nil { + return registry.SchemaRegistrationRequest{}, err + } + + var schemaRegisterRequest registry.SchemaRegistrationRequest + if err = json.Unmarshal(encoded, &schemaRegisterRequest); err != nil { + return registry.SchemaRegistrationRequest{}, err + } + + return schemaRegisterRequest, nil +} + +func readSchemaUpdateRequest(body io.ReadCloser) (registry.SchemaUpdateRequest, error) { + encoded, err := io.ReadAll(body) + if err != nil { + return registry.SchemaUpdateRequest{}, err + } + + var schemaUpdateRequest registry.SchemaUpdateRequest + if err = json.Unmarshal(encoded, &schemaUpdateRequest); err != nil { + return registry.SchemaUpdateRequest{}, err + } + + return schemaUpdateRequest, nil +} + +func readSchemaCompatibilityRequest(body io.ReadCloser) (registry.SchemaCompatibilityRequest, error) { + encoded, err := io.ReadAll(body) + if err != nil { + return registry.SchemaCompatibilityRequest{}, err + } + + var schemaCompatibilityRequest registry.SchemaCompatibilityRequest + if err = json.Unmarshal(encoded, &schemaCompatibilityRequest); err != nil { + return registry.SchemaCompatibilityRequest{}, err + } + + return schemaCompatibilityRequest, nil +} + +func readSchemaValidityRequest(body io.ReadCloser) (registry.SchemaValidityRequest, error) { + encoded, err := io.ReadAll(body) + if err != nil { + return registry.SchemaValidityRequest{}, err + } + + var schemaValidityRequest registry.SchemaValidityRequest + if err = json.Unmarshal(encoded, &schemaValidityRequest); err != nil { + return registry.SchemaValidityRequest{}, err + } + + return schemaValidityRequest, nil +} diff --git a/registry/validity/checker.go b/registry/validity/checker.go new file mode 100644 index 0000000..35a400b --- /dev/null +++ b/registry/validity/checker.go @@ -0,0 +1,11 @@ +package validity + +type Checker interface { + Check(schema, schemaType, mode string) (bool, error) +} + +type CheckerFunc func(schema, schemaType, mode string) (bool, error) + +func (f CheckerFunc) Check(schema, schemaType, mode string) (bool, error) { + return f(schema, schemaType, mode) +} diff --git a/registry/validity/checker_test.go b/registry/validity/checker_test.go new file mode 100644 index 0000000..82f6842 --- /dev/null +++ b/registry/validity/checker_test.go @@ -0,0 +1,89 @@ +package validity + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "runtime" + "testing" + "time" +) + +func TestNewExternalChecker(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + _, err := NewExternalChecker(ctx, "http://localhost:8089", 1*time.Second) + + if err != nil { + t.Fatal(err) + } +} + +func TestValidityChecker_Check(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + checker, err := NewExternalCheckerFromEnv(ctx) + if err != nil { + t.Fatal(err) + } + + type Data struct { + Schema string + } + + tt := []struct { + name string + schemaFilename string + schemaType string + validity string + valid bool + }{ + {"valid_json_syntax1", "valid_json_syntax1.json", "json", "syntax-only", true}, + {"invalid_json_syntax1", "invalid_json_syntax1.json", "json", "syntax-only", false}, + {"invalid_json_full1", "invalid_json_full1.json", "json", "full", false}, + {"valid_avro_full1", "valid_avro_full1.json", "avro", "full", true}, + {"invalid_avro_full1", "invalid_avro_full1.json", "avro", "full", false}, + {"invalid_avro_syntax1", "invalid_avro_syntax1.json", "avro", "syntax-only", false}, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + content, err := os.ReadFile(filepath.Join(testdataDir, tc.schemaFilename)) + if err != nil { + t.Error(err) + } + + var payload Data + err = json.Unmarshal(content, &payload) + if err != nil { + t.Error(err) + } + + newSchema := payload.Schema + + valid, err := checker.Check(newSchema, tc.schemaType, tc.validity) + if err != nil { + t.Errorf("validity error: %s", err) + } + if valid != tc.valid { + if valid { + t.Errorf("message valid, invalid expected") + } else { + t.Errorf("message invalid, valid expected") + } + } + }) + } +} diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-common-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-common-2.2.5.Final.jar new file mode 100644 index 0000000..f30d04c Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-common-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-protobuf-schema-utilities-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-protobuf-schema-utilities-2.2.5.Final.jar new file mode 100644 index 0000000..45aee9f Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-protobuf-schema-utilities-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-avro-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-avro-2.2.5.Final.jar new file mode 100644 index 0000000..ac51f0c Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-avro-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-common-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-common-2.2.5.Final.jar new file mode 100644 index 0000000..afdc31b Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-common-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-json-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-json-2.2.5.Final.jar new file mode 100644 index 0000000..a2a60e9 Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-json-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-protobuf-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-protobuf-2.2.5.Final.jar new file mode 100644 index 0000000..9c77d52 Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-protobuf-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-xml-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-xml-2.2.5.Final.jar new file mode 100644 index 0000000..be20d45 Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-xml-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-xsd-2.2.5.Final.jar b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-xsd-2.2.5.Final.jar new file mode 100644 index 0000000..c7920e0 Binary files /dev/null and b/registry/validity/external/validity-checker/lib/apicurio-registry-schema-util-xsd-2.2.5.Final.jar differ diff --git a/registry/validity/external/validity-checker/pom.xml b/registry/validity/external/validity-checker/pom.xml new file mode 100644 index 0000000..b31b76f --- /dev/null +++ b/registry/validity/external/validity-checker/pom.xml @@ -0,0 +1,250 @@ + + + 4.0.0 + + org.example + Validity + 1.0-SNAPSHOT + + + org.springframework.boot + spring-boot-starter-parent + 2.4.4 + + + + + 11 + 11 + 3.0.2 + 6.0.4 + 10.1.5 + + + + + apicurio + apicurio + https://mvnrepository.com/artifact/io.apicurio + + + + + + + org.apache.tomcat.embed + tomcat-embed-core + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-el + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-websocket + ${tomcat.version} + + + org.apache.tomcat + tomcat-annotations-api + ${tomcat.version} + + + + org.projectlombok + lombok + 1.18.20 + + + com.fasterxml.jackson.core + jackson-core + 2.13.4 + + + com.fasterxml.jackson.core + jackson-annotations + 2.13.4 + + + com.fasterxml.jackson.core + jackson-databind + 2.13.4.1 + + + + org.springframework + spring-beans + ${springframework.version} + + + org.springframework + spring-core + ${springframework.version} + + + org.springframework + spring-webmvc + ${springframework.version} + + + org.springframework + spring-web + ${springframework.version} + + + org.springframework + spring-aop + ${springframework.version} + + + org.springframework + spring-context + ${springframework.version} + + + org.springframework + spring-expression + ${springframework.version} + + + org.springframework + spring-jcl + ${springframework.version} + + + org.springframework + spring-test + ${springframework.version} + + + + com.fasterxml.woodstox + woodstox-core + 6.4.0 + + + + + org.springframework.boot + spring-boot-starter-test + test + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-web + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-tomcat + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-validation + ${springframework.boot.version} + + + org.springframework.boot + spring-boot + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-autoconfigure + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-json + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-logging + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-test + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-test-autoconfigure + ${springframework.boot.version} + + + + org.yaml + snakeyaml + 2.0 + + + + com.google.protobuf + protobuf-java + 3.21.9 + + + + io.apicurio + apicurio-registry-schema-util-common + 2.3.1.Final + + + io.apicurio + apicurio-registry-schema-util-protobuf + 2.3.1.Final + + + io.apicurio + apicurio-registry-schema-util-avro + 2.3.1.Final + + + io.apicurio + apicurio-registry-schema-util-json + 2.3.1.Final + + + io.apicurio + apicurio-registry-schema-util-xsd + 2.3.1.Final + + + org.json + json + 20230227 + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + + 14 + 14 + + + + validity-checker + + diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/Message.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/Message.java new file mode 100644 index 0000000..c9d473e --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/Message.java @@ -0,0 +1,25 @@ +package net.syntio.validity; + +public class Message { + private final String schemaType; + private final String schema; + private final String validityLevel; + + public Message(String schemaType, String schema, String validityLevel) { + this.schemaType = schemaType; + this.schema = schema; + this.validityLevel = validityLevel; + } + + public String getSchemaType() { + return schemaType; + } + + public String getSchema() { + return schema; + } + + public String getValidityLevel() { + return validityLevel; + } +} diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/SchemaTypes.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/SchemaTypes.java new file mode 100644 index 0000000..df2ee12 --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/SchemaTypes.java @@ -0,0 +1,9 @@ +package net.syntio.validity; + +public class SchemaTypes { + public static final String JSON = "json"; + public static final String AVRO = "avro"; + public static final String PROTOBUF = "protobuf"; + public static final String XML = "xml"; + +} diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/ValidatorFactory.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/ValidatorFactory.java new file mode 100644 index 0000000..626847d --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/ValidatorFactory.java @@ -0,0 +1,20 @@ +package net.syntio.validity; + +import io.apicurio.registry.rules.validity.AvroContentValidator; +import io.apicurio.registry.rules.validity.ContentValidator; +import io.apicurio.registry.rules.validity.JsonSchemaContentValidator; +import io.apicurio.registry.rules.validity.ProtobufContentValidator; +import io.apicurio.registry.rules.validity.XsdContentValidator; + +public class ValidatorFactory { + public static ContentValidator createValidator(String schema) { + return switch (schema) { + case SchemaTypes.JSON -> new JsonSchemaContentValidator(); + case SchemaTypes.AVRO -> new AvroContentValidator(); + case SchemaTypes.PROTOBUF -> new ProtobufContentValidator(); + case SchemaTypes.XML -> new XsdContentValidator(); + default -> null; + }; + } + +} diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/ValidityCheckerApplication.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/ValidityCheckerApplication.java new file mode 100644 index 0000000..aa3eb01 --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/ValidityCheckerApplication.java @@ -0,0 +1,16 @@ +package net.syntio.validity; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +import java.util.Collections; + +@SpringBootApplication +public class ValidityCheckerApplication { + public static void main(String[] args) { + SpringApplication app = new SpringApplication(ValidityCheckerApplication.class); + app.setDefaultProperties(Collections.singletonMap("server.port", "8089")); + app.run(args); + } + +} diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/checker/Checker.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/checker/Checker.java new file mode 100644 index 0000000..27baf0c --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/checker/Checker.java @@ -0,0 +1,30 @@ +package net.syntio.validity.checker; + +import io.apicurio.registry.content.ContentHandle; +import io.apicurio.registry.rules.RuleViolationException; +import io.apicurio.registry.rules.validity.ContentValidator; +import io.apicurio.registry.rules.validity.ValidityLevel; + +import net.syntio.validity.ValidatorFactory; + +import java.util.Collections; + +public class Checker { + public static boolean checkValidity(String schemaType, String schema, String mode) { + ValidityLevel valLevel = switch (mode.toLowerCase()) { + case "syntax-only" -> ValidityLevel.SYNTAX_ONLY; + case "full" -> ValidityLevel.FULL; + default -> ValidityLevel.NONE; + }; + + ContentValidator validator = ValidatorFactory.createValidator(schemaType); + ContentHandle contentHandle = ContentHandle.create(schema); + try { + validator.validate(valLevel, contentHandle, Collections.emptyMap()); + return true; + } catch (RuleViolationException e) { + return false; + } + } + +} diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/controller/CheckerController.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/controller/CheckerController.java new file mode 100644 index 0000000..1b76ae6 --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/controller/CheckerController.java @@ -0,0 +1,44 @@ +package net.syntio.validity.controller; + +import net.syntio.validity.Message; +import net.syntio.validity.checker.Checker; +import net.syntio.validity.dto.CheckRequestDto; +import net.syntio.validity.dto.CheckResponseDto; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class CheckerController { + @PostMapping(value = "/") + public ResponseEntity check(@RequestBody CheckRequestDto req) { + Message payload = req.getMessage(); + + try { + String schemaType = payload.getSchemaType(); + String schema = payload.getSchema(); + String mode = payload.getValidityLevel(); + + boolean result = Checker.checkValidity(schemaType, schema, mode); + CheckResponseDto res = new CheckResponseDto(result); + if (result) { + res.setInfo("Schema is valid"); + return ResponseEntity.ok(res); + } + res.setInfo("Schema is invalid"); + return ResponseEntity.ok(res); + + } catch (Exception e) { + return ResponseEntity.badRequest().build(); + } + } + + @GetMapping(value = "/health") + public ResponseEntity healthCheck() { + return ResponseEntity.ok().build(); + } + +} diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/dto/CheckRequestDto.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/dto/CheckRequestDto.java new file mode 100644 index 0000000..8819bf6 --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/dto/CheckRequestDto.java @@ -0,0 +1,16 @@ +package net.syntio.validity.dto; + +import net.syntio.validity.Message; + +public class CheckRequestDto { + private final Message message; + + public CheckRequestDto(String schema, String format, String mode) { + this.message = new Message(format, schema, mode); + } + + public Message getMessage() { + return this.message; + } + +} diff --git a/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/dto/CheckResponseDto.java b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/dto/CheckResponseDto.java new file mode 100644 index 0000000..143415d --- /dev/null +++ b/registry/validity/external/validity-checker/src/main/java/net/syntio/validity/dto/CheckResponseDto.java @@ -0,0 +1,22 @@ +package net.syntio.validity.dto; + +public class CheckResponseDto { + private final boolean result; + private String info; + + public CheckResponseDto(boolean result) { + this.result = result; + } + + public boolean getResult() { + return result; + } + + public String getInfo() { + return info; + } + + public void setInfo(String info) { + this.info = info; + } +} diff --git a/registry/validity/externalChecker.go b/registry/validity/externalChecker.go new file mode 100644 index 0000000..3ff55d2 --- /dev/null +++ b/registry/validity/externalChecker.go @@ -0,0 +1,139 @@ +package validity + +import ( + "context" + "encoding/json" + "encoding/xml" + "fmt" + "io" + "os" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-sr/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-sr/validity/http" + "github.com/dataphos/lib-httputil/pkg/httputil" + "github.com/dataphos/lib-retry/pkg/retry" +) + +const ( + urlEnvKey = "VALIDITY_CHECKER_URL" + timeoutEnvKey = "VALIDITY_CHECKER_TIMEOUT_BASE" + globalValidityMode = "GLOBAL_VALIDITY_MODE" +) + +const ( + DefaultTimeoutBase = 2 * time.Second + defaultGlobalValidityMode = "FULL" +) + +type ExternalChecker struct { + url string + TimeoutBase time.Duration +} + +// NewExternalCheckerFromEnv loads the needed environment variables and calls NewExternalChecker. +func NewExternalCheckerFromEnv(ctx context.Context) (*ExternalChecker, error) { + url := os.Getenv(urlEnvKey) + if url == "" { + return nil, errtemplates.EnvVariableNotDefined(urlEnvKey) + } + timeout := DefaultTimeoutBase + if timeoutStr := os.Getenv(timeoutEnvKey); timeoutStr != "" { + var err error + timeout, err = time.ParseDuration(timeoutStr) + if err != nil { + return nil, errors.Wrap(err, errtemplates.ParsingEnvVariableFailed(timeoutEnvKey)) + } + } + + return NewExternalChecker(ctx, url, timeout) +} + +// NewExternalChecker returns a new instance of ExternalChecker. +func NewExternalChecker(ctx context.Context, url string, timeoutBase time.Duration) (*ExternalChecker, error) { + if err := retry.Do(ctx, retry.WithJitter(retry.Constant(2*time.Second)), func(ctx context.Context) error { + return httputil.HealthCheck(ctx, url+"/health") + }); err != nil { + return nil, errors.Wrapf(err, "attempting to reach validity checker at %s failed", url) + } + + return &ExternalChecker{ + url: url, + TimeoutBase: timeoutBase, + }, nil +} + +func (c *ExternalChecker) Check(schema, schemaType, mode string) (bool, error) { + //check if validity mode is none, if it is, don't send HTTP request to java code + if strings.ToLower(mode) == "none" { + return true, nil + } + if strings.ToLower(mode) == "syntax-only" || strings.ToLower(mode) == "full" { + internalCheck, err := internalCheck(schema, schemaType) + if err != nil { + return false, err + } + if !internalCheck { + return false, nil + } + } + + size := []byte(schema + schemaType + mode) + + ctx, cancel := context.WithTimeout(context.Background(), http.EstimateHTTPTimeout(len(size), c.TimeoutBase)) + defer cancel() + + return http.CheckOverHTTP(ctx, schemaType, schema, mode, c.url+"/") +} + +func InitExternalValidityChecker(ctx context.Context) (*ExternalChecker, string, error) { + valChecker, err := NewExternalCheckerFromEnv(ctx) + if err != nil { + return nil, "", err + } + globalValMode := os.Getenv(globalValidityMode) + if globalValMode == "" { + globalValMode = defaultGlobalValidityMode + } + if globalValMode == "SYNTAX-ONLY" || globalValMode == "FULL" || globalValMode == "NONE" { + return valChecker, globalValMode, nil + } + return nil, "", errors.Errorf("unsupported validity mode") +} + +func internalCheck(schema, schemaType string) (bool, error) { + switch schemaType { + case "json", "avro": + return json.Valid([]byte(schema)), nil + case "xml": + return IsValidXML(schema), nil + case "protobuf": //since there is no builtin protobuf validator, we assume schema is valid and propagate validation to external checker + return true, nil + default: + return false, fmt.Errorf("the schemaType is unavailiable") + } +} + +func IsValidXML(input string) bool { + decoder := xml.NewDecoder(strings.NewReader(input)) + for { + err := decoder.Decode(new(interface{})) + if err != nil { + return err == io.EOF + } + } +} + +func CheckIfValidMode(mode *string) bool { + if *mode == "" { + *mode = defaultGlobalValidityMode + } + lowerMode := strings.ToLower(*mode) + if lowerMode != "none" && lowerMode != "syntax-only" && lowerMode != "full" { + return false + } + return true +} diff --git a/registry/validity/http/http.go b/registry/validity/http/http.go new file mode 100644 index 0000000..0979287 --- /dev/null +++ b/registry/validity/http/http.go @@ -0,0 +1,91 @@ +package http + +import ( + "bytes" + "context" + "encoding/json" + "github.com/dataphos/lib-httputil/pkg/httputil" + "io" + "math" + "net/http" + "time" + + "github.com/pkg/errors" +) + +// checkRequest contains a new schema, its type and a validity mode which should be enforced. The structure represents an HTTP request body. +type checkRequest struct { + Schema string `json:"schema"` + Format string `json:"format"` + Mode string `json:"mode"` +} + +// checkResponse contains the validity result and an info message. The structure represents an HTTP response body. +type checkResponse struct { + Result bool `json:"result"` + Info string `json:"info"` +} + +// HTTPTimeoutBytesUnit the base amount of bytes used by EstimateHTTPTimeout. +const HTTPTimeoutBytesUnit = 1024 * 100 + +// EstimateHTTPTimeout calculates the expected timeout, by dividing the size given in bytes with HTTPTimeoutBytesUnit, and then +// multiplying the coefficient with the given time duration. +// +// If the given size is less than HTTPTimeoutBytesUnit, base is returned, to avoid problems due to the http overhead which isn't fully linear. +func EstimateHTTPTimeout(size int, base time.Duration) time.Duration { + coef := int(math.Round(float64(size) / float64(HTTPTimeoutBytesUnit))) + if coef <= 1 { + return base + } + + return time.Duration(coef) * base +} + +// CheckOverHTTP requests a schema check over HTTP. +// Function returns false if schema isn't valid. +func CheckOverHTTP(ctx context.Context, schemaType, schema, mode, url string) (bool, error) { + response, err := sendCheckRequest(ctx, schemaType, schema, mode, url) + if err != nil { + return false, err + } + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + panic(errors.New("couldn't close response body")) + } + }(response.Body) + + body, err := io.ReadAll(response.Body) + if err != nil { + return false, err + } + + var parsedBody checkResponse + if err = json.Unmarshal(body, &parsedBody); err != nil { + return false, err + } + + valid := parsedBody.Result + + switch response.StatusCode { + case http.StatusOK: + return valid, nil + case http.StatusBadRequest: + return valid, nil + default: + return valid, errors.Errorf("error: status code [%v]", response.StatusCode) + } +} + +func sendCheckRequest(ctx context.Context, schemaType, schema, mode, url string) (*http.Response, error) { + // this can't generate an error, so it's safe to ignore + data, _ := json.Marshal(checkRequest{Schema: schema, Format: schemaType, Mode: mode}) + + request, err := httputil.Post(ctx, url, "application/json", bytes.NewBuffer(data)) + if err != nil { + return nil, err + } + + return http.DefaultClient.Do(request) +} diff --git a/registry/validity/testdata/invalid_avro_full1.json b/registry/validity/testdata/invalid_avro_full1.json new file mode 100644 index 0000000..8e9758b --- /dev/null +++ b/registry/validity/testdata/invalid_avro_full1.json @@ -0,0 +1,3 @@ +{ + "schema": "{\n \"type\" : \"enu\",\n \"name\" : \"Numbers\", \n \"namspace\": \"data\", \n \"symbols\" : [ \"ONE\", \"TWO\", \"THREE\", \"FOUR\" ]\n}" +} diff --git a/registry/validity/testdata/invalid_avro_syntax1.json b/registry/validity/testdata/invalid_avro_syntax1.json new file mode 100644 index 0000000..a110035 --- /dev/null +++ b/registry/validity/testdata/invalid_avro_syntax1.json @@ -0,0 +1,3 @@ +{ + "schema": "\n \"type\" : \"enum\",\n \"name\" : \"Numbers\", \n \"namspace\": \"data\", \n \"symbols\" : [ \"ONE\", \"TWO\", \"THREE\", \"FOUR\" ]\n" +} diff --git a/registry/validity/testdata/invalid_json_full1.json b/registry/validity/testdata/invalid_json_full1.json new file mode 100644 index 0000000..e7a4e8d --- /dev/null +++ b/registry/validity/testdata/invalid_json_full1.json @@ -0,0 +1,3 @@ +{ + "schema": "{\n \"$schema\": \"http://json-schema.org/draft-07/schema\",\n \"type\": \"object\",\n \"title222\": \"The Root Schema\",\n \"description\": \"The root schema comprises the entire JSON document.\",\n \"default\": {},\n \"additionalProperties\": \"1\",\n \"required\": [\n \"phone\",\n \"room\"\n ],\n \"properties\": {\n \"phone\": {\n \"type\": \"integer\",\n \"title\": \"The Phone Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 23541\n ]\n }\n }\n}" +} diff --git a/registry/validity/testdata/invalid_json_syntax1.json b/registry/validity/testdata/invalid_json_syntax1.json new file mode 100644 index 0000000..2c9acb2 --- /dev/null +++ b/registry/validity/testdata/invalid_json_syntax1.json @@ -0,0 +1,3 @@ +{ + "schema": "\n \"$schema\": \"http://json-schema.org/draft-07/schema\",\n \"type\": \"object\",\n \"title222\": \"The Root Schema\",\n \"description\": \"The root schema comprises the entire JSON document.\",\n \"default\": {},\n \"additionalProperties\": true,\n \"required\": [\n \"phone\",\n \"room\"\n ],\n \"properties\": {\n \"phone\": {\n \"type\": \"integer\",\n \"title\": \"The Phone Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 23541\n ]\n }\n " +} diff --git a/registry/validity/testdata/valid_avro_full1.json b/registry/validity/testdata/valid_avro_full1.json new file mode 100644 index 0000000..02aeef2 --- /dev/null +++ b/registry/validity/testdata/valid_avro_full1.json @@ -0,0 +1,3 @@ +{ + "schema": "{\n \"type\" : \"enum\",\n \"name\" : \"Numbers\", \n \"namespace\": \"data\", \n \"symbols\" : [ \"ONE\", \"TWO\", \"THREE\", \"FOUR\" ]\n}" +} diff --git a/registry/validity/testdata/valid_json_syntax1.json b/registry/validity/testdata/valid_json_syntax1.json new file mode 100644 index 0000000..c561478 --- /dev/null +++ b/registry/validity/testdata/valid_json_syntax1.json @@ -0,0 +1,3 @@ +{ + "schema": "{\n \"$schema\": \"http://json-schema.org/draft-07/schema\",\n \"type\": \"object\",\n \"title222\": \"The Root Schema\",\n \"description\": \"The root schema comprises the entire JSON document.\",\n \"default\": {},\n \"additionalProperties\": true,\n \"required\": [\n \"phone\",\n \"room\"\n ],\n \"properties\": {\n \"phone\": {\n \"type\": \"integer\",\n \"title\": \"The Phone Schema\",\n \"description\": \"An explanation about the purpose of this instance.\",\n \"default\": \"\",\n \"examples\": [\n 23541\n ]\n }\n }\n}" +} diff --git a/validator/README.md b/validator/README.md new file mode 100644 index 0000000..55cf4f2 --- /dev/null +++ b/validator/README.md @@ -0,0 +1,270 @@ +# Dataphos Schema Registry - Worker component + +Repository of the Dataphos Schema Registry Worker. + +## Worker + +The Worker is deployed as a deployment on a Kubernetes cluster and performs the following: + +- Message schema retrieval (and caching) from the Registry using message metadata +- Input message validation using the retrieved schema +- Input message transmission depending on its validation result + +Before the producer starts sending messages their schema needs to be registered in the database, whether it is an +entirely new schema or a new version of an existing one. Each of the messages being sent to the input topic needs to +have its metadata enriched with the schema information, which includes the ID, version and the message format. + +The role of the Worker component is to filter the messages being pushed from the input topic based on the metadata +attributes and route them to their destination. It does so with the help of the Registry component. + +If the schema is registered in the database, the request sent to the Registry will return the schema specification and +the message can be successfully validated and routed to a topic for valid messages. In case of validation failure, the +message will be routed to a topic for dead letter messages. + +Message brokers supported with the Worker component are: + +- GCP Pub/Sub +- Azure ServiceBus +- Azure Event Hubs +- Apache Kafka +- Apache Pulsar +- NATS JetSteam + +Also, the Schema registry enables the use of different protocols for producers and consumers, which ultimately enables +protocol conversion. For example, using the Schema registry protocol conversion you will be able to have a producer that +publishes messages using the Kafka protocol and a consumer that consumes messages using Pub/Sub protocol. + +Providing a data schema and data the validators can determine if the given data is valid for the given schema. Data +types supported are: + +- JSON +- AVRO +- Protocol Buffers +- XML +- CSV + +Instead of logging metrics to standard output, the Worker component has Prometheus support for monitoring and alerting. + + +## Getting Started +### Prerequisites + +Schema Registry components run in a Kubernetes environment. This quickstart guide will assume that you have +the ```kubectl``` tool installed and a running Kubernetes cluster on one of the major cloud providers (GCP, Azure) and a +connection with the cluster. The Kubernetes cluster node/nodes should have at least 8 GB of available RAM. + +Schema Registry has multiple message broker options. This quickstart guide will assume that the publishing message +broker and the consuming message broker will be either GCP Pub/Sub, Azure ServiceBus or Kafka, and that you have +created: + +- (in case of GCP Pub/Sub) service account JSON key with the appropriate roles (Pub/Sub Publisher, Pub/Sub Subscriber) + - [link to create a service account](https://cloud.google.com/iam/docs/service-accounts-create#iam-service-accounts-create-console) + - [link to create a JSON key](https://cloud.google.com/iam/docs/keys-create-delete) +- (in case of Azure ServiceBus) ServiceBus connection string +- (in case of Kafka) Kafka broker + - [link to create a Kafka broker on Kubernetes](https://strimzi.io/docs/operators/0.30.0/quickstart.html) +- An input topic and subscription[^1] (The input topic refers to the topic that contains the data in its original +format) +- Valid topic and subscription[^1] (The valid topic refers to the topic where the data is stored after being validated +and serialized using a specific schema) +- Dead-letter topic and subscription[^1] (The valid topic refers to the topic where messages that could not be processed +by a consumer are stored for troubleshooting and analysis purposes) +- (optional) Prometheus server for gathering the metrics and monitoring the logs + - can be deployed using the ```./scripts/prometheus.sh ``` command from the content root + +[^1]: In case of Kafka, no subscription is required. + +> **_NOTE:_** All the deployment scripts are located in the ```./scripts``` folder from the content root. + +--- + +#### Namespace +Before deploying the Schema Registry, the namespace where the components will be deployed should be created if it +doesn't exist. + +--- +Open a command line tool of your choice and connect to your cluster. Create the namespace where Schema Registry will be +deployed. We will use namespace "dataphos" in this quickstart guide. + +```yaml +kubectl create namespace dataphos +``` + +### Quick Start + +Deploy Schema Registry - worker component using the following script. The required arguments are: + +- the namespace +- Schema History Postgres password + +#### GCP Deployment + +The required arguments are: + +- the namespace +- Producer Pub/Sub valid topic ID +- Producer Pub/Sub dead-letter topic ID +- name of the message type used by this worker (json, avro, protobuf, csv, xml) +- Consumer GCP Project ID +- Consumer Pub/Sub Subscription ID (created beforehand) +- Producer GCP Project ID + + +The script is located in the ```./scripts/sr-worker/``` folder from the content root. To run the script, run the +following command: + +``` +# "dataphos" is an example of the namespace name +# "valid-topic" is example of the valid topic name +# "dead-letter-topic" is example of the dead-letter topic name +# "json" is example of the message format name (needs to be either "json", "avro", "csv", "xml", "protobuf") +# "dataphos-project" is example of the consumer GCP project ID +# "input-topic-sub" is example of the input topic subcription name +# "dataphos-project" is example of the producer GCP project ID + +./sr-worker-pubsub.sh "dataphos" "valid-topic" "dead-letter-topic" "json" "dataphos-project" "input-topic-sub" "dataphos-project" +``` + +#### Azure (ServiceBus) Deployment +Required arguments are: + +- the namespace +- Producer ServiceBus valid topic ID +- Producer ServiceBus dead-letter topic ID +- name of the message type used by this worker (json, avro, protobuf, csv, xml) +- Consumer ServiceBus Connection String +- Consumer ServiceBus Topic +- Consumer ServiceBus Subscription +- Producer ServiceBus Connection String + +The script is located in the ```./scripts/sr-worker/``` folder. from the content root. To run the script, run the +following command: + +``` +# "dataphos" is an example of the namespace name +# "valid-topic" is example of the valid topic name +# "dead-letter-topic" is example of the dead-letter topic name +# "json" is example of the message format name (needs to be either "json", "avro", "csv", "xml", "protobuf") +# "Endpoint=sb://foo.servicebus.windows.net/;SharedAccessKeyName=someKeyName;SharedAccessKey=someKeyValue" is example of the consumer ServiceBus connection string (https://azurelessons.com/azure-service-bus-connection-string/) +# "input-topic" is example of the input topic name +# "input-topic-sub" is example of the input topic subcription name +# "Endpoint=sb://foo.servicebus.windows.net/;SharedAccessKeyName=someKeyName;SharedAccessKey=someKeyValue" is example of the producer ServiceBus connection string (https://azurelessons.com/azure-service-bus-connection-string/) + +./sr-worker-servicebus.sh "dataphos" "valid-topic" "dead-letter-topic" "json" "Endpoint=sb://foo.servicebus.windows.net/;SharedAccessKeyName=someKeyName;SharedAccessKey=someKeyValue" "input-topic" "input-topic-sub" "Endpoint=sb://foo.servicebus.windows.net/;SharedAccessKeyName=someKeyName;SharedAccessKey=someKeyValue" +``` + +#### Kafka Deployment (Platform agnostic) + +Required arguments are: + +- the namespace +- Producer Kafka valid topic ID +- Producer Kafka dead-letter topic ID +- name of the message type used by this worker (json, avro, protobuf, csv, xml) +- Consumer Kafka bootstrap server address +- Consumer Kafka Topic +- Consumer Kafka Group ID +- Producer Kafka bootstrap server address + +The script is located in the ```./scripts/sr-worker/``` folder. from the content root. To run the script, run the +following command: + +``` +# "dataphos" is an example of the namespace name +# "valid-topic" is example of the valid topic name +# "dead-letter-topic" is example of the dead-letter topic name +# "json" is example of the message format name (needs to be either "json", "avro", "csv", "xml", "protobuf") +# "127.0.0.1:9092" is example of the consumer Kafka bootstrap server address +# "input-topic" is example of the input topic name +# "group01" is example of the input topic group ID +# "127.0.0.1:9092" is example of the producer Kafka bootstrap server address + +./sr-worker-kafka.sh "dataphos" "valid-topic" "dead-letter-topic" "json" "127.0.0.1:9092" "input-topic" "group01" "127.0.0.1:9092" +``` + +## Usage + + +### Message format + +Depending on the technology your producer uses, the way you shape the message may differ and therefore the part of the +message that contains the metadata might be called ```attributes```, ```metadata,``` etc. + +Besides the data field, which contains the message data, inside the attributes (or metadata) structure it's important to +add fields ```schemaId```, ```versionId``` and ```format``` +which are important information for the worker component. In case some additional attributes are provided, the worker +won't lose them, they will be delegated to the destination topic. + +### GCP Pub/Sub + +``` +{ + "ID": string, + "Data": string, + "Attributes": { + schemaId: string, + versionId: string, + format: string, + ... + }, + "PublishTime": time, +} +``` + +| Field | Description | +|------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Data | **string** (bytes format)

The message data field. If this field is empty, the message must contain at least one attribute.

A base64-encoded string. | +| Attributes | **map** (key: string, value: string)

Attributes for this message. If this field is empty, the message must contain non-empty data. This can be used to filter messages on the subscription.

An object containing a list of "key": value pairs. Example: { "schemaId": "1", "versionId": "2", "format": "json" }. | +| PublishTime| **time** (time.Time format)

PublishTime is the time at which the message was published. This is populated by the server for Messages obtained from a subscription.| + +### Azure ServiceBus +``` +{ + "MessageID": string, + "Body": string, + "PartitionKey": string, + "ApplicationProperties": { + schemaId: string, + versionId: string, + format: string, + ... + }, + EnqueuedTime: time +} +``` + +| Field | Description | +|------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Body | **string** (bytes format)

The message data field. If this field is empty, the message must contain at least one application property. | +| ApplicationProperties | **map** (key: string, value: string)

Attributes for this message. ApplicationProperties can be used to store custom metadata for a message.

An object containing a list of "key": value pairs. Example: { "schemaId": "1", "versionId": "2", "format": "json" }. | +| PartitionKey| **string**

PartitionKey is used with a partitioned entity and enables assigning related messages to the same internal partition. This ensures that the submission sequence order is correctly recorded. The partition is chosen by a hash function in Service Bus and cannot be chosen directly.| +| EnqueuedTime| **time** (time.Time format)

EnqueuedTime is the UTC time when the message was accepted and stored by Service Bus.| + + +### Kafka + +``` +{ + "Key": string, + "Value": string, + "Offset": int64, + "Partition": int32, + "Headers": { + schemaId: string, + versionId: string, + format: string, + ... + }, + Timestamp: time +} +``` + +| Field | Description | +|------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Key | **string** (bytes format)

Key is an optional field that can be used for partition assignment. | +| Value | **string** (bytes format)

Value is blob of data to write to Kafka. | +| Offset | **int64**

Offset is the offset that a record is written as.| +| Partition | **int32**

Partition is the partition that a record is written to.| +| Headers | **map** (key: string, value: string)

Headers are optional key/value pairs that are passed along with records.

Example: { "schemaId": "1", "versionId": "2", "format": "json" }.

These are purely for producers and consumers; Kafka does not look at this field and only writes it to disk. | +| Timestamp| **time** (time.Time format)

Timestamp is the timestamp that will be used for this record. Record batches are always written with "CreateTime", meaning that timestamps are generated by clients rather than brokers.| + diff --git a/validator/cmd/centralconsumer/main.go b/validator/cmd/centralconsumer/main.go new file mode 100644 index 0000000..6d00957 --- /dev/null +++ b/validator/cmd/centralconsumer/main.go @@ -0,0 +1,13 @@ +package main + +import ( + "flag" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitorctl" +) + +func main() { + configFile := flag.String("f", "", "toml file containing configuration of working environment") + flag.Parse() + + janitorctl.RunCentralConsumer(*configFile) +} diff --git a/validator/cmd/pullercleaner/main.go b/validator/cmd/pullercleaner/main.go new file mode 100644 index 0000000..92e3d35 --- /dev/null +++ b/validator/cmd/pullercleaner/main.go @@ -0,0 +1,13 @@ +package main + +import ( + "flag" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitorctl" +) + +func main() { + configFile := flag.String("f", "", "toml file containing configuration of working environment") + flag.Parse() + + janitorctl.RunPullerCleaner(*configFile) +} diff --git a/validator/config/validator.toml b/validator/config/validator.toml new file mode 100644 index 0000000..98128f5 --- /dev/null +++ b/validator/config/validator.toml @@ -0,0 +1,60 @@ +# configuration for central consumer +mode = "" +schema_id = "" +schema_version = "" +schema_type = "" + +[consumer] +type = "" # insert "kafka", "pubsub", "servicebus" or "jetstream" +encryption_key = "" + +[consumer.kafka] +address = "" # insert +topic = "" # insert +group_id = "" # insert + +[consumer.pubsub] +project_id = "" # insert +subscription_id = "" # insert + +[consumer.servicebus] +connection_string = "" # insert +topic = "" # insert +subscription = "" # insert + +[consumer.jetstream] +url = "" # insert +subject = "" # insert +consumer_name = "" # insert + +[producer] +type = "" # insert "kafka", "eventhubs", "pubsub", "servicebus" or "jetstream" + +[producer.kafka] +address = "" # insert + +[producer.pubsub] +project_id = "" # insert + +[producer.servicebus] +connection_string = "" # insert + +[producer.jetstream] +url = "" # insert + +[topics] +valid = "" # insert +dead_letter = "" # insert + +[registry] +url = "" +type = "janitor" # insert "janitor" or "apicurio" +groupID = "default" + +[validators] +enable_json = "true" +enable_csv = "false" +enable_xml = "false" +enable_protobuf = "false" +csv_url = "http://csv-validator-svc:8080" +xml_url = "http://xml-validator-svc:8081" diff --git a/validator/docker/csv-validator/Dockerfile b/validator/docker/csv-validator/Dockerfile new file mode 100644 index 0000000..a6a051d --- /dev/null +++ b/validator/docker/csv-validator/Dockerfile @@ -0,0 +1,41 @@ +# References the base image for Java 11 and maven +FROM maven:3.8.4-openjdk-17-slim AS build + +# Maintainer Info +LABEL maintainer="Syntio Inc." + +# Copy the source code to a new working directory +COPY validator/internal/validator/external/csv-validator/src /home/app/src + +# Copy the pom.xml to the root of the project +COPY validator/internal/validator/external/csv-validator/pom.xml /home/app + +COPY validator/licenses/csv-validator/LICENSE-3RD-PARTY.md /home/app/licenses/LICENSE-3RD-PARTY.md +COPY LICENSE /home/app/licenses/LICENSE + +# Download dependecies and build +RUN mvn -f /home/app/pom.xml clean package + +# References base image for Java 11 runtime +FROM openjdk:22-ea-17-jdk-slim + +# Copy the binaries in a new working directory +COPY --from=build /home/app/target/csv-validator-0.0.1-SNAPSHOT.jar /home/csv/validator.jar +COPY --from=build /home/app/licenses/LICENSE-3RD-PARTY.md /home/csv/licenses/LICENSE-3RD-PARTY.md +COPY --from=build /home/app/licenses/LICENSE /home/csv/licenses/LICENSE + +# Expose port 8080 to the outside world +EXPOSE 8080 + +# change to a non-root user for security +RUN adduser --disabled-password --home /home/csv user +RUN chown -R user /home/csv +RUN chmod -R 555 /home/csv +USER user + +# Set entrypoint of command that will run when container is started +ENTRYPOINT ["java","-jar","/home/csv/validator.jar"] + +# From the helper-functions directory: +# sudo docker build -t centralconsumer-csv-val . +# sudo docker run centralconsumer-csv-val diff --git a/validator/docker/validator/Dockerfile b/validator/docker/validator/Dockerfile new file mode 100644 index 0000000..8245b12 --- /dev/null +++ b/validator/docker/validator/Dockerfile @@ -0,0 +1,37 @@ +FROM golang:1.21-alpine3.18 as build + +LABEL maintainer="Syntio Inc." + +ENV GO111MODULE=on \ + GOOS=linux \ + GOARCH=amd64 \ + CGO_ENABLED=0 + +RUN apk add --no-cache git + +WORKDIR /src + +COPY ./validator/go.mod ./validator/go.sum ./ +RUN go mod download + +COPY ./validator . +COPY LICENSE ./licenses/ + +RUN go mod tidy + +RUN go build -buildvcs=false -o /app/validator ./cmd/centralconsumer + +FROM alpine:3.16 + +COPY --from=build /app/validator /app/validator +COPY --from=build /src/config/validator.toml app/config/validator.toml +COPY --from=build /src/licenses/LICENSE-3RD-PARTY.md /app/licenses/ +COPY --from=build /src/licenses/LICENSE /app/licenses/ + +# change to a non-root user for security +RUN adduser -D -h /app user +RUN chown -R user /app +RUN chmod -R 777 /app +USER user + +ENTRYPOINT ["/app/validator", "-f", "/app/config/validator.toml" ] diff --git a/validator/docker/xml-validator/Dockerfile b/validator/docker/xml-validator/Dockerfile new file mode 100644 index 0000000..7532c8e --- /dev/null +++ b/validator/docker/xml-validator/Dockerfile @@ -0,0 +1,28 @@ +# References the base image for running Python applications +FROM python:3.7-alpine + +# Maintainer Info +LABEL maintainer="Syntio Inc." + +# Set the current working directory inside the container +WORKDIR /code + +# Copy the dependencies file to the working directory inside the container +COPY validator/internal/validator/external/xml-validator/requirements.txt . + +# Install dependencies from the file +RUN pip install --no-cache-dir -r requirements.txt + +# Copy source (content of the local src directory) to the working directory inside the container +COPY validator/internal/validator/external/xml-validator . +COPY validator/licenses/xml-validator/LICENSE-3RD-PARTY.md app/licenses/LICENSE-3RD-PARTY.md +COPY LICENSE /app/licenses/LICENSE + +# change to a non-root user for security +RUN adduser -D -h /code user +RUN chown -R user /code +RUN chmod -R 555 /code +USER user + +EXPOSE 8081 +CMD [ "python", "./main.py" ] diff --git a/validator/go.mod b/validator/go.mod new file mode 100644 index 0000000..3e52662 --- /dev/null +++ b/validator/go.mod @@ -0,0 +1,113 @@ +module github.com/dataphos/aquarium-janitor-standalone-internal + +go 1.21 + +require ( + github.com/dataphos/lib-batchproc v1.0.0 + github.com/dataphos/lib-brokers v1.0.0 + github.com/dataphos/lib-httputil v1.0.0 + github.com/dataphos/lib-logger v1.0.0 + github.com/dataphos/lib-retry v1.0.0 + github.com/dataphos/lib-shutdown v1.0.0 + github.com/dataphos/lib-streamproc v1.0.0 + github.com/go-playground/validator/v10 v10.11.1 + github.com/hamba/avro v1.8.0 + github.com/hashicorp/golang-lru v0.5.4 + github.com/jhump/protoreflect v1.12.0 + github.com/kkyr/fig v0.3.0 + github.com/pkg/errors v0.9.1 + github.com/prometheus/client_golang v1.15.1 + github.com/santhosh-tekuri/jsonschema/v5 v5.0.2 + github.com/xeipuuv/gojsonschema v1.2.0 + go.uber.org/multierr v1.9.0 + go.uber.org/ratelimit v0.2.0 + golang.org/x/net v0.23.0 + golang.org/x/sync v0.3.0 + google.golang.org/protobuf v1.33.0 +) + +require ( + cloud.google.com/go v0.110.8 // indirect + cloud.google.com/go/compute v1.23.0 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v1.1.2 // indirect + cloud.google.com/go/pubsub v1.33.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.2 // indirect + github.com/AthenZ/athenz v1.11.29 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.3.0 // indirect + github.com/Azure/go-amqp v1.0.0 // indirect + github.com/DataDog/zstd v1.5.5 // indirect + github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 // indirect + github.com/apache/pulsar-client-go v0.14.0 // indirect + github.com/ardielle/ardielle-go v1.5.2 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bits-and-blooms/bitset v1.7.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/danieljoos/wincred v1.2.0 // indirect + github.com/dvsekhvalnov/jose2go v1.6.0 // indirect + github.com/frankban/quicktest v1.14.6 // indirect + github.com/go-playground/locales v0.14.0 // indirect + github.com/go-playground/universal-translator v0.18.0 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/golang-jwt/jwt/v5 v5.2.1 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/s2a-go v0.1.4 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.2.4 // indirect + github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/hamba/avro/v2 v2.22.2-0.20240625062549-66aad10411d9 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/jcmturner/aescts/v2 v2.0.0 // indirect + github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect + github.com/jcmturner/gofork v1.7.6 // indirect + github.com/jcmturner/gokrb5/v8 v8.4.4 // indirect + github.com/jcmturner/rpc/v2 v2.0.3 // indirect + github.com/joho/godotenv v1.4.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.17.9 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/nats-io/nats.go v1.25.0 // indirect + github.com/nats-io/nkeys v0.4.4 // indirect + github.com/nats-io/nuid v1.0.1 // indirect + github.com/pelletier/go-toml v1.9.3 // indirect + github.com/pierrec/lz4 v2.6.1+incompatible // indirect + github.com/pierrec/lz4/v4 v4.1.17 // indirect + github.com/prometheus/client_model v0.4.0 // indirect + github.com/prometheus/common v0.43.0 // indirect + github.com/prometheus/procfs v0.9.0 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/spaolacci/murmur3 v1.1.0 // indirect + github.com/twmb/franz-go v1.13.3 // indirect + github.com/twmb/franz-go/pkg/kmsg v1.5.0 // indirect + github.com/twmb/franz-go/pkg/sasl/kerberos v1.1.0 // indirect + github.com/twmb/franz-go/plugin/kprom v1.0.0 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + go.opencensus.io v0.24.0 // indirect + go.uber.org/atomic v1.11.0 // indirect + go.uber.org/zap v1.23.0 // indirect + golang.org/x/crypto v0.22.0 // indirect + golang.org/x/mod v0.18.0 // indirect + golang.org/x/oauth2 v0.11.0 // indirect + golang.org/x/sys v0.19.0 // indirect + golang.org/x/term v0.19.0 // indirect + golang.org/x/text v0.14.0 // indirect + google.golang.org/api v0.128.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b // indirect + google.golang.org/grpc v1.59.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect +) diff --git a/validator/go.sum b/validator/go.sum new file mode 100644 index 0000000..a537da2 --- /dev/null +++ b/validator/go.sum @@ -0,0 +1,918 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.110.8 h1:tyNdfIxjzaWctIiLYOTalaLKZ17SI44SKFW26QbOhME= +cloud.google.com/go v0.110.8/go.mod h1:Iz8AkXJf1qmxC3Oxoep8R1T36w8B92yU29PcBhHO5fk= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute v1.23.0 h1:tP41Zoavr8ptEqaW6j+LQOnyBBhO7OkOMAGrgLopTwY= +cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/iam v1.1.2 h1:gacbrBdWcoVmGLozRuStX45YKvJtzIjJdAolzUs1sm4= +cloud.google.com/go/iam v1.1.2/go.mod h1:A5avdyVL2tCppe4unb0951eI9jreack+RJ0/d+KUZOU= +cloud.google.com/go/kms v1.15.2 h1:lh6qra6oC4AyWe5fUUUBe/S27k12OHAleOOOw6KakdE= +cloud.google.com/go/kms v1.15.2/go.mod h1:3hopT4+7ooWRCjc2DxgnpESFxhIraaI2IpAVUEhbT/w= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.33.0 h1:6SPCPvWav64tj0sVX/+npCBKhUi/UjJehy9op/V3p2g= +cloud.google.com/go/pubsub v1.33.0/go.mod h1:f+w71I33OMyxf9VpMVcZbnG5KSUkCOUHYpFd5U1GdRc= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= +github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= +github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk= +github.com/AthenZ/athenz v1.11.29 h1:lVo3kz17gXMagqpN7rzTwEym5jsfhthvfLUKo5JXU5o= +github.com/AthenZ/athenz v1.11.29/go.mod h1:hz8WrHkj4KOOaejllzTJIoXBCtptWV279CtEAUDuxis= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 h1:8kDqDngH+DmVBiCtIjCFTGa7MBnsIOkF9IccInFEbjk= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0 h1:Yoicul8bnVdQrhDMTHxdEckRGX01XvwXDHUT9zYZ3k0= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= +github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.3.0 h1:eLqmA+3GXbOOLJVTSqkrFudTbHkfOp5HIy+iShCNM7A= +github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.3.0/go.mod h1:pXDkeh10bAqElvd+S5Ppncj+DCKvJGXNa8rRT2R7rIw= +github.com/Azure/go-amqp v1.0.0 h1:QfCugi1M+4F2JDTRgVnRw7PYXLXZ9hmqk3+9+oJh3OA= +github.com/Azure/go-amqp v1.0.0/go.mod h1:+bg0x3ce5+Q3ahCEXnCsGG3ETpDQe3MEVnOuT2ywPwc= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0 h1:WVsrXCnHlDDX8ls+tootqRE87/hL9S/g4ewig9RsD/c= +github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= +github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.5 h1:haEcLNpj9Ka1gd3B3tAEs9CpE0c+1IhoL59w/exYU38= +github.com/Microsoft/hcsshim v0.11.5/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 h1:MzBOUgng9orim59UnfUTLRjMpd09C5uEVQ6RPGeCaVI= +github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/pulsar-client-go v0.14.0 h1:P7yfAQhQ52OCAu8yVmtdbNQ81vV8bF54S2MLmCPJC9w= +github.com/apache/pulsar-client-go v0.14.0/go.mod h1:PNUE29x9G1EHMvm41Bs2vcqwgv7N8AEjeej+nEVYbX8= +github.com/ardielle/ardielle-go v1.5.2 h1:TilHTpHIQJ27R1Tl/iITBzMwiUGSlVfiVhwDNGM3Zj4= +github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bits-and-blooms/bitset v1.7.0 h1:YjAGVd3XmtK9ktAbX8Zg2g2PwLIMjGREZJHlV4j7NEo= +github.com/bits-and-blooms/bitset v1.7.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= +github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao= +github.com/containerd/containerd v1.7.18/go.mod h1:IYEk9/IO6wAPUz2bCMVUbsfXjzw5UNP5fLz4PsUygQ4= +github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= +github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM= +github.com/containerd/errdefs v0.1.0 h1:m0wCRBiu1WJT/Fr+iOoQHMQS/eP5myQ8lCv4Dz5ZURM= +github.com/containerd/errdefs v0.1.0/go.mod h1:YgWiiHtLmSeBrvpw+UfPijzbLaB77mEG1WwJTDETIV0= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= +github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= +github.com/dataphos/lib-batchproc v1.0.0 h1:5rZo080k+3wmzg7vTcccJgyX0cnbEhczOhfL1N6arQw= +github.com/dataphos/lib-batchproc v1.0.0/go.mod h1:ZdReLkmcDK9r+qvjRZRNqqNn+OKOTcKpgx+J4cdasMc= +github.com/dataphos/lib-brokers v1.0.0 h1:8/uu2iX5iaddDBIp9iVugPOHWWLnN0My3eIpnnnZmME= +github.com/dataphos/lib-brokers v1.0.0/go.mod h1:6pOEBPGA4GUbTQPAEt8XwsamRHE7hmkXNb/9sRhn05A= +github.com/dataphos/lib-httputil v1.0.0 h1:xfaZqHz+PXxifPJU0kS/FhbQG7dEVQEibBCz9MPBPgY= +github.com/dataphos/lib-httputil v1.0.0/go.mod h1:XlXMsNAj94vwBt0pc3G9reLln51G5puRX8Qv24zmmiI= +github.com/dataphos/lib-logger v1.0.0 h1:c6d1//cyVpXB0QvixUb79rMz9OuFzvGYtk2PE8WXqtE= +github.com/dataphos/lib-logger v1.0.0/go.mod h1:AJi106+YVssJ0ak0GrrMoqvtgA+0ido2ZlvxuKyxqUQ= +github.com/dataphos/lib-retry v1.0.0 h1:pvh00Esu34z9bWKliphkeT8DHO9paLOGAi9oQ3yVN4c= +github.com/dataphos/lib-retry v1.0.0/go.mod h1:0T0VfgdamSHvieGMVMBRThXqZGezx/E1bItanDHsmDM= +github.com/dataphos/lib-shutdown v1.0.0 h1:RDFwxWH6UpEvQvpi9ubbMP7ZPu9zqTB22vz9i/or9Nc= +github.com/dataphos/lib-shutdown v1.0.0/go.mod h1:mQP+k4FYte4EOwbHGjiOOqE0PVyECKaWrLa9fVw3h/Y= +github.com/dataphos/lib-streamproc v1.0.0 h1:3t9tDlkOm4atsglnAhdZB+0o6h/yTFqeXvqfB79wZv0= +github.com/dataphos/lib-streamproc v1.0.0/go.mod h1:UNiH7T+macu2tHWjnI1C/2P6a1luYX7X6t43TN/cWjo= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dimfeld/httptreemux v5.0.1+incompatible h1:Qj3gVcDNoOthBAqftuD596rm4wg/adLLz5xh5CmpiCA= +github.com/dimfeld/httptreemux v5.0.1+incompatible/go.mod h1:rbUlSV+CCpv/SuqUTP/8Bk2O3LyUV436/yaRGkhP6Z0= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/cli v23.0.0+incompatible h1:bcM4syaQ+EM/iczJTimMOGzvnzJBFPFEf4acS7sZ+RM= +github.com/docker/cli v23.0.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/docker v27.1.1+incompatible h1:hO/M4MtV36kzKldqnA37IWhebRA+LnqqcqDja6kVaKY= +github.com/docker/docker v27.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY= +github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ= +github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-zookeeper/zk v1.0.3 h1:7M2kwOsc//9VeeFiPtf+uSJlVpU66x9Ba5+8XK7/TDg= +github.com/go-zookeeper/zk v1.0.3/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= +github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.2.4 h1:uGy6JWR/uMIILU8wbf+OkstIrNiMjGpEIyhx8f6W7s4= +github.com/googleapis/enterprise-certificate-proxy v0.2.4/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= +github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hamba/avro v1.8.0 h1:eCVrLX7UYThA3R3yBZ+rpmafA5qTc3ZjpTz6gYJoVGU= +github.com/hamba/avro v1.8.0/go.mod h1:NiGUcrLLT+CKfGu5REWQtD9OVPPYUGMVFiC+DE0lQfY= +github.com/hamba/avro/v2 v2.22.2-0.20240625062549-66aad10411d9 h1:NEoabXt33PDWK4fXryK4e+XX+fSKDmmu9vg3yb9YI2M= +github.com/hamba/avro/v2 v2.22.2-0.20240625062549-66aad10411d9/go.mod h1:fQVdB2mFZBhPW1D5Abej41LMvrErARGrrdjOnKbm5yw= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= +github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= +github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= +github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.2/go.mod h1:sb+Xq/fTY5yktf/VxLsE3wlfPqQjp0aWNYyvBVK62bc= +github.com/jcmturner/gokrb5/v8 v8.4.3/go.mod h1:dqRwJGXznQrzw6cWmyo6kH+E7jksEQG/CyVWsJEsJO0= +github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8= +github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= +github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jhump/gopoet v0.0.0-20190322174617-17282ff210b3/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI= +github.com/jhump/gopoet v0.1.0/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI= +github.com/jhump/goprotoc v0.5.0/go.mod h1:VrbvcYrQOrTi3i0Vf+m+oqQWk9l72mjkJCYo7UvLHRQ= +github.com/jhump/protoreflect v1.11.0/go.mod h1:U7aMIjN0NWq9swDP7xDdoMfRHb35uiuTd3Z9nFXJf5E= +github.com/jhump/protoreflect v1.12.0 h1:1NQ4FpWMgn3by/n1X0fbeKEUxP1wBt7+Oitpv01HR10= +github.com/jhump/protoreflect v1.12.0/go.mod h1:JytZfP5d0r8pVNLZvai7U/MCuTWITgrI4tTg7puQFKI= +github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= +github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kkyr/fig v0.3.0 h1:5bd1amYKp/gsK2bGEUJYzcCrQPKOZp6HZD9K21v9Guo= +github.com/kkyr/fig v0.3.0/go.mod h1:fEnrLjwg/iwSr8ksJF4DxrDmCUir5CaVMLORGYMcz30= +github.com/klauspost/compress v1.13.5/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= +github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= +github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= +github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nats-io/jwt/v2 v2.3.0 h1:z2mA1a7tIf5ShggOFlR1oBPgd6hGqcDYsISxZByUzdI= +github.com/nats-io/jwt/v2 v2.3.0/go.mod h1:0tqz9Hlu6bCBFLWAASKhE5vUA4c24L9KPUUgvwumE/k= +github.com/nats-io/nats-server/v2 v2.9.14 h1:n2GscWVgXpA14vQSRP/MM1SGi4wyazR9l19/gWxqgXQ= +github.com/nats-io/nats-server/v2 v2.9.14/go.mod h1:40ZwFm4npKdFBhOdY7rkh3YyI1oI91FzLvlYyB7HfzM= +github.com/nats-io/nats.go v1.25.0 h1:t5/wCPGciR7X3Mu8QOi4jiJaXaWM8qtkLu4lzGZvYHE= +github.com/nats-io/nats.go v1.25.0/go.mod h1:D2WALIhz7V8M0pH8Scx8JZXlg6Oqz5VG+nQkK8nJdvg= +github.com/nats-io/nkeys v0.4.4 h1:xvBJ8d69TznjcQl9t6//Q5xXuVhyYiSos6RPtvQNTwA= +github.com/nats-io/nkeys v0.4.4/go.mod h1:XUkxdLPTufzlihbamfzQ7mw/VGx6ObUs+0bN5sNvt64= +github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/opencontainers/runc v1.1.5 h1:L44KXEpKmfWDcS02aeGm8QNTFXTo2D+8MYGDIJ/GDEs= +github.com/opencontainers/runc v1.1.5/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= +github.com/ory/dockertest/v3 v3.9.1 h1:v4dkG+dlu76goxMiTT2j8zV7s4oPPEppKT8K8p2f1kY= +github.com/ory/dockertest/v3 v3.9.1/go.mod h1:42Ir9hmvaAPm0Mgibk6mBPi7SFvTXxEcnztDYOJ//uM= +github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM= +github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc= +github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4 h1:Qj1ukM4GlMWXNdMBuXcXfz/Kw9s1qm0CLY32QxuSImI= +github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.15.1 h1:8tXpTmJbyH5lydzFPoxSIJ0J46jdh3tylbvM1xCv0LI= +github.com/prometheus/client_golang v1.15.1/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= +github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.30.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.43.0 h1:iq+BVjvYLei5f27wiuNiB1DN6DYQkp1c8Bx0Vykh5us= +github.com/prometheus/common v0.43.0/go.mod h1:NCvr5cQIh3Y/gy73/RdVtC9r8xxrxwJnB+2lB3BxrFc= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.9.0 h1:wzCHvIvM5SxWqYvwgVL7yJY8Lz3PKn49KQtpgMYJfhI= +github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/santhosh-tekuri/jsonschema/v5 v5.0.2 h1:zOYFITq/5SO7YOv39/Taw8s1skb0Py39K5V2XvCEP48= +github.com/santhosh-tekuri/jsonschema/v5 v5.0.2/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/testcontainers/testcontainers-go v0.32.0 h1:ug1aK08L3gCHdhknlTTwWjPHPS+/alvLJU/DRxTD/ME= +github.com/testcontainers/testcontainers-go v0.32.0/go.mod h1:CRHrzHLQhlXUsa5gXjTOfqIEJcrK5+xMDmBr/WMI88E= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/twmb/franz-go v1.0.0/go.mod h1:cdFLk8d/5/ox88y38xgiDKP3Yo338OO0t5QbTEM2K6I= +github.com/twmb/franz-go v1.7.0/go.mod h1:PMze0jNfNghhih2XHbkmTFykbMF5sJqmNJB31DOOzro= +github.com/twmb/franz-go v1.13.3 h1:AO0HcPu7hNMi+ue+jz3CnV+VpuAizaazQuqTo1SvLr4= +github.com/twmb/franz-go v1.13.3/go.mod h1:jm/FtYxmhxDTN0gNSb26XaJY0irdSVcsckLiR5tQNMk= +github.com/twmb/franz-go/pkg/kadm v1.7.0 h1:TAgcS+t5q+9jnm8INCD2OJ1MD9y4Ij6pD5CYfZ3tkbg= +github.com/twmb/franz-go/pkg/kadm v1.7.0/go.mod h1:sI9BjVkpjyYssIlVa+WIwseaUjJqPsR/8gmJi6aDyEk= +github.com/twmb/franz-go/pkg/kmsg v0.0.0-20210901051457-3c197a133ddd/go.mod h1:SxG/xJKhgPu25SamAq0rrucfp7lbzCpEXOC+vH/ELrY= +github.com/twmb/franz-go/pkg/kmsg v0.0.0-20210914042331-106aef61b693/go.mod h1:SxG/xJKhgPu25SamAq0rrucfp7lbzCpEXOC+vH/ELrY= +github.com/twmb/franz-go/pkg/kmsg v1.2.0/go.mod h1:SxG/xJKhgPu25SamAq0rrucfp7lbzCpEXOC+vH/ELrY= +github.com/twmb/franz-go/pkg/kmsg v1.5.0 h1:eqVJquFQLdBNLrRMWX03pPDPpngn6PTjGZLlZnagouk= +github.com/twmb/franz-go/pkg/kmsg v1.5.0/go.mod h1:se9Mjdt0Nwzc9lnjJ0HyDtLyBnaBDAd7pCje47OhSyw= +github.com/twmb/franz-go/pkg/sasl/kerberos v1.1.0 h1:alKdbddkPw3rDh+AwmUEwh6HNYgTvDSFIe/GWYRR9RM= +github.com/twmb/franz-go/pkg/sasl/kerberos v1.1.0/go.mod h1:k8BoBjyUbFj34f0rRbn+Ky12sZFAPbmShrg0karAIMo= +github.com/twmb/franz-go/plugin/kprom v1.0.0 h1:VcWVmtYnsTBhhes33xHp0FHJBLZRa7hslUfdFNPmJwQ= +github.com/twmb/franz-go/plugin/kprom v1.0.0/go.mod h1:DFcybS1MpHf9POwG2BYvViS4u4y12t8dyCp7Hlc4g9I= +github.com/twmb/go-rbtree v1.0.0/go.mod h1:UlIAI8gu3KRPkXSobZnmJfVwCJgEhD/liWzT5ppzIyc= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= +go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= +go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= +go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= +go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= +go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= +go.uber.org/ratelimit v0.2.0 h1:UQE2Bgi7p2B85uP5dC2bbRtig0C+OeNRnNEafLjsLPA= +go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg= +go.uber.org/zap v1.23.0 h1:OjGQ5KQDEUawVHxNwQgPpiypGHOxo2mNZsOqTak4fFY= +go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220817201139-bc19a97f63c8/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0= +golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220725212005-46097bf591d3/go.mod h1:AaygXjzTFtRAg2ttMY5RMuhpJ3cNnI0XpyFJD1iQRSM= +golang.org/x/net v0.0.0-20220812174116-3211cb980234/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU= +golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210910150752-751e447fb3d0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= +golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.128.0 h1:RjPESny5CnQRn9V6siglged+DZCgfu9l6mO9dkX9VOg= +google.golang.org/api v0.128.0/go.mod h1:Y611qgqaE92On/7g65MQgxYul3c0rEB894kniWLY750= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a h1:fwgW9j3vHirt4ObdHoYNwuO24BEZjSzbh+zPaNWoiY8= +google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:EMfReVxb80Dq1hhioy0sOsY9jCE46YDgHlJ7fWVUWRE= +google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97 h1:W18sezcAYs+3tDZX4F80yctqa12jcP1PUS2gQu1zTPU= +google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97/go.mod h1:iargEX0SFPm3xcfMI0d1domjg0ZF4Aa0p2awqyxhvF0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b h1:ZlWIi1wSK56/8hn4QcBp/j9M7Gt3U/3hZw3mC7vDICo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b/go.mod h1:swOH3j0KzcDDgGUWr+SNpyTen5YrXjS3eyPzFYKc6lc= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= +google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g= +nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/validator/internal/centralconsumer/centralconsumer.go b/validator/internal/centralconsumer/centralconsumer.go new file mode 100644 index 0000000..c0dc041 --- /dev/null +++ b/validator/internal/centralconsumer/centralconsumer.go @@ -0,0 +1,463 @@ +package centralconsumer + +import ( + "context" + "encoding/json" + "strconv" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-brokers/pkg/broker" + "github.com/dataphos/lib-logger/logger" + "github.com/pkg/errors" +) + +// Mode is the way Central consumer works; if the mode is Default, one CC will be deployed, and it will validate multiple +// different schemas. If it's OneCCPerTopic, there will be one CC for each topic, and it will validate only one schema. +type Mode int + +const ( + Default Mode = iota + OneCCPerTopic +) + +type SchemaMetadata struct { + ID string + Version string + Format string +} + +type Schema struct { + SchemaMetadata SchemaMetadata + Specification []byte +} + +type SchemaDefinition struct { + ID string `json:"schema_id,omitempty"` + Type string `json:"schema_type"` + Name string `json:"name"` + Description string `json:"description"` + LastCreated string `json:"last_created"` + Versions []VersionDetails `json:"schemas"` +} + +type VersionDetails struct { + Version string `json:"version"` + Specification []byte `json:"specification"` +} + +// CentralConsumer models the central consumer process. +type CentralConsumer struct { + Registry registry.SchemaRegistry + Validators janitor.Validators + Router janitor.Router + Publisher broker.Publisher + topicIDs Topics + topics map[string]broker.Topic + registrySem chan struct{} + validatorsSem chan struct{} + log logger.Log + mode Mode + schema Schema + encryptionKey string +} + +// Settings holds settings concerning the concurrency limits for various stages of the central consumer pipeline. +type Settings struct { + // NumSchemaCollectors defines the maximum amount of inflight requests to the schema registry. + NumSchemaCollectors int + + // NumInferrers defines the maximum amount of inflight destination topic inference jobs (validation and routing). + NumInferrers int +} + +// Topics defines the standard destination topics, based on validation results. +type Topics struct { + Valid string + InvalidCSV string + InvalidJSON string + Deadletter string +} + +// RouterFlags defines logging levels for logging each routing decision. +type RouterFlags struct { + MissingSchema bool + Valid bool + Invalid bool + Deadletter bool +} + +// New is a convenience function which returns a new instance of CentralConsumer. +func New(registry registry.SchemaRegistry, publisher broker.Publisher, validators janitor.Validators, topicIds Topics, settings Settings, log logger.Log, routerFlags RouterFlags, mode Mode, schemaMetadata SchemaMetadata, encryptionKey string) (*CentralConsumer, error) { + var ( + schemaVersion VersionDetails + format string + ) + + topics, err := idsIntoTopics(topicIds, publisher) + if err != nil { + return nil, errors.Wrap(err, errtemplates.LoadingTopicsFailed) + } + + var registrySem chan struct{} + if settings.NumSchemaCollectors > 0 { + registrySem = make(chan struct{}, settings.NumSchemaCollectors) + } + var validatorsSem chan struct{} + if settings.NumInferrers > 0 { + validatorsSem = make(chan struct{}, settings.NumInferrers) + } + + var schemaReturned []byte + if mode == OneCCPerTopic { + if schemaMetadata.ID != "" { + if schemaMetadata.Version != "" { + schemaSpecReturned, err := registry.Get(context.Background(), schemaMetadata.ID, schemaMetadata.Version) + if err != nil { + return &CentralConsumer{}, err + } + schemaVersion.Version = schemaMetadata.Version + schemaVersion.Specification = schemaSpecReturned + } else { + schemaReturned, err = registry.GetLatest(context.Background(), schemaMetadata.ID) + if err != nil { + return &CentralConsumer{}, err + } + if err = json.Unmarshal(schemaReturned, &schemaVersion); err != nil { + return &CentralConsumer{}, errors.Wrap(err, errtemplates.UnmarshallingJSONFailed) + } + } + if schemaMetadata.Format != "" { + format = schemaMetadata.Format + } else { + return &CentralConsumer{}, errors.New("schema format not specified") + } + } else { + return &CentralConsumer{}, errors.New("schema ID not specified") + } + } + + return &CentralConsumer{ + Registry: registry, + Validators: validators, + Router: setupRoutingFunc(topicIds, routerFlags, log), + topicIDs: topicIds, + Publisher: publisher, + topics: topics, + registrySem: registrySem, + validatorsSem: validatorsSem, + log: log, + mode: mode, + schema: Schema{ + SchemaMetadata: SchemaMetadata{ + ID: schemaMetadata.ID, + Version: schemaVersion.Version, + Format: format, + }, + Specification: schemaVersion.Specification, + }, + encryptionKey: encryptionKey, + }, nil +} + +// idsIntoTopics maps Topics into instances of broker.Topic. +func idsIntoTopics(topicIds Topics, publisher broker.Publisher) (map[string]broker.Topic, error) { + topics := make(map[string]broker.Topic) + + if topicIds.Valid != "" { + topic, err := publisher.Topic(topicIds.Valid) + if err != nil { + return nil, errors.Wrap(err, errtemplates.CreatingTopicInstanceFailed(topicIds.Valid)) + } + topics[topicIds.Valid] = topic + } + + if topicIds.InvalidJSON != "" { + topic, err := publisher.Topic(topicIds.InvalidJSON) + if err != nil { + return nil, errors.Wrap(err, errtemplates.CreatingTopicInstanceFailed(topicIds.InvalidJSON)) + } + topics[topicIds.InvalidJSON] = topic + } + + if topicIds.InvalidCSV != "" { + topic, err := publisher.Topic(topicIds.InvalidCSV) + if err != nil { + return nil, errors.Wrap(err, errtemplates.CreatingTopicInstanceFailed(topicIds.InvalidCSV)) + } + topics[topicIds.InvalidCSV] = topic + } + + if topicIds.Deadletter != "" { + topic, err := publisher.Topic(topicIds.Deadletter) + if err != nil { + return nil, errors.Wrap(err, errtemplates.CreatingTopicInstanceFailed(topicIds.Deadletter)) + } + topics[topicIds.Deadletter] = topic + } + return topics, nil +} + +// setupRoutingFunc sets up the janitor.LoggingRouter, by first checking if there's a need for logging any of the routing +// decisions (if any logging level flag is set). If none of the flags are set, standard intoRouter is used, +// wrapping it with logging middleware otherwise. +func setupRoutingFunc(topics Topics, routerFlags RouterFlags, log logger.Log) janitor.Router { + next := intoRouter(topics) + + if routerFlags.MissingSchema || routerFlags.Valid || routerFlags.Invalid || routerFlags.Deadletter { + return janitor.LoggingRouter( + log, + janitor.RouterFlags{ + MissingSchema: routerFlags.MissingSchema, + Valid: routerFlags.Valid, + Invalid: routerFlags.Invalid, + Deadletter: routerFlags.Deadletter, + }, + next, + ) + } + + return next +} + +const ( + avroFormat = "avro" + csvFormat = "csv" + jsonFormat = "json" + protobufFormat = "protobuf" + xmlFormat = "xml" +) + +// intoRouter maps the given Topics into a janitor.LoggingRouter. +// +// All janitor.Valid messages are routed to Topics.Valid. +// +// All janitor.Deadletter messages are routed to Topics.Deadletter. +// +// If the result is janitor.MissingSchema, +// CSV and JSON formats are routed to Topics.InvalidCSV and Topics.InvalidJSON, respectively, +// while all other formats are routed to Topics.Deadletter. +// +// If the result is janitor.Invalid, +// CSV and JSON formats are routed to Topics.InvalidCSV and Topics.InvalidJSON, respectively, +// while all other formats are routed to Topics.Deadletter. +func intoRouter(topics Topics) janitor.Router { + return janitor.RoutingFunc(func(result janitor.Result, message janitor.Message) string { + format := message.Format + + switch result { + case janitor.Valid: + return topics.Valid + case janitor.Deadletter: + return topics.Deadletter + case janitor.MissingSchema, janitor.Invalid: + switch format { + case csvFormat: + return topics.InvalidCSV + case jsonFormat: + return topics.InvalidJSON + default: + return topics.Deadletter + } + default: + return topics.Deadletter + } + }) +} + +func (cc *CentralConsumer) AsProcessor() *janitor.Processor { + return janitor.NewProcessor(cc, cc.topics, cc.topicIDs.Deadletter, cc.log) +} + +func (cc *CentralConsumer) Handle(ctx context.Context, message janitor.Message) (janitor.MessageTopicPair, error) { + var ( + messageSchemaPair janitor.MessageSchemaPair + messageTopicPair janitor.MessageTopicPair + specificSchemaVersion VersionDetails + err error + encryptedMessageData []byte + ) + + if cc.mode == Default { + acquireIfSet(cc.registrySem) + messageSchemaPair, err = janitor.CollectSchema(ctx, message, cc.Registry) + if err != nil { + setMessageRawAttributes(message, err, "Wrong compile") + releaseIfSet(cc.registrySem) + return janitor.MessageTopicPair{Message: message, Topic: cc.Router.Route(janitor.Deadletter, message)}, err + } + releaseIfSet(cc.registrySem) + + messageTopicPair, err = cc.getMessageTopicPair(messageSchemaPair, encryptedMessageData) + if err != nil { + return messageTopicPair, err + } + return messageTopicPair, nil + + } else if cc.mode == OneCCPerTopic { + if message.Version == "" { // Version not set in message + messageTopicPair, err = cc.getMessageTopicPair(janitor.MessageSchemaPair{ + Message: message, + Schema: cc.schema.Specification, + }, encryptedMessageData) + if err != nil { + return messageTopicPair, err + } + if messageTopicPair.Topic == cc.topicIDs.Deadletter { + // if message is invalid against latest schema saved in CC, then fetch latest from SR and revalidate + messageTopicPair, err = cc.revalidatedAgainstLatest(ctx, specificSchemaVersion, message, encryptedMessageData) + if err != nil { + return messageTopicPair, err + } + } + return messageTopicPair, nil + } else { + if message.Version == cc.schema.SchemaMetadata.Version { + messageTopicPair, err = cc.getMessageTopicPair(janitor.MessageSchemaPair{ + Message: message, + Schema: cc.schema.Specification, + }, encryptedMessageData) + if err != nil { + return messageTopicPair, err + } + return messageTopicPair, nil + } else { + acquireIfSet(cc.registrySem) + specificSchemaVersionSpec, err := cc.Registry.Get(ctx, cc.schema.SchemaMetadata.ID, message.Version) + if err != nil { + setMessageRawAttributes(message, err, "Wrong compile") + releaseIfSet(cc.registrySem) + return janitor.MessageTopicPair{Message: message, Topic: cc.Router.Route(janitor.Deadletter, message)}, err + } + releaseIfSet(cc.registrySem) + + err = cc.updateIfNewer(VersionDetails{ + Version: message.Version, + Specification: specificSchemaVersionSpec, + }) + if err != nil { + setMessageRawAttributes(message, err, "Non number version") + return janitor.MessageTopicPair{Message: message, Topic: cc.Router.Route(janitor.Deadletter, message)}, err + } + + messageTopicPair, err = cc.getMessageTopicPair(janitor.MessageSchemaPair{ + Message: message, + Schema: specificSchemaVersionSpec, + }, encryptedMessageData) + if err != nil { + return messageTopicPair, err + } + return messageTopicPair, nil + } + } + } else { + err = errors.New("unknown CC mode") + setMessageRawAttributes(message, err, "Unknown CC mode") + return janitor.MessageTopicPair{Message: message, Topic: cc.Router.Route(janitor.Deadletter, message)}, err + } +} + +func (cc *CentralConsumer) getMessageTopicPair(messageSchemaPair janitor.MessageSchemaPair, encryptedMessageData []byte) (janitor.MessageTopicPair, error) { + acquireIfSet(cc.validatorsSem) + var err error + if cc.encryptionKey != "" { + encryptedMessageData = messageSchemaPair.Message.Payload //nolint:ineffassign,staticcheck // fine for now + messageSchemaPair.Message.Payload, err = janitor.Decrypt(messageSchemaPair.Message.Payload, cc.encryptionKey) + if err != nil { + messageSchemaPair.Message.RawAttributes["deadLetterErrorCategory"] = "Failure to decrypt" + messageSchemaPair.Message.RawAttributes["deadLetterErrorReason"] = err.Error() + return janitor.MessageTopicPair{Message: messageSchemaPair.Message, Topic: cc.Router.Route(janitor.Deadletter, messageSchemaPair.Message)}, err + } + } + messageTopicPair, err := janitor.InferDestinationTopic(messageSchemaPair, cc.Validators, cc.Router) + if err != nil { + releaseIfSet(cc.validatorsSem) + return messageTopicPair, err + } + releaseIfSet(cc.validatorsSem) + return messageTopicPair, nil +} + +func (cc *CentralConsumer) updateVersion(vd VersionDetails) { + cc.schema.SchemaMetadata.Version = vd.Version + cc.schema.Specification = vd.Specification +} + +// checkIfNewer checks if v2 is newer than v1 +func checkIfNewer(v1, v2 string) (bool, error) { + v1Int, err := strconv.Atoi(v1) + if err != nil { + return false, err + } + v2Int, err := strconv.Atoi(v2) + if err != nil { + return false, err + } + if v2Int > v1Int { + return true, nil + } + return false, nil +} + +// revalidatedAgainstLatest fetches latest version of schema from Schema Registry and validates the message against it +func (cc *CentralConsumer) revalidatedAgainstLatest(ctx context.Context, specificSchemaVersion VersionDetails, message janitor.Message, encryptedMessageData []byte) (janitor.MessageTopicPair, error) { + var messageTopicPair janitor.MessageTopicPair + + acquireIfSet(cc.registrySem) + specificSchemaVersionBytes, err := cc.Registry.GetLatest(ctx, cc.schema.SchemaMetadata.ID) + if err != nil { + setMessageRawAttributes(message, err, "Wrong compile") + releaseIfSet(cc.registrySem) + return janitor.MessageTopicPair{Message: message, Topic: cc.Router.Route(janitor.Deadletter, message)}, err + } + if err = json.Unmarshal(specificSchemaVersionBytes, &specificSchemaVersion); err != nil { + setMessageRawAttributes(message, err, "Broken message") + releaseIfSet(cc.registrySem) + return janitor.MessageTopicPair{Message: message, Topic: cc.Router.Route(janitor.Deadletter, message)}, errors.Wrap(err, errtemplates.UnmarshallingJSONFailed) + } + releaseIfSet(cc.registrySem) + + err = cc.updateIfNewer(specificSchemaVersion) + if err != nil { + setMessageRawAttributes(message, err, "Non number version") + return janitor.MessageTopicPair{Message: message, Topic: cc.Router.Route(janitor.Deadletter, message)}, err + } + + messageTopicPair, err = cc.getMessageTopicPair(janitor.MessageSchemaPair{ + Message: message, + Schema: cc.schema.Specification, + }, encryptedMessageData) + if err != nil { + return messageTopicPair, err + } + return messageTopicPair, nil +} + +func (cc *CentralConsumer) updateIfNewer(versionDetails VersionDetails) error { + newer, err := checkIfNewer(cc.schema.SchemaMetadata.Version, versionDetails.Version) + if err != nil { + return err + } + if newer { + cc.updateVersion(versionDetails) + } + return nil +} + +func setMessageRawAttributes(message janitor.Message, err error, errMessage string) { + message.RawAttributes["deadLetterErrorCategory"] = errMessage + message.RawAttributes["deadLetterErrorReason"] = err.Error() +} + +func acquireIfSet(sem chan struct{}) { + if sem != nil { + sem <- struct{}{} + } +} + +func releaseIfSet(sem chan struct{}) { + if sem != nil { + <-sem + } +} diff --git a/validator/internal/centralconsumer/centralconsumer_test.go b/validator/internal/centralconsumer/centralconsumer_test.go new file mode 100644 index 0000000..908aab9 --- /dev/null +++ b/validator/internal/centralconsumer/centralconsumer_test.go @@ -0,0 +1,185 @@ +package centralconsumer + +import ( + "golang.org/x/net/context" + "os" + "path/filepath" + "runtime" + "testing" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/publisher" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + localjson "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/json" +) + +func TestTopicsIntoRoutingFunc(t *testing.T) { + topics := Topics{ + Valid: "valid-topic", + InvalidCSV: "deadletter", + InvalidJSON: "deadletter", + Deadletter: "deadletter", + } + + tt := []struct { + name string + isValid janitor.Result + format string + destination string + }{ + {"valid avro", janitor.Valid, avroFormat, topics.Valid}, + {"invalid avro", janitor.Invalid, avroFormat, topics.Deadletter}, + {"deadletter avro", janitor.Deadletter, avroFormat, topics.Deadletter}, + {"missing schema avro", janitor.MissingSchema, avroFormat, topics.Deadletter}, + {"valid protobuf", janitor.Valid, protobufFormat, topics.Valid}, + {"invalid protobuf", janitor.Invalid, protobufFormat, topics.Deadletter}, + {"deadletter protobuf", janitor.Deadletter, protobufFormat, topics.Deadletter}, + {"missing schema protobuf", janitor.MissingSchema, protobufFormat, topics.Deadletter}, + {"valid xml", janitor.Valid, xmlFormat, topics.Valid}, + {"invalid xml", janitor.Invalid, xmlFormat, topics.Deadletter}, + {"deadletter xml", janitor.Deadletter, xmlFormat, topics.Deadletter}, + {"missing schema xml", janitor.MissingSchema, xmlFormat, topics.Deadletter}, + {"valid json", janitor.Valid, jsonFormat, topics.Valid}, + {"invalid json", janitor.Invalid, jsonFormat, topics.InvalidJSON}, + {"deadletter json", janitor.Deadletter, jsonFormat, topics.Deadletter}, + {"missing schema json", janitor.MissingSchema, jsonFormat, topics.InvalidJSON}, + {"valid csv", janitor.Valid, csvFormat, topics.Valid}, + {"invalid csv", janitor.Invalid, csvFormat, topics.InvalidCSV}, + {"deadletter csv", janitor.Deadletter, csvFormat, topics.Deadletter}, + {"missing schema csv", janitor.MissingSchema, csvFormat, topics.InvalidCSV}, + } + + routingFunc := intoRouter(topics) + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + destination := routingFunc.Route(tc.isValid, janitor.Message{Format: tc.format}) + if destination != tc.destination { + t.Errorf("expected and actual destination not the same (%s != %s)", tc.destination, destination) + } + }) + } +} + +func TestOneCCPerTopic(t *testing.T) { + ctx := context.Background() + topics := Topics{ + Valid: "valid", + InvalidCSV: "deadletter", + InvalidJSON: "deadletter", + Deadletter: "deadletter", + } + + _, b, _, _ := runtime.Caller(0) + dir := filepath.Dir(b) + testdataDir := filepath.Join(dir, "testdata") + + data1, err := os.ReadFile(filepath.Join(testdataDir, "data-1.json")) + if err != nil { + t.Fatal(err) + } + data2, err := os.ReadFile(filepath.Join(testdataDir, "data-2.json")) + if err != nil { + t.Fatal(err) + } + data3, err := os.ReadFile(filepath.Join(testdataDir, "data-3.json")) + if err != nil { + t.Fatal(err) + } + schemaSpec1, err := os.ReadFile(filepath.Join(testdataDir, "schema-1.json")) + if err != nil { + t.Fatal(err) + } + schemaSpec2, err := os.ReadFile(filepath.Join(testdataDir, "schema-2.json")) + if err != nil { + t.Fatal(err) + } + schemaSpec3, err := os.ReadFile(filepath.Join(testdataDir, "schema-3.json")) + if err != nil { + t.Fatal(err) + } + + schemaRegistry := registry.NewMock() + schemaRegistry.SetGetResponse("1", "1", schemaSpec1, nil) + schemaRegistry.SetGetResponse("1", "2", schemaSpec2, nil) + schemaRegistry.SetGetResponse("1", "3", schemaSpec3, nil) + + validators := make(map[string]validator.Validator) + validators["json"] = localjson.New() + encryptionKey := "" + + cc, err := New(schemaRegistry, &publisher.MockPublisher{}, validators, topics, Settings{}, nil, RouterFlags{}, Mode(1), + SchemaMetadata{ + ID: "1", + Version: "1", + Format: "json", + }, + encryptionKey) + if err != nil { + t.Fatal(err) + } + + message1 := janitor.Message{ + ID: "", + Key: "", + RawAttributes: map[string]interface{}{}, + Payload: data1, + IngestionTime: time.Time{}, + SchemaID: "1", + Version: "1", + Format: "json", + } + message2 := janitor.Message{ + ID: "", + Key: "", + RawAttributes: map[string]interface{}{}, + Payload: data2, + IngestionTime: time.Time{}, + SchemaID: "1", + Version: "2", + Format: "json", + } + message3 := janitor.Message{ + ID: "", + Key: "", + RawAttributes: map[string]interface{}{}, + Payload: data3, + IngestionTime: time.Time{}, + SchemaID: "1", + Version: "3", + Format: "json", + } + + tt := []struct { + name string + expectedTopic string + version string + message janitor.Message + }{ + {"valid data1 unspecified", "valid", "", message1}, + {"valid data2 specified", "valid", "2", message2}, + //{"invalid data1 unspecified", "deadletter", "", message1}, + {"valid data3 specified", "valid", "3", message3}, + //{"invalid data2 unspecified", "deadletter", "", message2}, + {"invalid data 1 against v2", "deadletter", "2", message1}, + {"invalid data 1 against v3", "deadletter", "3", message1}, + {"invalid data 2 against v3", "deadletter", "3", message2}, + } + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + tc.message.Version = tc.version + messageTopicPair, err := cc.Handle(ctx, tc.message) + if err != nil { + t.Fatal(err) + } + if messageTopicPair.Topic != tc.expectedTopic { + t.Errorf("expected and actual destination not the same (%s != %s)", tc.expectedTopic, messageTopicPair.Topic) + } + }) + } +} diff --git a/validator/internal/centralconsumer/testdata/data-1.json b/validator/internal/centralconsumer/testdata/data-1.json new file mode 100644 index 0000000..c8719b2 --- /dev/null +++ b/validator/internal/centralconsumer/testdata/data-1.json @@ -0,0 +1,3 @@ +{ + "firstName": "John" +} diff --git a/validator/internal/centralconsumer/testdata/data-2.json b/validator/internal/centralconsumer/testdata/data-2.json new file mode 100644 index 0000000..b1227e4 --- /dev/null +++ b/validator/internal/centralconsumer/testdata/data-2.json @@ -0,0 +1,4 @@ +{ + "firstName": "John", + "lastName": "Doe" +} diff --git a/validator/internal/centralconsumer/testdata/data-3.json b/validator/internal/centralconsumer/testdata/data-3.json new file mode 100644 index 0000000..3fe0812 --- /dev/null +++ b/validator/internal/centralconsumer/testdata/data-3.json @@ -0,0 +1,5 @@ +{ + "firstName": "John", + "lastName": "Doe", + "age": 21 +} diff --git a/validator/internal/centralconsumer/testdata/schema-1.json b/validator/internal/centralconsumer/testdata/schema-1.json new file mode 100644 index 0000000..687ae74 --- /dev/null +++ b/validator/internal/centralconsumer/testdata/schema-1.json @@ -0,0 +1,21 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Person", + "type": "object", + "properties": { + "firstName": { + "type": "string", + "description": "The person's first name." + }, + "lastName": { + "type": "string", + "description": "The person's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + } + } +} diff --git a/validator/internal/centralconsumer/testdata/schema-2.json b/validator/internal/centralconsumer/testdata/schema-2.json new file mode 100644 index 0000000..cdb7cfd --- /dev/null +++ b/validator/internal/centralconsumer/testdata/schema-2.json @@ -0,0 +1,22 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Person", + "type": "object", + "required": ["lastName"], + "properties": { + "firstName": { + "type": "string", + "description": "The person's first name." + }, + "lastName": { + "type": "string", + "description": "The person's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + } + } +} diff --git a/validator/internal/centralconsumer/testdata/schema-3.json b/validator/internal/centralconsumer/testdata/schema-3.json new file mode 100644 index 0000000..4ff6aa2 --- /dev/null +++ b/validator/internal/centralconsumer/testdata/schema-3.json @@ -0,0 +1,22 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Person", + "type": "object", + "required": ["lastName", "age"], + "properties": { + "firstName": { + "type": "string", + "description": "The person's first name." + }, + "lastName": { + "type": "string", + "description": "The person's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + } + } +} diff --git a/validator/internal/config/centralconsumer.go b/validator/internal/config/centralconsumer.go new file mode 100644 index 0000000..10a3fd4 --- /dev/null +++ b/validator/internal/config/centralconsumer.go @@ -0,0 +1,67 @@ +package config + +import ( + "time" + + "github.com/kkyr/fig" +) + +// CentralConsumer represents all required configuration to run an instance of central consumer. +type CentralConsumer struct { + Producer Producer `toml:"producer"` + Consumer Consumer `toml:"consumer"` + Registry Registry `toml:"registry"` + Topics CentralConsumerTopics `toml:"topics"` + Validators CentralConsumerValidators `toml:"validators"` + ShouldLog CentralConsumerShouldLog `toml:"should_log"` + NumSchemaCollectors int `toml:"num_schema_collectors" default:"-1"` + NumInferrers int `toml:"num_inferrers" default:"-1"` + MetricsLoggingInterval time.Duration `toml:"metrics_logging_interval" default:"5s"` + RunOptions RunOptions `toml:"run_option"` + Mode int `toml:"mode"` + SchemaID string `toml:"schema_id"` + SchemaVersion string `toml:"schema_version"` + SchemaType string `toml:"schema_type"` + Encryption Encryption `toml:"encryption"` +} + +type Encryption struct { + EncryptionKey string `toml:"encryption_key"` +} + +type CentralConsumerTopics struct { + Valid string `toml:"valid" val:"required"` + DeadLetter string `toml:"dead_letter" val:"required"` +} + +type CentralConsumerValidators struct { + EnableAvro bool `toml:"enable_avro"` + EnableCsv bool `toml:"enable_csv"` + EnableJson bool `toml:"enable_json"` + EnableProtobuf bool `toml:"enable_protobuf"` + EnableXml bool `toml:"enable_xml"` + CsvUrl string `toml:"csv_url" val:"required_if=EnableCsv true,omitempty,url"` + CsvTimeoutBase time.Duration `toml:"csv_timeout_base" default:"2s"` + JsonUseAltBackend bool `toml:"json_use_alt_backend"` + JsonCacheSize int `toml:"json_cache_size" default:"100"` + ProtobufFilePath string `toml:"protobuf_file_path" default:"/app/.schemas"` + ProtobufCacheSize int `toml:"protobuf_cache_size" default:"100"` + XmlUrl string `toml:"xml_url" val:"required_if=EnableXml true,omitempty,url"` + XmlTimeoutBase time.Duration `toml:"xml_timeout_base" default:"3s"` +} + +type CentralConsumerShouldLog struct { + MissingSchema bool `toml:"missing_schema"` + Valid bool `toml:"valid"` + DeadLetter bool `toml:"dead_letter"` +} + +// Read loads parameters from configuration file into CentralConsumer struct. +func (cfg *CentralConsumer) Read(filename string) error { + return fig.Load(cfg, fig.File(filename), fig.Tag("toml"), fig.UseEnv("")) +} + +// Validate validates CentralConsumer struct. +func (cfg *CentralConsumer) Validate() error { + return validate(cfg, "CentralConsumer.") +} diff --git a/validator/internal/config/config.go b/validator/internal/config/config.go new file mode 100644 index 0000000..9643bfd --- /dev/null +++ b/validator/internal/config/config.go @@ -0,0 +1,406 @@ +package config + +import ( + "reflect" + "strings" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/go-playground/validator/v10" + "go.uber.org/multierr" +) + +type Producer struct { + Type string `toml:"type" val:"oneof=kafka eventhubs pubsub servicebus jetstream pulsar"` + EncryptionKey string `toml:"encryption_key"` + Kafka KafkaPublisherConfig `toml:"kafka"` + Eventhubs EventhubsPublisherConfig `toml:"eventhubs"` + Pubsub PubsubPublisherConfig `toml:"pubsub"` + Servicebus ServicebusPublisherConfig `toml:"servicebus"` + Jetstream JetstreamPublisherConfig `toml:"jetstream"` + Pulsar PulsarPublisherConfig `toml:"pulsar"` +} + +type KafkaPublisherConfig struct { + Address string `toml:"address"` + TlsConfig TlsConfig `toml:"tls_config"` + KrbConfig KrbConfig `toml:"krb_config"` + SaslConfig SaslConfig `toml:"sasl_config"` + Settings KafkaPublisherSettings `toml:"settings"` +} + +type EventhubsPublisherConfig struct { + Address string `toml:"address"` + TlsConfig TlsConfig `toml:"tls_config"` + SaslConfig SaslConfig `toml:"sasl_config"` + Settings EventhubsPublisherSettings `toml:"settings"` +} + +type TlsConfig struct { + Enabled bool `toml:"enabled"` + ClientCertFile string `toml:"client_cert_file" val:"required_if=Enabled true,omitempty,file"` + ClientKeyFile string `toml:"client_key_file" val:"required_if=Enabled true,omitempty,file"` + CaCertFile string `toml:"ca_cert_file" val:"required_if=Enabled true,omitempty,file"` + InsecureSkipVerify bool `toml:"insecure_skip_verify"` +} + +type KrbConfig struct { + Enabled bool `toml:"enabled"` + KrbConfigPath string `toml:"krb_config_path"` + KrbKeyTabPath string `toml:"krb_keytab_path"` + KrbRealm string `toml:"krb_realm"` + KrbServiceName string `toml:"krb_service_name"` + KrbUsername string `toml:"krb_username"` +} + +type SaslConfig struct { + User string `toml:"user"` + Password string `toml:"password"` +} + +type KafkaPublisherSettings struct { + BatchSize int `toml:"batch_size" default:"40"` + BatchBytes int64 `toml:"batch_bytes" default:"5242880"` + Linger time.Duration `toml:"linger" default:"10ms"` +} + +type EventhubsPublisherSettings struct { + BatchSize int `toml:"batch_size" default:"40"` + BatchBytes int64 `toml:"batch_bytes" default:"5242880"` + Linger time.Duration `toml:"linger" default:"10ms"` +} + +type PubsubPublisherConfig struct { + ProjectId string `toml:"project_id"` + Settings PubsubPublisherSettings `toml:"settings"` +} + +type PubsubPublisherSettings struct { + DelayThreshold time.Duration `toml:"delay_threshold" default:"50ms"` + CountThreshold int `toml:"count_threshold" default:"50"` + ByteThreshold int `toml:"byte_threshold" default:"52428800"` + NumGoroutines int `toml:"num_goroutines" default:"5"` + Timeout time.Duration `toml:"timeout" default:"15s"` + MaxOutstandingMessages int `toml:"max_outstanding_messages" default:"800"` + MaxOutstandingBytes int `toml:"max_outstanding_bytes" default:"1048576000"` + EnableMessageOrdering bool `toml:"enable_message_ordering"` +} + +type ServicebusPublisherConfig struct { + ConnectionString string `toml:"connection_string"` +} + +type JetstreamPublisherConfig struct { + Url string `toml:"url"` + Settings JetstreamPublisherSettings `toml:"settings"` +} + +type JetstreamPublisherSettings struct { + MaxInflightPending int `toml:"max_inflight_pending" default:"512"` +} + +type PulsarPublisherConfig struct { + ServiceUrl string `toml:"service_url"` + TlsConfig TlsConfig `toml:"tls_config"` +} + +type Consumer struct { + Type string `toml:"type" val:"oneof=kafka eventhubs pubsub servicebus jetstream pulsar"` + Kafka KafkaConsumerConfig `toml:"kafka"` + Eventhubs EventhubsConsumerConfig `toml:"eventhubs"` + Pubsub PubsubConsumerConfig `toml:"pubsub"` + Servicebus ServicebusConsumerConfig `toml:"servicebus"` + Jetstream JetstreamConsumerConfig `toml:"jetstream"` + Pulsar PulsarConsumerConfig `toml:"pulsar"` +} + +type KafkaConsumerConfig struct { + Address string `toml:"address"` + TlsConfig TlsConfig `toml:"tls_config"` + KrbConfig KrbConfig `toml:"krb_config"` + Topic string `toml:"topic"` + GroupId string `toml:"group_id"` + Settings KafkaConsumerSettings `toml:"settings"` +} + +type EventhubsConsumerConfig struct { + Address string `toml:"address"` + TlsConfig TlsConfig `toml:"tls_config"` + SaslConfig SaslConfig `toml:"sasl_config"` + Topic string `toml:"topic"` + GroupId string `toml:"group_id"` + Settings EventhubsConsumerSettings `toml:"settings"` +} + +type KafkaConsumerSettings struct { + MinBytes int `toml:"min_bytes" default:"100"` + MaxWait time.Duration `toml:"max_wait" default:"5s"` + MaxBytes int `toml:"max_bytes" default:"10485760"` + MaxConcurrentFetches int `toml:"max_concurrent_fetches" default:"3"` + MaxPollRecords int `toml:"max_poll_records" default:"100"` +} + +type EventhubsConsumerSettings struct { + MinBytes int `toml:"min_bytes" default:"100"` + MaxWait time.Duration `toml:"max_wait" default:"5s"` + MaxBytes int `toml:"max_bytes" default:"10485760"` + MaxConcurrentFetches int `toml:"max_concurrent_fetches" default:"3"` + MaxPollRecords int `toml:"max_poll_records" default:"100"` +} + +type PubsubConsumerConfig struct { + ProjectId string `toml:"project_id"` + SubscriptionId string `toml:"subscription_id"` + Settings PubsubConsumerSettings `toml:"settings"` +} + +type PubsubConsumerSettings struct { + MaxExtension time.Duration `toml:"max_extension" default:"30m"` + MaxExtensionPeriod time.Duration `toml:"max_extension_period" default:"3m"` + MaxOutstandingMessages int `toml:"max_outstanding_messages" default:"1000"` + MaxOutstandingBytes int `toml:"max_outstanding_bytes" default:"419430400"` + NumGoroutines int `toml:"num_goroutines" default:"10"` +} + +type ServicebusConsumerConfig struct { + ConnectionString string `toml:"connection_string"` + Topic string `toml:"topic"` + Subscription string `toml:"subscription"` + Settings ServicebusConsumerSettings `toml:"settings"` +} + +type ServicebusConsumerSettings struct { + BatchSize int `toml:"batch_size" default:"100"` +} + +type JetstreamConsumerConfig struct { + Url string `toml:"url"` + Subject string `toml:"subject"` + ConsumerName string `toml:"consumer_name"` + Settings JetstreamConsumerSettings `toml:"settings"` +} + +type JetstreamConsumerSettings struct { + BatchSize int `toml:"batch_size" default:"100"` +} + +type PulsarConsumerConfig struct { + ServiceUrl string `toml:"service_url"` + Topic string `toml:"topic"` + Subscription string `toml:"subscription"` + TlsConfig TlsConfig `toml:"tls_config"` +} + +type Registry struct { + URL string `toml:"url" val:"url"` + Type string `toml:"type" default:"janitor" val:"oneof=janitor apicurio"` + GroupID string `toml:"groupID"` + GetTimeout time.Duration `toml:"get_timeout" default:"4s"` + RegisterTimeout time.Duration `toml:"register_timeout" default:"10s"` + UpdateTimeout time.Duration `toml:"update_timeout" default:"10s"` + InmemCacheSize int `toml:"inmem_cache_size" default:"100"` +} + +type RunOptions struct { + ErrThreshold int64 `toml:"err_threshold" default:"50"` + ErrInterval time.Duration `toml:"err_interval" default:"1m"` + NumRetries int `toml:"num_retries" default:"0"` +} + +// validate validates CentralConsumer or PullerCleaner struct. +func validate(cfg interface{}, prefix string) error { + validate := validator.New() + validate.SetTagName("val") + + validate.RegisterTagNameFunc(func(fld reflect.StructField) string { + name := strings.SplitN(fld.Tag.Get("toml"), ",", 2)[0] + if name == "-" { + return "" + } + return name + }) + + validate.RegisterStructValidation( + ProducerStructLevelValidation, + KafkaPublisherConfig{}, + EventhubsPublisherConfig{}, + PubsubPublisherConfig{}, + ServicebusPublisherConfig{}, + JetstreamPublisherConfig{}, + ) + validate.RegisterStructValidation( + ConsumerStructLevelValidation, + KafkaConsumerConfig{}, + EventhubsConsumerConfig{}, + PubsubConsumerConfig{}, + ServicebusConsumerConfig{}, + JetstreamConsumerConfig{}, + ) + + if err := validate.Struct(cfg); err != nil { + if _, ok := err.(*validator.InvalidValidationError); ok { + return err + } + + var errCombined error + for _, err := range err.(validator.ValidationErrors) { + // Trims prefix ("CentralConsumer." or "PullerCleaner.") + // in order to correspond to TOML key path. + fieldName := strings.TrimPrefix(err.Namespace(), prefix) + + switch err.Tag() { + case "required": + errCombined = multierr.Append(errCombined, errtemplates.RequiredTagFail(fieldName)) + case "required_if": + errCombined = multierr.Append(errCombined, errtemplates.RequiredTagFail(fieldName)) + case "file": + errCombined = multierr.Append(errCombined, errtemplates.FileTagFail(fieldName, err.Value())) + case "url": + errCombined = multierr.Append(errCombined, errtemplates.UrlTagFail(fieldName, err.Value())) + case "oneof": + errCombined = multierr.Append(errCombined, errtemplates.OneofTagFail(fieldName, err.Value())) + case "hostname_port": + errCombined = multierr.Append(errCombined, errtemplates.HostnamePortTagFail(fieldName, err.Value())) + default: + errCombined = multierr.Append(errCombined, err) + } + } + return errCombined + } + return nil +} + +// ProducerStructLevelValidation is a custom validator which validates broker +// structure depending on which type of producer is required. +func ProducerStructLevelValidation(sl validator.StructLevel) { + source := sl.Parent().Interface().(Producer) + + validate := validator.New() + + switch producer := sl.Current().Interface().(type) { + case KafkaPublisherConfig: + if source.Type == "kafka" { + validateMultipleHostnames(validate, sl, producer.Address) + } + case EventhubsPublisherConfig: + if source.Type == "eventhubs" { + validateMultipleHostnames(validate, sl, producer.Address) + if err := validate.Var(producer.SaslConfig, "required"); err != nil { + sl.ReportValidationErrors("sasl_config", "", err.(validator.ValidationErrors)) + } + } + case PubsubPublisherConfig: + if source.Type == "pubsub" { + if err := validate.Var(producer.ProjectId, "required"); err != nil { + sl.ReportValidationErrors("project_id", "", err.(validator.ValidationErrors)) + } + } + case ServicebusPublisherConfig: + if source.Type == "servicebus" { + if err := validate.Var(producer.ConnectionString, "required"); err != nil { + sl.ReportValidationErrors("connection_string", "", err.(validator.ValidationErrors)) + } + } + case JetstreamPublisherConfig: + if source.Type == "jetstream" { + if err := validate.Var(producer.Url, "url"); err != nil { + sl.ReportValidationErrors("url", "", err.(validator.ValidationErrors)) + } + } + case PulsarPublisherConfig: + if source.Type == "pulsar" { + if err := validate.Var(producer.ServiceUrl, "required"); err != nil { + sl.ReportValidationErrors("service_url", "", err.(validator.ValidationErrors)) + } + } + } +} + +// ConsumerStructLevelValidation is a custom validator which validates broker +// structure depending on which type of consumer is required. +func ConsumerStructLevelValidation(sl validator.StructLevel) { + source := sl.Parent().Interface().(Consumer) + + validate := validator.New() + + switch consumer := sl.Current().Interface().(type) { + case KafkaConsumerConfig: + if source.Type == "kafka" { + validateMultipleHostnames(validate, sl, consumer.Address) + if err := validate.Var(consumer.Topic, "required"); err != nil { + sl.ReportValidationErrors("topic", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.GroupId, "required"); err != nil { + sl.ReportValidationErrors("group_id", "", err.(validator.ValidationErrors)) + } + } + case EventhubsConsumerConfig: + if source.Type == "eventhubs" { + validateMultipleHostnames(validate, sl, consumer.Address) + if err := validate.Var(consumer.Topic, "required"); err != nil { + sl.ReportValidationErrors("topic", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.GroupId, "required"); err != nil { + sl.ReportValidationErrors("group_id", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.SaslConfig, "required"); err != nil { + sl.ReportValidationErrors("sasl_config", "", err.(validator.ValidationErrors)) + } + } + case PubsubConsumerConfig: + if source.Type == "pubsub" { + if err := validate.Var(consumer.ProjectId, "required"); err != nil { + sl.ReportValidationErrors("project_id", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.SubscriptionId, "required"); err != nil { + sl.ReportValidationErrors("subscription_id", "", err.(validator.ValidationErrors)) + } + } + case ServicebusConsumerConfig: + if source.Type == "servicebus" { + if err := validate.Var(consumer.ConnectionString, "required"); err != nil { + sl.ReportValidationErrors("connection_string", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.Topic, "required"); err != nil { + sl.ReportValidationErrors("topic", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.Subscription, "required"); err != nil { + sl.ReportValidationErrors("subscription", "", err.(validator.ValidationErrors)) + } + } + case JetstreamConsumerConfig: + if source.Type == "jetstream" { + if err := validate.Var(consumer.Url, "url"); err != nil { + sl.ReportValidationErrors("url", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.Subject, "required"); err != nil { + sl.ReportValidationErrors("subject", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.ConsumerName, "required"); err != nil { + sl.ReportValidationErrors("consumer_name", "", err.(validator.ValidationErrors)) + } + } + case PulsarConsumerConfig: + if source.Type == "pulsar" { + if err := validate.Var(consumer.ServiceUrl, "required"); err != nil { + sl.ReportValidationErrors("service_url", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.Topic, "required"); err != nil { + sl.ReportValidationErrors("topic", "", err.(validator.ValidationErrors)) + } + if err := validate.Var(consumer.Subscription, "required"); err != nil { + sl.ReportValidationErrors("subscription", "", err.(validator.ValidationErrors)) + } + } + } +} + +func validateMultipleHostnames(validate *validator.Validate, sl validator.StructLevel, addresses string) { + splitted := strings.Split(addresses, ",") + + for _, address := range splitted { + if err := validate.Var(strings.Trim(address, " "), "hostname_port"); err != nil { + sl.ReportValidationErrors("address", "", err.(validator.ValidationErrors)) + } + } +} diff --git a/validator/internal/config/logger.go b/validator/internal/config/logger.go new file mode 100644 index 0000000..60a6336 --- /dev/null +++ b/validator/internal/config/logger.go @@ -0,0 +1,40 @@ +package config + +import ( + "fmt" + "github.com/dataphos/lib-logger/logger" + "os" +) + +const ( + LogLevelEnvKey = "LOG_LEVEL_MINIMUM" +) + +const ( + InfoLevel = "info" + WarnLevel = "warn" + ErrorLevel = "error" + DefaultLevel = InfoLevel +) + +var levels = map[string]logger.Level{InfoLevel: logger.LevelInfo, WarnLevel: logger.LevelWarn, ErrorLevel: logger.LevelError} + +// GetLogLevel returns minimum log level based on environment variable. +// Possible levels are info, warn, and error. Defaults to info. +func GetLogLevel() (logger.Level, []string) { + warnings := make([]string, 0, 2) // warnings about log config to be logged after the logger is configured + + levelString := os.Getenv(LogLevelEnvKey) + if levelString == "" { + warnings = append(warnings, fmt.Sprintf("Value for '%s' not set! Using level %s.", LogLevelEnvKey, DefaultLevel)) + return levels[DefaultLevel], warnings + } + + level, supported := levels[levelString] + if supported { + return level, warnings + } else { + warnings = append(warnings, fmt.Sprintf("Value %v for %v is not supported, using level %v.", levelString, LogLevelEnvKey, DefaultLevel)) + return levels[DefaultLevel], warnings + } +} diff --git a/validator/internal/config/pullercleaner.go b/validator/internal/config/pullercleaner.go new file mode 100644 index 0000000..2eef28d --- /dev/null +++ b/validator/internal/config/pullercleaner.go @@ -0,0 +1,50 @@ +package config + +import ( + "time" + + "github.com/kkyr/fig" +) + +// PullerCleaner represents all required configuration to run an instance of puller cleaner. +type PullerCleaner struct { + Producer Producer `toml:"producer"` + Consumer Consumer `toml:"consumer"` + Registry Registry `toml:"registry"` + Topics PullerCleanerTopics `toml:"topics"` + Validators PullerCleanerValidators `toml:"validators"` + ShouldLog PullerCleanerShouldLog `toml:"should_log"` + NumCleaners int `toml:"num_cleaners" default:"10"` + MetricsLoggingInterval time.Duration `toml:"metrics_logging_interval" default:"5s"` + RunOptions RunOptions `toml:"run_options"` +} + +type PullerCleanerTopics struct { + Valid string `toml:"valid" val:"required"` + DeadLetter string `toml:"dead_letter" val:"required"` +} + +type PullerCleanerValidators struct { + EnableCsv bool `toml:"enable_csv"` + EnableJson bool `toml:"enable_json"` + CsvUrl string `toml:"csv_url" val:"required_if=EnableCsv true,omitempty,url"` + CsvTimeoutBase time.Duration `toml:"csv_timeout_base" default:"2s"` + JsonUseAltBackend bool `toml:"json_use_alt_backend"` + JsonCacheSize int `toml:"json_cache_size" default:"100"` + JsonSchemaGenScript string `toml:"json_schema_gen_script" val:"required_if=EnableJson true,omitempty,file"` +} + +type PullerCleanerShouldLog struct { + Valid bool `toml:"valid"` + DeadLetter bool `toml:"dead_letter"` +} + +// Read loads parameters from configuration file into PullerCleaner struct. +func (cfg *PullerCleaner) Read(filename string) error { + return fig.Load(cfg, fig.File(filename), fig.Tag("toml"), fig.UseEnv("")) +} + +// Validate validates PullerCleaner struct. +func (cfg *PullerCleaner) Validate() error { + return validate(cfg, "PullerCleaner.") +} diff --git a/validator/internal/errcodes/errcodes.go b/validator/internal/errcodes/errcodes.go new file mode 100644 index 0000000..53255b6 --- /dev/null +++ b/validator/internal/errcodes/errcodes.go @@ -0,0 +1,68 @@ +// Package errcodes contains all the error codes used by janitor subcomponents. +package errcodes + +const ( + // RegistryInitialization marks unsuccessful initialization of the schema registry dependency. + RegistryInitialization = 100 + + // RegistryUnresponsive marks an unsuccessful attempt at a schema registry operation due to the registry being unresponsive. + RegistryUnresponsive = 101 + + // BrokerInitialization marks unsuccessful initialization of the message broker related external dependencies. + BrokerInitialization = 300 + + // PullingFailure marks failures which occur while pulling messages from some source. + PullingFailure = 301 + + // PublishingFailure marks an unsuccessful attempt at message publishing. + PublishingFailure = 302 + + // BrokerConnClosed marks unsuccessful closing of the connection to the message broker. + BrokerConnClosed = 303 + + // TLSInitialization marks an unsuccessful initialization of a TLS configuration. + TLSInitialization = 304 + + // MetricsServerFailure marks failure of an HTTP server for metrics. + MetricsServerFailure = 305 + + // MetricsServerShutdownFailure marks an unsuccessful shutdown of an HTTP server for metrics. + MetricsServerShutdownFailure = 306 + + // ReadConfigFailure marks unsuccessful read of .yaml file into janitorctl structure. + ReadConfigFailure = 400 + + // ValidateConfigFailure marks unsuccessful validation of janitorctl's exposed fields. + ValidateConfigFailure = 401 + + // ValidationFailure marks an unsuccessful attempt at message validation. + ValidationFailure = 500 + + // InvalidMessage marks messages which were inferred to be invalid. + InvalidMessage = 501 + + // DeadletterMessage marks messages which were inferred to be deadletter. + DeadletterMessage = 502 + + // SchemaGeneration marks an unsuccessful attempt at schema generation. + SchemaGeneration = 600 + + // Initialization is used for general initialization failure of internal structures only, + // initialization failure of external dependencies is marked through other, more descriptive error codes, + Initialization = 900 + + // ParsingMessage marks an unsuccessful attempt at mapping a broker message structure into the one used for processing. + ParsingMessage = 901 + + // UnrecoverableErrorEncountered declares that the system encountered an unrecoverable error. + UnrecoverableErrorEncountered = 902 + + // ErrorThresholdReached declares that the system encountered at least the threshold amount of errors. + ErrorThresholdReached = 903 + + // CompletedWithErrors marks that the process has completed but errors occurred. + CompletedWithErrors = 904 + + // Miscellaneous is used when no other available error code is fitting. + Miscellaneous = 999 +) diff --git a/validator/internal/errtemplates/errtemplates.go b/validator/internal/errtemplates/errtemplates.go new file mode 100644 index 0000000..6efa8ab --- /dev/null +++ b/validator/internal/errtemplates/errtemplates.go @@ -0,0 +1,88 @@ +// Package errtemplates offers convenience functions to standardize error messages and simplify proper error wrapping. +package errtemplates + +import ( + "fmt" + + "github.com/pkg/errors" +) + +const ( + envVariableNotDefinedTemplate = "env variable %s not defined" + parsingEnvVariableFailedTemplate = "parsing env variable %s failed" + unsupportedBrokerTypeTemplate = "unsupported broker type %s" + unsupportedRegistryTypeTemplate = "unsupported registry type %s" + failedTopicInitializationTemplate = "creating reference to %s failed" + attributeNotAStringTemplate = "%s attribute is not a string" + missingAttributeTemplate = "%s attribute is not defined" + mustNotBeEmptyTemplate = "%s must not be empty" + unsupportedMessageFormatTemplate = "unsupported message format: %s" + badHttpStatusCodeTemplate = "bad status code: %d" + httpRequestToUrlFailedTemplate = "%s request to %s failed" +) + +const ( + // ReadingResponseBodyFailed is an error message stating that reading the response body failed. + ReadingResponseBodyFailed = "reading response body failed" + + // UnmarshallingJSONFailed is an error message stating that unmarshalling json failed. + UnmarshallingJSONFailed = "unmarshalling json failed" + + // LoadingTopicsFailed is an error message stating that the target topics couldn't be loaded. + LoadingTopicsFailed = "loading topics failed" +) + +// EnvVariableNotDefined returns an error stating that the given env variable is not defined. +func EnvVariableNotDefined(name string) error { + return errors.Errorf(envVariableNotDefinedTemplate, name) +} + +// ParsingEnvVariableFailed returns a string stating that the given env variable couldn't be parsed properly. +func ParsingEnvVariableFailed(name string) string { + return fmt.Sprintf(parsingEnvVariableFailedTemplate, name) +} + +// UnsupportedBrokerType returns an error stating that the given broker type is not supported. +func UnsupportedBrokerType(name string) error { + return errors.Errorf(unsupportedBrokerTypeTemplate, name) +} + +// UnsupportedRegistryType returns an error stating that the given broker type is not supported. +func UnsupportedRegistryType(name string) error { + return errors.Errorf(unsupportedRegistryTypeTemplate, name) +} + +// CreatingTopicInstanceFailed returns an error stating that topic creation failed. +func CreatingTopicInstanceFailed(name string) string { + return fmt.Sprintf(failedTopicInitializationTemplate, name) +} + +// AttributeNotAString returns an error stating that the given attribute is not a string. +func AttributeNotAString(name string) error { + return errors.Errorf(attributeNotAStringTemplate, name) +} + +// AttributeNotDefined returns an error stating that the given attribute is not defined. +func AttributeNotDefined(name string) error { + return errors.Errorf(missingAttributeTemplate, name) +} + +// MustNotBeEmpty returns an error stating that the given variable must not be empty. +func MustNotBeEmpty(name string) error { + return errors.Errorf(mustNotBeEmptyTemplate, name) +} + +// UnsupportedMessageFormat returns an error stating that the given variable must not be empty. +func UnsupportedMessageFormat(format string) error { + return errors.Errorf(unsupportedMessageFormatTemplate, format) +} + +// BadHttpStatusCode returns an error stating that the given status code wasn't expected. +func BadHttpStatusCode(code int) error { + return errors.Errorf(badHttpStatusCodeTemplate, code) +} + +// HttpRequestToUrlFailed returns a string stating that a http method to the given url has failed. +func HttpRequestToUrlFailed(methodName, url string) string { + return fmt.Sprintf(httpRequestToUrlFailedTemplate, methodName, url) +} diff --git a/validator/internal/errtemplates/valtemplates.go b/validator/internal/errtemplates/valtemplates.go new file mode 100644 index 0000000..b5647a3 --- /dev/null +++ b/validator/internal/errtemplates/valtemplates.go @@ -0,0 +1,31 @@ +package errtemplates + +import "github.com/pkg/errors" + +const ( + requiredTagFailTemplate = "Validation for '%s' failed: can not be blank" + fileTagFailTemplate = "Validation for '%s' failed: '%s' does not exist" + urlTagFailTemplate = "Validation for '%s' failed: '%s' incorrect url" + oneofTagFailTemplate = "Validation for '%s' failed: '%s' is not one of the options" + hostnamePortTagFailTemplate = "Validation for '%s' failed: '%s' incorrect hostname and port" +) + +func RequiredTagFail(cause string) error { + return errors.Errorf(requiredTagFailTemplate, cause) +} + +func FileTagFail(cause string, value interface{}) error { + return errors.Errorf(fileTagFailTemplate, cause, value) +} + +func UrlTagFail(cause string, value interface{}) error { + return errors.Errorf(urlTagFailTemplate, cause, value) +} + +func OneofTagFail(cause string, value interface{}) error { + return errors.Errorf(oneofTagFailTemplate, cause, value) +} + +func HostnamePortTagFail(cause string, value interface{}) error { + return errors.Errorf(hostnamePortTagFailTemplate, cause, value) +} diff --git a/validator/internal/janitor/cleaner.go b/validator/internal/janitor/cleaner.go new file mode 100644 index 0000000..3fde83a --- /dev/null +++ b/validator/internal/janitor/cleaner.go @@ -0,0 +1,140 @@ +package janitor + +import ( + "context" + "sync" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + + "github.com/pkg/errors" +) + +// Cleaner models a process which attempts to find the correct schema metadata for the given Message. +type Cleaner interface { + // Clean takes a Message and tries to generate a new schema for the message, returning a copy of the original Message, + // with the updated AttributeSchemaID and AttributeSchemaVersion properties. + // + // The returned errors are instances of OpError in case the caller needs additional information, like testing OpError.Deadletter. + Clean(context.Context, Message) (Message, error) +} + +// CachingCleaner combines the functionalities of SchemaGenerators, Validators, registry.SchemaRegistry in order +// to perform operations relating to the registration of newly generated schemas. +// +// This implementation of Cleaner tries to validate each new message against the last schema generated for that format. +// This is a heuristic chosen because invalid messages tend to come in bursts, +// so it's sane to assume multiple messages in sequence will have the same schema. +type CachingCleaner struct { + Generators SchemaGenerators + Validators Validators + Registry registry.SchemaRegistry + LastGenerated map[string]SchemaInfo + mu sync.RWMutex +} + +// SchemaInfo holds the schema and the schema registry related info. +type SchemaInfo struct { + Schema []byte + ID string + Version string +} + +// NewCachingCleaner returns a new CachingCleaner. +// +// Because of the caching mechanisms, CachingCleaner is NOT intended for concurrent use. +func NewCachingCleaner(generators SchemaGenerators, validators Validators, schemaRegistry registry.SchemaRegistry) *CachingCleaner { + return &CachingCleaner{ + Generators: generators, + Validators: validators, + Registry: schemaRegistry, + LastGenerated: map[string]SchemaInfo{}, + mu: sync.RWMutex{}, + } +} + +// Clean implements Cleaner. +func (c *CachingCleaner) Clean(ctx context.Context, message Message) (Message, error) { + c.mu.RLock() + lastGeneratedOfFormat, ok := c.LastGenerated[message.Format] + c.mu.RUnlock() + if ok { + isValid, err := c.Validators.Validate(message, lastGeneratedOfFormat.Schema) + if err != nil { + return Message{}, intoOpErr(message.ID, errcodes.ValidationFailure, err) + } + if isValid { + return overwriteSchemaInfo(message, lastGeneratedOfFormat), nil + } + } + + schema, err := c.Generators.Generate(message) + if err != nil { + return Message{}, intoOpErr(message.ID, errcodes.SchemaGeneration, err) + } + id, version, err := c.registerOrUpdateSchema(ctx, schema, message.SchemaID, message.Format) + if err != nil { + return Message{}, intoOpErr(message.ID, errcodes.RegistryUnresponsive, err) + } + + lastGeneratedOfFormat = SchemaInfo{ + Schema: schema, + ID: id, + Version: version, + } + c.mu.Lock() + c.LastGenerated[message.Format] = lastGeneratedOfFormat + c.mu.Unlock() + + return overwriteSchemaInfo(message, lastGeneratedOfFormat), nil +} + +// registerOrUpdateSchema registers the schema if the given id is empty, or updates the schema +// under the given id. +func (c *CachingCleaner) registerOrUpdateSchema(ctx context.Context, schema []byte, id string, schemaType string) (string, string, error) { + if id == "" { + return c.Registry.Register(ctx, schema, schemaType, "none", "none") + } + + version, err := c.Registry.Update(ctx, id, schema) + if err != nil { + return "", "", err + } + + return id, version, nil +} + +// overwriteSchemaInfo returns a new Message, identical to the one given, expect for the fields +// concerning the schema id and version. +func overwriteSchemaInfo(message Message, schemaInfo SchemaInfo) Message { + message.RawAttributes[AttributeSchemaID] = schemaInfo.ID + message.RawAttributes[AttributeSchemaVersion] = schemaInfo.Version + + message.SchemaID = schemaInfo.ID + message.Version = schemaInfo.Version + + return message +} + +// CleanerRouter wraps the Cleaner functionality with a Router. +type CleanerRouter struct { + Cleaner Cleaner + Router Router +} + +// CleanAndReroute attempts to clean the given Message, marking all successfully cleaned messages as Valid. +// +// In case the error returned by Cleaner.Clean evaluates OpError.Deadletter to true, the message is marked as Deadletter. +// All other errors are propagated as is. +func (cr CleanerRouter) CleanAndReroute(ctx context.Context, message Message) (MessageTopicPair, error) { + cleaned, err := cr.Cleaner.Clean(ctx, message) + if err != nil { + var opError *OpError + if errors.As(err, &opError) && opError.Deadletter() { + return MessageTopicPair{Message: message, Topic: cr.Router.Route(Deadletter, message)}, nil + } + return MessageTopicPair{}, err + } + + return MessageTopicPair{Message: cleaned, Topic: cr.Router.Route(Valid, message)}, nil +} diff --git a/validator/internal/janitor/encryption.go b/validator/internal/janitor/encryption.go new file mode 100644 index 0000000..5de9f00 --- /dev/null +++ b/validator/internal/janitor/encryption.go @@ -0,0 +1,58 @@ +package janitor + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "io" +) + +func Encrypt(plaintext []byte, encryptionKey string) ([]byte, error) { + // NewCipher returns a new cipher.Block in dependence to the key which has to be either 16, 24 or 32 characters long + block, err := aes.NewCipher([]byte(encryptionKey)) + if err != nil { + return nil, err + } + + // GCM instance generation in dependence to the given cipher block + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + // initialization of the nonce size + nonce := make([]byte, gcm.NonceSize()) + // io.ReadFull ensures that the nonce buffer is filled exactly with the specified number of random bytes + if _, err = io.ReadFull(rand.Reader, nonce); err != nil { + return nil, err + } + + // return nonce with encrypted message appended + return gcm.Seal(nonce, nonce, plaintext, nil), nil +} + +func Decrypt(encrypted []byte, encryptionKey string) ([]byte, error) { + // NewCipher returns a new cipher.Block in dependence to the key which has to be either 16, 24 or 32 characters long + block, err := aes.NewCipher([]byte(encryptionKey)) + if err != nil { + return nil, err + } + + // gcm instance generation in dependence to the given cipher block + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + // reading the nonce and encrypted + nonceSize := gcm.NonceSize() + var nonce []byte + nonce, encrypted = encrypted[:nonceSize], encrypted[nonceSize:] + + // returns decrypted text except if the key is invalid or the nonce is too short for the cipher + decrypted, err := gcm.Open(nil, nonce, encrypted, nil) + if err != nil { + return nil, err + } + return decrypted, err +} diff --git a/validator/internal/janitor/encryption_test.go b/validator/internal/janitor/encryption_test.go new file mode 100644 index 0000000..3e728a0 --- /dev/null +++ b/validator/internal/janitor/encryption_test.go @@ -0,0 +1,24 @@ +package janitor + +import ( + "bytes" + "testing" +) + +func TestEncryption(t *testing.T) { + message := []byte("Hello, world!") + encryptionKey := "1Pw1EPV7bx8sk0ugotIkRg==" + + encrypted, err := Encrypt(message, encryptionKey) + if err != nil { + t.Fatal(err) + } + decrypted, err := Decrypt(encrypted, encryptionKey) + if err != nil { + t.Fatal(err) + } + + if !bytes.Equal(message, decrypted) { + t.Fatal("expected and actual message not the same") + } +} diff --git a/validator/internal/janitor/error.go b/validator/internal/janitor/error.go new file mode 100644 index 0000000..dd7d021 --- /dev/null +++ b/validator/internal/janitor/error.go @@ -0,0 +1,51 @@ +package janitor + +import ( + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" +) + +type OpError struct { + MessageID string + Code int + Err error +} + +func (e *OpError) Error() string { + return "processing " + e.MessageID + " failed:" + e.Err.Error() +} + +// Unwrap implements the optional Unwrap error method, which allows for proper usage of errors.Is and errors.As. +func (e *OpError) Unwrap() error { + return e.Err +} + +// Temporary implements the optional Temporary error method, to ensure we don't hide the temporariness of the underlying +// error (in case code checking if this error is temporary doesn't use errors.As but just converts directly). +func (e *OpError) Temporary() bool { + var temporary interface { + Temporary() bool + } + + // errors.As stops at the first error down the chain which implements temporary + // this is important because an unrecoverable error could wrap a recoverable one, so we need the "latest" of the two + if errors.As(e.Err, &temporary) { + return temporary.Temporary() + } + return true +} + +// Deadletter evaluates whether the instance is a Deadletter-type error. +func (e *OpError) Deadletter() bool { + return errors.Is(e.Err, validator.ErrDeadletter) || errors.Is(e.Err, schemagen.ErrDeadletter) +} + +func intoOpErr(messageId string, code int, err error) error { + return &OpError{ + MessageID: messageId, + Code: code, + Err: err, + } +} diff --git a/validator/internal/janitor/error_test.go b/validator/internal/janitor/error_test.go new file mode 100644 index 0000000..ef594bf --- /dev/null +++ b/validator/internal/janitor/error_test.go @@ -0,0 +1,45 @@ +package janitor + +import ( + "syscall" + "testing" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + "github.com/pkg/errors" +) + +func TestDeadletter(t *testing.T) { + opError := OpError{ + Err: validator.ErrDeadletter, + } + if !opError.Deadletter() { + t.Fatal("expected Deadletter") + } + + opError.Err = errors.New("oops") + if opError.Deadletter() { + t.Fatal("shouldn't be Deadletter") + } + + opError.Err = schemagen.ErrDeadletter + if !opError.Deadletter() { + t.Fatal("should be Deadletter") + } +} + +func TestTemporary(t *testing.T) { + opError := OpError{ + Err: registry.ErrNotFound, + } + if !opError.Temporary() { + t.Fatal("expected temporary") + } + + opError.Err = syscall.ECONNREFUSED + if opError.Temporary() { + t.Fatal("expected not temporary") + } +} diff --git a/validator/internal/janitor/janitor.go b/validator/internal/janitor/janitor.go new file mode 100644 index 0000000..54c6d97 --- /dev/null +++ b/validator/internal/janitor/janitor.go @@ -0,0 +1,202 @@ +// Package janitor offers a suite of functions for collecting message schemas, +// validating the messages based on the collected schemas, and publishing them to a destination topic. +package janitor + +import ( + "context" + "strings" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + "github.com/pkg/errors" + + "github.com/dataphos/lib-brokers/pkg/broker" +) + +// Message defines a Message used for processing broker messages. +// Essentially, Message decorates broker messages with additional, extracted information. +type Message struct { + ID string + Key string + RawAttributes map[string]interface{} + Payload []byte + IngestionTime time.Time + SchemaID string + Version string + Format string +} + +const ( + // AttributeSchemaID is one of the keys expected to beÅ‚ found in the attributes field of the message. + // It holds the schema id information concerning the data field of the message + AttributeSchemaID = "schemaId" + + // AttributeSchemaVersion is one of the keys expected to be found in the attributes field of the message, + // It holds the schema version information concerning the data field of the message. + AttributeSchemaVersion = "versionId" + + // AttributeFormat is one of the keys expected to be found in the attributes field of the message. + // It holds the format of the data field of the message. + AttributeFormat = "format" +) + +// MessageSchemaPair wraps a Message with the Schema relating to this Message. +type MessageSchemaPair struct { + Message Message + Schema []byte +} + +// CollectSchema retrieves the schema of the given Message from registry.SchemaRegistry. +// +// If schema retrieval results in registry.ErrNotFound, or Message.SchemaID or Message.Version is an empty string, +// the Message is put on the results channel with MessageSchemaPair.Schema set to nil. +// +// The returned error is an instance of OpError for improved error handling (so that the source of this error is identifiable +// even if combined with other errors). +func CollectSchema(ctx context.Context, message Message, schemaRegistry registry.SchemaRegistry) (MessageSchemaPair, error) { + if message.SchemaID == "" || message.Version == "" { + return MessageSchemaPair{Message: message, Schema: nil}, nil + } + + schema, err := schemaRegistry.Get(ctx, message.SchemaID, message.Version) + if err != nil { + if errors.Is(err, registry.ErrNotFound) { + return MessageSchemaPair{Message: message, Schema: nil}, nil + } + return MessageSchemaPair{}, intoOpErr(message.ID, errcodes.RegistryUnresponsive, err) + } + + return MessageSchemaPair{Message: message, Schema: schema}, nil +} + +// Validators is a convenience type for a map containing validator.Validator instances for available message formats. +type Validators map[string]validator.Validator + +// Validate wraps the same function of validator.Validator, by first selecting the proper validator, and then using that +// validator to determine the validity of the given Message.Payload under this schema. +// +// Returns an error if Validators doesn't contain a validator instance for the message format. +func (vs Validators) Validate(message Message, schema []byte) (bool, error) { + v, ok := vs[strings.ToLower(message.Format)] + if !ok { + return false, errtemplates.UnsupportedMessageFormat(message.Format) + } + return v.Validate(message.Payload, schema, message.SchemaID, message.Version) +} + +// SchemaGenerators is a convenience type for a map containing schemagen.Generator instances for available message formats. +type SchemaGenerators map[string]schemagen.Generator + +// Generate wraps the same function of schemagen.Generator, by first selecting the proper generator, and then using that +// generator to construct a schema from the given Parsed instance. +// +// Returns an error if SchemaGenerators doesn't contain a generator instance for the MessageFormat of the given Parsed instance. +func (gs SchemaGenerators) Generate(message Message) ([]byte, error) { + generator, ok := gs[message.Format] + if !ok { + return nil, errtemplates.UnsupportedMessageFormat(message.Format) + } + return generator.Generate(message.Payload) +} + +// Router determines where should the messages be sent to. +type Router interface { + Route(Result, Message) string +} + +// RoutingFunc convenience type to allow functions to implement Router directly. +type RoutingFunc func(Result, Message) string + +func (f RoutingFunc) Route(result Result, message Message) string { + return f(result, message) +} + +// Result holds the four possible outcomes concerning with routing messages to some destination topic: Valid, Invalid, Deadletter and MissingSchema. +// Valid, Invalid and Deadletter are possible outcomes of message validation, while MissingSchema occurs if there is no record +// of the Schema in the Schema Registry. +type Result int + +const ( + Valid Result = iota + Invalid + Deadletter + MissingSchema +) + +// MessageTopicPair wraps a Message with the Topic the Message is supposed to be sent to. +type MessageTopicPair struct { + Message Message + Topic string +} + +// InferDestinationTopic infers the destination topic for the given MessageSchemaPair. +// +// In case MessageSchemaPair.Schema is empty, MissingSchema is passed onto the given Router to +// infer the destination topic. +// +// If the schema exists, the message is validated against it, and the Result is passed onto the Router +// to infer the destination topic. In case validation returns validator.ErrDeadletter, Deadletter is passed onto the Router. +// +// The returned error is an instance of OpError for improved error handling (so that the source of this error is identifiable +// even if combined with other errors). +func InferDestinationTopic(messageSchemaPair MessageSchemaPair, validators Validators, router Router) (MessageTopicPair, error) { + message, schema := messageSchemaPair.Message, messageSchemaPair.Schema + + if len(schema) == 0 { + errMissingSchema := errors.WithMessage(validator.ErrMissingSchema, "") + message.RawAttributes["deadLetterErrorCategory"] = "Schema error" + message.RawAttributes["deadLetterErrorReason"] = errMissingSchema.Error() + return MessageTopicPair{Message: message, Topic: router.Route(MissingSchema, message)}, nil + } + + isValid, err := validators.Validate(message, schema) + if err != nil { + if errors.Is(err, validator.ErrBrokenMessage) { + message.RawAttributes["deadLetterErrorCategory"] = "Broken message" + message.RawAttributes["deadLetterErrorReason"] = err.Error() + return MessageTopicPair{Message: message, Topic: router.Route(Deadletter, message)}, nil + } + if errors.Is(err, validator.ErrWrongCompile) { + message.RawAttributes["deadLetterErrorCategory"] = "Wrong compile" + message.RawAttributes["deadLetterErrorReason"] = err.Error() + return MessageTopicPair{Message: message, Topic: router.Route(Deadletter, message)}, nil + } + if errors.Is(err, validator.ErrFailedValidation) { + message.RawAttributes["deadLetterErrorCategory"] = "Validation error" + message.RawAttributes["deadLetterErrorReason"] = err.Error() + return MessageTopicPair{Message: message, Topic: router.Route(Deadletter, message)}, nil + } + if errors.Is(err, validator.ErrDeadletter) { + return MessageTopicPair{Message: message, Topic: router.Route(Deadletter, message)}, nil + } + return MessageTopicPair{}, intoOpErr(message.ID, errcodes.ValidationFailure, err) + } + + var result Result + if isValid { + result = Valid + } else { + result = Invalid + } + return MessageTopicPair{Message: message, Topic: router.Route(result, message)}, nil +} + +// PublishToTopic publishes a Message to a broker.Topic, returning the relevant OpError in case of failure. +// +// If publishing is successful, the ack func of the underlying broker.Message is called, and the global Metrics are updated. +func PublishToTopic(ctx context.Context, message Message, topic broker.Topic) error { + if err := topic.Publish(ctx, broker.OutboundMessage{ + Key: message.Key, + Data: message.Payload, + Attributes: message.RawAttributes, + }); err != nil { + return intoOpErr(message.ID, errcodes.PublishingFailure, err) + } + + return nil +} diff --git a/validator/internal/janitor/janitor_test.go b/validator/internal/janitor/janitor_test.go new file mode 100644 index 0000000..288e2e3 --- /dev/null +++ b/validator/internal/janitor/janitor_test.go @@ -0,0 +1,167 @@ +package janitor + +import ( + "reflect" + "strconv" + "testing" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + "github.com/dataphos/lib-streamproc/pkg/streamproc" + + "github.com/pkg/errors" +) + +const ( + AvroFormat = "avro" + CSVFormat = "csv" + JSONFormat = "json" + ProtobufFormat = "protobuf" + XMLFormat = "xml" +) + +func TestParse(t *testing.T) { + brokerMessage := streamproc.Message{ + Data: []byte("this is supposed to be some json data"), + Attributes: map[string]interface{}{ + AttributeSchemaID: "1", + AttributeSchemaVersion: "1", + AttributeFormat: "json", + }, + } + + expected := Message{ + RawAttributes: brokerMessage.Attributes, + Payload: brokerMessage.Data, + SchemaID: "1", + Version: "1", + Format: "json", + } + + actual, err := ParseMessage(brokerMessage) + if err != nil { + t.Fatal(err) + } + + if !reflect.DeepEqual(expected, actual) { + t.Fatal("expected and actual message not the same") + } +} + +func TestParseError(t *testing.T) { + brokerMessages := []streamproc.Message{ + { + Data: []byte("this is supposed to be some json data"), + Attributes: map[string]interface{}{ + AttributeSchemaID: "1", + AttributeSchemaVersion: "1", + }, + }, + { + Data: []byte("this is supposed to be some json data"), + Attributes: map[string]interface{}{ + AttributeSchemaID: "1", + AttributeSchemaVersion: 1, + "format": "json", + }, + }, + { + Data: []byte("this is supposed to be some json data"), + Attributes: map[string]interface{}{ + AttributeSchemaID: 1, + AttributeSchemaVersion: "1", + "format": "json", + }, + }, + } + + for i, brokerMessage := range brokerMessages { + brokerMessage := brokerMessage + t.Run("parsing failure number "+strconv.Itoa(i), func(t *testing.T) { + _, err := ParseMessage(brokerMessage) + if err == nil { + t.Error("expected error") + } + }) + } +} + +func TestValidatorsValidate(t *testing.T) { + tt := []struct { + name string + message Message + isValid bool + shouldReturnError bool + }{ + { + name: "is valid", + message: Message{ + SchemaID: "1", + Version: "1", + Format: JSONFormat, + Payload: []byte("this is some data to be validates against a Schema, not important for this unit test"), + }, + isValid: true, + shouldReturnError: false, + }, + { + name: "is not valid", + message: Message{ + SchemaID: "1", + Version: "1", + Format: JSONFormat, + Payload: []byte("this is some data to be validates against a Schema, not important for this unit test"), + }, + isValid: false, + shouldReturnError: false, + }, + { + name: "valid, but the format is not supported", + message: Message{ + SchemaID: "1", + Version: "1", + Format: JSONFormat, + Payload: []byte("this is some data to be validates against a Schema, not important for this unit test"), + }, + isValid: true, + shouldReturnError: false, + }, + { + name: "throws error", + message: Message{ + SchemaID: "1", + Version: "1", + Format: JSONFormat, + Payload: []byte("this is some data to be validates against a schema, not important for this unit test"), + }, + isValid: false, + shouldReturnError: true, + }, + } + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + schema := []byte("schema for message validation, not important for this unit test") + validators := Validators(map[string]validator.Validator{ + JSONFormat: validator.Func(func(message, schema []byte, id string, version string) (bool, error) { + if tc.shouldReturnError { + return false, errors.New("test error") + } + return tc.isValid, nil + }), + }) + + isValid, err := validators.Validate(tc.message, schema) + if err != nil { + if !tc.shouldReturnError { + if tc.message.Format == JSONFormat { + t.Error("error occurred but was not expected", err) + } + } + } + if isValid && !tc.isValid { + t.Error("message set to valid but not valid expected") + } + }) + } +} diff --git a/validator/internal/janitor/logging.go b/validator/internal/janitor/logging.go new file mode 100644 index 0000000..ac7b0ac --- /dev/null +++ b/validator/internal/janitor/logging.go @@ -0,0 +1,102 @@ +package janitor + +import ( + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/lib-logger/logger" + "github.com/dataphos/lib-streamproc/pkg/streamproc" +) + +// RouterFlags defines the logging level of each call to the LoggingRouter. +// +// Intended to be used in LoggingRouter. +type RouterFlags struct { + MissingSchema bool + Valid bool + Invalid bool + Deadletter bool +} + +// LoggingRouter wraps the given LoggingRouter with logging middleware. +func LoggingRouter(log logger.Log, routerFlags RouterFlags, next Router) Router { + return RoutingFunc(func(result Result, message Message) string { + switch { + case routerFlags.MissingSchema && result == MissingSchema: + log.Warnw("message is missing the schema", logger.F{ + "status": "missing schema", + "id": message.ID, + "format": message.Format, + }) + case routerFlags.Valid && result == Valid: + log.Infow("message is classified as valid", logger.F{ + "status": "valid", + "id": message.ID, + "schema_id": message.SchemaID, + "schema_version": message.Version, + "format": message.Format, + }) + case routerFlags.Invalid && result == Invalid: + log.Errorw("message is classified as invalid", errcodes.InvalidMessage, logger.F{ + "status": "invalid", + "id": message.ID, + "schema_id": message.SchemaID, + "schema_version": message.Version, + "format": message.Format, + }) + case routerFlags.Deadletter && result == Deadletter: + log.Errorw("message is classified as Deadletter", errcodes.DeadletterMessage, logger.F{ + "status": "Deadletter", + "id": message.ID, + "schema_id": message.SchemaID, + "schema_version": message.Version, + "format": message.Format, + }) + } + + return next.Route(result, message) + }) +} + +type ShouldReturnFlowControl struct { + OnPullErr streamproc.FlowControl + OnProcessErr streamproc.FlowControl + OnUnrecoverable streamproc.FlowControl + OnThresholdReached streamproc.FlowControl +} + +// LoggingCallbacks returns a slice of streamproc.RunOptions, configuring streamproc.RunOptions to log all events with the agreed error codes. +func LoggingCallbacks(log logger.Log, control ShouldReturnFlowControl) []streamproc.RunOption { + onPullErr := func(err error) streamproc.FlowControl { + log.Error(err.Error(), errcodes.PullingFailure) + return control.OnPullErr + } + + OnProcessErr := func(err error) streamproc.FlowControl { + code := errcodes.Miscellaneous + opError := &OpError{} + if errors.As(err, &opError) { + code = opError.Code + } + log.Error(err.Error(), uint64(code)) + + return control.OnProcessErr + } + + onUnrecoverable := func(err error) streamproc.FlowControl { + log.Error(errors.Wrap(err, "unrecoverable error encountered").Error(), errcodes.UnrecoverableErrorEncountered) + return control.OnUnrecoverable + } + + onThresholdReached := func(err error, count, threshold int64) streamproc.FlowControl { + log.Error(errors.Errorf("error threshold reached (%d >= %d)", count, threshold).Error(), errcodes.ErrorThresholdReached) + return control.OnThresholdReached + } + + return []streamproc.RunOption{ + streamproc.OnPullErr(onPullErr), + streamproc.OnProcessErr(OnProcessErr), + streamproc.OnUnrecoverable(onUnrecoverable), + streamproc.OnThresholdReached(onThresholdReached), + } +} diff --git a/validator/internal/janitor/logging_test.go b/validator/internal/janitor/logging_test.go new file mode 100644 index 0000000..af8d838 --- /dev/null +++ b/validator/internal/janitor/logging_test.go @@ -0,0 +1,80 @@ +package janitor + +import ( + "testing" + + "github.com/dataphos/lib-logger/logger" + "github.com/dataphos/lib-logger/standardlogger" +) + +func TestRoutingFunc(t *testing.T) { + tt := []struct { + name string + routerFlags RouterFlags + result Result + }{ + { + "valid is propagated", + RouterFlags{ + MissingSchema: true, + Valid: true, + Deadletter: true, + }, + Valid, + }, + { + "invalid is propagated", + RouterFlags{ + MissingSchema: true, + Valid: true, + Deadletter: true, + }, + Invalid, + }, + { + "invalid is propagated", + RouterFlags{ + MissingSchema: true, + Valid: true, + Deadletter: true, + }, + MissingSchema, + }, + { + "Deadletter is propagated", + RouterFlags{ + MissingSchema: true, + Valid: true, + Deadletter: true, + }, + Deadletter, + }, + } + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + destination := "some topic" + var called int + next := RoutingFunc(func(result Result, message Message) string { + if result != tc.result { + t.Fatal("wrong result propagated") + } + called++ + + return destination + }) + + r := LoggingRouter(standardlogger.New(logger.L{}), tc.routerFlags, next) + + actual := r.Route(tc.result, Message{}) + if actual != destination { + t.Error("expected and actual not the same") + } + + if called != 1 { + t.Error("not propagated correctly") + } + }) + } +} diff --git a/validator/internal/janitor/metrics.go b/validator/internal/janitor/metrics.go new file mode 100644 index 0000000..788830c --- /dev/null +++ b/validator/internal/janitor/metrics.go @@ -0,0 +1,104 @@ +package janitor + +import ( + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" +) + +var ( + publishCountProm = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: "schema_registry", + Name: "published_messages_total", + Help: "The total number of published messages", + }) + bytesProcessedProm = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: "schema_registry", + Name: "processed_bytes_total", + Help: "The total number of processed bytes", + }) + processingTimesProm = promauto.NewSummary(prometheus.SummaryOpts{ + Namespace: "schema_registry", + Name: "processing_times_milliseconds", + Help: "Processing times of published messages in milliseconds", + MaxAge: 5 * time.Minute, + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }) +) + +// UpdateSuccessMetrics updates Prometheus metrics: publishCountProm, bytesProcessedProm, and processingTimesProm. +func UpdateSuccessMetrics(messages ...Message) { + publishCountProm.Add(float64(len(messages))) + + for _, message := range messages { + messageProcessingTime := time.Since(message.IngestionTime).Milliseconds() + processingTimesProm.Observe(float64(messageProcessingTime)) + + bytesProcessedProm.Add(float64(len(message.Payload))) + } +} + +var ( + publishDLCountProm = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: "schema_registry", + Name: "published_dead_letter_messages_total", + Help: "The total number of published dead letter messages", + }) + bytesDLProcessedProm = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: "schema_registry", + Name: "processed_dead_letter_bytes_total", + Help: "The total number of processed dead letter bytes", + }) + processingDLTimesProm = promauto.NewSummary(prometheus.SummaryOpts{ + Namespace: "schema_registry", + Name: "processing_dead_letter_times_milliseconds", + Help: "Processing times of published dead letter messages in milliseconds", + MaxAge: 5 * time.Minute, + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }) +) + +// UpdateSuccessDLMetrics updates Prometheus metrics: publishDLCountProm, bytesDLProcessedProm, and processingDLTimesProm. +func UpdateSuccessDLMetrics(messages ...Message) { + publishDLCountProm.Add(float64(len(messages))) + + for _, message := range messages { + messageProcessingTime := time.Since(message.IngestionTime).Milliseconds() + processingDLTimesProm.Observe(float64(messageProcessingTime)) + + bytesDLProcessedProm.Add(float64(len(message.Payload))) + } +} + +var ( + nackCountProm = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: "schema_registry", + Name: "nack_messages_total", + Help: "The total number of nack messages", + }) + nackBytesProcessedProm = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: "schema_registry", + Name: "nack_processed_bytes_total", + Help: "The total number of nack processed bytes", + }) + nackProcessingTimesProm = promauto.NewSummary(prometheus.SummaryOpts{ + Namespace: "schema_registry", + Name: "nack_processing_times_milliseconds", + Help: "Processing times of nack messages in milliseconds", + MaxAge: 5 * time.Minute, + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }) +) + +// UpdateFailureMetrics updates Prometheus metrics: nackCountProm, nackBytesProcessedProm, and nackProcessingTimesProm. +func UpdateFailureMetrics(messages ...Message) { + nackCountProm.Add(float64(len(messages))) + + for _, message := range messages { + msgNackProcessingTime := time.Since(message.IngestionTime).Milliseconds() + nackProcessingTimesProm.Observe(float64(msgNackProcessingTime)) + + nackBytesProcessedProm.Add(float64(len(message.Payload))) + } +} diff --git a/validator/internal/janitor/parse.go b/validator/internal/janitor/parse.go new file mode 100644 index 0000000..bee5665 --- /dev/null +++ b/validator/internal/janitor/parse.go @@ -0,0 +1,75 @@ +package janitor + +import ( + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/lib-streamproc/pkg/streamproc" +) + +// ParseMessage parses a given broker.Message into Message, by setting Message.Payload to the value of the data field of the given +// broker.Message, and extracting the other fields from the attributes field. +// +// ParseMessage checks if the attributes field contains the following keys: AttributeSchemaID, AttributeSchemaVersion and AttributeFormat. +// If AttributeSchemaID or AttributeSchemaVersion are present, then it is assumed they are strings, returning an error otherwise. +// The AttributeFormat key must be present and must be a non-empty string. +func ParseMessage(message streamproc.Message) (Message, error) { + parsed := Message{ + ID: message.ID, + Key: message.Key, + Payload: message.Data, + RawAttributes: message.Attributes, + IngestionTime: message.IngestionTime, + } + + attributes, err := ExtractAttributes(message.Attributes) + if err != nil { + return parsed, err + } + parsed.SchemaID = attributes.SchemaId + parsed.Version = attributes.SchemaVersion + parsed.Format = attributes.Format + + return parsed, nil +} + +type Attributes struct { + SchemaId string + SchemaVersion string + Format string +} + +func ExtractAttributes(raw map[string]interface{}) (Attributes, error) { + var schemaIDStr, versionStr, formatStr string + + schemaID, ok := raw[AttributeSchemaID] + if ok { + schemaIDStr, ok = schemaID.(string) + if !ok { + return Attributes{}, errtemplates.AttributeNotAString(AttributeSchemaID) + } + } + + version, ok := raw[AttributeSchemaVersion] + if ok { + versionStr, ok = version.(string) + if !ok { + return Attributes{}, errtemplates.AttributeNotAString(AttributeSchemaVersion) + } + } + format, ok := raw[AttributeFormat] + if !ok { + return Attributes{}, errtemplates.AttributeNotDefined(AttributeFormat) + } + formatStr, ok = format.(string) + if !ok { + return Attributes{}, errtemplates.AttributeNotAString(AttributeFormat) + } + if formatStr == "" { + return Attributes{}, errtemplates.MustNotBeEmpty(AttributeFormat) + } + + return Attributes{ + SchemaId: schemaIDStr, + SchemaVersion: versionStr, + Format: formatStr, + }, nil +} diff --git a/validator/internal/janitor/processor.go b/validator/internal/janitor/processor.go new file mode 100644 index 0000000..a5d54ac --- /dev/null +++ b/validator/internal/janitor/processor.go @@ -0,0 +1,225 @@ +package janitor + +import ( + "context" + + "golang.org/x/sync/errgroup" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/lib-batchproc/pkg/batchproc" + "github.com/dataphos/lib-brokers/pkg/broker" + "github.com/dataphos/lib-logger/logger" + "github.com/dataphos/lib-streamproc/pkg/streamproc" +) + +type Processor struct { + Handler Handler + Topics map[string]broker.Topic + Deadletter string + log logger.Log +} + +type Handler interface { + Handle(context.Context, Message) (MessageTopicPair, error) +} + +func NewProcessor(handler Handler, topics map[string]broker.Topic, deadletter string, log logger.Log) *Processor { + return &Processor{ + Handler: handler, + Topics: topics, + Deadletter: deadletter, + log: log, + } +} + +// HandleMessage processes the given streamproc.Message by first attempting to parse it, and then calling the +// underlying Handler. +func (p *Processor) HandleMessage(ctx context.Context, message streamproc.Message) error { + // The received ctx isn't used because the whole process is assumed to take up very little time. + // Because of this, it's preferred to exit "cleanly" instead of stopping mid-process, which might + // have side effects. + ctx = context.Background() //nolint:staticcheck (ignored the rule SA4009 in .golangci.yaml) + + parsed, ok, err := p.parseOrSendToDeadletter(ctx, message) + if err != nil { + UpdateFailureMetrics(parsed) + return err + } + if ok { + messageTopicPair, err := p.Handler.Handle(ctx, parsed) + if err != nil { + UpdateFailureMetrics(parsed) + return err + } + if err = PublishToTopic(ctx, messageTopicPair.Message, p.Topics[messageTopicPair.Topic]); err != nil { + UpdateFailureMetrics(messageTopicPair.Message) + return err + } + // if message is invalid (sent to DL), update DL metrics + if messageTopicPair.Topic == p.Deadletter { + UpdateSuccessDLMetrics(parsed) + } + } + UpdateSuccessMetrics(parsed) + return nil +} + +func (p *Processor) parseOrSendToDeadletter(ctx context.Context, message streamproc.Message) (Message, bool, error) { + parsed, err := ParseMessage(message) + if err != nil { + p.log.Errorw(err.Error(), errcodes.ParsingMessage, logger.F{ + "id": message.ID, + }) + parsed.RawAttributes["deadLetterErrorCategory"] = "Parsing error" + parsed.RawAttributes["deadLetterErrorReason"] = err.Error() + if err = PublishToTopic(ctx, parsed, p.Topics[p.Deadletter]); err != nil { + return Message{}, false, err + } + UpdateSuccessDLMetrics(parsed) + return Message{}, false, nil + } + return parsed, true, nil +} + +// HandleBatch processes the given slice of streamproc.Message instances, by calling the underlying Handler, +// on each streamproc.Message concurrently. +func (p *Processor) HandleBatch(ctx context.Context, batch []streamproc.Message) error { + // The received ctx isn't used because the whole process is assumed to take up very little time. + // Because of this, it's preferred to exit "cleanly" instead of stopping mid-process, which might + // have side effects. + ctx = context.Background() //nolint:staticcheck + + batchSize := len(batch) + messageTopicPairs := make([]*MessageTopicPair, batchSize) + failed := make([]bool, batchSize) + + // The batch is processed in chunks (and not one per goroutine), since the Handler is assumed to be mostly CPU bound. + if err := batchproc.Parallel(ctx, batchSize, func(ctx context.Context, lb int, ub int) error { + for i := lb; i < ub; i++ { + parsed, ok, err := p.parseOrSendToDeadletter(ctx, batch[i]) + if err != nil { + UpdateFailureMetrics(parsed) + failed[i] = true + return err + } + if ok { + messageTopicPair, err := p.Handler.Handle(ctx, parsed) + if err != nil { + UpdateFailureMetrics(parsed) + failed[i] = true + return err + } + messageTopicPairs[i] = &messageTopicPair + } + } + return nil + }); err != nil { + return &streamproc.PartiallyProcessedBatchError{ + Failed: indicesWhereTrue(failed), + Err: err, + } + } + + // Publish order needs to be preserved if messages have the same key, so we partition the processed messages before publishing them. + // That way, we can still utilize concurrency in the general case (it's unlikely for multiple messages in a batch to actually share the key), + // but still ensure ordering on the target topics when it matters. + partitions := groupByKey(messageTopicPairs) + // The empty string key implies no key is defined, so we don't care about order for this partition. + keyless := partitions[""] + delete(partitions, "") + + eg, ctx := errgroup.WithContext(ctx) + + // Required extra check for the number of keyless messages to avoid division by zero in batchproc.Process(). + if numKeyless := len(keyless); numKeyless != 0 { + eg.Go(func() error { + return batchproc.Process(ctx, numKeyless, numKeyless, func(ctx context.Context, i int, _ int) error { + messageTopicPair := messageTopicPairs[keyless[i]] + if err := PublishToTopic(ctx, messageTopicPair.Message, p.Topics[messageTopicPair.Topic]); err != nil { + UpdateFailureMetrics(messageTopicPair.Message) + failed[keyless[i]] = true + return err + } + // if message is invalid (sent to DL), update DL metrics + if messageTopicPair.Topic == p.Deadletter { + UpdateSuccessDLMetrics(messageTopicPair.Message) + } + UpdateSuccessMetrics(messageTopicPair.Message) + return nil + }) + }) + } + + for _, partition := range partitions { + partition := partition + eg.Go(func() error { + // Publishing needs to be sequential on a per-partition basis. + for _, index := range partition { + messageTopicPair := messageTopicPairs[index] + if err := PublishToTopic(ctx, messageTopicPair.Message, p.Topics[messageTopicPair.Topic]); err != nil { + UpdateFailureMetrics(messageTopicPair.Message) + failed[index] = true + return err + } + // if message is invalid (sent to DL), update DL metrics + if messageTopicPair.Topic == p.Deadletter { + UpdateSuccessDLMetrics(messageTopicPair.Message) + } + UpdateSuccessMetrics(messageTopicPair.Message) + } + return nil + }) + } + + if err := eg.Wait(); err != nil { + return &streamproc.PartiallyProcessedBatchError{ + Failed: indicesWhereTrue(failed), + Err: err, + } + } + return nil +} + +// indicesWhereTrue collects the indices of those elements in the bool slice which are set to true. +func indicesWhereTrue(bools []bool) []int { + var indices []int + for i, isFailed := range bools { + if isFailed { + indices = append(indices, i) + } + } + return indices +} + +// groupByKey groups the given slice of *MessageTopicPair by the contents of the Message.Key, returning +// a map which for every unique key, holds the indices of all elements which share that key. +func groupByKey(messageTopicPairs []*MessageTopicPair) map[string][]int { + groups := make(map[string][]int) + for i, messageTopicPair := range messageTopicPairs { + if messageTopicPair == nil { + continue + } + groups[messageTopicPair.Message.Key] = append(groups[messageTopicPair.Message.Key], i) + } + return groups +} + +// AsReceiverExecutor returns a new streamproc.ReceiverExecutor, referencing this instance. +func (p *Processor) AsReceiverExecutor() *streamproc.ReceiverExecutor { + return streamproc.NewReceiverExecutor(p) +} + +// AsBatchedReceiverExecutor returns a new streamproc.BatchedReceiverExecutor, referencing this instance. +func (p *Processor) AsBatchedReceiverExecutor() *streamproc.BatchedReceiverExecutor { + return streamproc.NewBatchedReceiverExecutor(p) +} + +// AsRecordExecutor returns a new streamproc.RecordExecutor, referencing this instance. +func (p *Processor) AsRecordExecutor() *streamproc.RecordExecutor { + return streamproc.NewRecordExecutor(p) +} + +// AsBatchExecutor returns a new streamproc.BatchExecutor, referencing this instance. +func (p *Processor) AsBatchExecutor() *streamproc.BatchExecutor { + return streamproc.NewBatchExecutor(p) +} diff --git a/validator/internal/janitorctl/init.go b/validator/internal/janitorctl/init.go new file mode 100644 index 0000000..d804a86 --- /dev/null +++ b/validator/internal/janitorctl/init.go @@ -0,0 +1,747 @@ +package janitorctl + +import ( + "context" + "crypto/tls" + "net/http" + "runtime" + "time" + + "github.com/pkg/errors" + "github.com/prometheus/client_golang/prometheus" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/config" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry/apicuriosr" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry/janitorsr" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" + csvgen "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen/csv" + jsongen "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen/json" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/avro" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/csv" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/json" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/protobuf" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/xml" + "github.com/dataphos/lib-brokers/pkg/broker" + "github.com/dataphos/lib-brokers/pkg/broker/jetstream" + "github.com/dataphos/lib-brokers/pkg/broker/kafka" + "github.com/dataphos/lib-brokers/pkg/broker/pubsub" + "github.com/dataphos/lib-brokers/pkg/broker/pulsar" + "github.com/dataphos/lib-brokers/pkg/broker/servicebus" + "github.com/dataphos/lib-brokers/pkg/brokerutil" + "github.com/dataphos/lib-httputil/pkg/httputil" + "github.com/dataphos/lib-logger/logger" + "github.com/dataphos/lib-streamproc/pkg/streamproc" +) + +// initializeSchemaRegistry gets the janitor implementation of a schema registry, optionally decorating it with an in-memory lru cache +// if the appropriate env variable is set. +func initializeSchemaRegistry(ctx context.Context, log logger.Log, cfg *config.Registry) (registry.SchemaRegistry, error) { + ctx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + var sr registry.SchemaRegistry + var err error + switch cfg.Type { + case "apicurio": + sr, err = apicuriosr.New( + ctx, + cfg.URL, + apicuriosr.TimeoutSettings{ + GetTimeout: cfg.GetTimeout, + RegisterTimeout: cfg.RegisterTimeout, + UpdateTimeout: cfg.UpdateTimeout, + }, + cfg.GroupID, + ) + case "janitor": + sr, err = janitorsr.New( + ctx, + cfg.URL, + janitorsr.TimeoutSettings{ + GetTimeout: cfg.GetTimeout, + RegisterTimeout: cfg.RegisterTimeout, + UpdateTimeout: cfg.UpdateTimeout, + }, + cfg.GroupID, + ) + default: + sr, err = nil, errtemplates.UnsupportedRegistryType(cfg.Type) + } + if err != nil { + return nil, err + } + + cacheSize := cfg.InmemCacheSize + if cacheSize > 0 { + log.Infow("using in-memory cache for schema registry", logger.F{ + "cache_size": cacheSize, + }) + return registry.WithCache(sr, cacheSize) + } + + return sr, err +} + +// initKafkaPublisher initializes an instance of Kafka Publisher +func initKafkaPublisher(ctx context.Context, cfg *config.Producer) (broker.Publisher, error) { + var tlsConfig *tls.Config + var krbConfig *kafka.KerberosConfig + if cfg.Kafka.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Kafka.TlsConfig.ClientCertFile, cfg.Kafka.TlsConfig.ClientKeyFile, cfg.Kafka.TlsConfig.CaCertFile) + tlsConfig.InsecureSkipVerify = cfg.Kafka.TlsConfig.InsecureSkipVerify + if err != nil { + return nil, err + } + } + + if cfg.Kafka.KrbConfig.Enabled { + krbConfig = &kafka.KerberosConfig{ + KeyTabPath: cfg.Kafka.KrbConfig.KrbKeyTabPath, + ConfigPath: cfg.Kafka.KrbConfig.KrbConfigPath, + Realm: cfg.Kafka.KrbConfig.KrbRealm, + Service: cfg.Kafka.KrbConfig.KrbServiceName, + Username: cfg.Kafka.KrbConfig.KrbUsername, + } + } + + return kafka.NewPublisher( + ctx, + kafka.ProducerConfig{ + BrokerAddr: cfg.Kafka.Address, + TLS: tlsConfig, + Kerberos: krbConfig, + Prometheus: &kafka.PrometheusConfig{ + Namespace: "publisher", + Registerer: prometheus.DefaultRegisterer, + Gatherer: prometheus.DefaultGatherer, + }, + }, kafka.ProducerSettings{ + BatchSize: cfg.Kafka.Settings.BatchSize, + BatchBytes: cfg.Kafka.Settings.BatchBytes, + Linger: cfg.Kafka.Settings.Linger, + }, + ) +} + +// initEventHubsPublisher initializes an instance of Kafka Publisher +func initEventHubsPublisher(ctx context.Context, cfg *config.Producer) (broker.Publisher, error) { + var tlsConfig *tls.Config + saslConfig := &kafka.PlainSASLConfig{ + User: cfg.Eventhubs.SaslConfig.User, + Pass: cfg.Eventhubs.SaslConfig.Password, + } + + if cfg.Eventhubs.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Eventhubs.TlsConfig.ClientCertFile, cfg.Eventhubs.TlsConfig.ClientKeyFile, cfg.Eventhubs.TlsConfig.CaCertFile) + tlsConfig.InsecureSkipVerify = cfg.Eventhubs.TlsConfig.InsecureSkipVerify + if err != nil { + return nil, err + } + } else { + // TLS has to be set when using eventhubs. In case it is not set, we need to skip certificate verification + tlsConfig = &tls.Config{ + InsecureSkipVerify: cfg.Eventhubs.TlsConfig.InsecureSkipVerify, + } + } + return kafka.NewPublisher( + ctx, + kafka.ProducerConfig{ + BrokerAddr: cfg.Eventhubs.Address, + Prometheus: &kafka.PrometheusConfig{ + Namespace: "publisher", + Registerer: prometheus.DefaultRegisterer, + Gatherer: prometheus.DefaultGatherer, + }, + TLS: tlsConfig, + PlainSASL: saslConfig, + DisableCompression: true, + }, kafka.ProducerSettings{ + BatchSize: cfg.Eventhubs.Settings.BatchSize, + BatchBytes: cfg.Eventhubs.Settings.BatchBytes, + Linger: cfg.Eventhubs.Settings.Linger, + }, + ) +} + +// initPubSubPublisher initializes an instance of PubSub Publisher +func initPubSubPublisher(ctx context.Context, cfg *config.Producer) (broker.Publisher, error) { + return pubsub.NewPublisher( + ctx, + pubsub.PublisherConfig{ + ProjectID: cfg.Pubsub.ProjectId, + }, + pubsub.PublishSettings{ + DelayThreshold: cfg.Pubsub.Settings.DelayThreshold, + CountThreshold: cfg.Pubsub.Settings.CountThreshold, + ByteThreshold: cfg.Pubsub.Settings.ByteThreshold, + NumGoroutines: cfg.Pubsub.Settings.NumGoroutines, + Timeout: cfg.Pubsub.Settings.Timeout, + MaxOutstandingMessages: cfg.Pubsub.Settings.MaxOutstandingMessages, + MaxOutstandingBytes: cfg.Pubsub.Settings.MaxOutstandingBytes, + EnableMessageOrdering: cfg.Pubsub.Settings.EnableMessageOrdering, + }, + ) +} + +// initServiceBusPublisher initializes an instance of ServiceBus Publisher +func initServiceBusPublisher(cfg *config.Producer) (broker.Publisher, error) { + return servicebus.NewPublisher(cfg.Servicebus.ConnectionString) +} + +// initJetStreamPublisher initializes an instance of JetStream Publisher +func initJetStreamPublisher(ctx context.Context, cfg *config.Producer) (broker.Publisher, error) { + return jetstream.NewPublisher( + ctx, + cfg.Jetstream.Url, + jetstream.PublisherSettings{ + MaxPending: cfg.Jetstream.Settings.MaxInflightPending, + }, + ) +} + +// initPulsarPublisher initializes an instance of Pulsar Publisher +func initPulsarPublisher(cfg *config.Producer) (broker.Publisher, error) { + var tlsConfig *tls.Config + if cfg.Kafka.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Pulsar.TlsConfig.ClientCertFile, cfg.Pulsar.TlsConfig.ClientKeyFile, cfg.Pulsar.TlsConfig.CaCertFile) + if err != nil { + return nil, err + } + } + + return pulsar.NewPublisher( + pulsar.PublisherConfig{ + ServiceURL: cfg.Pulsar.ServiceUrl, + TLSConfig: tlsConfig, + }, + pulsar.DefaultPublisherSettings, + ) +} + +// initializePublisher selects and initializes an instance of broker.Publisher, depending on the value based through the appropriate +// environment variable. +func initializePublisher(ctx context.Context, cfg *config.Producer) (broker.Publisher, error) { + switch cfg.Type { + case "kafka": + return initKafkaPublisher(ctx, cfg) + case "eventhubs": + return initEventHubsPublisher(ctx, cfg) + case "pubsub": + return initPubSubPublisher(ctx, cfg) + case "servicebus": + return initServiceBusPublisher(cfg) + case "jetstream": + return initJetStreamPublisher(ctx, cfg) + case "pulsar": + return initPulsarPublisher(cfg) + + default: + return nil, errtemplates.UnsupportedBrokerType(cfg.Type) + } +} + +// initializeValidators initializes a map of validator.Validator, +// depending on which validators are enabled. +func initializeValidatorsForCentralConsumer(ctx context.Context, cfg *config.CentralConsumer) (map[string]validator.Validator, error) { + validators := make(map[string]validator.Validator) + + if cfg.Validators.EnableAvro { + validators["avro"] = avro.New() + } + + if cfg.Validators.EnableCsv { + csvValidator, err := csv.New(ctx, cfg.Validators.CsvUrl, cfg.Validators.CsvTimeoutBase) + if err != nil { + return nil, errors.Wrap(err, "couldn't initialize csv validator") + } + validators["csv"] = csvValidator + } + + if cfg.Validators.EnableJson { + if cfg.Validators.JsonCacheSize > 0 { + if cfg.Validators.JsonUseAltBackend { + validators["json"] = json.NewCachedGoJsonSchemaValidator(cfg.Validators.JsonCacheSize) + } else { + validators["json"] = json.NewCached(cfg.Validators.JsonCacheSize) + } + } else { + if cfg.Validators.JsonUseAltBackend { + validators["json"] = json.NewGoJsonSchemaValidator() + } else { + validators["json"] = json.New() + } + } + } + + if cfg.Validators.EnableProtobuf { + protobufValidator, err := protobuf.New(cfg.Validators.ProtobufFilePath, cfg.Validators.ProtobufCacheSize) + if err != nil { + return nil, errors.Wrap(err, "couldn't initialize protobuf validator") + } + validators["protobuf"] = protobufValidator + } + + if cfg.Validators.EnableXml { + xmlValidator, err := xml.New(ctx, cfg.Validators.XmlUrl, cfg.Validators.XmlTimeoutBase) + if err != nil { + return nil, errors.Wrap(err, "couldn't initialize xml validator") + } + validators["xml"] = xmlValidator + } + + return validators, nil +} + +// initializeValidators initializes a map of validator.Validator, +// depending on which validators are enabled. +func initializeValidatorsForPullerCleaner(ctx context.Context, cfg *config.PullerCleaner) (map[string]validator.Validator, error) { + validators := make(map[string]validator.Validator) + + if cfg.Validators.EnableCsv { + csvValidator, err := csv.New(ctx, cfg.Validators.CsvUrl, cfg.Validators.CsvTimeoutBase) + if err != nil { + return nil, errors.Wrap(err, "couldn't initialize csv validator") + } + validators["csv"] = csvValidator + } + + if cfg.Validators.EnableJson { + if cfg.Validators.JsonCacheSize > 0 { + if cfg.Validators.JsonUseAltBackend { + validators["json"] = json.NewCachedGoJsonSchemaValidator(cfg.Validators.JsonCacheSize) + } else { + validators["json"] = json.NewCached(cfg.Validators.JsonCacheSize) + } + } else { + if cfg.Validators.JsonUseAltBackend { + validators["json"] = json.NewGoJsonSchemaValidator() + } else { + validators["json"] = json.New() + } + } + } + + return validators, nil +} + +// initializeGenerators initializes a map of enabled schemagen.Generator for puller cleaner. +func initializeGenerators(cfg *config.PullerCleaner) (map[string]schemagen.Generator, error) { + generators := make(map[string]schemagen.Generator) + + if cfg.Validators.EnableCsv { + generators["csv"] = csvgen.New() + } + + if cfg.Validators.EnableJson { + if cfg.Validators.JsonSchemaGenScript == "" { + return nil, errors.New("jsonSchemaGenScript not defined") + } + generators["json"] = jsongen.New(cfg.Validators.JsonSchemaGenScript) + } + + return generators, nil +} + +// initKafkaConsumer initializes a Kafka consumer component +func initKafkaConsumer(ctx context.Context, processor *janitor.Processor, log logger.Log, cfg *config.Consumer, opts []streamproc.RunOption) { + var srv *http.Server + var tlsConfig *tls.Config + var krbConfig *kafka.KerberosConfig + if cfg.Kafka.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Kafka.TlsConfig.ClientCertFile, cfg.Kafka.TlsConfig.ClientKeyFile, cfg.Kafka.TlsConfig.CaCertFile) + if err != nil { + log.Error(err.Error(), errcodes.TLSInitialization) + return + } + } + + if cfg.Kafka.KrbConfig.Enabled { + krbConfig = &kafka.KerberosConfig{ + KeyTabPath: cfg.Kafka.KrbConfig.KrbKeyTabPath, + ConfigPath: cfg.Kafka.KrbConfig.KrbConfigPath, + Realm: cfg.Kafka.KrbConfig.KrbRealm, + Service: cfg.Kafka.KrbConfig.KrbServiceName, + Username: cfg.Kafka.KrbConfig.KrbUsername, + } + } + + iterator, err := kafka.NewBatchIterator( + ctx, + kafka.ConsumerConfig{ + BrokerAddr: cfg.Kafka.Address, + GroupID: cfg.Kafka.GroupId, + Topic: cfg.Kafka.Topic, + TLS: tlsConfig, + Kerberos: krbConfig, + Prometheus: &kafka.PrometheusConfig{ + Namespace: "consumer", + Registerer: prometheus.DefaultRegisterer, + Gatherer: prometheus.DefaultGatherer, + }, + }, + kafka.BatchConsumerSettings{ + ConsumerSettings: kafka.ConsumerSettings{ + MinBytes: cfg.Kafka.Settings.MinBytes, + MaxWait: cfg.Kafka.Settings.MaxWait, + MaxBytes: cfg.Kafka.Settings.MaxBytes, + MaxConcurrentFetches: cfg.Kafka.Settings.MaxConcurrentFetches, + }, + MaxPollRecords: cfg.Kafka.Settings.MaxPollRecords, + }, + ) + if err != nil { + log.Error(err.Error(), errcodes.BrokerInitialization) + return + } + defer iterator.Close() + + srv = runMetricsServer(log) + + flowOpts := janitor.LoggingCallbacks(log, janitor.ShouldReturnFlowControl{ + OnPullErr: streamproc.FlowControlContinue, + OnProcessErr: streamproc.FlowControlStop, + OnUnrecoverable: streamproc.FlowControlStop, + OnThresholdReached: streamproc.FlowControlStop, + }) + + opts = append(opts, flowOpts...) + + log.Info("setup complete, running") + if err = processor.AsBatchExecutor().Run(ctx, iterator, opts...); err != nil { + log.Error(err.Error(), errcodes.CompletedWithErrors) + } + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := srv.Shutdown(ctx); err != nil { + log.Error(errors.Wrap(err, "http server shutdown failed").Error(), errcodes.MetricsServerShutdownFailure) + } +} + +// initEventHubsConsumer initializes a PubSub consumer component +func initEventHubsConsumer(ctx context.Context, processor *janitor.Processor, log logger.Log, cfg *config.Consumer, opts []streamproc.RunOption) { + var srv *http.Server + var tlsConfig *tls.Config + saslConfig := &kafka.PlainSASLConfig{ + User: cfg.Eventhubs.SaslConfig.User, + Pass: cfg.Eventhubs.SaslConfig.Password, + } + if cfg.Eventhubs.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Eventhubs.TlsConfig.ClientCertFile, cfg.Eventhubs.TlsConfig.ClientKeyFile, cfg.Eventhubs.TlsConfig.CaCertFile) + if err != nil { + log.Error(err.Error(), errcodes.TLSInitialization) + return + } + } else { + // TLS has to be set when using eventhubs. In case it is not set, we need to skip certificate verification + tlsConfig = &tls.Config{ + InsecureSkipVerify: cfg.Eventhubs.TlsConfig.InsecureSkipVerify, + } + } + + iterator, err := kafka.NewBatchIterator( + ctx, + kafka.ConsumerConfig{ + BrokerAddr: cfg.Eventhubs.Address, + GroupID: cfg.Eventhubs.GroupId, + Topic: cfg.Eventhubs.Topic, + TLS: tlsConfig, + Prometheus: &kafka.PrometheusConfig{ + Namespace: "consumer", + Registerer: prometheus.DefaultRegisterer, + Gatherer: prometheus.DefaultGatherer, + }, + PlainSASL: saslConfig, + }, + kafka.BatchConsumerSettings{ + ConsumerSettings: kafka.ConsumerSettings{ + MinBytes: cfg.Eventhubs.Settings.MinBytes, + MaxWait: cfg.Eventhubs.Settings.MaxWait, + MaxBytes: cfg.Eventhubs.Settings.MaxBytes, + MaxConcurrentFetches: cfg.Eventhubs.Settings.MaxConcurrentFetches, + }, + MaxPollRecords: cfg.Eventhubs.Settings.MaxPollRecords, + }, + ) + if err != nil { + log.Error(err.Error(), errcodes.BrokerInitialization) + return + } + defer iterator.Close() + + srv = runMetricsServer(log) + + log.Info("setup complete, running") + if err = processor.AsBatchExecutor().Run(ctx, iterator, opts...); err != nil { + log.Error(err.Error(), errcodes.CompletedWithErrors) + } + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := srv.Shutdown(ctx); err != nil { + log.Error(errors.Wrap(err, "http server shutdown failed").Error(), errcodes.MetricsServerShutdownFailure) + } +} + +// initPubSubConsumer initializes a PubSub consumer component +func initPubSubConsumer(ctx context.Context, processor *janitor.Processor, log logger.Log, cfg *config.Consumer, opts []streamproc.RunOption) { + var srv *http.Server + receiver, err := pubsub.NewReceiver( + ctx, + pubsub.ReceiverConfig{ + ProjectID: cfg.Pubsub.ProjectId, + SubscriptionID: cfg.Pubsub.SubscriptionId, + }, + pubsub.ReceiveSettings{ + MaxExtension: cfg.Pubsub.Settings.MaxExtension, + MaxExtensionPeriod: cfg.Pubsub.Settings.MaxExtensionPeriod, + MaxOutstandingMessages: cfg.Pubsub.Settings.MaxOutstandingMessages, + MaxOutstandingBytes: cfg.Pubsub.Settings.MaxOutstandingBytes, + NumGoroutines: cfg.Pubsub.Settings.NumGoroutines, + }, + ) + if err != nil { + log.Error(err.Error(), errcodes.BrokerInitialization) + return + } + defer func() { + if err := receiver.Close(); err != nil { + log.Error(err.Error(), errcodes.BrokerConnClosed) + } + }() + + srv = runMetricsServer(log) + + flowOpts := janitor.LoggingCallbacks(log, janitor.ShouldReturnFlowControl{ + OnPullErr: streamproc.FlowControlStop, + OnProcessErr: streamproc.FlowControlContinue, + OnUnrecoverable: streamproc.FlowControlContinue, + OnThresholdReached: streamproc.FlowControlStop, + }) + + opts = append(opts, flowOpts...) + + log.Info("setup complete, running") + if err = processor.AsReceiverExecutor().Run(ctx, receiver, opts...); err != nil { + log.Error(err.Error(), errcodes.CompletedWithErrors) + } + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := srv.Shutdown(ctx); err != nil { + log.Error(errors.Wrap(err, "http server shutdown failed").Error(), errcodes.MetricsServerShutdownFailure) + } +} + +// initServiceBusConsumer initializes a ServiceBus consumer component +func initServiceBusConsumer(ctx context.Context, processor *janitor.Processor, log logger.Log, cfg *config.Consumer, opts []streamproc.RunOption) { + var srv *http.Server + iterator, err := servicebus.NewBatchIterator( + servicebus.IteratorConfig{ + ConnectionString: cfg.Servicebus.ConnectionString, + Topic: cfg.Servicebus.Topic, + Subscription: cfg.Servicebus.Subscription, + }, + servicebus.BatchIteratorSettings{ + BatchSize: cfg.Servicebus.Settings.BatchSize, + }, + ) + if err != nil { + log.Error(err.Error(), errcodes.BrokerInitialization) + return + } + defer func() { + if err := iterator.Close(); err != nil { + log.Error(err.Error(), errcodes.BrokerConnClosed) + } + }() + + batchedReceiver := brokerutil.BatchedMessageIteratorIntoBatchedReceiver( + iterator, + brokerutil.IntoBatchedReceiverSettings{ + NumGoroutines: runtime.GOMAXPROCS(0), + }, + ) + + srv = runMetricsServer(log) + + flowOpts := janitor.LoggingCallbacks(log, janitor.ShouldReturnFlowControl{ + OnPullErr: streamproc.FlowControlStop, + OnProcessErr: streamproc.FlowControlContinue, + OnUnrecoverable: streamproc.FlowControlContinue, + OnThresholdReached: streamproc.FlowControlStop, + }) + + opts = append(opts, flowOpts...) + + log.Info("setup complete, running") + if err = processor.AsBatchedReceiverExecutor().Run(ctx, batchedReceiver, opts...); err != nil { + log.Error(err.Error(), errcodes.CompletedWithErrors) + } + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := srv.Shutdown(ctx); err != nil { + log.Error(errors.Wrap(err, "http server shutdown failed").Error(), errcodes.MetricsServerShutdownFailure) + } +} + +// initJetStreamConsumer initializes a JetStream consumer component +func initJetStreamConsumer(ctx context.Context, processor *janitor.Processor, log logger.Log, cfg *config.Consumer, opts []streamproc.RunOption) { + var srv *http.Server + iterator, err := jetstream.NewBatchIterator( + ctx, + jetstream.IteratorConfig{ + URL: cfg.Jetstream.Url, + Subject: cfg.Jetstream.Subject, + ConsumerName: cfg.Jetstream.ConsumerName, + }, + jetstream.BatchIteratorSettings{ + BatchSize: cfg.Jetstream.Settings.BatchSize, + }, + ) + if err != nil { + log.Error(err.Error(), errcodes.BrokerInitialization) + return + } + defer iterator.Close() + + batchedReceiver := brokerutil.BatchedMessageIteratorIntoBatchedReceiver( + iterator, + brokerutil.IntoBatchedReceiverSettings{ + NumGoroutines: runtime.GOMAXPROCS(0), + }, + ) + + srv = runMetricsServer(log) + + flowOpts := janitor.LoggingCallbacks(log, janitor.ShouldReturnFlowControl{ + OnPullErr: streamproc.FlowControlStop, + OnProcessErr: streamproc.FlowControlContinue, + OnUnrecoverable: streamproc.FlowControlContinue, + OnThresholdReached: streamproc.FlowControlStop, + }) + + opts = append(opts, flowOpts...) + + log.Info("setup complete, running") + if err = processor.AsBatchedReceiverExecutor().Run(ctx, batchedReceiver, opts...); err != nil { + log.Error(err.Error(), errcodes.CompletedWithErrors) + } + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := srv.Shutdown(ctx); err != nil { + log.Error(errors.Wrap(err, "http server shutdown failed").Error(), errcodes.MetricsServerShutdownFailure) + } +} + +// initPulsarConsumer initializes a Pulsar consumer component +func initPulsarConsumer(ctx context.Context, processor *janitor.Processor, log logger.Log, cfg *config.Consumer, opts []streamproc.RunOption) { + var srv *http.Server + var tlsConfig *tls.Config + if cfg.Kafka.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Pulsar.TlsConfig.ClientCertFile, cfg.Pulsar.TlsConfig.ClientKeyFile, cfg.Pulsar.TlsConfig.CaCertFile) + if err != nil { + log.Error(err.Error(), errcodes.TLSInitialization) + return + } + } + + iterator, err := pulsar.NewIterator( + pulsar.IteratorConfig{ + ServiceURL: cfg.Pulsar.ServiceUrl, + Topic: cfg.Pulsar.Topic, + Subscription: cfg.Pulsar.Subscription, + TLSConfig: tlsConfig, + }, + pulsar.DefaultIteratorSettings, + ) + if err != nil { + log.Error(err.Error(), errcodes.BrokerInitialization) + } + defer func() { + if err := iterator.Close(); err != nil { + log.Error(err.Error(), errcodes.BrokerConnClosed) + } + }() + + batchedReceiver := brokerutil.MessageIteratorIntoBatchedReceiver( + iterator, + brokerutil.IntoBatchedMessageIteratorSettings{ + BatchSize: pulsar.DefaultIteratorSettings.ReceiverQueueSize, + Timeout: pulsar.DefaultIteratorSettings.OperationTimeout, + }, + brokerutil.IntoBatchedReceiverSettings{ + NumGoroutines: runtime.GOMAXPROCS(0), + }, + ) + + srv = runMetricsServer(log) + + flowOpts := janitor.LoggingCallbacks(log, janitor.ShouldReturnFlowControl{ + OnPullErr: streamproc.FlowControlStop, + OnProcessErr: streamproc.FlowControlContinue, + OnUnrecoverable: streamproc.FlowControlContinue, + OnThresholdReached: streamproc.FlowControlStop, + }) + + opts = append(opts, flowOpts...) + + log.Info("setup complete, running") + if err = processor.AsBatchedReceiverExecutor().Run(ctx, batchedReceiver, opts...); err != nil { + log.Error(err.Error(), errcodes.CompletedWithErrors) + } + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := srv.Shutdown(ctx); err != nil { + log.Error(errors.Wrap(err, "http server shutdown failed").Error(), errcodes.MetricsServerShutdownFailure) + } +} + +// initializeSourceSystemAndRunProcessor initializes a consumer component and runs processor. +func initializeSourceSystemAndRunProcessor(ctx context.Context, processor *janitor.Processor, log logger.Log, cfg *config.Consumer, opts []streamproc.RunOption) { + switch cfg.Type { + case "kafka": + initKafkaConsumer(ctx, processor, log, cfg, opts) + case "eventhubs": + initEventHubsConsumer(ctx, processor, log, cfg, opts) + case "pubsub": + initPubSubConsumer(ctx, processor, log, cfg, opts) + case "servicebus": + initServiceBusConsumer(ctx, processor, log, cfg, opts) + case "jetstream": + initJetStreamConsumer(ctx, processor, log, cfg, opts) + case "pulsar": + initPulsarConsumer(ctx, processor, log, cfg, opts) + default: + log.Error(errtemplates.UnsupportedBrokerType(cfg.Type).Error(), errcodes.BrokerInitialization) + } +} + +func loadRunOptions(cfg *config.RunOptions) []streamproc.RunOption { + var opts []streamproc.RunOption + + opts = append(opts, streamproc.WithErrThreshold(cfg.ErrThreshold)) + opts = append(opts, streamproc.WithErrInterval(cfg.ErrInterval)) + opts = append(opts, streamproc.WithNumRetires(cfg.NumRetries)) + + return opts +} diff --git a/validator/internal/janitorctl/metrics.go b/validator/internal/janitorctl/metrics.go new file mode 100644 index 0000000..bb4de9f --- /dev/null +++ b/validator/internal/janitorctl/metrics.go @@ -0,0 +1,32 @@ +package janitorctl + +import ( + "fmt" + "net/http" + + "github.com/prometheus/client_golang/prometheus/promhttp" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/lib-logger/logger" +) + +// runMetricsServer runs a http server on which Prometheus metrics are being exposed. +// All metrics that are registered to default Prometheus Registry are displayed at: +// "localhost:2112/metrics" endpoint. +func runMetricsServer(log logger.Log) *http.Server { + http.Handle("/metrics", promhttp.Handler()) + + port := ":2112" + + srv := &http.Server{Addr: port} + + go func() { + if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.Error(err.Error(), errcodes.MetricsServerFailure) + } + }() + + log.Info(fmt.Sprintf("exposed metrics at port %s", port)) + + return srv +} diff --git a/validator/internal/janitorctl/run.go b/validator/internal/janitorctl/run.go new file mode 100644 index 0000000..faf4111 --- /dev/null +++ b/validator/internal/janitorctl/run.go @@ -0,0 +1,215 @@ +package janitorctl + +import ( + "context" + "runtime/debug" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/centralconsumer" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/config" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/pullercleaner" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-brokers/pkg/broker" + "github.com/dataphos/lib-logger/logger" + "github.com/dataphos/lib-logger/standardlogger" + "github.com/dataphos/lib-shutdown/pkg/graceful" +) + +type ProcessorInitFunc func(context.Context, registry.SchemaRegistry, broker.Publisher) (*janitor.Processor, error) + +func RunCentralConsumer(configFile string) { + labels := logger.Labels{ + "product": "Schema Registry", + "component": "central_consumer", + } + var Commit = func() string { + if info, ok := debug.ReadBuildInfo(); ok { + for _, setting := range info.Settings { + if setting.Key == "vcs.revision" { + return setting.Value + } + } + } + + return "" + }() + if Commit != "" { + labels["commit"] = Commit + } + + logLevel, logConfigWarnings := config.GetLogLevel() + log := standardlogger.New(labels, standardlogger.WithLogLevel(logLevel)) + + for _, w := range logConfigWarnings { + log.Warn(w) + } + + var cfg config.CentralConsumer + if err := cfg.Read(configFile); err != nil { + log.Fatal(err.Error(), errcodes.ReadConfigFailure) + } + if err := cfg.Validate(); err != nil { + log.Fatal(err.Error(), errcodes.ValidateConfigFailure) + } + + initProcessor := func(ctx context.Context, registry registry.SchemaRegistry, publisher broker.Publisher) (*janitor.Processor, error) { + validators, err := initializeValidatorsForCentralConsumer(ctx, &cfg) + if err != nil { + return nil, err + } + + cc, err := centralconsumer.New( + registry, + publisher, + validators, + centralconsumer.Topics{ + Valid: cfg.Topics.Valid, + InvalidCSV: cfg.Topics.DeadLetter, + InvalidJSON: cfg.Topics.DeadLetter, + Deadletter: cfg.Topics.DeadLetter, + }, + centralconsumer.Settings{ + NumSchemaCollectors: cfg.NumSchemaCollectors, + NumInferrers: cfg.NumInferrers, + }, + log, + centralconsumer.RouterFlags{ + MissingSchema: cfg.ShouldLog.MissingSchema, + Valid: cfg.ShouldLog.Valid, + Deadletter: cfg.ShouldLog.DeadLetter, + }, + centralconsumer.Mode(cfg.Mode), + centralconsumer.SchemaMetadata{ + ID: cfg.SchemaID, + Version: cfg.SchemaVersion, + Format: cfg.SchemaType, + }, + cfg.Encryption.EncryptionKey, + ) + if err != nil { + return nil, err + } + return cc.AsProcessor(), nil + } + + run( + graceful.WithSignalShutdown(context.Background()), + log, + &cfg.Registry, + initProcessor, + &cfg.Producer, + &cfg.RunOptions, + &cfg.Consumer, + ) +} + +func RunPullerCleaner(configFile string) { + labels := logger.Labels{ + "product": "Schema Registry", + "component": "puller_cleaner", + } + var Commit = func() string { + if info, ok := debug.ReadBuildInfo(); ok { + for _, setting := range info.Settings { + if setting.Key == "vcs.revision" { + return setting.Value + } + } + } + + return "" + }() + if Commit != "" { + labels["commit"] = Commit + } + + logLevel, logConfigWarnings := config.GetLogLevel() + log := standardlogger.New(labels, standardlogger.WithLogLevel(logLevel)) + + for _, w := range logConfigWarnings { + log.Warn(w) + } + + var cfg config.PullerCleaner + if err := cfg.Read(configFile); err != nil { + log.Fatal(err.Error(), errcodes.ReadConfigFailure) + } + if err := cfg.Validate(); err != nil { + log.Fatal(err.Error(), errcodes.ValidateConfigFailure) + } + + initProcessor := func(ctx context.Context, registry registry.SchemaRegistry, publisher broker.Publisher) (*janitor.Processor, error) { + validators, err := initializeValidatorsForPullerCleaner(ctx, &cfg) + if err != nil { + return nil, err + } + + generators, err := initializeGenerators(&cfg) + if err != nil { + return nil, err + } + + pc, err := pullercleaner.New( + generators, + registry, + validators, + publisher, + pullercleaner.Topics{ + Valid: cfg.Topics.Valid, + Deadletter: cfg.Topics.DeadLetter, + }, + cfg.NumCleaners, + log, + pullercleaner.RouterFlags{ + Valid: cfg.ShouldLog.Valid, + Deadletter: cfg.ShouldLog.DeadLetter, + }, + ) + if err != nil { + return nil, err + } + return pc.AsProcessor(), nil + } + + run( + graceful.WithSignalShutdown(context.Background()), + log, + &cfg.Registry, + initProcessor, + &cfg.Producer, + &cfg.RunOptions, + &cfg.Consumer, + ) +} + +func run(ctx context.Context, log logger.Log, registryCfg *config.Registry, initProcessor ProcessorInitFunc, producerCfg *config.Producer, runOptions *config.RunOptions, consumerCfg *config.Consumer) { + log.Info("initializing schema registry") + schemaRegistry, err := initializeSchemaRegistry(ctx, log, registryCfg) + if err != nil { + log.Error(err.Error(), errcodes.RegistryInitialization) + return + } + + log.Info("initializing publisher") + publisher, err := initializePublisher(ctx, producerCfg) + if err != nil { + log.Error(err.Error(), errcodes.BrokerInitialization) + return + } + + log.Info("initializing main component") + processor, err := initProcessor(ctx, schemaRegistry, publisher) + if err != nil { + log.Error(err.Error(), errcodes.Initialization) + return + } + + log.Info("loading run settings") + opts := loadRunOptions(runOptions) + + log.Info("initializing source system and running processor") + initializeSourceSystemAndRunProcessor(ctx, processor, log, consumerCfg, opts) + + log.Info("shutting down") +} diff --git a/validator/internal/producer/config.go b/validator/internal/producer/config.go new file mode 100644 index 0000000..d5a53f0 --- /dev/null +++ b/validator/internal/producer/config.go @@ -0,0 +1,216 @@ +package producer + +import ( + "reflect" + "strings" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/go-playground/validator/v10" + "github.com/kkyr/fig" + "go.uber.org/multierr" +) + +// Config represents all required configuration to run a standalone producer. +type Config struct { + BaseDir string `toml:"base_dir" default:""` + FileName string `toml:"file_name" val:"file"` + NumberOfMessages int `toml:"number_of_messages" default:"100"` + RateLimit int `toml:"rate_limit" default:"100"` + TopicId string `toml:"topic_id" val:"required"` + Type string `toml:"type" val:"oneof=kafka eventhubs pubsub servicebus jetstream"` + EncryptionKey string `toml:"encryption_key"` + Kafka KafkaConfig `toml:"kafka"` + Eventhubs EventhubsConfig `toml:"eventhubs"` + Pubsub PubsubConfig `toml:"pubsub"` + Servicebus ServicebusConfig `toml:"servicebus"` + Jetstream JetstreamConfig `toml:"jetstream"` + RegistryConfig RegistryConfig `toml:"registry_config"` + Mode int `toml:"mode"` +} + +type KafkaConfig struct { + Address string `toml:"address"` + TlsConfig TlsConfig `toml:"tls_config"` + KrbConfig KrbConfig `toml:"krb_config"` + SaslConfig SaslConfig `toml:"sasl_config"` + Settings KafkaPublisherSettings `toml:"settings"` +} + +type EventhubsConfig struct { + Address string `toml:"address"` + TlsConfig TlsConfig `toml:"tls_config"` + SaslConfig SaslConfig `toml:"sasl_config"` + Settings EventhubsPublisherSettings `toml:"settings"` +} + +type TlsConfig struct { + Enabled bool `toml:"enabled"` + ClientCertFile string `toml:"client_cert_file" val:"required_if=Enabled true,omitempty,file"` + ClientKeyFile string `toml:"client_key_file" val:"required_if=Enabled true,omitempty,file"` + CaCertFile string `toml:"ca_cert_file" val:"required_if=Enabled true,omitempty,file"` +} + +type KrbConfig struct { + Enabled bool `toml:"enabled"` + KrbConfigPath string `toml:"krb_config_path"` + KrbKeyTabPath string `toml:"krb_keytab_path"` + KrbRealm string `toml:"krb_realm"` + KrbServiceName string `toml:"krb_service_name"` + KrbUsername string `toml:"krb_username"` +} + +type SaslConfig struct { + User string `toml:"user"` + Password string `toml:"password"` +} + +type KafkaPublisherSettings struct { + BatchSize int `toml:"batch_size" default:"40"` + BatchBytes int64 `toml:"batch_bytes" default:"5242880"` + Linger time.Duration `toml:"linger" default:"10ms"` +} + +type EventhubsPublisherSettings struct { + BatchSize int `toml:"batch_size" default:"40"` + BatchBytes int64 `toml:"batch_bytes" default:"5242880"` + Linger time.Duration `toml:"linger" default:"10ms"` +} + +type PubsubConfig struct { + ProjectId string `toml:"project_id"` + Settings PubsubPublisherSettings `toml:"settings"` +} + +type PubsubPublisherSettings struct { + DelayThreshold time.Duration `toml:"delay_threshold" default:"50ms"` + CountThreshold int `toml:"count_threshold" default:"50"` + ByteThreshold int `toml:"byte_threshold" default:"52428800"` + NumGoroutines int `toml:"num_goroutines" default:"5"` + Timeout time.Duration `toml:"timeout" default:"15s"` + MaxOutstandingMessages int `toml:"max_outstanding_messages" default:"800"` + MaxOutstandingBytes int `toml:"max_outstanding_bytes" default:"1048576000"` + EnableMessageOrdering bool `toml:"enable_message_ordering"` +} + +type ServicebusConfig struct { + ConnectionString string `toml:"connection_string"` +} + +type JetstreamConfig struct { + Url string `toml:"url"` + Settings JetstreamPublisherSettings `toml:"settings"` +} + +type JetstreamPublisherSettings struct { + MaxInflightPending int `toml:"max_inflight_pending" default:"512"` +} + +type RegistryConfig struct { + URL string `toml:"url" val:"url"` + GetTimeout time.Duration `toml:"get_timeout" default:"4s"` + RegisterTimeout time.Duration `toml:"register_timeout" default:"10s"` + UpdateTimeout time.Duration `toml:"update_timeout" default:"10s"` + Type string `toml:"type" default:"janitor" val:"oneof=janitor apicurio"` + GroupID string `toml:"groupID"` +} + +// Read loads parameters from configuration file into Config struct. +func (cfg *Config) Read(fileName string) error { + return fig.Load(cfg, fig.File(fileName), fig.Tag("toml"), fig.UseEnv("")) +} + +// Validate validates Config struct. +func (cfg *Config) Validate() error { + validate := validator.New() + validate.SetTagName("val") + + validate.RegisterTagNameFunc(func(fld reflect.StructField) string { + name := strings.SplitN(fld.Tag.Get("toml"), ",", 2)[0] + if name == "-" { + return "" + } + return name + }) + + validate.RegisterStructValidation( + publisherStructLevelValidation, + KafkaConfig{}, + EventhubsConfig{}, + PubsubConfig{}, + ServicebusConfig{}, + JetstreamConfig{}, + ) + + if err := validate.Struct(cfg); err != nil { + if _, ok := err.(*validator.InvalidValidationError); ok { + return err + } + + var errCombined error + for _, err := range err.(validator.ValidationErrors) { + // Trims prefix in order to correspond to TOML key path. + fieldName := strings.TrimPrefix(err.Namespace(), "Config.") + + switch err.Tag() { + case "required": + errCombined = multierr.Append(errCombined, errtemplates.RequiredTagFail(fieldName)) + case "required_if": + errCombined = multierr.Append(errCombined, errtemplates.RequiredTagFail(fieldName)) + case "file": + errCombined = multierr.Append(errCombined, errtemplates.FileTagFail(fieldName, err.Value())) + case "url": + errCombined = multierr.Append(errCombined, errtemplates.UrlTagFail(fieldName, err.Value())) + case "oneof": + errCombined = multierr.Append(errCombined, errtemplates.OneofTagFail(fieldName, err.Value())) + case "hostname_port": + errCombined = multierr.Append(errCombined, errtemplates.HostnamePortTagFail(fieldName, err.Value())) + default: + errCombined = multierr.Append(errCombined, err) + } + } + return errCombined + } + return nil +} + +// publisherStructLevelValidation is a custom validator which validates broker +// structure depending on which type of producer is required. +func publisherStructLevelValidation(sl validator.StructLevel) { + source := sl.Parent().Interface().(Config) + + validate := validator.New() + + switch producer := sl.Current().Interface().(type) { + case KafkaConfig: + if source.Type == "kafka" { + if err := validate.Var(producer.Address, "hostname_port"); err != nil { + sl.ReportValidationErrors("address", "", err.(validator.ValidationErrors)) + } + } + case EventhubsConfig: + if source.Type == "eventhubs" { + if err := validate.Var(producer.Address, "hostname_port"); err != nil { + sl.ReportValidationErrors("address", "", err.(validator.ValidationErrors)) + } + } + case PubsubConfig: + if source.Type == "pubsub" { + if err := validate.Var(producer.ProjectId, "required"); err != nil { + sl.ReportValidationErrors("project_id", "", err.(validator.ValidationErrors)) + } + } + case ServicebusConfig: + if source.Type == "servicebus" { + if err := validate.Var(producer.ConnectionString, "required"); err != nil { + sl.ReportValidationErrors("connection_string", "", err.(validator.ValidationErrors)) + } + } + case JetstreamConfig: + if source.Type == "jetstream" { + if err := validate.Var(producer.Url, "url"); err != nil { + sl.ReportValidationErrors("url", "", err.(validator.ValidationErrors)) + } + } + } +} diff --git a/validator/internal/producer/load.go b/validator/internal/producer/load.go new file mode 100644 index 0000000..e3d1801 --- /dev/null +++ b/validator/internal/producer/load.go @@ -0,0 +1,238 @@ +package producer + +import ( + "bytes" + "context" + "encoding/csv" + "io" + "log" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-brokers/pkg/broker" + + "github.com/pkg/errors" +) + +// IntoBrokerMessages converts the given ProcessEntry slice into a slice of broker.Message, by calling IntoBrokerMessage on each entry. +func IntoBrokerMessages(ctx context.Context, entries []ProcessedEntry, registry registry.SchemaRegistry) ([]broker.OutboundMessage, error) { + brokerMessages := make([]broker.OutboundMessage, len(entries)) + for i, curr := range entries { + message, err := IntoBrokerMessage(ctx, curr, registry) + if err != nil { + return nil, err + } + brokerMessages[i] = message + } + return brokerMessages, nil +} + +// IntoBrokerMessage converts the given ProcessedEntry into broker.Message, while also performing schema registration, depending +// on the value of ProcessedEntry.ShouldRegister. +func IntoBrokerMessage(ctx context.Context, entry ProcessedEntry, registry registry.SchemaRegistry) (broker.OutboundMessage, error) { + attributes := make(map[string]interface{}) + + attributes[janitor.AttributeFormat] = entry.Format + if entry.Version != "" { + attributes[janitor.AttributeSchemaVersion] = entry.Version + } + + if entry.ShouldRegister { + var schemaId, versionId string + var err error + schemaId, versionId, err = registry.Register(ctx, entry.Schema, entry.Format, entry.CompatibilityMode, entry.ValidityMode) + if err != nil { + return broker.OutboundMessage{}, err + } + + log.Printf("schema registered under %s/%s\n", schemaId, versionId) + + attributes[janitor.AttributeSchemaID] = schemaId + attributes[janitor.AttributeSchemaVersion] = versionId + + for k, v := range entry.AdditionalAttributes { + attributes[k] = v + } + } + + return broker.OutboundMessage{ + Data: entry.Message, + Attributes: attributes, + }, nil +} + +// ProcessedEntry is the processed version of Entry. +type ProcessedEntry struct { + Message []byte + Schema []byte + ShouldRegister bool + Format string + CompatibilityMode string + ValidityMode string + Version string + AdditionalAttributes map[string]interface{} +} + +// ProcessEntries processes the given Entry slice, by calling ProcessEntry on each Entry. +func ProcessEntries(baseDir string, entries []Entry) ([]ProcessedEntry, error) { + messageSchemaPairs := make([]ProcessedEntry, len(entries)) + var loaded ProcessedEntry + var err error + for i, curr := range entries { + loaded, err = ProcessEntry(baseDir, curr) + if err != nil { + return nil, err + } + messageSchemaPairs[i] = loaded + } + return messageSchemaPairs, nil +} + +// ProcessEntry processes the given Entry, by loading ProcessedEntry.Message and ProcessedEntry.Schema from the filenames +// in the relevant Entry fields. It is assumed that the correct absolute path of the given filename can be gained by calling +// filepath.Join with the given baseDir as the first argument. +func ProcessEntry(baseDir string, entry Entry) (ProcessedEntry, error) { + var message, schema []byte + var err error + + messageFilename := filepath.Join(baseDir, entry.BlobFilename) + log.Printf("loading message data from %s\n", messageFilename) + message, err = os.ReadFile(messageFilename) + if err != nil { + return ProcessedEntry{}, err + } + + if entry.SchemaFilename == "" { + if entry.ShouldRegister { + return ProcessedEntry{}, errors.New("invalid entry: can't register an entry since no file was given") + } + } else { + schemaFilename := filepath.Join(baseDir, entry.SchemaFilename) + log.Printf("loading schema data from %s\n", schemaFilename) + schema, err = os.ReadFile(schemaFilename) + if err != nil { + return ProcessedEntry{}, err + } + } + + return ProcessedEntry{ + Message: message, + Schema: schema, + ShouldRegister: entry.ShouldRegister, + Format: entry.Format, + CompatibilityMode: entry.CompatibilityMode, + ValidityMode: entry.ValidityMode, + Version: entry.Version, + AdditionalAttributes: entry.Attributes, + }, nil +} + +type Entry struct { + BlobFilename string + SchemaFilename string + ShouldRegister bool + Format string + CompatibilityMode string + ValidityMode string + Version string + Attributes map[string]interface{} +} + +// LoadEntries loads the entries from the given csv file. +func LoadEntries(filename string) ([]Entry, error) { + file, err := os.ReadFile(filename) + if err != nil { + return nil, err + } + + return parseEntriesFile(bytes.NewReader(file)) +} + +// parseEntriesFile parses the given csv file. +func parseEntriesFile(file io.Reader) ([]Entry, error) { + reader := csv.NewReader(file) + + reader.Comma = ',' + + lines, err := reader.ReadAll() + if err != nil { + return nil, err + } + + entries := make([]Entry, len(lines)) + var curr Entry + for i, line := range lines { + if len(line) == 4 { + curr, err = parseLineNoSchema(line) + } else if len(line) == 5 { + curr, err = parseLineOneCCPerTopic(line) + } else { + curr, err = parseLine(line) + } + if err != nil { + return nil, err + } + entries[i] = curr + } + + return entries, nil +} + +// parseLine parses a given line of the csv file. +func parseLine(line []string) (Entry, error) { + shouldRegister, err := strconv.ParseBool(line[2]) + if err != nil { + return Entry{}, err + } + + var attributes = make(map[string]interface{}) + attrs := strings.Split(line[6], ";") + for _, at := range attrs { + if at == "" { + continue + } + pair := strings.SplitN(at, "=", 2) + attributes[pair[0]] = pair[1] + } + + return Entry{ + BlobFilename: line[0], + SchemaFilename: line[1], + ShouldRegister: shouldRegister, + Format: line[3], + CompatibilityMode: line[4], + ValidityMode: line[5], + Version: "", + Attributes: attributes, + }, nil +} + +func parseLineNoSchema(line []string) (Entry, error) { + return Entry{ + BlobFilename: line[0], + SchemaFilename: "", + ShouldRegister: false, + Format: line[1], + CompatibilityMode: line[2], + ValidityMode: line[3], + Version: "", + Attributes: nil, + }, nil +} + +func parseLineOneCCPerTopic(line []string) (Entry, error) { + return Entry{ + BlobFilename: line[0], + SchemaFilename: "", + ShouldRegister: false, + Version: line[1], + Format: line[2], + CompatibilityMode: line[3], + ValidityMode: line[4], + Attributes: nil, + }, nil +} diff --git a/validator/internal/producer/producer.go b/validator/internal/producer/producer.go new file mode 100644 index 0000000..c3fa037 --- /dev/null +++ b/validator/internal/producer/producer.go @@ -0,0 +1,124 @@ +package producer + +import ( + "context" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/pkg/errors" + "go.uber.org/ratelimit" + "log" + "math" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-brokers/pkg/broker" + + "golang.org/x/sync/errgroup" +) + +type Mode int + +const ( + Default Mode = iota + OneCCPerTopic +) + +// Producer models a producer used for schema registry and the publishing of the given dataset. +type Producer struct { + Registry registry.SchemaRegistry + Topic broker.Topic + Frequency int + Mode Mode + EncryptionKey string +} + +// New returns a new Producer instance. +func New(registry registry.SchemaRegistry, topic broker.Topic, rate int, mode Mode, encryptionKey string) *Producer { + return &Producer{ + Registry: registry, + Topic: topic, + Frequency: rate, + Mode: mode, + EncryptionKey: encryptionKey, + } +} + +// LoadAndProduce loads the dataset from the given filename, assuming the directory to the dataset is given with baseDir, +// (optionally) registers and publishes the loaded messages exactly n times. +func (p *Producer) LoadAndProduce(ctx context.Context, baseDir, filename string, n int) error { + log.Printf("loading entries from %s...\n", filename) + start := time.Now() + entries, err := LoadEntries(filename) + if err != nil { + return err + } + log.Println("entries loaded in", time.Since(start)) + + log.Println("loading messages and schemas...") + start = time.Now() + processedEntries, err := ProcessEntries(baseDir, entries) + if err != nil { + return err + } + log.Println("messages and schemas loaded in", time.Since(start)) + + log.Println("converting into broker messages...") + start = time.Now() + messages, err := IntoBrokerMessages(ctx, processedEntries, p.Registry) + if err != nil { + return err + } + log.Println("converted into broker messages in", time.Since(start)) + + log.Printf("publishing...") + start = time.Now() + if err = p.Publish(ctx, messages, n); err != nil { + if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) { + return nil + } + return err + } + log.Println("published", n, "messages in", time.Since(start)) + return nil +} + +// Publish publishes the given broker.Message slice n times, in a round-robin way in case the size of the dataset is smaller +// than n. +func (p *Producer) Publish(ctx context.Context, messages []broker.OutboundMessage, n int) error { + datasetSize := len(messages) + + if n <= 0 { + n = math.MaxInt64 + } + + rl := ratelimit.NewUnlimited() + if p.Frequency > 0 { + // throws error if rate <= 0 + rl = ratelimit.New(p.Frequency) // per second + } + + eg, ctx := errgroup.WithContext(ctx) +LOOP: + for i := 0; i < n; i++ { + select { + case <-ctx.Done(): + break LOOP + default: + } + + message := messages[i%datasetSize] + if p.EncryptionKey != "" { + encryptedData, err := janitor.Encrypt(message.Data, p.EncryptionKey) + if err != nil { + return err + } + message.Data = encryptedData + } + + rl.Take() + eg.Go(func() error { + return p.Topic.Publish(ctx, message) + }) + + } + return eg.Wait() +} diff --git a/validator/internal/producer/producer_test.go b/validator/internal/producer/producer_test.go new file mode 100644 index 0000000..4f39b24 --- /dev/null +++ b/validator/internal/producer/producer_test.go @@ -0,0 +1,69 @@ +package producer + +import ( + "bytes" + "context" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-brokers/pkg/broker/inmem" +) + +func TestProducer(t *testing.T) { + publisher := inmem.Publisher{} + topic, _ := publisher.Topic("some-topic") + + _, b, _, _ := runtime.Caller(0) + dir := filepath.Dir(b) + testdataDir := filepath.Join(dir, "testdata") + + messageToRegister, err := os.ReadFile(filepath.Join(testdataDir, "data-1.json")) + if err != nil { + t.Fatal(err) + } + schemaToRegister, err := os.ReadFile(filepath.Join(testdataDir, "schema-1.json")) + if err != nil { + t.Fatal(err) + } + + schemaRegistry := registry.NewMock() + schemaRegistry.SetRegisterResponse(schemaToRegister, "1", "1", nil) + + encryptionKey := "" + producer := New(schemaRegistry, topic, 100, 0, encryptionKey) + + root := filepath.Join(dir, "../..") + dataset := filepath.Join(dir, "testdata/dataset.csv") + + if err = producer.LoadAndProduce(context.Background(), root, dataset, 5); err != nil { + t.Fatal(err) + } + + if len(publisher.Spawned[0].Published) != 5 { + t.Fatal("publish count not as expected") + } + for _, published := range publisher.Spawned[0].Published { + if published.Attributes[janitor.AttributeFormat] != "json" { + t.Fatal("format not as expected") + } + + if bytes.Equal(published.Data, messageToRegister) { + if published.Attributes[janitor.AttributeSchemaID] != "1" || + published.Attributes[janitor.AttributeSchemaVersion] != "1" { + t.Fatal("schema id and version not as expected") + } + } else { + if _, ok := published.Attributes[janitor.AttributeSchemaID]; ok { + t.Fatal("schema id not empty, empty expected") + } + if _, ok := published.Attributes[janitor.AttributeSchemaVersion]; ok { + t.Fatal("schema version not empty, empty expected") + } + } + } + +} diff --git a/validator/internal/producer/run.go b/validator/internal/producer/run.go new file mode 100644 index 0000000..4510c89 --- /dev/null +++ b/validator/internal/producer/run.go @@ -0,0 +1,175 @@ +package producer + +import ( + "context" + "crypto/tls" + "errors" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry/apicuriosr" + "log" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errcodes" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry/janitorsr" + "github.com/dataphos/lib-brokers/pkg/broker" + "github.com/dataphos/lib-brokers/pkg/broker/jetstream" + "github.com/dataphos/lib-brokers/pkg/broker/kafka" + "github.com/dataphos/lib-brokers/pkg/broker/pubsub" + "github.com/dataphos/lib-brokers/pkg/broker/servicebus" + "github.com/dataphos/lib-httputil/pkg/httputil" + "github.com/dataphos/lib-shutdown/pkg/graceful" +) + +func Run(configFile string) { + var cfg Config + if err := cfg.Read(configFile); err != nil { + log.Fatal(err.Error()+" ", errcodes.ReadConfigFailure) + } + if err := cfg.Validate(); err != nil { + log.Fatal(err.Error()+" ", errcodes.ValidateConfigFailure) + } + + ctx := graceful.WithSignalShutdown(context.Background()) + + var sr registry.SchemaRegistry + var err error + switch cfg.RegistryConfig.Type { + case "apicurio": + sr, err = apicuriosr.New( + ctx, + cfg.RegistryConfig.URL, + apicuriosr.TimeoutSettings{ + GetTimeout: cfg.RegistryConfig.GetTimeout, + RegisterTimeout: cfg.RegistryConfig.RegisterTimeout, + UpdateTimeout: cfg.RegistryConfig.UpdateTimeout, + }, + cfg.RegistryConfig.GroupID, + ) + case "janitor": + sr, err = janitorsr.New( + ctx, + cfg.RegistryConfig.URL, + janitorsr.TimeoutSettings{ + GetTimeout: cfg.RegistryConfig.GetTimeout, + RegisterTimeout: cfg.RegistryConfig.RegisterTimeout, + UpdateTimeout: cfg.RegistryConfig.UpdateTimeout, + }, + cfg.RegistryConfig.GroupID, + ) + default: + sr, err = nil, errtemplates.UnsupportedRegistryType(cfg.Type) + } + if err != nil { + log.Fatal(err) + } + + publisher, err := selectPublisher(ctx, &cfg) + if err != nil { + log.Fatal(err) + } + + topic, err := publisher.Topic(cfg.TopicId) + if err != nil { + log.Fatal(err) + } + + if err = New(sr, topic, cfg.RateLimit, Mode(cfg.Mode), cfg.EncryptionKey).LoadAndProduce(ctx, cfg.BaseDir, cfg.FileName, cfg.NumberOfMessages); err != nil { + log.Fatal(err) + } + log.Println("done") +} + +func selectPublisher(ctx context.Context, cfg *Config) (broker.Publisher, error) { + switch cfg.Type { + case "pubsub": + return pubsub.NewPublisher( + ctx, + pubsub.PublisherConfig{ + ProjectID: cfg.Pubsub.ProjectId, + }, + pubsub.PublishSettings{ + DelayThreshold: cfg.Pubsub.Settings.DelayThreshold, + CountThreshold: cfg.Pubsub.Settings.CountThreshold, + ByteThreshold: cfg.Pubsub.Settings.ByteThreshold, + NumGoroutines: cfg.Pubsub.Settings.NumGoroutines, + Timeout: cfg.Pubsub.Settings.Timeout, + MaxOutstandingMessages: cfg.Pubsub.Settings.MaxOutstandingMessages, + MaxOutstandingBytes: cfg.Pubsub.Settings.MaxOutstandingBytes, + EnableMessageOrdering: cfg.Pubsub.Settings.EnableMessageOrdering, + }, + ) + case "kafka": + var tlsConfig *tls.Config + var krbConfig *kafka.KerberosConfig + if cfg.Kafka.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Kafka.TlsConfig.ClientCertFile, cfg.Kafka.TlsConfig.ClientKeyFile, cfg.Kafka.TlsConfig.CaCertFile) + if err != nil { + return nil, err + } + } + if cfg.Kafka.KrbConfig.Enabled { + krbConfig = &kafka.KerberosConfig{ + KeyTabPath: cfg.Kafka.KrbConfig.KrbKeyTabPath, + ConfigPath: cfg.Kafka.KrbConfig.KrbConfigPath, + Realm: cfg.Kafka.KrbConfig.KrbRealm, + Service: cfg.Kafka.KrbConfig.KrbServiceName, + Username: cfg.Kafka.KrbConfig.KrbUsername, + } + } + return kafka.NewPublisher( + ctx, + kafka.ProducerConfig{ + BrokerAddr: cfg.Kafka.Address, + TLS: tlsConfig, + Kerberos: krbConfig, + }, + kafka.ProducerSettings{ + BatchSize: cfg.Kafka.Settings.BatchSize, + BatchBytes: cfg.Kafka.Settings.BatchBytes, + Linger: cfg.Kafka.Settings.Linger, + }, + ) + case "eventhubs": + var tlsConfig *tls.Config + var saslConfig *kafka.PlainSASLConfig + if cfg.Eventhubs.TlsConfig.Enabled { + var err error + tlsConfig, err = httputil.NewTLSConfig(cfg.Eventhubs.TlsConfig.ClientCertFile, cfg.Eventhubs.TlsConfig.ClientKeyFile, cfg.Eventhubs.TlsConfig.CaCertFile) + if err != nil { + return nil, err + } + } + saslConfig = &kafka.PlainSASLConfig{ + User: cfg.Eventhubs.SaslConfig.User, + Pass: cfg.Eventhubs.SaslConfig.Password, + } + + return kafka.NewPublisher( + ctx, + kafka.ProducerConfig{ + BrokerAddr: cfg.Eventhubs.Address, + TLS: tlsConfig, + PlainSASL: saslConfig, + DisableCompression: true, + }, + kafka.ProducerSettings{ + BatchSize: cfg.Eventhubs.Settings.BatchSize, + BatchBytes: cfg.Eventhubs.Settings.BatchBytes, + Linger: cfg.Eventhubs.Settings.Linger, + }, + ) + case "servicebus": + return servicebus.NewPublisher(cfg.Servicebus.ConnectionString) + case "jetstream": + return jetstream.NewPublisher( + ctx, + cfg.Jetstream.Url, + jetstream.PublisherSettings{ + MaxPending: cfg.Jetstream.Settings.MaxInflightPending, + }, + ) + default: + return nil, errors.New("unsupported broker type") + } +} diff --git a/validator/internal/producer/testdata/data-1.json b/validator/internal/producer/testdata/data-1.json new file mode 100644 index 0000000..3fe0812 --- /dev/null +++ b/validator/internal/producer/testdata/data-1.json @@ -0,0 +1,5 @@ +{ + "firstName": "John", + "lastName": "Doe", + "age": 21 +} diff --git a/validator/internal/producer/testdata/data-2.json b/validator/internal/producer/testdata/data-2.json new file mode 100644 index 0000000..7d1ee16 --- /dev/null +++ b/validator/internal/producer/testdata/data-2.json @@ -0,0 +1,13 @@ +{ + "fruits": [ "apple", "orange", "pear" ], + "vegetables": [ + { + "veggieName": "potato", + "veggieLike": true + }, + { + "veggieName": "broccoli", + "veggieLike": false + } + ] +} diff --git a/validator/internal/producer/testdata/dataset.csv b/validator/internal/producer/testdata/dataset.csv new file mode 100644 index 0000000..b5bb5bd --- /dev/null +++ b/validator/internal/producer/testdata/dataset.csv @@ -0,0 +1,2 @@ +internal/producer/testdata/data-1.json,internal/producer/testdata/schema-1.json,true,json,none,none, +internal/producer/testdata/data-2.json,internal/producer/testdata/schema-2.json,false,json,none,none, diff --git a/validator/internal/producer/testdata/schema-1.json b/validator/internal/producer/testdata/schema-1.json new file mode 100644 index 0000000..687ae74 --- /dev/null +++ b/validator/internal/producer/testdata/schema-1.json @@ -0,0 +1,21 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Person", + "type": "object", + "properties": { + "firstName": { + "type": "string", + "description": "The person's first name." + }, + "lastName": { + "type": "string", + "description": "The person's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + } + } +} diff --git a/validator/internal/producer/testdata/schema-2.json b/validator/internal/producer/testdata/schema-2.json new file mode 100644 index 0000000..72ab606 --- /dev/null +++ b/validator/internal/producer/testdata/schema-2.json @@ -0,0 +1,34 @@ +{ + "$id": "https://example.com/arrays.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "description": "A representation of a person, company, organization, or place", + "type": "object", + "properties": { + "fruits": { + "type": "array", + "items": { + "type": "string" + } + }, + "vegetables": { + "type": "array", + "items": { "$ref": "#/$defs/veggie" } + } + }, + "$defs": { + "veggie": { + "type": "object", + "required": [ "veggieName", "veggieLike" ], + "properties": { + "veggieName": { + "type": "string", + "description": "The name of the vegetable." + }, + "veggieLike": { + "type": "boolean", + "description": "Do I like this vegetable?" + } + } + } + } +} diff --git a/validator/internal/publisher/mock.go b/validator/internal/publisher/mock.go new file mode 100644 index 0000000..eb98e1a --- /dev/null +++ b/validator/internal/publisher/mock.go @@ -0,0 +1,25 @@ +package publisher + +import ( + "golang.org/x/net/context" + + "github.com/dataphos/lib-brokers/pkg/broker" +) + +type MockPublisher struct { +} + +type MockTopic struct { +} + +func (t *MockTopic) BatchPublish(context.Context, ...broker.OutboundMessage) error { + return nil +} + +func (*MockTopic) Publish(context.Context, broker.OutboundMessage) error { + return nil +} + +func (*MockPublisher) Topic(_ string) (broker.Topic, error) { + return &MockTopic{}, nil +} diff --git a/validator/internal/pullercleaner/pullercleaner.go b/validator/internal/pullercleaner/pullercleaner.go new file mode 100644 index 0000000..66415df --- /dev/null +++ b/validator/internal/pullercleaner/pullercleaner.go @@ -0,0 +1,149 @@ +// Package pullercleaner houses a pipeline over a message stream which attempts to generate and register new +// schemas and send the updated messages to a destination topic. +package pullercleaner + +import ( + "context" + + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-brokers/pkg/broker" + "github.com/dataphos/lib-logger/logger" +) + +// PullerCleaner models the pullercleaner process. +type PullerCleaner struct { + Publisher broker.Publisher + TopicIDs Topics + CleanerRouter janitor.CleanerRouter + cleanerRouterSem chan struct{} + topics map[string]broker.Topic + log logger.Log +} + +// Topics defines the standard destination topics, based on possible validation results. +type Topics struct { + Valid string + Deadletter string +} + +// RouterFlags defines logging levels for logging each routing decision. +type RouterFlags struct { + Valid bool + Deadletter bool +} + +const ( + csvFormat = "csv" + jsonFormat = "json" +) + +// New returns a new instance of PullerCleaner. +func New(schemaGenerators janitor.SchemaGenerators, registry registry.SchemaRegistry, validators janitor.Validators, publisher broker.Publisher, topicIds Topics, numCleaners int, log logger.Log, routerFlags RouterFlags) (*PullerCleaner, error) { + topics, err := setupTopics(topicIds, publisher) + if err != nil { + return nil, errors.Wrap(err, errtemplates.CreatingTopicInstanceFailed(topicIds.Deadletter)) + } + + return &PullerCleaner{ + Publisher: publisher, + TopicIDs: topicIds, + CleanerRouter: janitor.CleanerRouter{ + Cleaner: janitor.NewCachingCleaner(schemaGenerators, validators, registry), + Router: setupRoutingFunc(topicIds, routerFlags, log), + }, + cleanerRouterSem: make(chan struct{}, numCleaners), + topics: topics, + log: log, + }, nil +} + +// setupTopics maps Topics into instances of broker.Topic. +func setupTopics(topicIds Topics, publisher broker.Publisher) (map[string]broker.Topic, error) { + topics := make(map[string]broker.Topic) + + if topicIds.Valid != "" { + topic, err := publisher.Topic(topicIds.Valid) + if err != nil { + return nil, errors.Wrap(err, errtemplates.CreatingTopicInstanceFailed(topicIds.Valid)) + } + topics[topicIds.Valid] = topic + } + + if topicIds.Deadletter != "" { + topic, err := publisher.Topic(topicIds.Deadletter) + if err != nil { + return nil, errors.Wrap(err, errtemplates.CreatingTopicInstanceFailed(topicIds.Deadletter)) + } + topics[topicIds.Deadletter] = topic + } + return topics, nil +} + +// setupRoutingFunc sets up the janitor.LoggingRouter, by first checking if there's a need for logging any of the routing +// decisions (if any logging level flag is set). If none of the flags are set, standard IntoRoutingFunc is used, +// wrapping it with logging middleware otherwise. +func setupRoutingFunc(topicIDs Topics, routerFlags RouterFlags, log logger.Log) janitor.Router { + next := IntoRoutingFunc(topicIDs) + + if routerFlags.Valid || routerFlags.Deadletter { + return janitor.LoggingRouter( + log, + janitor.RouterFlags{ + Valid: routerFlags.Valid, + Deadletter: routerFlags.Deadletter, + }, + next, + ) + } + + return next +} + +// IntoRoutingFunc maps the given Topics into a janitor.LoggingRouter. +// +// If the janitor.Result is janitor.Deadletter, the message are routed to Topics.Deadletter. +// All valid messages are sent to Topics.Valid. +func IntoRoutingFunc(topics Topics) janitor.Router { + return janitor.RoutingFunc(func(result janitor.Result, _ janitor.Message) string { + switch result { + case janitor.Valid: + return topics.Valid + case janitor.Invalid, janitor.Deadletter, janitor.MissingSchema: + return topics.Deadletter + default: + return topics.Deadletter + } + }) +} + +func (pc *PullerCleaner) AsProcessor() *janitor.Processor { + return janitor.NewProcessor(pc, pc.topics, pc.TopicIDs.Deadletter, pc.log) +} + +func (pc *PullerCleaner) Handle(ctx context.Context, message janitor.Message) (janitor.MessageTopicPair, error) { + acquireIfSet(pc.cleanerRouterSem) + messageTopicPair, err := pc.CleanerRouter.CleanAndReroute(ctx, message) + if err != nil { + releaseIfSet(pc.cleanerRouterSem) + return janitor.MessageTopicPair{}, err + } + releaseIfSet(pc.cleanerRouterSem) + + return messageTopicPair, nil +} + +func acquireIfSet(sem chan struct{}) { + if sem != nil { + sem <- struct{}{} + } +} + +func releaseIfSet(sem chan struct{}) { + if sem != nil { + <-sem + } +} diff --git a/validator/internal/pullercleaner/pullercleaner_test.go b/validator/internal/pullercleaner/pullercleaner_test.go new file mode 100644 index 0000000..738d21f --- /dev/null +++ b/validator/internal/pullercleaner/pullercleaner_test.go @@ -0,0 +1,42 @@ +package pullercleaner + +import ( + "testing" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/janitor" +) + +func TestTopicsIntoRoutingFunc(t *testing.T) { + topics := Topics{ + Valid: "valid-topic", + Deadletter: "deadletter", + } + + tt := []struct { + name string + isValid janitor.Result + format string + destination string + }{ + {"valid json", janitor.Valid, jsonFormat, topics.Valid}, + {"invalid json", janitor.Invalid, jsonFormat, topics.Deadletter}, + {"deadletter json", janitor.Deadletter, jsonFormat, topics.Deadletter}, + {"missing schema json", janitor.MissingSchema, jsonFormat, topics.Deadletter}, + {"valid csv", janitor.Valid, csvFormat, topics.Valid}, + {"invalid csv", janitor.Invalid, csvFormat, topics.Deadletter}, + {"deadletter csv", janitor.Deadletter, csvFormat, topics.Deadletter}, + {"missing schema csv", janitor.MissingSchema, csvFormat, topics.Deadletter}, + } + + routingFunc := IntoRoutingFunc(topics) + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + destination := routingFunc.Route(tc.isValid, janitor.Message{Format: tc.format}) + if destination != tc.destination { + t.Errorf("expected and actual destination not the same (%s != %s)", tc.destination, destination) + } + }) + } +} diff --git a/validator/internal/registry/apicuriosr/apicuriosr.go b/validator/internal/registry/apicuriosr/apicuriosr.go new file mode 100644 index 0000000..6166ad3 --- /dev/null +++ b/validator/internal/registry/apicuriosr/apicuriosr.go @@ -0,0 +1,289 @@ +package apicuriosr + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "github.com/dataphos/lib-retry/pkg/retry" + "io" + "log" + "net/http" + "strings" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-httputil/pkg/httputil" + + "github.com/pkg/errors" +) + +// SchemaRegistry is a proxy for communicating with the janitor schema registry server. +type SchemaRegistry struct { + Url string + Timeouts TimeoutSettings + GroupID string +} + +// TimeoutSettings defines the maximum amount of time for each get, register or update request. +type TimeoutSettings struct { + GetTimeout time.Duration + RegisterTimeout time.Duration + UpdateTimeout time.Duration +} + +var DefaultTimeoutSettings = TimeoutSettings{ + GetTimeout: 4 * time.Second, + RegisterTimeout: 10 * time.Second, + UpdateTimeout: 10 * time.Second, +} + +// New returns an instance of SchemaRegistry. +// +// Performs a health check to see if the schema registry is available, retrying periodically until the context is cancelled +// or the health check succeeds. +func New(ctx context.Context, url string, timeouts TimeoutSettings, groupID string) (*SchemaRegistry, error) { + ctx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + if err := retry.Do(ctx, retry.WithJitter(retry.Constant(2*time.Second)), func(ctx context.Context) error { + return httputil.HealthCheck(ctx, url+"/health") + }); err != nil { + return nil, errors.Wrapf(err, "attempting to reach schema registry at %s failed", url) + } + + return &SchemaRegistry{ + Url: url, + Timeouts: timeouts, + GroupID: groupID, + }, nil +} + +func (sr *SchemaRegistry) Get(ctx context.Context, id, version string) ([]byte, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.GetTimeout) + defer cancel() + + response, err := sr.sendGetRequest(ctx, id, version) + if err != nil { + return nil, err + } + defer response.Body.Close() + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + if response.StatusCode != http.StatusOK { + if response.StatusCode == http.StatusNotFound { + return nil, errors.Wrapf(registry.ErrNotFound, "fetching schema %s/%s failed", id, version) + } + return nil, errors.Wrapf(errtemplates.BadHttpStatusCode(response.StatusCode), "fetching schema %s/%s resulted in a bad status code", id, version) + } + + return body, nil +} + +func (sr *SchemaRegistry) sendGetRequest(ctx context.Context, id string, version string) (*http.Response, error) { + url := fmt.Sprintf("%s/apis/registry/v2/groups/%s/artifacts/%s/versions/%s", sr.Url, sr.GroupID, id, version) + + request, err := httputil.Get(ctx, url) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodGet, url)) + } + + return response, nil +} + +func (sr *SchemaRegistry) GetLatest(ctx context.Context, id string) ([]byte, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.GetTimeout) + defer cancel() + + response, err := sr.sendGetLatestRequest(ctx, id) + if err != nil { + return nil, err + } + defer func() { + err := response.Body.Close() + if err != nil { + log.Fatal(err) + } + }() + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + if response.StatusCode != http.StatusOK { + if response.StatusCode == http.StatusNotFound { + return nil, errors.Wrapf(registry.ErrNotFound, "fetching schema %s/latest failed", id) + } + return nil, errors.Wrapf(errtemplates.BadHttpStatusCode(response.StatusCode), "fetching schema %s/latest resulted in a bad status code", id) + } + + return body, nil +} + +func (sr *SchemaRegistry) sendGetLatestRequest(ctx context.Context, id string) (*http.Response, error) { + url := fmt.Sprintf("%s/apis/registry/v2/groups/%s/artifacts/%s/versions/latest", sr.Url, sr.GroupID, id) + + request, err := httputil.Get(ctx, url) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodGet, url)) + } + + return response, nil +} + +func (sr *SchemaRegistry) Register(ctx context.Context, schema []byte, schemaType, compMode, valMode string) (string, string, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.RegisterTimeout) + defer cancel() + + response, err := sr.sendRegisterRequest(ctx, schema, schemaType) + if err != nil { + return "", "", err + } + defer response.Body.Close() + + // this needs to be before checking the status code because the response body always needs to be read + body, err := io.ReadAll(response.Body) + if err != nil { + return "", "", errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + // the schema registry returns either 200, if the new schema version is successfully inserted, or 409 if + // the given schema already exists + if response.StatusCode != http.StatusOK && response.StatusCode != http.StatusConflict { + return "", "", errtemplates.BadHttpStatusCode(response.StatusCode) + } + + var info insertInfo + if err = json.Unmarshal(body, &info); err != nil { + return "", "", errors.Wrap(err, errtemplates.UnmarshallingJSONFailed) + } + + compResponse, valResponse, err := sr.sendRulesRequest(ctx, info.Id, compMode, valMode) + if err != nil { + return "", "", err + } + defer compResponse.Body.Close() + defer valResponse.Body.Close() + + // the schema registry returns 204 if the new rule is successfully added + if compResponse.StatusCode != http.StatusNoContent { + return "", "", errtemplates.BadHttpStatusCode(compResponse.StatusCode) + } + if valResponse.StatusCode != http.StatusNoContent { + return "", "", errtemplates.BadHttpStatusCode(valResponse.StatusCode) + } + + return info.Id, info.Version, nil +} + +func (sr *SchemaRegistry) sendRegisterRequest(ctx context.Context, schema []byte, schemaType string) (*http.Response, error) { + url := fmt.Sprintf("%s/apis/registry/v2/groups/%s/artifacts", sr.Url, sr.GroupID) + + request, err := httputil.Post(ctx, url, fmt.Sprintf("application/json; artifactType=%s", strings.ToUpper(schemaType)), bytes.NewBuffer(schema)) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodPost, url)) + } + + return response, nil +} + +type ruleRequest struct { + Type string `json:"type"` + Config string `json:"config"` +} + +func (sr *SchemaRegistry) sendRulesRequest(ctx context.Context, id, compMode, valMode string) (*http.Response, *http.Response, error) { + url := fmt.Sprintf("%s/apis/registry/v2/groups/%s/artifacts/%s/rules", sr.Url, sr.GroupID, id) + + // this can't generate an error, so it's safe to ignore + compRule, _ := json.Marshal(ruleRequest{Type: "COMPATIBILITY", Config: strings.ToUpper(compMode)}) + request, err := httputil.Post(ctx, url, "application/json", bytes.NewBuffer(compRule)) + if err != nil { + return nil, nil, err + } + + compResponse, err := http.DefaultClient.Do(request) + if err != nil { + return nil, nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodPost, url)) + } + + // this can't generate an error, so it's safe to ignore + valRule, _ := json.Marshal(ruleRequest{Type: "VALIDITY", Config: strings.ToUpper(valMode)}) + request, err = httputil.Post(ctx, url, "application/json", bytes.NewBuffer(valRule)) + if err != nil { + return nil, nil, err + } + + valResponse, err := http.DefaultClient.Do(request) + if err != nil { + return nil, nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodPost, url)) + } + + return compResponse, valResponse, nil +} + +func (sr *SchemaRegistry) Update(ctx context.Context, id string, schema []byte) (string, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.UpdateTimeout) + defer cancel() + + response, err := sr.sendUpdateRequest(ctx, id, schema) + if err != nil { + return "", err + } + defer response.Body.Close() + + // this needs to be before checking the status code because the response body always needs to be read + body, err := io.ReadAll(response.Body) + if err != nil { + return "", errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + // the schema registry returns 200, if the new schema version is successfully inserted + if response.StatusCode != http.StatusOK { + return "", errtemplates.BadHttpStatusCode(response.StatusCode) + } + + var info insertInfo + if err = json.Unmarshal(body, &info); err != nil { + return "", errors.Wrap(err, errtemplates.UnmarshallingJSONFailed) + } + + return info.Version, nil +} + +func (sr *SchemaRegistry) sendUpdateRequest(ctx context.Context, id string, schema []byte) (*http.Response, error) { + url := fmt.Sprintf("%s/apis/registry/v2/groups/%s/artifacts/%s", sr.Url, sr.GroupID, id) + + request, err := httputil.Put(ctx, url, "application/json", bytes.NewBuffer(schema)) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodPut, url)) + } + + return response, nil +} diff --git a/validator/internal/registry/apicuriosr/apicuriosr_test.go b/validator/internal/registry/apicuriosr/apicuriosr_test.go new file mode 100644 index 0000000..4e02e07 --- /dev/null +++ b/validator/internal/registry/apicuriosr/apicuriosr_test.go @@ -0,0 +1,192 @@ +package apicuriosr + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httptest" + "os" + "reflect" + "testing" + "time" + + "github.com/pkg/errors" +) + +func TestNew(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + healthChecked := false + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + healthChecked = true + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + _, err := New(context.Background(), srv.URL, DefaultTimeoutSettings, "default") + if err != nil { + t.Fatal(err) + } + if !healthChecked { + t.Fatal("health check not called") + } +} + +func TestNewTimeout(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + time.Sleep(2 * time.Minute) + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond) + defer cancel() + + _, err := New(ctx, srv.URL, DefaultTimeoutSettings, "default") + if !errors.Is(err, context.DeadlineExceeded) { + t.Fatal("expected timeout") + } +} + +func TestGet(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + schema := []byte("some specification") + + id, version := "1", "1" + + srv := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + if request.Method == http.MethodGet && request.URL.Path == fmt.Sprintf("/apis/registry/v2/groups/default/artifacts/%s/versions/%s", id, version) { + _, _ = writer.Write(schema) + } else { + t.Fatal("wrong endpoint called") + } + })) + defer srv.Close() + + registry := SchemaRegistry{ + Url: srv.URL, + Timeouts: DefaultTimeoutSettings, + } + + spec, err := registry.Get(context.Background(), id, version) + if err != nil { + t.Fatal(err) + } + + if !reflect.DeepEqual(schema, spec) { + t.Fatalf("expected and actual spec not the same (%s != %s)", schema, spec) + } +} + +func TestRegister(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + schema := []byte("some specification") + schemaType := "json" + + requestResponse := insertInfo{ + Id: "1", + Version: "1", + } + + srv := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + if request.Method == http.MethodPost && request.URL.Path == "/apis/registry/v2/groups/default/artifacts" { + defer request.Body.Close() + registration, err := io.ReadAll(request.Body) + if err != nil { + t.Fatal(err) + } + if string(registration) != string(schema) { + t.Fatal("expected and actual schema not the same") + } + + writer.WriteHeader(http.StatusOK) + if err := json.NewEncoder(writer).Encode(requestResponse); err != nil { + t.Fatal(err) + } + } else if request.Method == http.MethodPost && request.URL.Path == fmt.Sprintf("/apis/registry/v2/groups/default/artifacts/%s/rules", requestResponse.Id) { + writer.WriteHeader(http.StatusNoContent) + } else { + t.Fatal("wrong endpoint called") + } + })) + defer srv.Close() + + registry := SchemaRegistry{ + Url: srv.URL, + Timeouts: DefaultTimeoutSettings, + } + + id, version, err := registry.Register(context.Background(), schema, schemaType, "none", "none") + if err != nil { + t.Fatal(err) + } + + if id != requestResponse.Id || version != requestResponse.Version { + t.Fatal("response not parsed correctly") + } +} + +func TestUpdate(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + schema := []byte("some specification") + schemaId := "1" + + requestResponse := insertInfo{ + Id: schemaId, + Version: "2", + } + + srv := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + if request.Method == http.MethodPut && request.URL.Path == fmt.Sprintf("/apis/registry/v2/groups/default/artifacts/%s", schemaId) { + defer request.Body.Close() + registration, err := io.ReadAll(request.Body) + if err != nil { + t.Fatal(err) + } + if string(registration) != string(schema) { + t.Fatal("expected and actual schema not the same") + } + + writer.WriteHeader(http.StatusOK) + if err := json.NewEncoder(writer).Encode(requestResponse); err != nil { + t.Fatal(err) + } + } else { + t.Fatal("wrong endpoint called") + } + })) + defer srv.Close() + + registry := SchemaRegistry{ + Url: srv.URL, + Timeouts: DefaultTimeoutSettings, + } + + version, err := registry.Update(context.Background(), schemaId, schema) + if err != nil { + t.Fatal(err) + } + + if version != requestResponse.Version { + t.Fatal("response not parsed correctly") + } +} diff --git a/validator/internal/registry/apicuriosr/dto.go b/validator/internal/registry/apicuriosr/dto.go new file mode 100644 index 0000000..4b6507c --- /dev/null +++ b/validator/internal/registry/apicuriosr/dto.go @@ -0,0 +1,20 @@ +package apicuriosr + +type insertInfo struct { + Name string `json:"name"` + Description string `json:"description"` + CreatedBy string `json:"createdBy"` + CreatedOn string `json:"createdOn"` + ModifiedBy string `json:"modifiedBy"` + ModifiedOn string `json:"modifiedOn"` + Id string `json:"id"` + Version string `json:"version"` + Type string `json:"type"` + GlobalId int64 `json:"globalId"` + State string `json:"state"` + GroupId string `json:"groupId"` + ContentId int64 `json:"contentId"` + Labels []string `json:"labels"` + Properties []interface{} `json:"properties"` + References []interface{} `json:"references"` +} diff --git a/validator/internal/registry/cache.go b/validator/internal/registry/cache.go new file mode 100644 index 0000000..3bd2894 --- /dev/null +++ b/validator/internal/registry/cache.go @@ -0,0 +1,69 @@ +package registry + +import ( + "context" + + lru "github.com/hashicorp/golang-lru" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + "golang.org/x/sync/singleflight" +) + +var cachedHitsCount = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: "schema_registry", + Name: "schemas_cache_hits_total", + Help: "The total number of schemas cache hits", +}) + +// cached decorates SchemaRegistry with a lru cache. +type cached struct { + SchemaRegistry + cache *lru.TwoQueueCache + group singleflight.Group +} + +// newCache returns a new cached. +func newCache(registry SchemaRegistry, size int) (*cached, error) { + cache, err := lru.New2Q(size) + if err != nil { + return nil, err + } + + return &cached{ + SchemaRegistry: registry, + cache: cache, + group: singleflight.Group{}, + }, nil +} + +// Get overrides the SchemaRegistry.Get method, caching each call to the underlying SchemaRegistry, while also +// making sure there's only one inflight request for the same key (if multiple goroutines request the same schema, +// only one request is actually sent down, the rest wait for the first one to share its result). +func (c *cached) Get(ctx context.Context, id, version string) ([]byte, error) { + // this should be faster than string concatenation + arrKey := [2]string{id, version} + + if v, ok := c.cache.Get(arrKey); ok { + // cache hit + cachedHitsCount.Inc() + return v.([]byte), nil + } + + // cache miss, we need a string version of the key to satisfy the singleflight.Group method signature + key := id + "_" + version + + v, err, _ := c.group.Do(key, func() (interface{}, error) { + schema, err := c.SchemaRegistry.Get(ctx, id, version) + if err != nil { + return nil, err + } + + c.cache.Add(arrKey, schema) + + return schema, nil + }) + if err != nil { + return nil, err + } + return v.([]byte), nil +} diff --git a/validator/internal/registry/cache_test.go b/validator/internal/registry/cache_test.go new file mode 100644 index 0000000..c54581d --- /dev/null +++ b/validator/internal/registry/cache_test.go @@ -0,0 +1,57 @@ +package registry + +import ( + "bytes" + "context" + "testing" + + "github.com/pkg/errors" +) + +func TestCacheGet(t *testing.T) { + t.Run("get returns the correct result", func(t *testing.T) { + sr := NewMock() + c, err := newCache(sr, 10) + if err != nil { + t.Error(err) + } + + id, version := "1", "1" + schema := []byte("schema stored in the registry") + sr.SetGetResponse(id, version, schema, nil) + + result, err := c.Get(context.Background(), id, version) + if err != nil { + t.Error(err) + } + + if !bytes.Equal(result, schema) { + t.Error("result and actual not the same") + } + + result1, err := c.Get(context.Background(), id, version) + if err != nil { + t.Error(err) + } + + if !bytes.Equal(result1, schema) { + t.Error("cached result and actual not the same") + } + }) + + t.Run("error propagation", func(t *testing.T) { + sr := NewMock() + c, err := newCache(sr, 10) + if err != nil { + t.Error(err) + } + + id, version := "1", "1" + sr.SetGetResponse(id, version, nil, errors.New("oops")) + + _, err = c.Get(context.Background(), id, version) + if err == nil { + t.Error("expected an error") + } + }) +} diff --git a/validator/internal/registry/janitorsr/dto.go b/validator/internal/registry/janitorsr/dto.go new file mode 100644 index 0000000..4256dd6 --- /dev/null +++ b/validator/internal/registry/janitorsr/dto.go @@ -0,0 +1,30 @@ +package janitorsr + +import "time" + +type VersionDetails struct { + VersionID string `json:"version_id,omitempty"` + Version string `json:"version"` + SchemaID string `json:"schema_id"` + Specification string `json:"specification"` + Description string `json:"description"` + SchemaHash string `json:"schema_hash"` + CreatedAt time.Time `json:"created_at"` + VersionDeactivated bool `json:"version_deactivated"` +} + +type registrationRequest struct { + Description string `json:"description"` + Specification string `json:"specification"` + Name string `json:"name"` + SchemaType string `json:"schema_type"` + CompatibilityMode string `json:"compatibility_mode"` + ValidityMode string `json:"validity_mode"` + GroupId string `json:"publisher_id"` +} + +type insertInfo struct { + Id string `json:"identification"` + Version string `json:"version"` + Message string `json:"message"` +} diff --git a/validator/internal/registry/janitorsr/janitorsr.go b/validator/internal/registry/janitorsr/janitorsr.go new file mode 100644 index 0000000..24e505b --- /dev/null +++ b/validator/internal/registry/janitorsr/janitorsr.go @@ -0,0 +1,286 @@ +package janitorsr + +import ( + "bytes" + "context" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/errtemplates" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/registry" + "github.com/dataphos/lib-httputil/pkg/httputil" + "github.com/dataphos/lib-retry/pkg/retry" + + "github.com/pkg/errors" +) + +// SchemaRegistry is a proxy for communicating with the janitor schema registry server. +type SchemaRegistry struct { + Url string + Timeouts TimeoutSettings + GroupID string +} + +// TimeoutSettings defines the maximum amount of time for each get, register or update request. +type TimeoutSettings struct { + GetTimeout time.Duration + RegisterTimeout time.Duration + UpdateTimeout time.Duration +} + +var DefaultTimeoutSettings = TimeoutSettings{ + GetTimeout: 4 * time.Second, + RegisterTimeout: 10 * time.Second, + UpdateTimeout: 10 * time.Second, +} + +// New returns an instance of SchemaRegistry. +// +// Performs a health check to see if the schema registry is available, retrying periodically until the context is cancelled +// or the health check succeeds. +func New(ctx context.Context, url string, timeouts TimeoutSettings, groupID string) (*SchemaRegistry, error) { + ctx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + if err := retry.Do(ctx, retry.WithJitter(retry.Constant(2*time.Second)), func(ctx context.Context) error { + return httputil.HealthCheck(ctx, url+"/health") + }); err != nil { + return nil, errors.Wrapf(err, "attempting to reach schema registry at %s failed", url) + } + + return &SchemaRegistry{ + Url: url, + Timeouts: timeouts, + GroupID: groupID, + }, nil +} + +func (sr *SchemaRegistry) Get(ctx context.Context, id, version string) ([]byte, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.GetTimeout) + defer cancel() + + response, err := sr.sendGetRequest(ctx, id, version) + if err != nil { + return nil, err + } + defer func() { + err := response.Body.Close() + if err != nil { + log.Fatal(err) + } + }() + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + if response.StatusCode != http.StatusOK { + if response.StatusCode == http.StatusNotFound { + return nil, errors.Wrapf(registry.ErrNotFound, "fetching schema %s/%s failed", id, version) + } + return nil, errors.Wrapf(errtemplates.BadHttpStatusCode(response.StatusCode), "fetching schema %s/%s resulted in a bad status code", id, version) + } + + var schema VersionDetails + if err = json.Unmarshal(body, &schema); err != nil { + return nil, errors.Wrap(err, errtemplates.UnmarshallingJSONFailed) + } + + specification, err := base64.StdEncoding.DecodeString(schema.Specification) + if err != nil { + return nil, errors.Wrap(err, "decoding schema failed") + } + + return specification, nil +} + +func (sr *SchemaRegistry) sendGetRequest(ctx context.Context, id, version string) (*http.Response, error) { + url := fmt.Sprintf("%s/schemas/%s/versions/%s", sr.Url, id, version) + + request, err := httputil.Get(ctx, url) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodGet, url)) + } + + return response, nil +} + +// GetLatest returns latest version of schema +// +// Unlike Get, Register and Update methods, GetLatest returns whole schema with metadata, and not only +// schema specification +func (sr *SchemaRegistry) GetLatest(ctx context.Context, id string) ([]byte, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.GetTimeout) + defer cancel() + + response, err := sr.sendGetLatestRequest(ctx, id) + if err != nil { + return nil, err + } + defer func() { + err := response.Body.Close() + if err != nil { + log.Fatal(err) + } + }() + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + if response.StatusCode != http.StatusOK { + if response.StatusCode == http.StatusNotFound { + return nil, errors.Wrapf(registry.ErrNotFound, "fetching schema %s/latest failed", id) + } + return nil, errors.Wrapf(errtemplates.BadHttpStatusCode(response.StatusCode), "fetching schema %s/latest resulted in a bad status code", id) + } + + return body, nil +} + +func (sr *SchemaRegistry) sendGetLatestRequest(ctx context.Context, id string) (*http.Response, error) { + url := fmt.Sprintf("%s/schemas/%s/versions/latest", sr.Url, id) + + request, err := httputil.Get(ctx, url) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodGet, url)) + } + + return response, nil +} + +func (sr *SchemaRegistry) Register(ctx context.Context, schema []byte, schemaType, compMode, valMode string) (string, string, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.RegisterTimeout) + defer cancel() + + response, err := sr.sendRegisterRequest(ctx, schema, schemaType, compMode, valMode) + if err != nil { + return "", "", err + } + defer func() { + err := response.Body.Close() + if err != nil { + log.Fatal(err) + } + }() + + // this needs to be before checking the status code because the response body always needs to be read + body, err := io.ReadAll(response.Body) + if err != nil { + return "", "", errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + // the schema registry returns either 201, if the new schema version is successfully inserted, or 409 if + // the given schema already exists + if response.StatusCode != http.StatusCreated && response.StatusCode != http.StatusConflict { + return "", "", errtemplates.BadHttpStatusCode(response.StatusCode) + } + + var info insertInfo + if err = json.Unmarshal(body, &info); err != nil { + return "", "", errors.Wrap(err, errtemplates.UnmarshallingJSONFailed) + } + + return info.Id, info.Version, nil +} + +func (sr *SchemaRegistry) sendRegisterRequest(ctx context.Context, schema []byte, schemaType, compMode, valMode string) (*http.Response, error) { + // this can't generate an error, so it's safe to ignore + data, _ := json.Marshal(registrationRequest{ + Specification: string(schema), + SchemaType: schemaType, + CompatibilityMode: compMode, + ValidityMode: valMode, + GroupId: sr.GroupID, + }) + + url := fmt.Sprintf("%s/schemas", sr.Url) + + request, err := httputil.Post(ctx, url, "application/json", bytes.NewBuffer(data)) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodPost, url)) + } + + return response, nil +} + +type schemaUpdateRequest struct { + Description string `json:"description"` + Specification string `json:"specification"` +} + +func (sr *SchemaRegistry) Update(ctx context.Context, id string, schema []byte) (string, error) { + ctx, cancel := context.WithTimeout(ctx, sr.Timeouts.UpdateTimeout) + defer cancel() + + response, err := sr.sendUpdateRequest(ctx, id, schema) + if err != nil { + return "", err + } + defer func() { + err := response.Body.Close() + if err != nil { + log.Fatal(err) + } + }() + + // this needs to be before checking the status code because the response body always needs to be read + body, err := io.ReadAll(response.Body) + if err != nil { + return "", errors.Wrap(err, errtemplates.ReadingResponseBodyFailed) + } + + // the schema registry returns either 200, if the new schema version is successfully inserted, or 409 if + // the given schema already exists + if response.StatusCode != http.StatusOK && response.StatusCode != http.StatusConflict { + return "", errtemplates.BadHttpStatusCode(response.StatusCode) + } + + var info insertInfo + if err = json.Unmarshal(body, &info); err != nil { + return "", errors.Wrap(err, errtemplates.UnmarshallingJSONFailed) + } + + return info.Version, nil +} + +func (sr *SchemaRegistry) sendUpdateRequest(ctx context.Context, id string, schema []byte) (*http.Response, error) { + // this can't generate an error, so it's safe to ignore + data, _ := json.Marshal(schemaUpdateRequest{Specification: string(schema)}) + + url := fmt.Sprintf("%s/schemas/%s", sr.Url, id) + + request, err := httputil.Put(ctx, url, "application/json", bytes.NewBuffer(data)) + if err != nil { + return nil, err + } + + response, err := http.DefaultClient.Do(request) + if err != nil { + return nil, errors.Wrap(err, errtemplates.HttpRequestToUrlFailed(http.MethodPut, url)) + } + + return response, nil +} diff --git a/validator/internal/registry/janitorsr/janitorsr_test.go b/validator/internal/registry/janitorsr/janitorsr_test.go new file mode 100644 index 0000000..70788c5 --- /dev/null +++ b/validator/internal/registry/janitorsr/janitorsr_test.go @@ -0,0 +1,194 @@ +package janitorsr + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "log" + "net/http" + "net/http/httptest" + "reflect" + "testing" + "time" + + "github.com/pkg/errors" +) + +func TestNew(t *testing.T) { + healthChecked := false + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + healthChecked = true + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + _, err := New(context.Background(), srv.URL, DefaultTimeoutSettings, "default") + if err != nil { + t.Fatal(err) + } + if !healthChecked { + t.Fatal("health check not called") + } +} + +func TestNewTimeout(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + time.Sleep(2 * time.Minute) + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond) + defer cancel() + + _, err := New(ctx, srv.URL, DefaultTimeoutSettings, "default") + if !errors.Is(err, context.DeadlineExceeded) { + t.Fatal("expected timeout") + } +} + +func TestGet(t *testing.T) { + schema := []byte("some specification") + + details := VersionDetails{ + VersionID: "1", + Version: "1", + SchemaID: "1", + Specification: base64.StdEncoding.EncodeToString(schema), + Description: "some description", + SchemaHash: "some schema hash", + CreatedAt: time.Now(), + VersionDeactivated: false, + } + + srv := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + if request.Method == http.MethodGet && request.URL.Path == fmt.Sprintf("/schemas/%s/versions/%s", details.SchemaID, details.Version) { + _ = json.NewEncoder(writer).Encode(details) + } else { + t.Fatal("wrong endpoint called") + } + })) + defer srv.Close() + + registry := SchemaRegistry{ + Url: srv.URL, + Timeouts: DefaultTimeoutSettings, + } + + spec, err := registry.Get(context.Background(), details.SchemaID, details.Version) + if err != nil { + t.Fatal(err) + } + + if !reflect.DeepEqual(schema, spec) { + t.Fatalf("expected and actual spec not the same (%s != %s)", schema, spec) + } +} + +func TestRegister(t *testing.T) { + schema := []byte("some specification") + schemaType := "json" + + requestResponse := insertInfo{ + Id: "1", + Version: "1", + } + + srv := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + if request.Method == http.MethodPost && request.URL.Path == "/schemas" { + defer func() { + err := request.Body.Close() + if err != nil { + log.Fatal(err) + } + }() + var registration registrationRequest + if err := json.NewDecoder(request.Body).Decode(®istration); err != nil { + t.Fatal(err) + } + if registration.Specification != string(schema) { + t.Fatal("expected and actual schema not the same") + } + + writer.WriteHeader(http.StatusCreated) + if err := json.NewEncoder(writer).Encode(requestResponse); err != nil { + t.Fatal(err) + } + } else { + t.Fatal("wrong endpoint called") + } + })) + defer srv.Close() + + registry := SchemaRegistry{ + Url: srv.URL, + Timeouts: DefaultTimeoutSettings, + } + + id, version, err := registry.Register(context.Background(), schema, schemaType, "none", "none") + if err != nil { + t.Fatal(err) + } + + if id != requestResponse.Id || version != requestResponse.Version { + t.Fatal("response not parsed correctly") + } +} + +func TestUpdate(t *testing.T) { + schema := []byte("some specification") + schemaId := "1" + + requestResponse := insertInfo{ + Id: schemaId, + Version: "2", + } + + srv := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + if request.Method == http.MethodPut && request.URL.Path == fmt.Sprintf("/schemas/%s", schemaId) { + defer func() { + err := request.Body.Close() + if err != nil { + log.Fatal(err) + } + }() + var registration registrationRequest + if err := json.NewDecoder(request.Body).Decode(®istration); err != nil { + t.Fatal(err) + } + if registration.Specification != string(schema) { + t.Fatal("expected and actual schema not the same") + } + + writer.WriteHeader(http.StatusOK) + if err := json.NewEncoder(writer).Encode(requestResponse); err != nil { + t.Fatal(err) + } + } else { + t.Fatal("wrong endpoint called") + } + })) + defer srv.Close() + + registry := SchemaRegistry{ + Url: srv.URL, + Timeouts: DefaultTimeoutSettings, + } + + version, err := registry.Update(context.Background(), schemaId, schema) + if err != nil { + t.Fatal(err) + } + + if version != requestResponse.Version { + t.Fatal("response not parsed correctly") + } +} diff --git a/validator/internal/registry/janitorsr/testdata/schema.avsc b/validator/internal/registry/janitorsr/testdata/schema.avsc new file mode 100644 index 0000000..bda4977 --- /dev/null +++ b/validator/internal/registry/janitorsr/testdata/schema.avsc @@ -0,0 +1,35 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "age", + "type": "int" + }, + { + "name": "collection", + "type": { + "type": "array", + "items": "string" + } + }, + { + "name": "foo", + "type": { + "name": "foo", + "type": "record", + "fields": [ + { + "name": "bar", + "type": "string" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/validator/internal/registry/janitorsr/testdata/schema.csvs b/validator/internal/registry/janitorsr/testdata/schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/registry/janitorsr/testdata/schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/registry/janitorsr/testdata/schema.json b/validator/internal/registry/janitorsr/testdata/schema.json new file mode 100644 index 0000000..e38e3b0 --- /dev/null +++ b/validator/internal/registry/janitorsr/testdata/schema.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + } + } +} diff --git a/validator/internal/registry/janitorsr/testdata/schema.xsd b/validator/internal/registry/janitorsr/testdata/schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/registry/janitorsr/testdata/schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/registry/mock.go b/validator/internal/registry/mock.go new file mode 100644 index 0000000..87eadf7 --- /dev/null +++ b/validator/internal/registry/mock.go @@ -0,0 +1,93 @@ +package registry + +import "context" + +type Mock struct { + getSchemaResponse map[string]mockGetSchemaResponse + getLatestSchemaResponse map[string]mockGetLatestSchemaResponse + registrationResponse map[string]mockRegisterResponse + updateResponse map[string]mockUpdateResponse +} + +type mockGetSchemaResponse struct { + schema []byte + err error +} + +type mockGetLatestSchemaResponse struct { + schema []byte + err error +} + +type mockRegisterResponse struct { + id string + version string + err error +} + +type mockUpdateResponse struct { + version string + err error +} + +func NewMock() *Mock { + return &Mock{ + getSchemaResponse: map[string]mockGetSchemaResponse{}, + getLatestSchemaResponse: map[string]mockGetLatestSchemaResponse{}, + registrationResponse: map[string]mockRegisterResponse{}, + updateResponse: map[string]mockUpdateResponse{}, + } +} + +func (m *Mock) SetGetResponse(id, version string, schema []byte, err error) { + key := id + "_" + version + m.getSchemaResponse[key] = mockGetSchemaResponse{ + schema: schema, + err: err, + } +} + +func (m *Mock) Get(_ context.Context, id, version string) ([]byte, error) { + key := id + "_" + version + response := m.getSchemaResponse[key] + return response.schema, response.err +} + +func (m *Mock) SetGetLatestResponse(id string, schema []byte, err error) { + key := id + m.getLatestSchemaResponse[key] = mockGetLatestSchemaResponse{ + schema: schema, + err: err, + } +} + +func (m *Mock) GetLatest(_ context.Context, id string) ([]byte, error) { + key := id + response := m.getLatestSchemaResponse[key] + return response.schema, response.err +} + +func (m *Mock) SetRegisterResponse(schema []byte, id, version string, err error) { + m.registrationResponse[string(schema)] = mockRegisterResponse{ + id: id, + version: version, + err: err, + } +} + +func (m *Mock) Register(_ context.Context, schema []byte, _, _, _ string) (string, string, error) { + response := m.registrationResponse[string(schema)] + return response.id, response.version, response.err +} + +func (m *Mock) SetUpdateResponse(id, version string, err error) { + m.updateResponse[id] = mockUpdateResponse{ + version: version, + err: err, + } +} + +func (m *Mock) Update(_ context.Context, id string, _ []byte) (string, error) { + response := m.updateResponse[id] + return response.version, response.err +} diff --git a/validator/internal/registry/registry.go b/validator/internal/registry/registry.go new file mode 100644 index 0000000..cb24aa0 --- /dev/null +++ b/validator/internal/registry/registry.go @@ -0,0 +1,35 @@ +// Package registry exposes common functionalities of all schema registries. +package registry + +import ( + "context" + + "github.com/pkg/errors" +) + +var ErrNotFound = errors.New("no schema registered under given id and version") + +// SchemaRegistry models schema registries. +type SchemaRegistry interface { + // Get returns the schema stored under the given id and version. + // If no schema exists, ErrNotFound must be returned. + Get(ctx context.Context, id, version string) ([]byte, error) + // Get(ctx context.Context, id, version string) ([]byte, error) + + // GetLatest returns the whole schema, including the metadata and all versions + // If no schema exists under specified id, ErrNotFound must be returned. + GetLatest(ctx context.Context, id string) ([]byte, error) + + // Register register a new schema and returns the id and version it was registered under. + Register(ctx context.Context, schema []byte, schemaType, compMode, valMode string) (string, string, error) + // Register(ctx context.Context, schema []byte, schemaType, compMode, valMode string) (string, string, error) + + // Update updates the schema stored under the given id, returns the version it was registered under. + Update(ctx context.Context, id string, schema []byte) (string, error) + // Update(ctx context.Context, id string, schema []byte) (string, error) +} + +// WithCache decorates the given SchemaRegistry with an in-memory cache of the given size. +func WithCache(registry SchemaRegistry, size int) (SchemaRegistry, error) { + return newCache(registry, size) +} diff --git a/validator/internal/schemagen/cmd.go b/validator/internal/schemagen/cmd.go new file mode 100644 index 0000000..827ece3 --- /dev/null +++ b/validator/internal/schemagen/cmd.go @@ -0,0 +1,23 @@ +package schemagen + +import ( + "bytes" + "os/exec" + + "github.com/pkg/errors" +) + +// ExternalCmdSchemaGenerator generates the schema by calling the given cmd and passing the data to its stdin. +func ExternalCmdSchemaGenerator(cmd *exec.Cmd, data []byte) ([]byte, error) { + cmd.Stdin = bytes.NewReader(data) + + output, err := cmd.CombinedOutput() + if err != nil { + return nil, errors.Wrap(err, string(output)) + } + if len(output) == 0 { + return nil, ErrDeadletter + } + + return output, nil +} diff --git a/validator/internal/schemagen/cmd_test.go b/validator/internal/schemagen/cmd_test.go new file mode 100644 index 0000000..62a99a0 --- /dev/null +++ b/validator/internal/schemagen/cmd_test.go @@ -0,0 +1,19 @@ +package schemagen + +import ( + "os" + "os/exec" + "testing" +) + +func TestOverScriptSchemaGenerator(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + + cmd := exec.Command("python", "json/json_schema_gen.py") + _, err := ExternalCmdSchemaGenerator(cmd, []byte("{\n \"id\": 100,\n \"first_name\": \"syn jason\",\n \"last_name\": \"syn oblak\",\n \"email\": \"jsonsmail\"\n}")) + if err != nil { + t.Fatal(err) + } +} diff --git a/validator/internal/schemagen/csv/csv.go b/validator/internal/schemagen/csv/csv.go new file mode 100644 index 0000000..10b01e2 --- /dev/null +++ b/validator/internal/schemagen/csv/csv.go @@ -0,0 +1,49 @@ +package csv + +import ( + "bytes" + "encoding/csv" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" + "io" + "strconv" + "strings" +) + +func New() schemagen.Generator { + return schemagen.Func(func(data []byte) ([]byte, error) { + reader := csv.NewReader(bytes.NewReader(data)) + + reader.ReuseRecord = true + reader.LazyQuotes = true + reader.Comma = ',' + + header, err := reader.Read() + if err != nil { + return nil, schemagen.ErrDeadletter + } + + schema := parseHeaderIntoSchema(header) + + for { + _, err = reader.Read() + if err != nil { + if err == io.EOF { + return schema, nil + } + return nil, schemagen.ErrDeadletter + } + } + }) +} + +func parseHeaderIntoSchema(header []string) []byte { + var schemaBuilder bytes.Buffer + + schemaBuilder.Write([]byte("version 1.0\n")) + schemaBuilder.Write([]byte("@totalColumns " + strconv.Itoa(len(header)) + "\n")) + for _, key := range header { + schemaBuilder.Write([]byte(strings.Trim(key, " ") + ":\n")) + } + + return schemaBuilder.Bytes() +} diff --git a/validator/internal/schemagen/csv/csv_test.go b/validator/internal/schemagen/csv/csv_test.go new file mode 100644 index 0000000..eecf98d --- /dev/null +++ b/validator/internal/schemagen/csv/csv_test.go @@ -0,0 +1,63 @@ +package csv + +import ( + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" + "github.com/pkg/errors" + "os" + "path/filepath" + "runtime" + "strings" + "testing" +) + +func TestGenerate(t *testing.T) { + gen := New() + + tt := []struct { + name string + dataFilename string + schemaFilename string + deadletter bool + }{ + {"data-1", "data-1.csv", "schema-1.csvs", false}, + {"deadletter-1", "deadletter-1-data.csv", "", true}, + {"deadletter-2", "deadletter-2-data.csv", "", true}, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + data, err := os.ReadFile(filepath.Join(testdataDir, tc.dataFilename)) + if err != nil { + t.Errorf("data read error: %s", err) + } + + generated, err := gen.Generate(data) + if tc.deadletter { + if !errors.Is(err, schemagen.ErrDeadletter) { + t.Error("deadletter expected") + } + } else { + if err != nil { + t.Errorf("validator error: %s", err) + } + + schema, err := os.ReadFile(filepath.Join(testdataDir, tc.schemaFilename)) + if err != nil { + t.Errorf("schema read error: %s", err) + } + + schemaStr := string(schema) + schemaStr = strings.ReplaceAll(schemaStr, "\r\n", "\n") + generatedStr := string(generated) + + if schemaStr != generatedStr { + t.Errorf("expected and generated schema not the same") + } + } + }) + } +} diff --git a/validator/internal/schemagen/csv/testdata/data-1.csv b/validator/internal/schemagen/csv/testdata/data-1.csv new file mode 100644 index 0000000..59c43f9 --- /dev/null +++ b/validator/internal/schemagen/csv/testdata/data-1.csv @@ -0,0 +1,4 @@ +name,age,gender +miana,21,m +jure,19,f +stipano,57,m diff --git a/validator/internal/schemagen/csv/testdata/deadletter-1-data.csv b/validator/internal/schemagen/csv/testdata/deadletter-1-data.csv new file mode 100644 index 0000000..dd64322 --- /dev/null +++ b/validator/internal/schemagen/csv/testdata/deadletter-1-data.csv @@ -0,0 +1,6 @@ +{ + "id": 100, + "first_name": "syn jason", + "last_name": "syn oblak", + "email": "jsonsmail" +} diff --git a/validator/internal/schemagen/csv/testdata/deadletter-2-data.csv b/validator/internal/schemagen/csv/testdata/deadletter-2-data.csv new file mode 100644 index 0000000..d9ebf19 --- /dev/null +++ b/validator/internal/schemagen/csv/testdata/deadletter-2-data.csv @@ -0,0 +1,4 @@ +name,age,gender +miana,m +jure,19 +stipano,57,m diff --git a/validator/internal/schemagen/csv/testdata/schema-1.csvs b/validator/internal/schemagen/csv/testdata/schema-1.csvs new file mode 100644 index 0000000..85bd1f5 --- /dev/null +++ b/validator/internal/schemagen/csv/testdata/schema-1.csvs @@ -0,0 +1,5 @@ +version 1.0 +@totalColumns 3 +name: +age: +gender: diff --git a/validator/internal/schemagen/json/json.go b/validator/internal/schemagen/json/json.go new file mode 100644 index 0000000..672ce58 --- /dev/null +++ b/validator/internal/schemagen/json/json.go @@ -0,0 +1,16 @@ +package json + +import ( + "os/exec" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" +) + +func New(filename string) schemagen.Generator { + return schemagen.Func(func(data []byte) ([]byte, error) { + // #nosec G204 this would usually be a security concern because of remote code execution, + // but it's fine here since we execute a python script from a file, so the attacker would need to have + // full access to the vm to execute the script, and in that case, they could just execute the script themselves + return schemagen.ExternalCmdSchemaGenerator(exec.Command("python", filename), data) + }) +} diff --git a/validator/internal/schemagen/json/json_schema_gen.py b/validator/internal/schemagen/json/json_schema_gen.py new file mode 100644 index 0000000..8696871 --- /dev/null +++ b/validator/internal/schemagen/json/json_schema_gen.py @@ -0,0 +1,22 @@ +import json + +import sys +from genson import SchemaBuilder + + +def main(): + data_file = open(sys.argv[1]) if len(sys.argv) > 1 else sys.stdin + data = data_file.read() + to_add = {"additionalProperties": False} + builder = SchemaBuilder() + try: + builder.add_schema(to_add) + builder.add_object(json.loads(data)) + result_schema = builder.to_json(indent=2) + except ValueError: + result_schema = "" + sys.stdout.write(result_schema) + + +if __name__ == "__main__": + main() diff --git a/validator/internal/schemagen/json/json_test.go b/validator/internal/schemagen/json/json_test.go new file mode 100644 index 0000000..05710f7 --- /dev/null +++ b/validator/internal/schemagen/json/json_test.go @@ -0,0 +1,62 @@ +package json + +import ( + "bytes" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/schemagen" + + "github.com/pkg/errors" +) + +func TestGenerate(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + + gen := New("json_schema_gen.py") + + tt := []struct { + name string + dataFilename string + schemaFilename string + deadletter bool + }{ + {"deadletter-1", "deadletter-1-data.json", "", true}, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + data, err := os.ReadFile(filepath.Join(testdataDir, tc.dataFilename)) + if err != nil { + t.Errorf("data read error: %s", err) + } + + generated, err := gen.Generate(data) + if tc.deadletter { + if !errors.Is(err, schemagen.ErrDeadletter) { + t.Error("deadletter expected") + } + } else { + if err != nil { + t.Errorf("validator error: %s", err) + } + + schema, err := os.ReadFile(filepath.Join(testdataDir, tc.schemaFilename)) + if err != nil { + t.Errorf("schema read error: %s", err) + } + if !bytes.Equal(schema, generated) { + t.Errorf("expected and generated schema not the same") + } + } + }) + } +} diff --git a/validator/internal/schemagen/json/testdata/deadletter-1-data.json b/validator/internal/schemagen/json/testdata/deadletter-1-data.json new file mode 100644 index 0000000..ec3bdf2 --- /dev/null +++ b/validator/internal/schemagen/json/testdata/deadletter-1-data.json @@ -0,0 +1,5 @@ + "id": 100, + "first_name": "syn jason", + "last_name": "syn oblak", + "email": "jsonsmail" +} diff --git a/validator/internal/schemagen/schemagen.go b/validator/internal/schemagen/schemagen.go new file mode 100644 index 0000000..ba1dd7b --- /dev/null +++ b/validator/internal/schemagen/schemagen.go @@ -0,0 +1,22 @@ +// Package schemagen exposes common functionalities of all schema generators. +package schemagen + +import "github.com/pkg/errors" + +// ErrDeadletter is a special error type to mark that schema generation was unsuccessful, +// due to the fact that the given message isn't even the right format. +var ErrDeadletter = errors.New("deadletter") + +// Generator defines schema generators. +type Generator interface { + // Generate takes data of some assumed format and returns the schema inferred from that data + Generate([]byte) ([]byte, error) +} + +// Func convenience type which is the functional equivalent of Generator. +type Func func(data []byte) ([]byte, error) + +// Generate implements Generate by forwarding the call to the underlying Func. +func (f Func) Generate(data []byte) ([]byte, error) { + return f(data) +} diff --git a/validator/internal/validator/avro/avro.go b/validator/internal/validator/avro/avro.go new file mode 100644 index 0000000..2aad657 --- /dev/null +++ b/validator/internal/validator/avro/avro.go @@ -0,0 +1,30 @@ +package avro + +import ( + "bytes" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + "github.com/hamba/avro" +) + +func New() validator.Validator { + return validator.Func(func(message, schema []byte, _, _ string) (bool, error) { + parsedSchema, err := avro.Parse(string(schema)) + if err != nil { + return false, validator.ErrDeadletter + } + + var data interface{} + if err = avro.Unmarshal(parsedSchema, message, &data); err != nil { + return false, nil + } + + reserializedMessage, err := avro.Marshal(parsedSchema, data) + if err != nil { + return false, nil + } + + return bytes.Equal(reserializedMessage, message), nil + }) +} diff --git a/validator/internal/validator/avro/avro_test.go b/validator/internal/validator/avro/avro_test.go new file mode 100644 index 0000000..dfea54b --- /dev/null +++ b/validator/internal/validator/avro/avro_test.go @@ -0,0 +1,209 @@ +package avro + +import ( + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/hamba/avro" +) + +func TestAvroValidator_Validate(t *testing.T) { + avroV := New() + + tt := []struct { + name string + data interface{} + serializationSchemaFilename string + validationSchemaFilename string + valid bool + }{ + { + "valid-1", + struct { + Name string `avro:"name"` + Age int `avro:"age"` + Collection interface{} `avro:"collection"` + Foo interface{} `avro:"foo"` + }{ + "Syntio", + 4, + []string{"data engineering", "avro"}, + struct { + Bar string `avro:"bar"` + }{ + "hello world", + }, + }, + "valid-1-serialization-schema.avsc", + "valid-1-validation-schema.avsc", + true, + }, + { + "invalid-1", + struct { + Name string `avro:"name"` + Age int `avro:"age"` + Collection interface{} `avro:"collection"` + }{ + "Syntio", + 4, + []string{"data engineering", "avro"}, + }, + "invalid-1-serialization-schema.avsc", + "invalid-1-validation-schema.avsc", + false, + }, + { + "invalid-2", + struct { + Age int `avro:"age"` + Tall bool `avro:"tall"` + Handsome bool `avro:"handsome"` + }{ + 2, + true, + true, + }, + "invalid-2-serialization-schema.avsc", + "invalid-2-validation-schema.avsc", + // IT IS FUNDAMENTALLY IMPOSSIBLE TO COVER THIS CASE + // BECAUSE OF THE WAY AVRO WORKS + // THIS IS A KNOWN AND ACCEPTABLE LIMITATION + true, + }, + { + "invalid-3", + struct { + Age int `avro:"age"` + Tall bool `avro:"tall"` + Handsome bool `avro:"handsome"` + }{ + 2, + true, + false, + }, + "invalid-3-serialization-schema.avsc", + "invalid-3-validation-schema.avsc", + // IT IS FUNDAMENTALLY IMPOSSIBLE TO COVER THIS CASE + // BECAUSE OF THE WAY AVRO WORKS + // THIS IS A KNOWN AND ACCEPTABLE LIMITATION + true, + }, + { + "invalid-4", + struct { + Age int `avro:"age"` + Height int `avro:"height"` + Length int `avro:"length"` + }{ + 4, + 64, + -65, + }, + "invalid-4-serialization-schema.avsc", + "invalid-4-validation-schema.avsc", + // IT IS FUNDAMENTALLY IMPOSSIBLE TO COVER THIS CASE + // BECAUSE OF THE WAY AVRO WORKS + // THIS IS A KNOWN AND ACCEPTABLE LIMITATION + true, + }, + { + "invalid-5", + struct { + Age int `avro:"age"` + Pos0 int `avro:"pos0"` + Pos1 int `avro:"pos1"` + Pos2 int `avro:"pos2"` + Pos3 int `avro:"pos3"` + Pos4 int `avro:"pos4"` + }{ + 5, + // 36, // H + // -35, // E + // 38, // L + // 38, // L + // -40, // O + 40, // P + -44, // W + 39, // N + -35, // E + 34, // D + }, + "invalid-5-serialization-schema.avsc", + "invalid-5-validation-schema.avsc", + // IT IS FUNDAMENTALLY IMPOSSIBLE TO COVER THIS CASE + // BECAUSE OF THE WAY AVRO WORKS + // THIS IS A KNOWN AND ACCEPTABLE LIMITATION + true, + }, + { + "invalid-6", + struct { + Pos1 string `avro:"pos1"` + Pos0 string `avro:"pos0"` + }{ + "SYNTIO!", + "HELLO, ", + }, + "invalid-6-serialization-schema.avsc", + "invalid-6-validation-schema.avsc", + // IT IS FUNDAMENTALLY IMPOSSIBLE TO COVER THIS CASE + // BECAUSE OF THE WAY AVRO WORKS + // THIS IS A KNOWN AND ACCEPTABLE LIMITATION + true, + }, + { + "invalid-7", + struct { + Pos0 string `avro:"pos0"` + }{ + "HELLO", + }, + "invalid-7-serialization-schema.avsc", + "invalid-7-validation-schema.avsc", + false, + }, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + serializationSchemaTxt, err := os.ReadFile(filepath.Join(testdataDir, tc.serializationSchemaFilename)) + if err != nil { + t.Fatalf("serialization schema read error") + } + + serializationSchema, err := avro.Parse(string(serializationSchemaTxt)) + if err != nil { + t.Fatalf("avro serialization parse error: %s", err) + } + + data, err := avro.Marshal(serializationSchema, tc.data) + if err != nil { + t.Fatalf("avro serialization error: %s", err) + } + + validationSchema, err := os.ReadFile(filepath.Join(testdataDir, tc.validationSchemaFilename)) + if err != nil { + t.Fatalf("validation schema read error") + } + + valid, err := avroV.Validate(data, validationSchema, "", "") + if err != nil { + t.Fatalf("validator error: %s", err) + } + if valid != tc.valid { + if valid { + t.Errorf("message valid, invalid expected") + } else { + t.Errorf("message invalid, valid expected") + } + } + }) + } +} diff --git a/validator/internal/validator/avro/testdata/invalid-1-serialization-schema.avsc b/validator/internal/validator/avro/testdata/invalid-1-serialization-schema.avsc new file mode 100644 index 0000000..fa04ae2 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-1-serialization-schema.avsc @@ -0,0 +1,22 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "age", + "type": "int" + }, + { + "name": "collection", + "type": { + "type": "array", + "items": "string" + } + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-1-validation-schema.avsc b/validator/internal/validator/avro/testdata/invalid-1-validation-schema.avsc new file mode 100644 index 0000000..bda4977 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-1-validation-schema.avsc @@ -0,0 +1,35 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "age", + "type": "int" + }, + { + "name": "collection", + "type": { + "type": "array", + "items": "string" + } + }, + { + "name": "foo", + "type": { + "name": "foo", + "type": "record", + "fields": [ + { + "name": "bar", + "type": "string" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/validator/internal/validator/avro/testdata/invalid-2-serialization-schema.avsc b/validator/internal/validator/avro/testdata/invalid-2-serialization-schema.avsc new file mode 100644 index 0000000..f5bfaae --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-2-serialization-schema.avsc @@ -0,0 +1,19 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "age", + "type": "int" + }, + { + "name": "tall", + "type": "boolean" + }, + { + "name": "handsome", + "type": "boolean" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-2-validation-schema.avsc b/validator/internal/validator/avro/testdata/invalid-2-validation-schema.avsc new file mode 100644 index 0000000..8437e17 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-2-validation-schema.avsc @@ -0,0 +1,11 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "name", + "type": "bytes" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-3-serialization-schema.avsc b/validator/internal/validator/avro/testdata/invalid-3-serialization-schema.avsc new file mode 100644 index 0000000..f5bfaae --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-3-serialization-schema.avsc @@ -0,0 +1,19 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "age", + "type": "int" + }, + { + "name": "tall", + "type": "boolean" + }, + { + "name": "handsome", + "type": "boolean" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-3-validation-schema.avsc b/validator/internal/validator/avro/testdata/invalid-3-validation-schema.avsc new file mode 100644 index 0000000..aec54f7 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-3-validation-schema.avsc @@ -0,0 +1,19 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "age", + "type": "int" + }, + { + "name": "grade", + "type": "int" + }, + { + "name": "height", + "type": "int" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-4-serialization-schema.avsc b/validator/internal/validator/avro/testdata/invalid-4-serialization-schema.avsc new file mode 100644 index 0000000..f585663 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-4-serialization-schema.avsc @@ -0,0 +1,19 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "age", + "type": "int" + }, + { + "name": "height", + "type": "int" + }, + { + "name": "length", + "type": "int" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-4-validation-schema.avsc b/validator/internal/validator/avro/testdata/invalid-4-validation-schema.avsc new file mode 100644 index 0000000..8437e17 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-4-validation-schema.avsc @@ -0,0 +1,11 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "name", + "type": "bytes" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-5-serialization-schema.avsc b/validator/internal/validator/avro/testdata/invalid-5-serialization-schema.avsc new file mode 100644 index 0000000..68fc7e8 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-5-serialization-schema.avsc @@ -0,0 +1,31 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "age", + "type": "int" + }, + { + "name": "pos0", + "type": "int" + }, + { + "name": "pos1", + "type": "int" + }, + { + "name": "pos2", + "type": "int" + }, + { + "name": "pos3", + "type": "int" + }, + { + "name": "pos4", + "type": "int" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-5-validation-schema.avsc b/validator/internal/validator/avro/testdata/invalid-5-validation-schema.avsc new file mode 100644 index 0000000..52dba1d --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-5-validation-schema.avsc @@ -0,0 +1,11 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "message", + "type": "string" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-6-serialization-schema.avsc b/validator/internal/validator/avro/testdata/invalid-6-serialization-schema.avsc new file mode 100644 index 0000000..dd2fb24 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-6-serialization-schema.avsc @@ -0,0 +1,15 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "pos1", + "type": "string" + }, + { + "name": "pos0", + "type": "string" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-6-validation-schema.avsc b/validator/internal/validator/avro/testdata/invalid-6-validation-schema.avsc new file mode 100644 index 0000000..701ce27 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-6-validation-schema.avsc @@ -0,0 +1,15 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "pos0", + "type": "string" + }, + { + "name": "pos1", + "type": "string" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-7-serialization-schema.avsc b/validator/internal/validator/avro/testdata/invalid-7-serialization-schema.avsc new file mode 100644 index 0000000..2fbe0d0 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-7-serialization-schema.avsc @@ -0,0 +1,11 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "pos0", + "type": "string" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/invalid-7-validation-schema.avsc b/validator/internal/validator/avro/testdata/invalid-7-validation-schema.avsc new file mode 100644 index 0000000..701ce27 --- /dev/null +++ b/validator/internal/validator/avro/testdata/invalid-7-validation-schema.avsc @@ -0,0 +1,15 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "pos0", + "type": "string" + }, + { + "name": "pos1", + "type": "string" + } + ] +} diff --git a/validator/internal/validator/avro/testdata/valid-1-serialization-schema.avsc b/validator/internal/validator/avro/testdata/valid-1-serialization-schema.avsc new file mode 100644 index 0000000..bda4977 --- /dev/null +++ b/validator/internal/validator/avro/testdata/valid-1-serialization-schema.avsc @@ -0,0 +1,35 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "age", + "type": "int" + }, + { + "name": "collection", + "type": { + "type": "array", + "items": "string" + } + }, + { + "name": "foo", + "type": { + "name": "foo", + "type": "record", + "fields": [ + { + "name": "bar", + "type": "string" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/validator/internal/validator/avro/testdata/valid-1-validation-schema.avsc b/validator/internal/validator/avro/testdata/valid-1-validation-schema.avsc new file mode 100644 index 0000000..bda4977 --- /dev/null +++ b/validator/internal/validator/avro/testdata/valid-1-validation-schema.avsc @@ -0,0 +1,35 @@ +{ + "name": "Test", + "type": "record", + "namespace": "hr.syntio", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "age", + "type": "int" + }, + { + "name": "collection", + "type": { + "type": "array", + "items": "string" + } + }, + { + "name": "foo", + "type": { + "name": "foo", + "type": "record", + "fields": [ + { + "name": "bar", + "type": "string" + } + ] + } + } + ] +} \ No newline at end of file diff --git a/validator/internal/validator/csv/csv.go b/validator/internal/validator/csv/csv.go new file mode 100644 index 0000000..8b570ab --- /dev/null +++ b/validator/internal/validator/csv/csv.go @@ -0,0 +1,68 @@ +package csv + +import ( + "bytes" + "context" + "encoding/csv" + "io" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + "github.com/dataphos/lib-httputil/pkg/httputil" + "github.com/dataphos/lib-retry/pkg/retry" + + "github.com/pkg/errors" +) + +type Validator struct { + Url string + TimeoutBase time.Duration +} + +const DefaultTimeoutBase = 2 * time.Second + +// New returns a new validator which validates CSV messages against a schema. +// +// Performs a health check to see if the validator is available, retrying periodically until the context is cancelled +// or the health check succeeds. +func New(ctx context.Context, url string, timeoutBase time.Duration) (validator.Validator, error) { + if err := retry.Do(ctx, retry.WithJitter(retry.Constant(2*time.Second)), func(ctx context.Context) error { + return httputil.HealthCheck(ctx, url+"/health") + }); err != nil { + return nil, errors.Wrapf(err, "attempting to reach csv validator at %s failed", url) + } + + return &Validator{ + Url: url, + TimeoutBase: timeoutBase, + }, nil +} + +func (v *Validator) Validate(message, schema []byte, _, _ string) (bool, error) { + if !IsCSV(message) { + return false, validator.ErrDeadletter + } + + ctx, cancel := context.WithTimeout(context.Background(), validator.EstimateHTTPTimeout(len(message), v.TimeoutBase)) + defer cancel() + + return validator.ValidateOverHTTP(ctx, message, schema, v.Url) +} + +// IsCSV checks if the given data is valid csv. +// +// The data is assumed to use ',' as delimiter. +func IsCSV(data []byte) bool { + reader := csv.NewReader(bytes.NewReader(data)) + + reader.ReuseRecord = true + reader.Comma = ',' + reader.LazyQuotes = true + + for { + _, err := reader.Read() + if err != nil { + return err == io.EOF + } + } +} diff --git a/validator/internal/validator/csv/csv_test.go b/validator/internal/validator/csv/csv_test.go new file mode 100644 index 0000000..1e2d20b --- /dev/null +++ b/validator/internal/validator/csv/csv_test.go @@ -0,0 +1,121 @@ +package csv + +import ( + "context" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "runtime" + "testing" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + "github.com/pkg/errors" +) + +func TestNew(t *testing.T) { + healthChecked := false + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + healthChecked = true + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + _, err := New(context.Background(), srv.URL, DefaultTimeoutBase) + if err != nil { + t.Fatal(err) + } + if !healthChecked { + t.Error("health check not called") + } +} + +func TestNewTimeout(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + time.Sleep(2 * time.Minute) + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond) + defer cancel() + + _, err := New(ctx, srv.URL, DefaultTimeoutBase) + if !errors.Is(err, context.DeadlineExceeded) { + t.Fatal("expected timeout") + } +} + +func TestCSVValidator_Validate(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + + csvV, err := New(context.Background(), "http://localhost:8088", DefaultTimeoutBase) + if err != nil { + t.Fatal(err) + } + + tt := []struct { + name string + dataFilename string + schemaFilename string + valid bool + deadletter bool + }{ + {"valid-1", "valid-1-data.csv", "valid-1-schema.csvs", true, false}, + {"valid-2", "valid-2-data.csv", "valid-2-schema.csvs", true, false}, + {"valid-3", "valid-3-data.csv", "valid-3-schema.csvs", true, false}, + {"valid-4", "valid-4-data.csv", "valid-4-schema.csvs", true, false}, + {"invalid-1", "invalid-1-data.csv", "invalid-1-schema.csvs", false, false}, + {"invalid-2", "invalid-2-data.csv", "invalid-2-schema.csvs", false, false}, + {"invalid-3", "invalid-3-data.csv", "invalid-3-schema.csvs", false, false}, + {"deadletter-1", "deadletter-1-data.csv", "deadletter-1-schema.csvs", false, true}, + {"deadletter-2", "deadletter-2-data.csv", "deadletter-2-schema.csvs", false, true}, + {"deadletter-3", "deadletter-3-data.csv", "deadletter-3-schema.csvs", false, true}, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + data, err := os.ReadFile(filepath.Join(testdataDir, tc.dataFilename)) + if err != nil { + t.Fatalf("data read error: %s", err) + } + schema, err := os.ReadFile(filepath.Join(testdataDir, tc.schemaFilename)) + if err != nil { + t.Fatalf("schema read error: %s", err) + } + + valid, err := csvV.Validate(data, schema, "", "") + if tc.deadletter { + if !errors.Is(err, validator.ErrDeadletter) { + t.Error("deadletter expected") + } + } else { + if err != nil { + t.Errorf("validator error: %s", err) + } + if valid != tc.valid { + if valid { + t.Errorf("message valid, invalid expected") + } else { + t.Errorf("message invalid, valid expected") + } + } + } + }) + } +} diff --git a/validator/internal/validator/csv/testdata/deadletter-1-data.csv b/validator/internal/validator/csv/testdata/deadletter-1-data.csv new file mode 100644 index 0000000..2b3d598 --- /dev/null +++ b/validator/internal/validator/csv/testdata/deadletter-1-data.csv @@ -0,0 +1,6 @@ +{ + "id": 100, + "first_name": "syn jason", + "last_name": "syn oblak", + "email": "jsonsmail" +} diff --git a/validator/internal/validator/csv/testdata/deadletter-1-schema.csvs b/validator/internal/validator/csv/testdata/deadletter-1-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/deadletter-1-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/deadletter-2-data.csv b/validator/internal/validator/csv/testdata/deadletter-2-data.csv new file mode 100644 index 0000000..b80607f --- /dev/null +++ b/validator/internal/validator/csv/testdata/deadletter-2-data.csv @@ -0,0 +1,4 @@ +name,age,gender +miana,m +jure,19,f +stipano,57,m diff --git a/validator/internal/validator/csv/testdata/deadletter-2-schema.csvs b/validator/internal/validator/csv/testdata/deadletter-2-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/deadletter-2-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/deadletter-3-data.csv b/validator/internal/validator/csv/testdata/deadletter-3-data.csv new file mode 100644 index 0000000..3f95ae0 --- /dev/null +++ b/validator/internal/validator/csv/testdata/deadletter-3-data.csv @@ -0,0 +1,4 @@ +name,gender +miana,20,m +jure,19,f +stipano,57,m diff --git a/validator/internal/validator/csv/testdata/deadletter-3-schema.csvs b/validator/internal/validator/csv/testdata/deadletter-3-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/deadletter-3-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/invalid-1-data.csv b/validator/internal/validator/csv/testdata/invalid-1-data.csv new file mode 100644 index 0000000..58646a0 --- /dev/null +++ b/validator/internal/validator/csv/testdata/invalid-1-data.csv @@ -0,0 +1,4 @@ +name,age,gender +miana,4 years,m +jure,19,f +mia,57,male \ No newline at end of file diff --git a/validator/internal/validator/csv/testdata/invalid-1-schema.csvs b/validator/internal/validator/csv/testdata/invalid-1-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/invalid-1-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/invalid-2-data.csv b/validator/internal/validator/csv/testdata/invalid-2-data.csv new file mode 100644 index 0000000..163b688 --- /dev/null +++ b/validator/internal/validator/csv/testdata/invalid-2-data.csv @@ -0,0 +1,4 @@ +name,age,gender +miana,sixty years,m +jure,19,male +mia,57,female \ No newline at end of file diff --git a/validator/internal/validator/csv/testdata/invalid-2-schema.csvs b/validator/internal/validator/csv/testdata/invalid-2-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/invalid-2-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/invalid-3-data.csv b/validator/internal/validator/csv/testdata/invalid-3-data.csv new file mode 100644 index 0000000..b6097f2 --- /dev/null +++ b/validator/internal/validator/csv/testdata/invalid-3-data.csv @@ -0,0 +1,4 @@ +name,age,gender +miana,0 years,male +jure,19,female +mia,0 years,female \ No newline at end of file diff --git a/validator/internal/validator/csv/testdata/invalid-3-schema.csvs b/validator/internal/validator/csv/testdata/invalid-3-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/invalid-3-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/valid-1-data.csv b/validator/internal/validator/csv/testdata/valid-1-data.csv new file mode 100644 index 0000000..59c43f9 --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-1-data.csv @@ -0,0 +1,4 @@ +name,age,gender +miana,21,m +jure,19,f +stipano,57,m diff --git a/validator/internal/validator/csv/testdata/valid-1-schema.csvs b/validator/internal/validator/csv/testdata/valid-1-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-1-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/valid-2-data.csv b/validator/internal/validator/csv/testdata/valid-2-data.csv new file mode 100644 index 0000000..60933fb --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-2-data.csv @@ -0,0 +1,4 @@ +name,age,gender +wow,22,f +wew,99,f +stipano,57,f \ No newline at end of file diff --git a/validator/internal/validator/csv/testdata/valid-2-schema.csvs b/validator/internal/validator/csv/testdata/valid-2-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-2-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/valid-3-data.csv b/validator/internal/validator/csv/testdata/valid-3-data.csv new file mode 100644 index 0000000..f00f516 --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-3-data.csv @@ -0,0 +1,4 @@ +name,age,gender +muchname,50,m +suchname,20,m +name3,57,m diff --git a/validator/internal/validator/csv/testdata/valid-3-schema.csvs b/validator/internal/validator/csv/testdata/valid-3-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-3-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/csv/testdata/valid-4-data.csv b/validator/internal/validator/csv/testdata/valid-4-data.csv new file mode 100644 index 0000000..955e446 --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-4-data.csv @@ -0,0 +1,4 @@ +name,age,gender +num2,21,m +name3,19,f +mamam,57,m \ No newline at end of file diff --git a/validator/internal/validator/csv/testdata/valid-4-schema.csvs b/validator/internal/validator/csv/testdata/valid-4-schema.csvs new file mode 100644 index 0000000..635dc83 --- /dev/null +++ b/validator/internal/validator/csv/testdata/valid-4-schema.csvs @@ -0,0 +1,5 @@ +version 1.1 +@totalColumns 3 +name: notEmpty +age: range(0, 120) +gender: is("m") or is("f") or is("t") or is("n") diff --git a/validator/internal/validator/external/csv-validator/.gitignore b/validator/internal/validator/external/csv-validator/.gitignore new file mode 100644 index 0000000..549e00a --- /dev/null +++ b/validator/internal/validator/external/csv-validator/.gitignore @@ -0,0 +1,33 @@ +HELP.md +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ diff --git a/validator/internal/validator/external/csv-validator/pom.xml b/validator/internal/validator/external/csv-validator/pom.xml new file mode 100644 index 0000000..1b31fad --- /dev/null +++ b/validator/internal/validator/external/csv-validator/pom.xml @@ -0,0 +1,219 @@ + + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 3.1.1 + + + + net.syntio + csv-validator + 0.0.1-SNAPSHOT + csv-validator + CSV validator service + + + 11 + 3.0.2 + 6.0.4 + 10.1.5 + + + + + + org.apache.tomcat.embed + tomcat-embed-core + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-el + ${tomcat.version} + + + org.apache.tomcat.embed + tomcat-embed-websocket + ${tomcat.version} + + + org.apache.tomcat + tomcat-annotations-api + ${tomcat.version} + + + + + org.springframework.boot + spring-boot-starter-web + ${springframework.boot.version} + + + org.springframework.boot + spring-boot + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-autoconfigure + ${springframework.boot.version} + + + + org.springframework.boot + spring-boot-devtools + runtime + true + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-test + test + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-json + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-logging + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-starter-tomcat + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-test + ${springframework.boot.version} + + + org.springframework.boot + spring-boot-test-autoconfigure + ${springframework.boot.version} + + + + + org.springframework + spring-web + ${springframework.version} + + + org.springframework + spring-core + ${springframework.version} + + + org.springframework + spring-aop + ${springframework.version} + + + org.springframework + spring-beans + ${springframework.version} + + + org.springframework + spring-context + ${springframework.version} + + + org.springframework + spring-expression + ${springframework.version} + + + org.springframework + spring-jcl + ${springframework.version} + + + org.springframework + spring-test + ${springframework.version} + + + org.springframework + spring-webmvc + ${springframework.version} + + + + ch.qos.logback + logback-classic + 1.4.8 + + + ch.qos.logback + logback-core + 1.4.8 + + + + org.slf4j + slf4j-api + 2.0.7 + + + org.slf4j + slf4j-simple + 2.0.7 + + + + com.fasterxml.jackson.core + jackson-databind + 2.15.2 + + + + org.yaml + snakeyaml + 2.0 + + + + uk.gov.nationalarchives + csv-validator-java-api + 1.1.5 + + + com.gilt + gfc-semver_2.11 + + + + + + com.gilt + gfc-semver_2.11 + 0.0.5 + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/CsvValidatorApplication.java b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/CsvValidatorApplication.java new file mode 100644 index 0000000..a1bdf35 --- /dev/null +++ b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/CsvValidatorApplication.java @@ -0,0 +1,12 @@ +package net.syntio.csvvalidator; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@SpringBootApplication +public class CsvValidatorApplication { + + public static void main(String[] args) { + SpringApplication.run(CsvValidatorApplication.class, args); + } +} diff --git a/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/controller/ValidatorController.java b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/controller/ValidatorController.java new file mode 100644 index 0000000..0ec7a3b --- /dev/null +++ b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/controller/ValidatorController.java @@ -0,0 +1,37 @@ +package net.syntio.csvvalidator.controller; + +import net.syntio.csvvalidator.dto.ValidationRequestDto; +import net.syntio.csvvalidator.dto.ValidatorResponseDto; +import net.syntio.csvvalidator.validator.CsvValidator; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RestController; + +@RestController +public class ValidatorController { + + @PostMapping(value = "/") + public ResponseEntity validate(@RequestBody ValidationRequestDto req) { + String data = req.getData().replaceAll("\r\n", "\n"); + String schema = req.getSchema().replaceAll("\r\n", "\n"); + try { + boolean validation = CsvValidator.validate(data, schema); + ValidatorResponseDto res = new ValidatorResponseDto(validation); + if (validation) { + res.setInfo("Data is valid"); + return ResponseEntity.ok(res); + } + res.setInfo("Data is invalid"); + return ResponseEntity.ok(res); + } catch (Exception e) { + return ResponseEntity.badRequest().build(); + } + } + + @GetMapping(value = "/health") + public ResponseEntity healthCheck() { + return ResponseEntity.ok().build(); + } +} diff --git a/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/dto/ValidationRequestDto.java b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/dto/ValidationRequestDto.java new file mode 100644 index 0000000..585130e --- /dev/null +++ b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/dto/ValidationRequestDto.java @@ -0,0 +1,19 @@ +package net.syntio.csvvalidator.dto; + +public class ValidationRequestDto { + private final String data; + private final String schema; + + public ValidationRequestDto(String data, String schema) { + this.data = data; + this.schema = schema; + } + + public String getData() { + return data; + } + + public String getSchema() { + return schema; + } +} diff --git a/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/dto/ValidatorResponseDto.java b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/dto/ValidatorResponseDto.java new file mode 100644 index 0000000..206451e --- /dev/null +++ b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/dto/ValidatorResponseDto.java @@ -0,0 +1,22 @@ +package net.syntio.csvvalidator.dto; + +public class ValidatorResponseDto { + private final boolean validation; + private String info; + + public ValidatorResponseDto(boolean validation) { + this.validation = validation; + } + + public boolean getValidation() { + return validation; + } + + public String getInfo() { + return info; + } + + public void setInfo(String info) { + this.info = info; + } +} diff --git a/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/validator/CsvValidator.java b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/validator/CsvValidator.java new file mode 100644 index 0000000..07e8410 --- /dev/null +++ b/validator/internal/validator/external/csv-validator/src/main/java/net/syntio/csvvalidator/validator/CsvValidator.java @@ -0,0 +1,21 @@ +package net.syntio.csvvalidator.validator; +import uk.gov.nationalarchives.csv.validator.api.java.FailMessage; +import java.io.Reader; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; + +public class CsvValidator { + public static boolean validate(String data, String schema) { + Reader dataReader = new StringReader(data); + Reader schemaReader = new StringReader(schema); + + List messages = uk.gov.nationalarchives.csv.validator.api.java.CsvValidator.validate(dataReader, schemaReader, + false, + new ArrayList<>(), + true, + false); + + return messages.isEmpty(); + } +} diff --git a/validator/internal/validator/external/csv-validator/src/test/java/net/syntio/csvvalidator/CsvValidatorApplicationTests.java b/validator/internal/validator/external/csv-validator/src/test/java/net/syntio/csvvalidator/CsvValidatorApplicationTests.java new file mode 100644 index 0000000..c53b474 --- /dev/null +++ b/validator/internal/validator/external/csv-validator/src/test/java/net/syntio/csvvalidator/CsvValidatorApplicationTests.java @@ -0,0 +1,13 @@ +package net.syntio.csvvalidator; + +import org.junit.jupiter.api.Test; +import org.springframework.boot.test.context.SpringBootTest; + +@SpringBootTest +class CsvValidatorApplicationTests { + + @Test + void contextLoads() { + } + +} diff --git a/validator/internal/validator/external/xml-validator/cloudbuild.yaml b/validator/internal/validator/external/xml-validator/cloudbuild.yaml new file mode 100644 index 0000000..766d43e --- /dev/null +++ b/validator/internal/validator/external/xml-validator/cloudbuild.yaml @@ -0,0 +1,5 @@ +steps: + - name: 'gcr.io/cloud-builders/docker' + args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/xml_validator', '.' ] + - name: 'gcr.io/cloud-builders/docker' + args: [ 'push', 'gcr.io/$PROJECT_ID/xml_validator' ] diff --git a/validator/internal/validator/external/xml-validator/main.py b/validator/internal/validator/external/xml-validator/main.py new file mode 100644 index 0000000..8bbd3e6 --- /dev/null +++ b/validator/internal/validator/external/xml-validator/main.py @@ -0,0 +1,57 @@ +# Package handles xml schema validation +import http +import json + +import xmlschema +from flask import Response, Flask, request +from waitress import serve + +app = Flask(__name__) + + +@app.route("/", methods=["POST"]) +def http_validation_handler(): + request_json = request.get_json(silent=True) + is_valid = False + + if request_json and "data" in request_json and "schema" in request_json: + data = request_json["data"] + schema = request_json["schema"] + try: + is_valid = validate(data, schema) + response = make_response(is_valid, "successful validation", 200) + except: + response = make_response( + is_valid, "invalid json: can't resolve 'data' and 'schema' fields", 400 + ) + else: + response = make_response( + False, "invalid request, needs 'data' and 'schema' fields.", 400 + ) + + return response + + +@app.route("/health", methods=["GET"]) +def http_health_handler(): + response = Response(status=http.HTTPStatus.OK) + return response + + +def validate(data, schema): + schema = xmlschema.XMLSchema(schema) + return schema.is_valid(data) + + +def make_response(validation, info, status): + response_data = {"validation": validation, "info": info} + + response = Response() + response.data = json.dumps(response_data) + response.status_code = status + return response + + +if __name__ == "__main__": + print("* Serving app main") + serve(app=app, host="0.0.0.0", port=8081) diff --git a/validator/internal/validator/external/xml-validator/requirements.txt b/validator/internal/validator/external/xml-validator/requirements.txt new file mode 100644 index 0000000..ca7a4d7 --- /dev/null +++ b/validator/internal/validator/external/xml-validator/requirements.txt @@ -0,0 +1,11 @@ +click==8.0.1 +colorama==0.4.4 +elementpath==2.2.3 +Flask==2.2.5 +itsdangerous==2.0.1 +Jinja2==3.0.1 +MarkupSafe==2.1.1 +waitress==2.1.1 +Werkzeug==2.2.3 +xmlschema==1.7.0 +setuptools==65.5.1 diff --git a/validator/internal/validator/http.go b/validator/internal/validator/http.go new file mode 100644 index 0000000..07fc17a --- /dev/null +++ b/validator/internal/validator/http.go @@ -0,0 +1,85 @@ +package validator + +import ( + "bytes" + "context" + "encoding/json" + "io" + "math" + "net/http" + "time" + + "github.com/dataphos/lib-httputil/pkg/httputil" + + "github.com/pkg/errors" +) + +// HTTPTimeoutBytesUnit the base amount of bytes used by EstimateHTTPTimeout. +const HTTPTimeoutBytesUnit = 1024 * 100 + +// EstimateHTTPTimeout calculates the expected timeout, by dividing the size given in bytes with HTTPTimeoutBytesUnit, and then +// multiplying the coefficient with the given time duration. +// +// If the given size is less than HTTPTimeoutBytesUnit, base is returned, to avoid problems due to the http overhead which isn't fully linear. +func EstimateHTTPTimeout(size int, base time.Duration) time.Duration { + coef := int(math.Round(float64(size) / float64(HTTPTimeoutBytesUnit))) + if coef <= 1 { + return base + } + + return time.Duration(coef) * base +} + +// ValidateOverHTTP requests a message validation over HTTP. +// Function returns the validation boolean result. +func ValidateOverHTTP(ctx context.Context, message, schema []byte, url string) (bool, error) { + response, err := sendValidationRequest(ctx, message, schema, url) + if err != nil { + return false, err + } + defer response.Body.Close() + + body, err := io.ReadAll(response.Body) + if err != nil { + return false, err + } + + var parsedBody validationResponse + if err = json.Unmarshal(body, &parsedBody); err != nil { + return false, err + } + + switch response.StatusCode { + case http.StatusOK: + return parsedBody.Validation, nil + case http.StatusBadRequest: + return false, ErrDeadletter + default: + return false, errors.Errorf("error: status code [%v]", response.StatusCode) + } +} + +func sendValidationRequest(ctx context.Context, message, schema []byte, url string) (*http.Response, error) { + // this can't generate an error, so it's safe to ignore + data, _ := json.Marshal(validationRequest{Data: string(message), Schema: string(schema)}) + + request, err := httputil.Post(ctx, url, "application/json", bytes.NewBuffer(data)) + if err != nil { + return nil, err + } + + return http.DefaultClient.Do(request) +} + +// validationRequest contains the message and schema which are used by the validator. The structure represents an HTTP +// request body. +type validationRequest struct { + Data string `json:"data"` + Schema string `json:"schema"` +} + +// validationResponse contains the validation result and an info message. The structure represents an HTTP response body. +type validationResponse struct { + Validation bool `json:"validation"` + Info string `json:"info"` +} diff --git a/validator/internal/validator/http_test.go b/validator/internal/validator/http_test.go new file mode 100644 index 0000000..f671db5 --- /dev/null +++ b/validator/internal/validator/http_test.go @@ -0,0 +1,120 @@ +package validator + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "reflect" + "testing" + "time" +) + +func TestEstimateHTTPTimeout(t *testing.T) { + tt := []struct { + name string + size int + timeout time.Duration + adjustedTimeout time.Duration + }{ + {"lower than base", 99 * 1024, 1 * time.Second, 1 * time.Second}, + {"1 byte", 1, 1 * time.Second, 1 * time.Second}, + {"equal to base", HTTPTimeoutBytesUnit, 1 * time.Second, 1 * time.Second}, + {"closer to 1", 149 * 1024, 1 * time.Second, 1 * time.Second}, + {"closer to 2", 151 * 1024, 1 * time.Second, 2 * time.Second}, + } + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + adjusted := EstimateHTTPTimeout(tc.size, tc.timeout) + if adjusted != tc.adjustedTimeout { + t.Error("calculated timeout not the same as expected") + } + }) + } +} + +func TestValidateOverHttp(t *testing.T) { + tt := []struct { + name string + expectedRequest validationRequest + expectedResponse []byte + statusCode int + isValid bool + }{ + { + "valid with status code 200", + validationRequest{ + Data: "data sent as the request for validation", + Schema: "schema for the data to be validated against", + }, + []byte("{\"validation\":true,\"info\":\"\"}"), + http.StatusOK, + true, + }, + { + "invalid with status code 200", + validationRequest{ + Data: "data sent as the request for validation", + Schema: "schema for the data to be validated against", + }, + []byte("{\"validation\":false,\"info\":\"\"}"), + http.StatusOK, + false, + }, + { + "bad request", + validationRequest{ + Data: "data sent as the request for validation", + Schema: "schema for the data to be validated against", + }, + []byte("{\"validation\":false,\"info\":\"\"}"), + http.StatusBadRequest, + false, + }, + { + "valid but malformed response because json is missing closing bracket", + validationRequest{ + Data: "data sent as the request for validation", + Schema: "schema for the data to be validated against", + }, + []byte("{\"validation\":true,\"info\":\"\""), + http.StatusBadRequest, + false, + }, + } + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + handler := http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { + defer request.Body.Close() + + var receivedRequest validationRequest + if err := json.NewDecoder(request.Body).Decode(&receivedRequest); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(receivedRequest, tc.expectedRequest) { + t.Fatal("expected and actual request not the same") + } + + writer.WriteHeader(tc.statusCode) + writer.Write(tc.expectedResponse) + }) + srv := httptest.NewServer(handler) + defer srv.Close() + + isValid, err := ValidateOverHTTP(context.Background(), []byte(tc.expectedRequest.Data), []byte(tc.expectedRequest.Schema), srv.URL) + if err != nil { + if tc.statusCode == http.StatusOK { + t.Fatal("error not expected", err) + } + } + + if isValid != tc.isValid { + t.Fatal("expected and actual validation result not the same") + } + }) + } +} diff --git a/validator/internal/validator/json/json.go b/validator/internal/validator/json/json.go new file mode 100644 index 0000000..3d56ab4 --- /dev/null +++ b/validator/internal/validator/json/json.go @@ -0,0 +1,133 @@ +package json + +import ( + "bytes" + "encoding/json" + "github.com/pkg/errors" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + lru "github.com/hashicorp/golang-lru" + "github.com/santhosh-tekuri/jsonschema/v5" + _ "github.com/santhosh-tekuri/jsonschema/v5/httploader" + "github.com/xeipuuv/gojsonschema" +) + +func New() validator.Validator { + return validator.Func(func(message, schema []byte, _, _ string) (bool, error) { + var v interface{} + if err := json.Unmarshal(message, &v); err != nil { + errBroken := errors.WithMessage(validator.ErrBrokenMessage, "Message is not in a valid format - "+err.Error()) + return false, errBroken + } + + compiledSchema, err := compileSchema(schema) + if err != nil { + errCompile := errors.WithMessage(validator.ErrWrongCompile, err.Error()) + return false, errCompile + } + + if err = compiledSchema.Validate(v); err != nil { + errValidation := errors.WithMessage(validator.ErrFailedValidation, err.Error()) + return false, errValidation + + } + return true, nil + }) +} + +func NewCached(size int) validator.Validator { + cache, _ := lru.NewARC(size) + + return validator.Func(func(message, schema []byte, id, version string) (bool, error) { + var parsedMessage interface{} + if err := json.Unmarshal(message, &parsedMessage); err != nil { + errBroken := errors.WithMessage(validator.ErrBrokenMessage, "Message is not in a valid format - "+err.Error()) + return false, errBroken + } + + var compiledSchema *jsonschema.Schema + key := id + "_" + version + v, ok := cache.Get(key) + if !ok { + var err error + compiledSchema, err = compileSchema(schema) + if err != nil { + errCompile := errors.WithMessage(validator.ErrWrongCompile, err.Error()) + return false, errCompile + } + cache.Add(key, compiledSchema) + } else { + compiledSchema = v.(*jsonschema.Schema) + } + + if err := compiledSchema.Validate(parsedMessage); err != nil { + errValidation := errors.WithMessage(validator.ErrFailedValidation, err.Error()) + return false, errValidation + } + return true, nil + }) +} + +func compileSchema(schema []byte) (*jsonschema.Schema, error) { + compiler := jsonschema.NewCompiler() + if err := compiler.AddResource("schema.json", bytes.NewReader(schema)); err != nil { + return nil, err + } + compiled, err := compiler.Compile("schema.json") + if err != nil { + return nil, err + } + return compiled, nil +} + +func NewGoJsonSchemaValidator() validator.Validator { + return validator.Func(func(message, schema []byte, _, _ string) (bool, error) { + if !json.Valid(message) { + return false, validator.ErrDeadletter + } + + schemaValidator, err := gojsonschema.NewSchema(gojsonschema.NewBytesLoader(schema)) + if err != nil { + return false, validator.ErrDeadletter + } + + result, err := schemaValidator.Validate(gojsonschema.NewBytesLoader(message)) + if err != nil { + return false, err + } + + return result.Valid(), nil + }) +} + +func NewCachedGoJsonSchemaValidator(size int) validator.Validator { + cache, _ := lru.NewARC(size) + + return validator.Func(func(message, schema []byte, id, version string) (bool, error) { + if !json.Valid(message) { + return false, validator.ErrDeadletter + } + + var compiledSchema *gojsonschema.Schema + key := id + "_" + version + v, ok := cache.Get(key) + if !ok { + var err error + compiledSchema, err = gojsonschema.NewSchema(gojsonschema.NewBytesLoader(schema)) + if err != nil { + return false, validator.ErrDeadletter + } + cache.Add(key, compiledSchema) + } else { + compiledSchema = v.(*gojsonschema.Schema) + } + + result, err := compiledSchema.Validate(gojsonschema.NewBytesLoader(message)) + if err != nil { + return false, err + } + + return result.Valid(), nil + }) +} diff --git a/validator/internal/validator/json/json_test.go b/validator/internal/validator/json/json_test.go new file mode 100644 index 0000000..c3d9ba0 --- /dev/null +++ b/validator/internal/validator/json/json_test.go @@ -0,0 +1,301 @@ +package json + +import ( + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + "github.com/pkg/errors" +) + +func TestJSONValidator_Validate(t *testing.T) { + jsonV := New() + + tt := []struct { + name string + dataFilename string + schemaFilename string + valid bool + deadletter bool + }{ + {"valid-1", "valid-1-data.json", "valid-1-schema.json", true, false}, + {"valid-2", "valid-2-data.json", "valid-2-schema.json", true, false}, + {"valid-3", "valid-3-data.json", "valid-3-schema.json", true, false}, + {"valid-4", "valid-4-data.json", "valid-4-schema.json", true, false}, + // {"invalid-1", "invalid-1-data.json", "invalid-1-schema.json", false, false}, + // {"invalid-2", "invalid-2-data.json", "invalid-2-schema.json", false, false}, + // {"invalid-3", "invalid-3-data.json", "invalid-3-schema.json", false, false}, + {"deadletter-1", "deadletter-1-data.json", "deadletter-1-schema.json", false, true}, + {"deadletter-2", "deadletter-2-data.json", "deadletter-2-schema.json", false, true}, + {"data-1", "data-1.json", "schema-1.json", true, false}, + {"data-2", "data-2.json", "schema-2.json", true, false}, + {"data-3", "data-3.json", "schema-3.json", true, false}, + {"data-4", "data-4.json", "schema-4.json", true, false}, + + {"ref-1", "ref-data-1.json", "ref-1.json", true, false}, + {"ref-2", "ref-data-2.json", "ref-2.json", true, false}, + {"ref-3", "ref-data-3.json", "ref-3.json", true, false}, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + data, err := os.ReadFile(filepath.Join(testdataDir, tc.dataFilename)) + if err != nil { + t.Errorf("data read error: %s", err) + } + schema, err := os.ReadFile(filepath.Join(testdataDir, tc.schemaFilename)) + if err != nil { + t.Errorf("schema read error: %s", err) + } + + valid, err := jsonV.Validate(data, schema, "", "") + if tc.deadletter { + if !(errors.Is(err, validator.ErrDeadletter) || errors.Is(err, validator.ErrFailedValidation) || errors.Is(err, validator.ErrWrongCompile) || errors.Is(err, validator.ErrMissingSchema) || errors.Is(err, validator.ErrBrokenMessage)) { + t.Error("deadletter expected") + } + } else { + if err != nil { + t.Errorf("validator error: %s", err) + } + if valid != tc.valid { + if valid { + t.Errorf("message valid, invalid expected") + } else { + t.Errorf("message invalid, valid expected") + } + } + } + }) + } +} + +func BenchmarkValidateStandardImplementation(b *testing.B) { + v := New() + + tt := []struct { + dataFilename string + schemaFilename string + data []byte + schema []byte + }{ + {dataFilename: "valid-1-data.json", schemaFilename: "valid-1-schema.json"}, + {dataFilename: "valid-2-data.json", schemaFilename: "valid-2-schema.json"}, + {dataFilename: "valid-3-data.json", schemaFilename: "valid-3-schema.json"}, + {dataFilename: "valid-4-data.json", schemaFilename: "valid-4-schema.json"}, + {dataFilename: "data-1.json", schemaFilename: "schema-1.json"}, + {dataFilename: "data-2.json", schemaFilename: "schema-2.json"}, + {dataFilename: "data-3.json", schemaFilename: "schema-3.json"}, + {dataFilename: "data-4.json", schemaFilename: "schema-4.json"}, + } + + _, base, _, _ := runtime.Caller(0) + basepath := filepath.Dir(base) + testdataDir := filepath.Join(basepath, "testdata") + for i := range tt { + data, err := os.ReadFile(filepath.Join(testdataDir, tt[i].dataFilename)) + if err != nil { + b.Errorf("data read error: %s", err) + } + schema, err := os.ReadFile(filepath.Join(testdataDir, tt[i].schemaFilename)) + if err != nil { + b.Errorf("schema read error: %s", err) + } + + tt[i].data = data + tt[i].schema = schema + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + for _, tc := range tt { + valid, err := v.Validate(tc.data, tc.schema, "", "") + if err != nil { + b.Errorf("schema read error: %s", err) + } + if !valid { + b.Errorf("expected valid") + } + } + } +} + +func BenchmarkValidateCachedImplementation(b *testing.B) { + v := NewCached(100) + + tt := []struct { + dataFilename string + schemaFilename string + id string + version string + data []byte + schema []byte + }{ + {dataFilename: "valid-1-data.json", schemaFilename: "valid-1-schema.json", id: "1", version: "1"}, + {dataFilename: "valid-2-data.json", schemaFilename: "valid-2-schema.json", id: "2", version: "1"}, + {dataFilename: "valid-3-data.json", schemaFilename: "valid-3-schema.json", id: "3", version: "1"}, + {dataFilename: "valid-4-data.json", schemaFilename: "valid-4-schema.json", id: "4", version: "1"}, + {dataFilename: "data-1.json", schemaFilename: "schema-1.json", id: "5", version: "1"}, + {dataFilename: "data-2.json", schemaFilename: "schema-2.json", id: "6", version: "1"}, + {dataFilename: "data-3.json", schemaFilename: "schema-3.json", id: "7", version: "1"}, + {dataFilename: "data-4.json", schemaFilename: "schema-4.json", id: "8", version: "1"}, + } + + _, base, _, _ := runtime.Caller(0) + basepath := filepath.Dir(base) + testdataDir := filepath.Join(basepath, "testdata") + for i := range tt { + data, err := os.ReadFile(filepath.Join(testdataDir, tt[i].dataFilename)) + if err != nil { + b.Errorf("data read error: %s", err) + } + schema, err := os.ReadFile(filepath.Join(testdataDir, tt[i].schemaFilename)) + if err != nil { + b.Errorf("schema read error: %s", err) + } + + tt[i].data = data + tt[i].schema = schema + } + + for _, tc := range tt { + valid, err := v.Validate(tc.data, tc.schema, tc.id, tc.version) + if err != nil { + b.Errorf("schema read error: %s", err) + } + if !valid { + b.Errorf("expected valid") + } + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + for _, tc := range tt { + valid, err := v.Validate(tc.data, tc.schema, tc.id, tc.version) + if err != nil { + b.Errorf("schema read error: %s", err) + } + if !valid { + b.Errorf("expected valid") + } + } + } +} + +func BenchmarkValidateGoJsonSchema(b *testing.B) { + v := NewGoJsonSchemaValidator() + + tt := []struct { + dataFilename string + schemaFilename string + data []byte + schema []byte + }{ + {dataFilename: "valid-1-data.json", schemaFilename: "valid-1-schema.json"}, + {dataFilename: "valid-2-data.json", schemaFilename: "valid-2-schema.json"}, + {dataFilename: "valid-3-data.json", schemaFilename: "valid-3-schema.json"}, + {dataFilename: "valid-4-data.json", schemaFilename: "valid-4-schema.json"}, + {dataFilename: "data-1.json", schemaFilename: "schema-1.json"}, + {dataFilename: "data-2.json", schemaFilename: "schema-2.json"}, + {dataFilename: "data-3.json", schemaFilename: "schema-3.json"}, + {dataFilename: "data-4.json", schemaFilename: "schema-4.json"}, + } + + _, base, _, _ := runtime.Caller(0) + basepath := filepath.Dir(base) + testdataDir := filepath.Join(basepath, "testdata") + for i := range tt { + data, err := os.ReadFile(filepath.Join(testdataDir, tt[i].dataFilename)) + if err != nil { + b.Errorf("data read error: %s", err) + } + schema, err := os.ReadFile(filepath.Join(testdataDir, tt[i].schemaFilename)) + if err != nil { + b.Errorf("schema read error: %s", err) + } + + tt[i].data = data + tt[i].schema = schema + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + for _, tc := range tt { + valid, err := v.Validate(tc.data, tc.schema, "", "") + if err != nil { + b.Errorf("schema read error: %s", err) + } + if !valid { + b.Errorf("expected valid") + } + } + } +} + +func BenchmarkValidateCachedGoJsonSchema(b *testing.B) { + v := NewCachedGoJsonSchemaValidator(100) + + tt := []struct { + dataFilename string + schemaFilename string + id string + version string + data []byte + schema []byte + }{ + {dataFilename: "valid-1-data.json", schemaFilename: "valid-1-schema.json", id: "1", version: "1"}, + {dataFilename: "valid-2-data.json", schemaFilename: "valid-2-schema.json", id: "2", version: "1"}, + {dataFilename: "valid-3-data.json", schemaFilename: "valid-3-schema.json", id: "3", version: "1"}, + {dataFilename: "valid-4-data.json", schemaFilename: "valid-4-schema.json", id: "4", version: "1"}, + {dataFilename: "data-1.json", schemaFilename: "schema-1.json", id: "5", version: "1"}, + {dataFilename: "data-2.json", schemaFilename: "schema-2.json", id: "6", version: "1"}, + {dataFilename: "data-3.json", schemaFilename: "schema-3.json", id: "7", version: "1"}, + {dataFilename: "data-4.json", schemaFilename: "schema-4.json", id: "8", version: "1"}, + } + + _, base, _, _ := runtime.Caller(0) + basepath := filepath.Dir(base) + testdataDir := filepath.Join(basepath, "testdata") + for i := range tt { + data, err := os.ReadFile(filepath.Join(testdataDir, tt[i].dataFilename)) + if err != nil { + b.Errorf("data read error: %s", err) + } + schema, err := os.ReadFile(filepath.Join(testdataDir, tt[i].schemaFilename)) + if err != nil { + b.Errorf("schema read error: %s", err) + } + + tt[i].data = data + tt[i].schema = schema + } + + for _, tc := range tt { + valid, err := v.Validate(tc.data, tc.schema, tc.id, tc.version) + if err != nil { + b.Errorf("schema read error: %s", err) + } + if !valid { + b.Errorf("expected valid") + } + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + for _, tc := range tt { + valid, err := v.Validate(tc.data, tc.schema, tc.id, tc.version) + if err != nil { + b.Errorf("schema read error: %s", err) + } + if !valid { + b.Errorf("expected valid") + } + } + } +} diff --git a/validator/internal/validator/json/testdata/data-1.json b/validator/internal/validator/json/testdata/data-1.json new file mode 100644 index 0000000..3fe0812 --- /dev/null +++ b/validator/internal/validator/json/testdata/data-1.json @@ -0,0 +1,5 @@ +{ + "firstName": "John", + "lastName": "Doe", + "age": 21 +} diff --git a/validator/internal/validator/json/testdata/data-2.json b/validator/internal/validator/json/testdata/data-2.json new file mode 100644 index 0000000..7d1ee16 --- /dev/null +++ b/validator/internal/validator/json/testdata/data-2.json @@ -0,0 +1,13 @@ +{ + "fruits": [ "apple", "orange", "pear" ], + "vegetables": [ + { + "veggieName": "potato", + "veggieLike": true + }, + { + "veggieName": "broccoli", + "veggieLike": false + } + ] +} diff --git a/validator/internal/validator/json/testdata/data-3.json b/validator/internal/validator/json/testdata/data-3.json new file mode 100644 index 0000000..68e8620 --- /dev/null +++ b/validator/internal/validator/json/testdata/data-3.json @@ -0,0 +1,13 @@ +{ + "id": 7, + "name": "John Doe", + "age": 22, + "hobbies": { + "indoor": [ + "Chess" + ], + "outdoor": [ + "BasketballStand-up Comedy" + ] + } +} diff --git a/validator/internal/validator/json/testdata/data-4.json b/validator/internal/validator/json/testdata/data-4.json new file mode 100644 index 0000000..1ee80eb --- /dev/null +++ b/validator/internal/validator/json/testdata/data-4.json @@ -0,0 +1,100 @@ +{ + "web-app": { + "servlet": [ + { + "servlet-name": "cofaxCDS", + "servlet-class": "org.cofax.cds.CDSServlet", + "init-param": { + "configGlossary:installationAt": "Philadelphia, PA", + "configGlossary:adminEmail": "ksm@pobox.com", + "configGlossary:poweredBy": "Cofax", + "configGlossary:poweredByIcon": "/images/cofax.gif", + "configGlossary:staticPath": "/content/static", + "templateProcessorClass": "org.cofax.WysiwygTemplate", + "templateLoaderClass": "org.cofax.FilesTemplateLoader", + "templatePath": "templates", + "templateOverridePath": "", + "defaultListTemplate": "listTemplate.htm", + "defaultFileTemplate": "articleTemplate.htm", + "useJSP": false, + "jspListTemplate": "listTemplate.jsp", + "jspFileTemplate": "articleTemplate.jsp", + "cachePackageTagsTrack": 200, + "cachePackageTagsStore": 200, + "cachePackageTagsRefresh": 60, + "cacheTemplatesTrack": 100, + "cacheTemplatesStore": 50, + "cacheTemplatesRefresh": 15, + "cachePagesTrack": 200, + "cachePagesStore": 100, + "cachePagesRefresh": 10, + "cachePagesDirtyRead": 10, + "searchEngineListTemplate": "forSearchEnginesList.htm", + "searchEngineFileTemplate": "forSearchEngines.htm", + "searchEngineRobotsDb": "WEB-INF/robots.db", + "useDataStore": true, + "dataStoreClass": "org.cofax.SqlDataStore", + "redirectionClass": "org.cofax.SqlRedirection", + "dataStoreName": "cofax", + "dataStoreDriver": "com.microsoft.jdbc.sqlserver.SQLServerDriver", + "dataStoreUrl": "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon", + "dataStoreUser": "sa", + "dataStorePassword": "dataStoreTestQuery", + "dataStoreTestQuery": "SET NOCOUNT ON;select test='test';", + "dataStoreLogFile": "/usr/local/tomcat/logs/datastore.log", + "dataStoreInitConns": 10, + "dataStoreMaxConns": 100, + "dataStoreConnUsageLimit": 100, + "dataStoreLogLevel": "debug", + "maxUrlLength": 500 + } + }, + { + "servlet-name": "cofaxEmail", + "servlet-class": "org.cofax.cds.EmailServlet", + "init-param": { + "mailHost": "mail1", + "mailHostOverride": "mail2" + } + }, + { + "servlet-name": "cofaxAdmin", + "servlet-class": "org.cofax.cds.AdminServlet" + }, + { + "servlet-name": "fileServlet", + "servlet-class": "org.cofax.cds.FileServlet" + }, + { + "servlet-name": "cofaxTools", + "servlet-class": "org.cofax.cms.CofaxToolsServlet", + "init-param": { + "templatePath": "toolstemplates/", + "log": 1, + "logLocation": "/usr/local/tomcat/logs/CofaxTools.log", + "logMaxSize": "", + "dataLog": 1, + "dataLogLocation": "/usr/local/tomcat/logs/dataLog.log", + "dataLogMaxSize": "", + "removePageCache": "/content/admin/remove?cache=pages&id=", + "removeTemplateCache": "/content/admin/remove?cache=templates&id=", + "fileTransferFolder": "/usr/local/tomcat/webapps/content/fileTransferFolder", + "lookInContext": 1, + "adminGroupID": 4, + "betaServer": true + } + } + ], + "servlet-mapping": { + "cofaxCDS": "/", + "cofaxEmail": "/cofaxutil/aemail/*", + "cofaxAdmin": "/admin/*", + "fileServlet": "/static/*", + "cofaxTools": "/tools/*" + }, + "taglib": { + "taglib-uri": "cofax.tld", + "taglib-location": "/WEB-INF/tlds/cofax.tld" + } + } +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/deadletter-1-data.json b/validator/internal/validator/json/testdata/deadletter-1-data.json new file mode 100644 index 0000000..ec3bdf2 --- /dev/null +++ b/validator/internal/validator/json/testdata/deadletter-1-data.json @@ -0,0 +1,5 @@ + "id": 100, + "first_name": "syn jason", + "last_name": "syn oblak", + "email": "jsonsmail" +} diff --git a/validator/internal/validator/json/testdata/deadletter-1-schema.json b/validator/internal/validator/json/testdata/deadletter-1-schema.json new file mode 100644 index 0000000..e38e3b0 --- /dev/null +++ b/validator/internal/validator/json/testdata/deadletter-1-schema.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + } + } +} diff --git a/validator/internal/validator/json/testdata/deadletter-2-data.json b/validator/internal/validator/json/testdata/deadletter-2-data.json new file mode 100644 index 0000000..241d4ce --- /dev/null +++ b/validator/internal/validator/json/testdata/deadletter-2-data.json @@ -0,0 +1,6 @@ +{ + "id": 100, + "first_name": "syn jason", + "last_name": "syn oblak", + "email": "jsonsmail" +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/deadletter-2-schema.json b/validator/internal/validator/json/testdata/deadletter-2-schema.json new file mode 100644 index 0000000..162468a --- /dev/null +++ b/validator/internal/validator/json/testdata/deadletter-2-schema.json @@ -0,0 +1,50 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email" + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + } + } +} diff --git a/validator/internal/validator/json/testdata/invalid-1-data.json b/validator/internal/validator/json/testdata/invalid-1-data.json new file mode 100644 index 0000000..05a84de --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-1-data.json @@ -0,0 +1,7 @@ +{ + "id": 100, + "first_name": "Syntio Martina", + "last_name": "Syntio surname", + "email": "businessEmail@syntio.net", + "phone": "090 111 222 33" +} diff --git a/validator/internal/validator/json/testdata/invalid-1-schema.json b/validator/internal/validator/json/testdata/invalid-1-schema.json new file mode 100644 index 0000000..8865b61 --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-1-schema.json @@ -0,0 +1,72 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": true, + "required": [ + "id", + "first_name", + "last_name", + "email", + "phone", + "address" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + }, + "phone": { + "type": "string", + "title": "The Phone Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "090 111 222 33" + ] + }, + "address": { + "type": "string", + "title": "The Address Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio Building, Zagreb" + ] + } + } +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/invalid-2-data.json b/validator/internal/validator/json/testdata/invalid-2-data.json new file mode 100644 index 0000000..35dc8c3 --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-2-data.json @@ -0,0 +1,9 @@ +{ + "id": 100, + "first_name": "Syntsreeertina", + "last_name": "Syntio sdjjsurname", + "email": "busiddddnessEmail@syntio.net", + "phone": "090 111 222 33", + "address": "Syntio Building, Zagreb 2", + "additionalProperty": "not allowed" +} diff --git a/validator/internal/validator/json/testdata/invalid-2-schema.json b/validator/internal/validator/json/testdata/invalid-2-schema.json new file mode 100644 index 0000000..71b7472 --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-2-schema.json @@ -0,0 +1,72 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email", + "phone", + "address" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + }, + "phone": { + "type": "string", + "title": "The Phone Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "090 111 222 33" + ] + }, + "address": { + "type": "string", + "title": "The Address Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio Building, Zagreb" + ] + } + } +} diff --git a/validator/internal/validator/json/testdata/invalid-3-data.json b/validator/internal/validator/json/testdata/invalid-3-data.json new file mode 100644 index 0000000..fe89e0b --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-3-data.json @@ -0,0 +1,16 @@ +{ + "data": { + "id": 101, + "first_name": "name", + "last_name": "surname", + "email": "businessEmail@company.net", + "phone": "090 111 222 33", + "address": "Company Building, Town", + "home number": "33a" + }, + "attributes": { + "schemaId": "ShZjhdu76jjstre", + "versionId": 2, + "format": "json" + } +} diff --git a/validator/internal/validator/json/testdata/invalid-3-schema.json b/validator/internal/validator/json/testdata/invalid-3-schema.json new file mode 100644 index 0000000..8865b61 --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-3-schema.json @@ -0,0 +1,72 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": true, + "required": [ + "id", + "first_name", + "last_name", + "email", + "phone", + "address" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + }, + "phone": { + "type": "string", + "title": "The Phone Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "090 111 222 33" + ] + }, + "address": { + "type": "string", + "title": "The Address Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio Building, Zagreb" + ] + } + } +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/invalid-4-data.json b/validator/internal/validator/json/testdata/invalid-4-data.json new file mode 100644 index 0000000..05a84de --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-4-data.json @@ -0,0 +1,7 @@ +{ + "id": 100, + "first_name": "Syntio Martina", + "last_name": "Syntio surname", + "email": "businessEmail@syntio.net", + "phone": "090 111 222 33" +} diff --git a/validator/internal/validator/json/testdata/invalid-4-schema.json b/validator/internal/validator/json/testdata/invalid-4-schema.json new file mode 100644 index 0000000..cf350a1 --- /dev/null +++ b/validator/internal/validator/json/testdata/invalid-4-schema.json @@ -0,0 +1,74 @@ +{ + SYNTAX + ERROR + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": true, + "required": [ + "id", + "first_name", + "last_name", + "email", + "phone", + "address" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + }, + "phone": { + "type": "string", + "title": "The Phone Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "090 111 222 33" + ] + }, + "address": { + "type": "string", + "title": "The Address Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio Building, Zagreb" + ] + } + } +} diff --git a/validator/internal/validator/json/testdata/ref-1-child.json b/validator/internal/validator/json/testdata/ref-1-child.json new file mode 100644 index 0000000..8328bc7 --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-1-child.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "http://schema.my-site.org/geometry.json", + "type": "object", + "title": "The Address Schema", + "description": "Reusable Address schema", + "default": {}, + "additionalProperties": false, + "required": [ + "street", + "zip", + "city", + "country" + ], + "properties": { + "street": { + "type": "string", + "title": "Street name" + }, + "streetNo": { + "type": "string", + "title": "House/Apartment number" + }, + "place": { + "type": "string", + "title": "Place name", + "default": "" + }, + "zip": { + "type": "string", + "title": "Zip code", + "default": "" + }, + "city": { + "type": "string", + "title": "City name", + "default": "" + }, + "region": { + "type": "string", + "title": "Region name" + }, + "country": { + "type": "string", + "title": "Country name" + } + } +} diff --git a/validator/internal/validator/json/testdata/ref-1.json b/validator/internal/validator/json/testdata/ref-1.json new file mode 100644 index 0000000..bfe8bc9 --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-1.json @@ -0,0 +1,26 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Customer master data", + "description": "Customer MDM schema", + "default": {}, + "additionalProperties": true, + "required": [ + "name", + "personalid", + "address" + ], + "properties": { + "name": { + "type": "string", + "title": "Customer name" + }, + "personalid": { + "type": "string", + "title": "Personal document ID number" + }, + "address": { + "$ref": "./testdata/ref-1-child.json" + } + } +} diff --git a/validator/internal/validator/json/testdata/ref-2-child.json b/validator/internal/validator/json/testdata/ref-2-child.json new file mode 100644 index 0000000..24f5ebf --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-2-child.json @@ -0,0 +1,16 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "url": { + "id": "url", + "type": "string", + "format": "uri" + }, + "name": { + "id": "name", + "type": "string" + } + }, + "required": ["name"] +} diff --git a/validator/internal/validator/json/testdata/ref-2.json b/validator/internal/validator/json/testdata/ref-2.json new file mode 100644 index 0000000..90adc06 --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-2.json @@ -0,0 +1,26 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "allOf": [ + { "$ref": "./testdata/ref-2-child.json" }, + { + "type": "object", + "properties": { + "gender": { + "id": "gender", + "type": "string", + "enum": ["F", "M"] + }, + "nationality": { + "id": "nationality", + "type": "string" + }, + "birthDate": { + "id": "birthDate", + "type": "string", + "format": "date-time" + } + }, + "required": ["gender"] + } + ] +} diff --git a/validator/internal/validator/json/testdata/ref-3-child.json b/validator/internal/validator/json/testdata/ref-3-child.json new file mode 100644 index 0000000..b8a7c46 --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-3-child.json @@ -0,0 +1,22 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Address Schema", + "description": "Reusable Address schema", + "default": {}, + "additionalProperties": false, + "required": [ + "street" + ], + "properties": { + "street": { + "type": "string", + "title": "Street name" + }, + "city": { + "type": "string", + "title": "City name", + "default": "" + } + } +} diff --git a/validator/internal/validator/json/testdata/ref-3.json b/validator/internal/validator/json/testdata/ref-3.json new file mode 100644 index 0000000..e4e9750 --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-3.json @@ -0,0 +1,21 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Customer master data", + "description": "Customer MDM schema", + "default": {}, + "additionalProperties": true, + "required": [ + "name", + "address" + ], + "properties": { + "name": { + "type": "string", + "title": "Customer name" + }, + "address": { + "$ref": "./testdata/ref-3-child.json" + } + } +} diff --git a/validator/internal/validator/json/testdata/ref-data-1.json b/validator/internal/validator/json/testdata/ref-data-1.json new file mode 100644 index 0000000..69a8390 --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-data-1.json @@ -0,0 +1,10 @@ +{ + "name": "Pero", + "personalid": "123", + "address": { + "street": "123", + "zip": "123", + "city": "asd", + "country": "asd" + } +} diff --git a/validator/internal/validator/json/testdata/ref-data-2.json b/validator/internal/validator/json/testdata/ref-data-2.json new file mode 100644 index 0000000..4701e49 --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-data-2.json @@ -0,0 +1,4 @@ +{ + "name": "Pero", + "gender": "M" +} diff --git a/validator/internal/validator/json/testdata/ref-data-3.json b/validator/internal/validator/json/testdata/ref-data-3.json new file mode 100644 index 0000000..58080aa --- /dev/null +++ b/validator/internal/validator/json/testdata/ref-data-3.json @@ -0,0 +1,7 @@ +{ + "name": "jade", + "address": { + "street": "7th street", + "city": "zagreb" + } +} diff --git a/validator/internal/validator/json/testdata/schema-1.json b/validator/internal/validator/json/testdata/schema-1.json new file mode 100644 index 0000000..687ae74 --- /dev/null +++ b/validator/internal/validator/json/testdata/schema-1.json @@ -0,0 +1,21 @@ +{ + "$id": "https://example.com/person.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Person", + "type": "object", + "properties": { + "firstName": { + "type": "string", + "description": "The person's first name." + }, + "lastName": { + "type": "string", + "description": "The person's last name." + }, + "age": { + "description": "Age in years which must be equal to or greater than zero.", + "type": "integer", + "minimum": 0 + } + } +} diff --git a/validator/internal/validator/json/testdata/schema-2.json b/validator/internal/validator/json/testdata/schema-2.json new file mode 100644 index 0000000..72ab606 --- /dev/null +++ b/validator/internal/validator/json/testdata/schema-2.json @@ -0,0 +1,34 @@ +{ + "$id": "https://example.com/arrays.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "description": "A representation of a person, company, organization, or place", + "type": "object", + "properties": { + "fruits": { + "type": "array", + "items": { + "type": "string" + } + }, + "vegetables": { + "type": "array", + "items": { "$ref": "#/$defs/veggie" } + } + }, + "$defs": { + "veggie": { + "type": "object", + "required": [ "veggieName", "veggieLike" ], + "properties": { + "veggieName": { + "type": "string", + "description": "The name of the vegetable." + }, + "veggieLike": { + "type": "boolean", + "description": "Do I like this vegetable?" + } + } + } + } +} diff --git a/validator/internal/validator/json/testdata/schema-3.json b/validator/internal/validator/json/testdata/schema-3.json new file mode 100644 index 0000000..3a8ad3f --- /dev/null +++ b/validator/internal/validator/json/testdata/schema-3.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$id": "https://example.com/employee.schema.json", + "title": "Record of employee", + "description": "This document records the details of an employee", + "type": "object", + "properties": { + "id": { + "description": "A unique identifier for an employee", + "type": "number" + }, + "name": { + "description": "Full name of the employee", + "type": "string" + }, + "age": { + "description": "Age of the employee", + "type": "number" + }, + "hobbies": { + "description": "Hobbies of the employee", + "type": "object", + "properties": { + "indoor": { + "type": "array", + "items": { + "description": "List of indoor hobbies", + "type": "string" + } + }, + "outdoor": { + "type": "array", + "items": { + "description": "List of outdoor hobbies", + "type": "string" + } + } + } + } + } +} diff --git a/validator/internal/validator/json/testdata/schema-4.json b/validator/internal/validator/json/testdata/schema-4.json new file mode 100644 index 0000000..f46f61d --- /dev/null +++ b/validator/internal/validator/json/testdata/schema-4.json @@ -0,0 +1,253 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": false, + "type": "object", + "properties": { + "web-app": { + "type": "object", + "properties": { + "servlet": { + "type": "array", + "items": { + "type": "object", + "properties": { + "servlet-name": { + "type": "string" + }, + "servlet-class": { + "type": "string" + }, + "init-param": { + "type": "object", + "properties": { + "configGlossary:installationAt": { + "type": "string" + }, + "configGlossary:adminEmail": { + "type": "string" + }, + "configGlossary:poweredBy": { + "type": "string" + }, + "configGlossary:poweredByIcon": { + "type": "string" + }, + "configGlossary:staticPath": { + "type": "string" + }, + "templateProcessorClass": { + "type": "string" + }, + "templateLoaderClass": { + "type": "string" + }, + "templatePath": { + "type": "string" + }, + "templateOverridePath": { + "type": "string" + }, + "defaultListTemplate": { + "type": "string" + }, + "defaultFileTemplate": { + "type": "string" + }, + "useJSP": { + "type": "boolean" + }, + "jspListTemplate": { + "type": "string" + }, + "jspFileTemplate": { + "type": "string" + }, + "cachePackageTagsTrack": { + "type": "integer" + }, + "cachePackageTagsStore": { + "type": "integer" + }, + "cachePackageTagsRefresh": { + "type": "integer" + }, + "cacheTemplatesTrack": { + "type": "integer" + }, + "cacheTemplatesStore": { + "type": "integer" + }, + "cacheTemplatesRefresh": { + "type": "integer" + }, + "cachePagesTrack": { + "type": "integer" + }, + "cachePagesStore": { + "type": "integer" + }, + "cachePagesRefresh": { + "type": "integer" + }, + "cachePagesDirtyRead": { + "type": "integer" + }, + "searchEngineListTemplate": { + "type": "string" + }, + "searchEngineFileTemplate": { + "type": "string" + }, + "searchEngineRobotsDb": { + "type": "string" + }, + "useDataStore": { + "type": "boolean" + }, + "dataStoreClass": { + "type": "string" + }, + "redirectionClass": { + "type": "string" + }, + "dataStoreName": { + "type": "string" + }, + "dataStoreDriver": { + "type": "string" + }, + "dataStoreUrl": { + "type": "string" + }, + "dataStoreUser": { + "type": "string" + }, + "dataStorePassword": { + "type": "string" + }, + "dataStoreTestQuery": { + "type": "string" + }, + "dataStoreLogFile": { + "type": "string" + }, + "dataStoreInitConns": { + "type": "integer" + }, + "dataStoreMaxConns": { + "type": "integer" + }, + "dataStoreConnUsageLimit": { + "type": "integer" + }, + "dataStoreLogLevel": { + "type": "string" + }, + "maxUrlLength": { + "type": "integer" + }, + "mailHost": { + "type": "string" + }, + "mailHostOverride": { + "type": "string" + }, + "log": { + "type": "integer" + }, + "logLocation": { + "type": "string" + }, + "logMaxSize": { + "type": "string" + }, + "dataLog": { + "type": "integer" + }, + "dataLogLocation": { + "type": "string" + }, + "dataLogMaxSize": { + "type": "string" + }, + "removePageCache": { + "type": "string" + }, + "removeTemplateCache": { + "type": "string" + }, + "fileTransferFolder": { + "type": "string" + }, + "lookInContext": { + "type": "integer" + }, + "adminGroupID": { + "type": "integer" + }, + "betaServer": { + "type": "boolean" + } + } + } + }, + "required": [ + "servlet-class", + "servlet-name" + ] + } + }, + "servlet-mapping": { + "type": "object", + "properties": { + "cofaxCDS": { + "type": "string" + }, + "cofaxEmail": { + "type": "string" + }, + "cofaxAdmin": { + "type": "string" + }, + "fileServlet": { + "type": "string" + }, + "cofaxTools": { + "type": "string" + } + }, + "required": [ + "cofaxAdmin", + "cofaxCDS", + "cofaxEmail", + "cofaxTools", + "fileServlet" + ] + }, + "taglib": { + "type": "object", + "properties": { + "taglib-uri": { + "type": "string" + }, + "taglib-location": { + "type": "string" + } + }, + "required": [ + "taglib-location", + "taglib-uri" + ] + } + }, + "required": [ + "servlet", + "servlet-mapping", + "taglib" + ] + } + }, + "required": [ + "web-app" + ] +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/valid-1-data.json b/validator/internal/validator/json/testdata/valid-1-data.json new file mode 100644 index 0000000..241d4ce --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-1-data.json @@ -0,0 +1,6 @@ +{ + "id": 100, + "first_name": "syn jason", + "last_name": "syn oblak", + "email": "jsonsmail" +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/valid-1-schema.json b/validator/internal/validator/json/testdata/valid-1-schema.json new file mode 100644 index 0000000..e38e3b0 --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-1-schema.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + } + } +} diff --git a/validator/internal/validator/json/testdata/valid-2-data.json b/validator/internal/validator/json/testdata/valid-2-data.json new file mode 100644 index 0000000..fa08e34 --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-2-data.json @@ -0,0 +1,6 @@ +{ + "id": 67, + "first_name": "syn mia", + "last_name": "syn oblakyy", + "email": "miasmail" +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/valid-2-schema.json b/validator/internal/validator/json/testdata/valid-2-schema.json new file mode 100644 index 0000000..e38e3b0 --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-2-schema.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + } + } +} diff --git a/validator/internal/validator/json/testdata/valid-3-data.json b/validator/internal/validator/json/testdata/valid-3-data.json new file mode 100644 index 0000000..a22878d --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-3-data.json @@ -0,0 +1,6 @@ +{ + "id": 32, + "first_name": "syn ena", + "last_name": "syn dyy", + "email": "enasmail" +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/valid-3-schema.json b/validator/internal/validator/json/testdata/valid-3-schema.json new file mode 100644 index 0000000..e38e3b0 --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-3-schema.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + } + } +} diff --git a/validator/internal/validator/json/testdata/valid-4-data.json b/validator/internal/validator/json/testdata/valid-4-data.json new file mode 100644 index 0000000..ef71bbd --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-4-data.json @@ -0,0 +1,6 @@ +{ + "id": 12, + "first_name": "syn jure", + "last_name": "syn koma", + "email": "juresmail" +} \ No newline at end of file diff --git a/validator/internal/validator/json/testdata/valid-4-schema.json b/validator/internal/validator/json/testdata/valid-4-schema.json new file mode 100644 index 0000000..e38e3b0 --- /dev/null +++ b/validator/internal/validator/json/testdata/valid-4-schema.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "type": "object", + "title": "The Root Schema", + "description": "The root schema comprises the entire JSON document.", + "default": {}, + "additionalProperties": false, + "required": [ + "id", + "first_name", + "last_name", + "email" + ], + "properties": { + "id": { + "type": "integer", + "title": "The Id Schema", + "description": "An explanation about the purpose of this instance.", + "default": 0, + "examples": [ + 100.0 + ] + }, + "first_name": { + "type": "string", + "title": "The First_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio name" + ] + }, + "last_name": { + "type": "string", + "title": "The Last_name Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "Syntio surname" + ] + }, + "email": { + "type": "string", + "title": "The Email Schema", + "description": "An explanation about the purpose of this instance.", + "default": "", + "examples": [ + "businessEmail@syntio.net" + ] + } + } +} diff --git a/validator/internal/validator/protobuf/protobuf.go b/validator/internal/validator/protobuf/protobuf.go new file mode 100644 index 0000000..b303a30 --- /dev/null +++ b/validator/internal/validator/protobuf/protobuf.go @@ -0,0 +1,170 @@ +package protobuf + +import ( + "bufio" + "os" + "path/filepath" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + lru "github.com/hashicorp/golang-lru" + "github.com/jhump/protoreflect/desc" + "github.com/jhump/protoreflect/desc/protoparse" + "github.com/jhump/protoreflect/dynamic" + "github.com/pkg/errors" + "golang.org/x/sync/singleflight" +) + +type Validator struct { + Dir string + group singleflight.Group + cache *lru.TwoQueueCache +} + +// New returns a new instance of a protobuf validator.Validator. +// +// Since the validator needs to write to disk, a path to the used directory is needed, as well +// as a cache size which will be used to avoid writing to disk for each validation request. +func New(dir string, cacheSize int) (validator.Validator, error) { + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + return nil, err + } + + cache, err := lru.New2Q(cacheSize) + if err != nil { + return nil, err + } + + return &Validator{ + Dir: dir, + cache: cache, + }, nil +} + +func (v *Validator) Validate(message, schema []byte, id, version string) (bool, error) { + filename := id + "_" + version + ".txt" + descriptor, err := v.getMainMessageDescriptor(filename, schema) + if err != nil { + return false, err + } + + if err = descriptor.Unmarshal(message); err != nil { + return false, nil + } + + if hasUnknownFields(descriptor) { + return false, nil + } + if err = descriptor.ValidateRecursive(); err != nil { + return false, nil + } + + return true, nil +} + +// getMainMessageDescriptor returns a fresh dynamic.Message instance for the given schema. +// +// Because the used libraries require schemas to be read from disk, a lru cache is used to avoid I/O operations +// for each validation request. This in turn means every cache miss requires checking if the schema is stored to disk, +// (storing it if necessary), then reading and caching it. +func (v *Validator) getMainMessageDescriptor(filename string, schema []byte) (*dynamic.Message, error) { + var descriptor *desc.FileDescriptor + var err error + + path := filepath.Join(v.Dir, filename) + // try to retrieve the processed .proto message from the cache + val, ok := v.cache.Get(path) + if !ok { + // if it isn't in the cache, check if it is already written to disk + if _, err = os.Stat(path); errors.Is(err, os.ErrNotExist) { + if err = v.writeSchemaToFile(path, schema); err != nil { + return nil, err + } + } + + // now we can load the written .proto schema into a message descriptor + descriptor, err = loadSchemaIntoDescriptor(v.Dir, filename) + if err != nil { + return nil, err + } + + v.cache.Add(path, descriptor) + } else { + descriptor = val.(*desc.FileDescriptor) + } + + return parseDescriptor(descriptor) +} + +// writeSchemaToFile writes the given schema under the given path. +// +// A singleflight.Group is used to ensure concurrent request for the same schema only write +// the schema once (I/O is expensive). +func (v *Validator) writeSchemaToFile(path string, schema []byte) error { + _, err, _ := v.group.Do(path, func() (interface{}, error) { + f, err := os.Create(path) + if err != nil { + return nil, err + } + + w := bufio.NewWriter(f) + if _, err = w.Write(schema); err != nil { + return nil, err + } + if err = w.Flush(); err != nil { + return nil, err + } + + return nil, f.Close() + }) + if err != nil { + return err + } + + return nil +} + +// loadSchemaIntoDescriptor retrieves a file descriptor of a .proto file stored under filename, +// under the given import path. +func loadSchemaIntoDescriptor(importPath, filename string) (*desc.FileDescriptor, error) { + parser := protoparse.Parser{ + ImportPaths: []string{importPath}, + } + fileDescriptors, err := parser.ParseFiles(filename) + if err != nil { + return nil, err + } + + return fileDescriptors[0], nil +} + +// parseDescriptor parses the given file descriptor into a dynamic.Message instance, returning an error +// if there are multiple top-level messages defined in the given descriptor. +func parseDescriptor(descriptor *desc.FileDescriptor) (*dynamic.Message, error) { + messageDescriptors := descriptor.GetMessageTypes() + if len(messageDescriptors) == 0 { + return nil, errors.Wrap(validator.ErrDeadletter, "no message definitions were found in the .proto file") + } + if len(messageDescriptors) > 1 { + return nil, errors.Wrap(validator.ErrDeadletter, ".proto file must have exactly 1 top level message") + } + return dynamic.NewMessage(messageDescriptors[0]), nil +} + +// hasUnknownFields recursively checks for unknown fields of the given dynamic.Message. +func hasUnknownFields(message *dynamic.Message) bool { + if len(message.GetUnknownFields()) > 0 { + return true + } + + for _, v := range message.GetKnownFields() { + field := message.GetField(v) + if fieldMessage, ok := field.(*dynamic.Message); ok { + if hasUnknownFields(fieldMessage) { + return true + } + } + } + + return false +} diff --git a/validator/internal/validator/protobuf/protobuf_test.go b/validator/internal/validator/protobuf/protobuf_test.go new file mode 100644 index 0000000..eb08d50 --- /dev/null +++ b/validator/internal/validator/protobuf/protobuf_test.go @@ -0,0 +1,340 @@ +package protobuf + +import ( + "bufio" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/protobuf/testdata/person" + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator/protobuf/testdata/testpb3" + + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" +) + +func TestValidate(t *testing.T) { + dir := "./schemas" + v, err := New(dir, 10) + if err != nil { + t.Fatal(err) + } + defer func() { + _ = os.RemoveAll(dir) + }() + + tt := []struct { + name string + data string + schemaId string + schemaVersion string + schemaFilename string + valid bool + }{ + { + "Proto2-valid-1", + "valid2-1.pb", + "1", + "1", + "schema2-1.proto", + true, + }, + { + "Proto2-valid-2", + "valid2-2.pb", + "1", + "1", + "schema2-1.proto", + true, + }, + { + "Proto2-valid-3", + "valid2-3.pb", + "1", + "1", + "schema2-1.proto", + true, + }, + { + "Proto2-valid-4", + "valid2-4.pb", + "1", + "1", + "schema2-1.proto", + true, + }, + { + "Proto2-invalid-1", + "invalid2-1.pb", + "1", + "1", + "schema2-1.proto", + false, + }, + { + "Proto2-invalid-2", + "invalid2-2.pb", + "1", + "1", + "schema2-1.proto", + false, + }, + { + "Proto2-invalid-3", + "valid3-8.pb", + "1", + "1", + "schema2-1.proto", + false, + }, + { + "Proto3-valid-1", + "valid3-1.pb", + "2", + "1", + "schema3-1.proto", + true, + }, + { + "Proto3-valid-2", + "valid3-2.pb", + "2", + "1", + "schema3-1.proto", + true, + }, + { + "Proto3-valid-3", + "valid3-3.pb", + "2", + "1", + "schema3-1.proto", + true, + }, + { + "Proto3-valid-4", + "valid3-4.pb", + "2", + "1", + "schema3-1.proto", + true, + }, + { + "Proto3-valid-5", + "valid3-5.pb", + "2", + "1", + "schema3-1.proto", + true, + }, + { + "Proto3-valid-6", + "valid3-6.pb", + "2", + "1", + "schema3-1.proto", + true, + }, + { + "Proto3-valid-7", + "valid3-7.pb", + "2", + "1", + "schema3-1.proto", + true, + }, + { + "Proto3-invalid-1", + "valid3-8.pb", + "2", + "1", + "schema3-1.proto", + false, + }, + { + "Proto3-valid-8", + "valid3-8.pb", + "3", + "1", + "schema3-2.proto", + true, + }, + { + "Proto3-invalid-2", + "valid3-8.pb", + "2", + "1", + "schema3-1.proto", + false, + }, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + data, err := os.ReadFile(filepath.Join(testdataDir, tc.data)) + if err != nil { + t.Errorf("schema read error: %s", err) + } + + schema, err := os.ReadFile(filepath.Join(testdataDir, tc.schemaFilename)) + if err != nil { + t.Errorf("schema read error: %s", err) + } + + valid, err := v.Validate(data, schema, tc.schemaId, tc.schemaVersion) + if err != nil { + t.Error(err) + } + + if valid != tc.valid { + if valid { + t.Errorf("message valid, invalid expected") + } else { + t.Errorf("message invalid, valid expected") + } + } + }) + } +} + +//nolint:deadcode,unused +func generateData() error { + r1 := &testpb3.Record{ + Name: "test1", + Age: 1, + Collection: []string{"1", "2", "3"}, + Foo: &testpb3.Record_Foo{Bar: "bar1"}, + } + data1, err := proto.Marshal(r1) + if err != nil { + return err + } + if err = saveToFile(data1, "testdata/valid3-1.pb"); err != nil { + return err + } + + r2 := &testpb3.Record{ + Name: "test2", + Collection: []string{"1", "2"}, + Foo: &testpb3.Record_Foo{Bar: "bar2"}, + } + data2, err := proto.Marshal(r2) + if err != nil { + return err + } + if err = saveToFile(data2, "testdata/valid3-2.pb"); err != nil { + return err + } + + r3 := &testpb3.Record{ + Name: "test3", + Collection: []string{"1", "3"}, + Foo: &testpb3.Record_Foo{}, + } + data3, err := proto.Marshal(r3) + if err != nil { + return err + } + if err = saveToFile(data3, "testdata/valid3-3.pb"); err != nil { + return err + } + + r4 := &testpb3.Record{ + Name: "test4", + Collection: []string{"1", "3"}, + Foo: &testpb3.Record_Foo{}, + } + data4, err := proto.Marshal(r4) + if err != nil { + return err + } + if err = saveToFile(data4, "testdata/valid3-4.pb"); err != nil { + return err + } + + r5 := &testpb3.Record{ + Name: "test5", + Foo: &testpb3.Record_Foo{}, + } + data5, err := proto.Marshal(r5) + if err != nil { + return err + } + if err = saveToFile(data5, "testdata/valid3-5.pb"); err != nil { + return err + } + + r6 := &testpb3.Record{ + Collection: []string{"1", "3"}, + Foo: &testpb3.Record_Foo{}, + } + data6, err := proto.Marshal(r6) + if err != nil { + return err + } + if err = saveToFile(data6, "testdata/valid3-6.pb"); err != nil { + return err + } + + r7 := &testpb3.Record{ + Name: "test7", + Collection: []string{"1", "3"}, + Foo: &testpb3.Record_Foo{}, + } + data7, err := proto.Marshal(r7) + if err != nil { + return err + } + if err = saveToFile(data7, "testdata/valid3-7.pb"); err != nil { + return err + } + + p := &person.Person{ + Name: "person", + Id: 1, + Email: "person@real.human", + Phones: []*person.Person_PhoneNumber{ + { + Number: "123456", + Type: person.Person_HOME, + }, + { + Number: "123457", + Type: person.Person_WORK, + }, + }, + LastUpdated: timestamppb.Now(), + } + data8, err := proto.Marshal(p) + if err != nil { + return err + } + if err = saveToFile(data8, "testdata/valid3-8.pb"); err != nil { + return err + } + + return nil +} + +//nolint:deadcode,unused +func saveToFile(data []byte, filename string) error { + file, err := os.Create(filename) + if err != nil { + return err + } + + w := bufio.NewWriter(file) + if _, err = w.Write(data); err != nil { + return err + } + if err = w.Flush(); err != nil { + return err + } + + return file.Close() +} diff --git a/validator/internal/validator/protobuf/testdata/invalid2-1.pb b/validator/internal/validator/protobuf/testdata/invalid2-1.pb new file mode 100644 index 0000000..c7fd144 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/invalid2-1.pb @@ -0,0 +1,2 @@ + +st5 \ No newline at end of file diff --git a/validator/internal/validator/protobuf/testdata/invalid2-2.pb b/validator/internal/validator/protobuf/testdata/invalid2-2.pb new file mode 100644 index 0000000..e69de29 diff --git a/validator/internal/validator/protobuf/testdata/invalid3-1.pb b/validator/internal/validator/protobuf/testdata/invalid3-1.pb new file mode 100644 index 0000000..5466760 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/invalid3-1.pb @@ -0,0 +1,6 @@ + +personperson@real.human.com" + +123456" + +123457* Àà°Žàü×ê \ No newline at end of file diff --git a/validator/internal/validator/protobuf/testdata/person/schema3-2.pb.go b/validator/internal/validator/protobuf/testdata/person/schema3-2.pb.go new file mode 100644 index 0000000..aa93df4 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/person/schema3-2.pb.go @@ -0,0 +1,321 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.27.1 +// protoc v3.19.1 +// source: lib/validator/protobuf/testdata/schema3-2.proto + +package person + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Person_PhoneType int32 + +const ( + Person_MOBILE Person_PhoneType = 0 + Person_HOME Person_PhoneType = 1 + Person_WORK Person_PhoneType = 2 +) + +// Enum value maps for Person_PhoneType. +var ( + Person_PhoneType_name = map[int32]string{ + 0: "MOBILE", + 1: "HOME", + 2: "WORK", + } + Person_PhoneType_value = map[string]int32{ + "MOBILE": 0, + "HOME": 1, + "WORK": 2, + } +) + +func (x Person_PhoneType) Enum() *Person_PhoneType { + p := new(Person_PhoneType) + *p = x + return p +} + +func (x Person_PhoneType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Person_PhoneType) Descriptor() protoreflect.EnumDescriptor { + return file_lib_validator_protobuf_testdata_schema3_2_proto_enumTypes[0].Descriptor() +} + +func (Person_PhoneType) Type() protoreflect.EnumType { + return &file_lib_validator_protobuf_testdata_schema3_2_proto_enumTypes[0] +} + +func (x Person_PhoneType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Person_PhoneType.Descriptor instead. +func (Person_PhoneType) EnumDescriptor() ([]byte, []int) { + return file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescGZIP(), []int{0, 0} +} + +type Person struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Id int32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` // Unique MessageID number for this person. + Email string `protobuf:"bytes,3,opt,name=email,proto3" json:"email,omitempty"` + Phones []*Person_PhoneNumber `protobuf:"bytes,4,rep,name=phones,proto3" json:"phones,omitempty"` + LastUpdated *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=last_updated,json=lastUpdated,proto3" json:"last_updated,omitempty"` +} + +func (x *Person) Reset() { + *x = Person{} + if protoimpl.UnsafeEnabled { + mi := &file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Person) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Person) ProtoMessage() {} + +func (x *Person) ProtoReflect() protoreflect.Message { + mi := &file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Person.ProtoReflect.Descriptor instead. +func (*Person) Descriptor() ([]byte, []int) { + return file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescGZIP(), []int{0} +} + +func (x *Person) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Person) GetId() int32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Person) GetEmail() string { + if x != nil { + return x.Email + } + return "" +} + +func (x *Person) GetPhones() []*Person_PhoneNumber { + if x != nil { + return x.Phones + } + return nil +} + +func (x *Person) GetLastUpdated() *timestamppb.Timestamp { + if x != nil { + return x.LastUpdated + } + return nil +} + +type Person_PhoneNumber struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Number string `protobuf:"bytes,1,opt,name=number,proto3" json:"number,omitempty"` + Type Person_PhoneType `protobuf:"varint,2,opt,name=type,proto3,enum=testdata3.Person_PhoneType" json:"type,omitempty"` +} + +func (x *Person_PhoneNumber) Reset() { + *x = Person_PhoneNumber{} + if protoimpl.UnsafeEnabled { + mi := &file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Person_PhoneNumber) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Person_PhoneNumber) ProtoMessage() {} + +func (x *Person_PhoneNumber) ProtoReflect() protoreflect.Message { + mi := &file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Person_PhoneNumber.ProtoReflect.Descriptor instead. +func (*Person_PhoneNumber) Descriptor() ([]byte, []int) { + return file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *Person_PhoneNumber) GetNumber() string { + if x != nil { + return x.Number + } + return "" +} + +func (x *Person_PhoneNumber) GetType() Person_PhoneType { + if x != nil { + return x.Type + } + return Person_MOBILE +} + +var File_lib_validator_protobuf_testdata_schema3_2_proto protoreflect.FileDescriptor + +var file_lib_validator_protobuf_testdata_schema3_2_proto_rawDesc = []byte{ + 0x0a, 0x2f, 0x6c, 0x69, 0x62, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, + 0x61, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x33, 0x2d, 0x32, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x12, 0x09, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, 0x61, 0x33, 0x1a, 0x1f, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xbd, 0x02, + 0x0a, 0x06, 0x50, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, + 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x6d, 0x61, + 0x69, 0x6c, 0x12, 0x35, 0x0a, 0x06, 0x70, 0x68, 0x6f, 0x6e, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, 0x61, 0x33, 0x2e, 0x50, + 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x2e, 0x50, 0x68, 0x6f, 0x6e, 0x65, 0x4e, 0x75, 0x6d, 0x62, 0x65, + 0x72, 0x52, 0x06, 0x70, 0x68, 0x6f, 0x6e, 0x65, 0x73, 0x12, 0x3d, 0x0a, 0x0c, 0x6c, 0x61, 0x73, + 0x74, 0x5f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0b, 0x6c, 0x61, 0x73, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x1a, 0x56, 0x0a, 0x0b, 0x50, 0x68, 0x6f, 0x6e, + 0x65, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, + 0x2f, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, + 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, 0x61, 0x33, 0x2e, 0x50, 0x65, 0x72, 0x73, 0x6f, 0x6e, + 0x2e, 0x50, 0x68, 0x6f, 0x6e, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x22, 0x2b, 0x0a, 0x09, 0x50, 0x68, 0x6f, 0x6e, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0a, 0x0a, + 0x06, 0x4d, 0x4f, 0x42, 0x49, 0x4c, 0x45, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x4f, 0x4d, + 0x45, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x02, 0x42, 0x0a, 0x5a, + 0x08, 0x2e, 0x2f, 0x70, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, +} + +var ( + file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescOnce sync.Once + file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescData = file_lib_validator_protobuf_testdata_schema3_2_proto_rawDesc +) + +func file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescGZIP() []byte { + file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescOnce.Do(func() { + file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescData = protoimpl.X.CompressGZIP(file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescData) + }) + return file_lib_validator_protobuf_testdata_schema3_2_proto_rawDescData +} + +var file_lib_validator_protobuf_testdata_schema3_2_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_lib_validator_protobuf_testdata_schema3_2_proto_goTypes = []interface{}{ + (Person_PhoneType)(0), // 0: testdata3.Person.PhoneType + (*Person)(nil), // 1: testdata3.Person + (*Person_PhoneNumber)(nil), // 2: testdata3.Person.PhoneNumber + (*timestamppb.Timestamp)(nil), // 3: google.protobuf.Timestamp +} +var file_lib_validator_protobuf_testdata_schema3_2_proto_depIdxs = []int32{ + 2, // 0: testdata3.Person.phones:type_name -> testdata3.Person.PhoneNumber + 3, // 1: testdata3.Person.last_updated:type_name -> google.protobuf.Timestamp + 0, // 2: testdata3.Person.PhoneNumber.type:type_name -> testdata3.Person.PhoneType + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_lib_validator_protobuf_testdata_schema3_2_proto_init() } +func file_lib_validator_protobuf_testdata_schema3_2_proto_init() { + if File_lib_validator_protobuf_testdata_schema3_2_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Person); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Person_PhoneNumber); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_lib_validator_protobuf_testdata_schema3_2_proto_rawDesc, + NumEnums: 1, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_lib_validator_protobuf_testdata_schema3_2_proto_goTypes, + DependencyIndexes: file_lib_validator_protobuf_testdata_schema3_2_proto_depIdxs, + EnumInfos: file_lib_validator_protobuf_testdata_schema3_2_proto_enumTypes, + MessageInfos: file_lib_validator_protobuf_testdata_schema3_2_proto_msgTypes, + }.Build() + File_lib_validator_protobuf_testdata_schema3_2_proto = out.File + file_lib_validator_protobuf_testdata_schema3_2_proto_rawDesc = nil + file_lib_validator_protobuf_testdata_schema3_2_proto_goTypes = nil + file_lib_validator_protobuf_testdata_schema3_2_proto_depIdxs = nil +} diff --git a/validator/internal/validator/protobuf/testdata/schema2-1.proto b/validator/internal/validator/protobuf/testdata/schema2-1.proto new file mode 100644 index 0000000..f5890d2 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/schema2-1.proto @@ -0,0 +1,18 @@ +syntax = "proto2"; +package testdata2; + +option go_package = "./testpb2"; + +message Record { + required string name = 1; + optional int32 age = 2; + repeated string collection = 3; + + message Foo { + optional string bar = 1; + } + + optional Foo foo = 4; +} + + diff --git a/validator/internal/validator/protobuf/testdata/schema3-1.proto b/validator/internal/validator/protobuf/testdata/schema3-1.proto new file mode 100644 index 0000000..e5e7c91 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/schema3-1.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; +package testdata3; + +option go_package = "./testpb3"; + +message Record { + string name = 1; + int32 age = 2; + repeated string collection = 3; + + message Foo { + string bar = 1; + } + + Foo foo = 4; +} diff --git a/validator/internal/validator/protobuf/testdata/schema3-2.proto b/validator/internal/validator/protobuf/testdata/schema3-2.proto new file mode 100644 index 0000000..57ff06d --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/schema3-2.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; +package testdata3; + +option go_package = "./person"; + +import "google/protobuf/timestamp.proto"; + +message Person { + string name = 1; + int32 id = 2; // Unique ID number for this person. + string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + string number = 1; + PhoneType type = 2; + } + + repeated PhoneNumber phones = 4; + + google.protobuf.Timestamp last_updated = 5; +} diff --git a/validator/internal/validator/protobuf/testdata/testpb2/schema2-1.pb.go b/validator/internal/validator/protobuf/testdata/testpb2/schema2-1.pb.go new file mode 100644 index 0000000..bce342a --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/testpb2/schema2-1.pb.go @@ -0,0 +1,235 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.27.1 +// protoc v3.19.1 +// source: lib/validator/protobuf/testdata/schema2-1.proto + +package testpb2 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Record struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + Age *int32 `protobuf:"varint,2,opt,name=age" json:"age,omitempty"` + Collection []string `protobuf:"bytes,3,rep,name=collection" json:"collection,omitempty"` + Foo *Record_Foo `protobuf:"bytes,4,opt,name=foo" json:"foo,omitempty"` +} + +func (x *Record) Reset() { + *x = Record{} + if protoimpl.UnsafeEnabled { + mi := &file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Record) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Record) ProtoMessage() {} + +func (x *Record) ProtoReflect() protoreflect.Message { + mi := &file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Record.ProtoReflect.Descriptor instead. +func (*Record) Descriptor() ([]byte, []int) { + return file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescGZIP(), []int{0} +} + +func (x *Record) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *Record) GetAge() int32 { + if x != nil && x.Age != nil { + return *x.Age + } + return 0 +} + +func (x *Record) GetCollection() []string { + if x != nil { + return x.Collection + } + return nil +} + +func (x *Record) GetFoo() *Record_Foo { + if x != nil { + return x.Foo + } + return nil +} + +type Record_Foo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Bar *string `protobuf:"bytes,1,opt,name=bar" json:"bar,omitempty"` +} + +func (x *Record_Foo) Reset() { + *x = Record_Foo{} + if protoimpl.UnsafeEnabled { + mi := &file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Record_Foo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Record_Foo) ProtoMessage() {} + +func (x *Record_Foo) ProtoReflect() protoreflect.Message { + mi := &file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Record_Foo.ProtoReflect.Descriptor instead. +func (*Record_Foo) Descriptor() ([]byte, []int) { + return file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *Record_Foo) GetBar() string { + if x != nil && x.Bar != nil { + return *x.Bar + } + return "" +} + +var File_lib_validator_protobuf_testdata_schema2_1_proto protoreflect.FileDescriptor + +var file_lib_validator_protobuf_testdata_schema2_1_proto_rawDesc = []byte{ + 0x0a, 0x2f, 0x6c, 0x69, 0x62, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, + 0x61, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x32, 0x2d, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x12, 0x09, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, 0x61, 0x32, 0x22, 0x90, 0x01, 0x0a, + 0x06, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x02, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x61, + 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x0a, + 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, + 0x03, 0x66, 0x6f, 0x6f, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x65, 0x73, + 0x74, 0x64, 0x61, 0x74, 0x61, 0x32, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x2e, 0x46, 0x6f, + 0x6f, 0x52, 0x03, 0x66, 0x6f, 0x6f, 0x1a, 0x17, 0x0a, 0x03, 0x46, 0x6f, 0x6f, 0x12, 0x10, 0x0a, + 0x03, 0x62, 0x61, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x62, 0x61, 0x72, 0x42, + 0x0b, 0x5a, 0x09, 0x2e, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x70, 0x62, 0x32, +} + +var ( + file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescOnce sync.Once + file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescData = file_lib_validator_protobuf_testdata_schema2_1_proto_rawDesc +) + +func file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescGZIP() []byte { + file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescOnce.Do(func() { + file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescData = protoimpl.X.CompressGZIP(file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescData) + }) + return file_lib_validator_protobuf_testdata_schema2_1_proto_rawDescData +} + +var file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_lib_validator_protobuf_testdata_schema2_1_proto_goTypes = []interface{}{ + (*Record)(nil), // 0: testdata2.Record + (*Record_Foo)(nil), // 1: testdata2.Record.Foo +} +var file_lib_validator_protobuf_testdata_schema2_1_proto_depIdxs = []int32{ + 1, // 0: testdata2.Record.foo:type_name -> testdata2.Record.Foo + 1, // [1:1] is the sub-list for method output_type + 1, // [1:1] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_lib_validator_protobuf_testdata_schema2_1_proto_init() } +func file_lib_validator_protobuf_testdata_schema2_1_proto_init() { + if File_lib_validator_protobuf_testdata_schema2_1_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Record); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Record_Foo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_lib_validator_protobuf_testdata_schema2_1_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_lib_validator_protobuf_testdata_schema2_1_proto_goTypes, + DependencyIndexes: file_lib_validator_protobuf_testdata_schema2_1_proto_depIdxs, + MessageInfos: file_lib_validator_protobuf_testdata_schema2_1_proto_msgTypes, + }.Build() + File_lib_validator_protobuf_testdata_schema2_1_proto = out.File + file_lib_validator_protobuf_testdata_schema2_1_proto_rawDesc = nil + file_lib_validator_protobuf_testdata_schema2_1_proto_goTypes = nil + file_lib_validator_protobuf_testdata_schema2_1_proto_depIdxs = nil +} diff --git a/validator/internal/validator/protobuf/testdata/testpb3/schema3-1.pb.go b/validator/internal/validator/protobuf/testdata/testpb3/schema3-1.pb.go new file mode 100644 index 0000000..d44646b --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/testpb3/schema3-1.pb.go @@ -0,0 +1,236 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.27.1 +// protoc v3.19.1 +// source: lib/validator/protobuf/testdata/schema3-1.proto + +package testpb3 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Record struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Age int32 `protobuf:"varint,2,opt,name=age,proto3" json:"age,omitempty"` + Collection []string `protobuf:"bytes,3,rep,name=collection,proto3" json:"collection,omitempty"` + Foo *Record_Foo `protobuf:"bytes,4,opt,name=foo,proto3" json:"foo,omitempty"` +} + +func (x *Record) Reset() { + *x = Record{} + if protoimpl.UnsafeEnabled { + mi := &file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Record) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Record) ProtoMessage() {} + +func (x *Record) ProtoReflect() protoreflect.Message { + mi := &file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Record.ProtoReflect.Descriptor instead. +func (*Record) Descriptor() ([]byte, []int) { + return file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescGZIP(), []int{0} +} + +func (x *Record) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Record) GetAge() int32 { + if x != nil { + return x.Age + } + return 0 +} + +func (x *Record) GetCollection() []string { + if x != nil { + return x.Collection + } + return nil +} + +func (x *Record) GetFoo() *Record_Foo { + if x != nil { + return x.Foo + } + return nil +} + +type Record_Foo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Bar string `protobuf:"bytes,1,opt,name=bar,proto3" json:"bar,omitempty"` +} + +func (x *Record_Foo) Reset() { + *x = Record_Foo{} + if protoimpl.UnsafeEnabled { + mi := &file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Record_Foo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Record_Foo) ProtoMessage() {} + +func (x *Record_Foo) ProtoReflect() protoreflect.Message { + mi := &file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Record_Foo.ProtoReflect.Descriptor instead. +func (*Record_Foo) Descriptor() ([]byte, []int) { + return file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *Record_Foo) GetBar() string { + if x != nil { + return x.Bar + } + return "" +} + +var File_lib_validator_protobuf_testdata_schema3_1_proto protoreflect.FileDescriptor + +var file_lib_validator_protobuf_testdata_schema3_1_proto_rawDesc = []byte{ + 0x0a, 0x2f, 0x6c, 0x69, 0x62, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x6f, 0x72, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, + 0x61, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x33, 0x2d, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x12, 0x09, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, 0x61, 0x33, 0x22, 0x90, 0x01, 0x0a, + 0x06, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x61, + 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x0a, + 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, + 0x03, 0x66, 0x6f, 0x6f, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x65, 0x73, + 0x74, 0x64, 0x61, 0x74, 0x61, 0x33, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x2e, 0x46, 0x6f, + 0x6f, 0x52, 0x03, 0x66, 0x6f, 0x6f, 0x1a, 0x17, 0x0a, 0x03, 0x46, 0x6f, 0x6f, 0x12, 0x10, 0x0a, + 0x03, 0x62, 0x61, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x62, 0x61, 0x72, 0x42, + 0x0b, 0x5a, 0x09, 0x2e, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x70, 0x62, 0x33, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescOnce sync.Once + file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescData = file_lib_validator_protobuf_testdata_schema3_1_proto_rawDesc +) + +func file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescGZIP() []byte { + file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescOnce.Do(func() { + file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescData = protoimpl.X.CompressGZIP(file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescData) + }) + return file_lib_validator_protobuf_testdata_schema3_1_proto_rawDescData +} + +var file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_lib_validator_protobuf_testdata_schema3_1_proto_goTypes = []interface{}{ + (*Record)(nil), // 0: testdata3.Record + (*Record_Foo)(nil), // 1: testdata3.Record.Foo +} +var file_lib_validator_protobuf_testdata_schema3_1_proto_depIdxs = []int32{ + 1, // 0: testdata3.Record.foo:type_name -> testdata3.Record.Foo + 1, // [1:1] is the sub-list for method output_type + 1, // [1:1] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_lib_validator_protobuf_testdata_schema3_1_proto_init() } +func file_lib_validator_protobuf_testdata_schema3_1_proto_init() { + if File_lib_validator_protobuf_testdata_schema3_1_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Record); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Record_Foo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_lib_validator_protobuf_testdata_schema3_1_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_lib_validator_protobuf_testdata_schema3_1_proto_goTypes, + DependencyIndexes: file_lib_validator_protobuf_testdata_schema3_1_proto_depIdxs, + MessageInfos: file_lib_validator_protobuf_testdata_schema3_1_proto_msgTypes, + }.Build() + File_lib_validator_protobuf_testdata_schema3_1_proto = out.File + file_lib_validator_protobuf_testdata_schema3_1_proto_rawDesc = nil + file_lib_validator_protobuf_testdata_schema3_1_proto_goTypes = nil + file_lib_validator_protobuf_testdata_schema3_1_proto_depIdxs = nil +} diff --git a/validator/internal/validator/protobuf/testdata/valid2-1.pb b/validator/internal/validator/protobuf/testdata/valid2-1.pb new file mode 100644 index 0000000..a7372c4 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/valid2-1.pb @@ -0,0 +1,3 @@ + +test1123" +bar1 \ No newline at end of file diff --git a/validator/internal/validator/protobuf/testdata/valid2-2.pb b/validator/internal/validator/protobuf/testdata/valid2-2.pb new file mode 100644 index 0000000..6054f72 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/valid2-2.pb @@ -0,0 +1,3 @@ + +test212" +bar2 \ No newline at end of file diff --git a/validator/internal/validator/protobuf/testdata/valid2-3.pb b/validator/internal/validator/protobuf/testdata/valid2-3.pb new file mode 100644 index 0000000..8cbf142 Binary files /dev/null and b/validator/internal/validator/protobuf/testdata/valid2-3.pb differ diff --git a/validator/internal/validator/protobuf/testdata/valid2-4.pb b/validator/internal/validator/protobuf/testdata/valid2-4.pb new file mode 100644 index 0000000..f4a180d --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/valid2-4.pb @@ -0,0 +1,2 @@ + +test413 \ No newline at end of file diff --git a/validator/internal/validator/protobuf/testdata/valid3-1.pb b/validator/internal/validator/protobuf/testdata/valid3-1.pb new file mode 100644 index 0000000..a7372c4 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/valid3-1.pb @@ -0,0 +1,3 @@ + +test1123" +bar1 \ No newline at end of file diff --git a/validator/internal/validator/protobuf/testdata/valid3-2.pb b/validator/internal/validator/protobuf/testdata/valid3-2.pb new file mode 100644 index 0000000..6054f72 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/valid3-2.pb @@ -0,0 +1,3 @@ + +test212" +bar2 \ No newline at end of file diff --git a/validator/internal/validator/protobuf/testdata/valid3-3.pb b/validator/internal/validator/protobuf/testdata/valid3-3.pb new file mode 100644 index 0000000..8cbf142 Binary files /dev/null and b/validator/internal/validator/protobuf/testdata/valid3-3.pb differ diff --git a/validator/internal/validator/protobuf/testdata/valid3-4.pb b/validator/internal/validator/protobuf/testdata/valid3-4.pb new file mode 100644 index 0000000..d64acfc Binary files /dev/null and b/validator/internal/validator/protobuf/testdata/valid3-4.pb differ diff --git a/validator/internal/validator/protobuf/testdata/valid3-5.pb b/validator/internal/validator/protobuf/testdata/valid3-5.pb new file mode 100644 index 0000000..a3e0ccf Binary files /dev/null and b/validator/internal/validator/protobuf/testdata/valid3-5.pb differ diff --git a/validator/internal/validator/protobuf/testdata/valid3-6.pb b/validator/internal/validator/protobuf/testdata/valid3-6.pb new file mode 100644 index 0000000..f808bb2 Binary files /dev/null and b/validator/internal/validator/protobuf/testdata/valid3-6.pb differ diff --git a/validator/internal/validator/protobuf/testdata/valid3-7.pb b/validator/internal/validator/protobuf/testdata/valid3-7.pb new file mode 100644 index 0000000..2550f26 Binary files /dev/null and b/validator/internal/validator/protobuf/testdata/valid3-7.pb differ diff --git a/validator/internal/validator/protobuf/testdata/valid3-8.pb b/validator/internal/validator/protobuf/testdata/valid3-8.pb new file mode 100644 index 0000000..7d288a1 --- /dev/null +++ b/validator/internal/validator/protobuf/testdata/valid3-8.pb @@ -0,0 +1,6 @@ + +personperson@real.human" + +123456" + +123457* ŽìðŽàˆ¹` \ No newline at end of file diff --git a/validator/internal/validator/validator.go b/validator/internal/validator/validator.go new file mode 100644 index 0000000..5f169b2 --- /dev/null +++ b/validator/internal/validator/validator.go @@ -0,0 +1,37 @@ +// Package validator exposes common functionalities of all schema validators. +package validator + +import "github.com/pkg/errors" + +// ErrDeadletter is a special error type to help distinguish between invalid and broken messages. +var ErrDeadletter = errors.New("deadletter") + +// ErrBrokenMessage is a special error type to help distinguish broken messages. +var ErrBrokenMessage = errors.New("Message is not in valid format") + +// ErrWrongCompile is a special error type to help distinguish messages that had fault while compiling. +var ErrWrongCompile = errors.New("There is an error while compiling.") + +// ErrMissingSchema is a special error type to help distinguish messages that are missing schema. +var ErrMissingSchema = errors.New("Message is missing a schema") + +// ErrFailedValidation is a special error type to help distinguish messages that have failed in validation. +var ErrFailedValidation = errors.New("An error occured while validating message.") + +// Validator is the interface used to model message validators. +type Validator interface { + // Validate takes a message and a schema (along with schema id and version, in case they are needed for optimization purposes) + // and returns a bool value with the validation result. + // Returns an error in case the implementation encounters an unrecoverable issue. + // If the unrecoverable issue is a broken message or schema (for example, the given message isn't even in the + // valid format), ErrDeadletter MUST be returned. + Validate(message, schema []byte, id string, version string) (bool, error) +} + +// Func convenience type which is the functional equivalent of Validator. +type Func func(message, schema []byte, id string, version string) (bool, error) + +// Validate implements Validate by forwarding the call to the underlying ValidationFunc. +func (f Func) Validate(message, schema []byte, id string, version string) (bool, error) { + return f(message, schema, id, version) +} diff --git a/validator/internal/validator/xml/testdata/data-1.xml b/validator/internal/validator/xml/testdata/data-1.xml new file mode 100644 index 0000000..7e12b61 --- /dev/null +++ b/validator/internal/validator/xml/testdata/data-1.xml @@ -0,0 +1,24 @@ + + + + John Smith + + Ola Nordmann +
Langgt 23
+ 4000 Stavanger + Norway +
+ + Empire Burlesque + Special Edition + 1 + 10.90 + + + Hide your heart + 1 + 9.90 + +
diff --git a/validator/internal/validator/xml/testdata/data-2.xml b/validator/internal/validator/xml/testdata/data-2.xml new file mode 100644 index 0000000..3c214b0 --- /dev/null +++ b/validator/internal/validator/xml/testdata/data-2.xml @@ -0,0 +1,9 @@ + + + All + my boss + Me + my boss + A new message xml format + lalalalalala + diff --git a/validator/internal/validator/xml/testdata/data-3.xml b/validator/internal/validator/xml/testdata/data-3.xml new file mode 100644 index 0000000..4a76903 --- /dev/null +++ b/validator/internal/validator/xml/testdata/data-3.xml @@ -0,0 +1,7 @@ + + + My Readers + Chaitanya + A Message to my readers + Welcome to beginnersbook.com + diff --git a/validator/internal/validator/xml/testdata/deadletter-1-data.xml b/validator/internal/validator/xml/testdata/deadletter-1-data.xml new file mode 100644 index 0000000..c91f644 --- /dev/null +++ b/validator/internal/validator/xml/testdata/deadletter-1-data.xml @@ -0,0 +1,7 @@ + +beginnersbook> + My Readers + Chaitanya + A Message to my readers + Welcome to beginnersbook.com + diff --git a/validator/internal/validator/xml/testdata/deadletter-1-schema.xsd b/validator/internal/validator/xml/testdata/deadletter-1-schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/deadletter-1-schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/deadletter-2-data.xml b/validator/internal/validator/xml/testdata/deadletter-2-data.xml new file mode 100644 index 0000000..4a76903 --- /dev/null +++ b/validator/internal/validator/xml/testdata/deadletter-2-data.xml @@ -0,0 +1,7 @@ + + + My Readers + Chaitanya + A Message to my readers + Welcome to beginnersbook.com + diff --git a/validator/internal/validator/xml/testdata/deadletter-2-schema.xsd b/validator/internal/validator/xml/testdata/deadletter-2-schema.xsd new file mode 100644 index 0000000..3ee3ef8 --- /dev/null +++ b/validator/internal/validator/xml/testdata/deadletter-2-schema.xsd @@ -0,0 +1,15 @@ + + elementFormDefault="qualified"> + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/deadletter-3-data.xml b/validator/internal/validator/xml/testdata/deadletter-3-data.xml new file mode 100644 index 0000000..1ec13da --- /dev/null +++ b/validator/internal/validator/xml/testdata/deadletter-3-data.xml @@ -0,0 +1,8 @@ + + + My Readers + Chaitanya + A Message to my readers + Welcome to beginnersbook.com + +<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< diff --git a/validator/internal/validator/xml/testdata/deadletter-3-schema.xsd b/validator/internal/validator/xml/testdata/deadletter-3-schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/deadletter-3-schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/invalid-1-data.xml b/validator/internal/validator/xml/testdata/invalid-1-data.xml new file mode 100644 index 0000000..3c214b0 --- /dev/null +++ b/validator/internal/validator/xml/testdata/invalid-1-data.xml @@ -0,0 +1,9 @@ + + + All + my boss + Me + my boss + A new message xml format + lalalalalala + diff --git a/validator/internal/validator/xml/testdata/invalid-1-schema.xsd b/validator/internal/validator/xml/testdata/invalid-1-schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/invalid-1-schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/invalid-2-data.xml b/validator/internal/validator/xml/testdata/invalid-2-data.xml new file mode 100644 index 0000000..6f1d49a --- /dev/null +++ b/validator/internal/validator/xml/testdata/invalid-2-data.xml @@ -0,0 +1,5 @@ + + + Myb invalid msg + l53222a + diff --git a/validator/internal/validator/xml/testdata/invalid-2-schema.xsd b/validator/internal/validator/xml/testdata/invalid-2-schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/invalid-2-schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/schema-1.xsd b/validator/internal/validator/xml/testdata/schema-1.xsd new file mode 100644 index 0000000..22a30db --- /dev/null +++ b/validator/internal/validator/xml/testdata/schema-1.xsd @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/schema-2.xsd b/validator/internal/validator/xml/testdata/schema-2.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/schema-2.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/schema-3.xsd b/validator/internal/validator/xml/testdata/schema-3.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/schema-3.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/valid-1-data.xml b/validator/internal/validator/xml/testdata/valid-1-data.xml new file mode 100644 index 0000000..4a76903 --- /dev/null +++ b/validator/internal/validator/xml/testdata/valid-1-data.xml @@ -0,0 +1,7 @@ + + + My Readers + Chaitanya + A Message to my readers + Welcome to beginnersbook.com + diff --git a/validator/internal/validator/xml/testdata/valid-1-schema.xsd b/validator/internal/validator/xml/testdata/valid-1-schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/valid-1-schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/valid-2-data.xml b/validator/internal/validator/xml/testdata/valid-2-data.xml new file mode 100644 index 0000000..984f1d9 --- /dev/null +++ b/validator/internal/validator/xml/testdata/valid-2-data.xml @@ -0,0 +1,7 @@ + + + My team + myselfmeandI + Test subject idk + I luv you myteam + diff --git a/validator/internal/validator/xml/testdata/valid-2-schema.xsd b/validator/internal/validator/xml/testdata/valid-2-schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/valid-2-schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/testdata/valid-3-data.xml b/validator/internal/validator/xml/testdata/valid-3-data.xml new file mode 100644 index 0000000..5aad282 --- /dev/null +++ b/validator/internal/validator/xml/testdata/valid-3-data.xml @@ -0,0 +1,7 @@ + + + All + Me + A new message xml format + lalalalalala + diff --git a/validator/internal/validator/xml/testdata/valid-3-schema.xsd b/validator/internal/validator/xml/testdata/valid-3-schema.xsd new file mode 100644 index 0000000..629ae9f --- /dev/null +++ b/validator/internal/validator/xml/testdata/valid-3-schema.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + diff --git a/validator/internal/validator/xml/xml.go b/validator/internal/validator/xml/xml.go new file mode 100644 index 0000000..ccd03a2 --- /dev/null +++ b/validator/internal/validator/xml/xml.go @@ -0,0 +1,61 @@ +package xml + +import ( + "bytes" + "context" + "encoding/xml" + "io" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + "github.com/dataphos/lib-httputil/pkg/httputil" + "github.com/dataphos/lib-retry/pkg/retry" + + "github.com/pkg/errors" +) + +type Validator struct { + Url string + TimeoutBase time.Duration +} + +const DefaultTimeoutBase = 3 * time.Second + +// New returns a new validator which validates XML messages against a schema. +// +// Performs a health check to see if the validator is available, retrying periodically until the context is cancelled +// or the health check succeeds. +func New(ctx context.Context, url string, timeoutBase time.Duration) (validator.Validator, error) { + if err := retry.Do(ctx, retry.WithJitter(retry.Constant(2*time.Second)), func(ctx context.Context) error { + return httputil.HealthCheck(ctx, url+"/health") + }); err != nil { + return nil, errors.Wrapf(err, "attempting to reach xml validator at %s failed", url) + } + + return &Validator{ + Url: url, + TimeoutBase: timeoutBase, + }, nil +} + +func (v *Validator) Validate(message, schema []byte, _, _ string) (bool, error) { + if !IsXML(message) || !IsXML(schema) { + return false, validator.ErrDeadletter + } + + ctx, cancel := context.WithTimeout(context.Background(), validator.EstimateHTTPTimeout(len(message), v.TimeoutBase)) + defer cancel() + + return validator.ValidateOverHTTP(ctx, message, schema, v.Url) +} + +// IsXML checks if given data is valid XML. +func IsXML(data []byte) bool { + decoder := xml.NewDecoder(bytes.NewReader(data)) + for { + _, err := decoder.Token() + if err != nil { + return err == io.EOF + } + } +} diff --git a/validator/internal/validator/xml/xml_test.go b/validator/internal/validator/xml/xml_test.go new file mode 100644 index 0000000..49838f8 --- /dev/null +++ b/validator/internal/validator/xml/xml_test.go @@ -0,0 +1,128 @@ +package xml + +import ( + "context" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "runtime" + "testing" + "time" + + "github.com/dataphos/aquarium-janitor-standalone-internal/internal/validator" + + "github.com/pkg/errors" +) + +func TestNew(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + healthChecked := false + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + healthChecked = true + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + _, err := New(context.Background(), srv.URL, DefaultTimeoutBase) + if err != nil { + t.Fatal(err) + } + if !healthChecked { + t.Error("health check not called") + } +} + +func TestNewTimeout(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == http.MethodGet && r.URL.Path == "/health" { + time.Sleep(2 * time.Minute) + w.WriteHeader(http.StatusOK) + } else { + t.Fatal("wrong endpoint hit") + } + })) + + ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond) + defer cancel() + + _, err := New(ctx, srv.URL, DefaultTimeoutBase) + if !errors.Is(err, context.DeadlineExceeded) { + t.Fatal("expected timeout") + } +} + +func TestXMLValidator_Validate(t *testing.T) { + if os.Getenv("MANUAL_TEST") == "" { + t.Skip() + } + + xmlV, err := New(context.Background(), "http://localhost:8089", DefaultTimeoutBase) + if err != nil { + t.Fatal("validator constructor error", err) + } + + tt := []struct { + name string + dataFilename string + schemaFilename string + valid bool + deadletter bool + }{ + {"valid-1", "valid-1-data.xml", "valid-1-schema.xsd", true, false}, + {"valid-2", "valid-2-data.xml", "valid-2-schema.xsd", true, false}, + {"valid-3", "valid-3-data.xml", "valid-3-schema.xsd", true, false}, + {"invalid-1", "invalid-1-data.xml", "invalid-1-schema.xsd", false, false}, + {"invalid-2", "invalid-2-data.xml", "invalid-2-schema.xsd", false, false}, + {"deadletter-1", "deadletter-1-data.xml", "deadletter-1-schema.xsd", false, true}, + {"deadletter-2", "deadletter-2-data.xml", "deadletter-2-schema.xsd", false, true}, + {"deadletter-3", "deadletter-3-data.xml", "deadletter-3-schema.xsd", false, true}, + {"data-1", "data-1.xml", "schema-1.xsd", true, false}, + {"data-2", "data-2.xml", "schema-2.xsd", false, false}, + {"data-3", "data-3.xml", "schema-3.xsd", true, false}, + } + + _, b, _, _ := runtime.Caller(0) + basepath := filepath.Dir(b) + testdataDir := filepath.Join(basepath, "testdata") + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + data, err := os.ReadFile(filepath.Join(testdataDir, tc.dataFilename)) + if err != nil { + t.Errorf("data read error: %s", err) + } + schema, err := os.ReadFile(filepath.Join(testdataDir, tc.schemaFilename)) + if err != nil { + t.Errorf("schema read error: %s", err) + } + + valid, err := xmlV.Validate(data, schema, "", "") + if tc.deadletter { + if !errors.Is(err, validator.ErrDeadletter) { + t.Error("deadletter expected") + } + } else { + if err != nil { + t.Errorf("validator error: %s", err) + } + if valid != tc.valid { + if valid { + t.Errorf("message valid, invalid expected") + } else { + t.Errorf("message invalid, valid expected") + } + } + } + }) + } +} diff --git a/validator/licenses/LICENSE-3RD-PARTY.md b/validator/licenses/LICENSE-3RD-PARTY.md new file mode 100644 index 0000000..788299e --- /dev/null +++ b/validator/licenses/LICENSE-3RD-PARTY.md @@ -0,0 +1,99 @@ +| Module | License | +|:----------------------------------------------------------------------------------------|:-------------| +| cloud.google.com/go/compute/metadata v0.2.3 (indirect) | Apache-2.0 | +| cloud.google.com/go/iam v1.1.2 (indirect) | Apache-2.0 | +| cloud.google.com/go/internal | Apache-2.0 | +| cloud.google.com/go/pubsub v1.33.0 (indirect) | Apache-2.0 | +| github.com/99designs/keyring v1.2.2 (indirect) | MIT | +| github.com/AthenZ/athenz v1.11.29 (indirect) | Apache-2.0 | +| github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 (indirect) | MIT | +| github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 (indirect) | MIT | +| github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.3.0 (indirect) | MIT | +| github.com/Azure/go-amqp v1.0.0 (indirect) | MIT | +| github.com/DataDog/zstd v1.5.5 (indirect) | BSD-3-Clause | +| github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 (indirect) | MIT | +| github.com/apache/pulsar-client-go v0.14.0 (indirect) | Apache-2.0 | +| github.com/ardielle/ardielle-go/rdl | Apache-2.0 | +| github.com/beorn7/perks/quantile | MIT | +| github.com/bits-and-blooms/bitset v1.7.0 (indirect) | BSD-3-Clause | +| github.com/cespare/xxhash/v2 v2.2.0 (indirect) | MIT | +| github.com/dvsekhvalnov/jose2go v1.6.0 (indirect) | MIT | +| github.com/go-playground/locales v0.14.0 (indirect) | MIT | +| github.com/go-playground/universal-translator v0.18.0 (indirect) | MIT | +| github.com/go-playground/validator/v10 v10.11.1 | MIT | +| github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 (indirect) | BSD-2-Clause | +| github.com/golang-jwt/jwt/v5 v5.2.1 (indirect) | MIT | +| github.com/golang/groupcache/lru | Apache-2.0 | +| github.com/golang/protobuf v1.5.4 (indirect) | BSD-3-Clause | +| github.com/google/s2a-go v0.1.4 (indirect) | Apache-2.0 | +| github.com/googleapis/enterprise-certificate-proxy/client | Apache-2.0 | +| github.com/googleapis/gax-go/v2 v2.12.0 (indirect) | BSD-3-Clause | +| github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c (indirect) | MIT | +| github.com/hamba/avro v1.8.0 | MIT | +| github.com/hamba/avro/v2 v2.22.2-0.20240625062549-66aad10411d9 (indirect) | MIT | +| github.com/hashicorp/errwrap v1.1.0 (indirect) | MPL-2.0 | +| github.com/hashicorp/go-multierror v1.1.1 (indirect) | MPL-2.0 | +| github.com/hashicorp/go-uuid v1.0.3 (indirect) | MPL-2.0 | +| github.com/hashicorp/golang-lru v0.5.4 | MPL-2.0 | +| github.com/jcmturner/aescts/v2 v2.0.0 (indirect) | Apache-2.0 | +| github.com/jcmturner/dnsutils/v2 v2.0.0 (indirect) | Apache-2.0 | +| github.com/jcmturner/gofork v1.7.6 (indirect) | BSD-3-Clause | +| github.com/jcmturner/gokrb5/v8 v8.4.4 (indirect) | Apache-2.0 | +| github.com/jcmturner/rpc/v2 v2.0.3 (indirect) | Apache-2.0 | +| github.com/jhump/protoreflect v1.12.0 | Apache-2.0 | +| github.com/json-iterator/go v1.1.12 (indirect) | MIT | +| github.com/kkyr/fig v0.3.0 | Apache-2.0 | +| github.com/klauspost/compress v1.17.9 (indirect) | Apache-2.0 | +| github.com/klauspost/compress/internal/snapref | BSD-3-Clause | +| github.com/klauspost/compress/s2 | BSD-3-Clause | +| github.com/klauspost/compress/zstd/internal/xxhash | MIT | +| github.com/leodido/go-urn v1.2.1 (indirect) | MIT | +| github.com/matttproud/golang_protobuf_extensions/pbutil | Apache-2.0 | +| github.com/mitchellh/mapstructure v1.5.0 (indirect) | MIT | +| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd (indirect) | Apache-2.0 | +| github.com/modern-go/reflect2 v1.0.2 (indirect) | Apache-2.0 | +| github.com/mtibben/percent v0.2.1 (indirect) | MIT | +| github.com/nats-io/nats.go v1.25.0 (indirect) | Apache-2.0 | +| github.com/nats-io/nkeys v0.4.4 (indirect) | Apache-2.0 | +| github.com/nats-io/nuid v1.0.1 (indirect) | Apache-2.0 | +| github.com/pelletier/go-toml v1.9.3 (indirect) | Apache-2.0 | +| github.com/pierrec/lz4 v2.6.1+incompatible (indirect) | BSD-3-Clause | +| github.com/pierrec/lz4/v4 v4.1.17 (indirect) | BSD-3-Clause | +| github.com/pkg/errors v0.9.1 | BSD-2-Clause | +| github.com/prometheus/client_golang/prometheus | Apache-2.0 | +| github.com/prometheus/client_model/go | Apache-2.0 | +| github.com/prometheus/common v0.43.0 (indirect) | Apache-2.0 | +| github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg | BSD-3-Clause | +| github.com/prometheus/procfs v0.9.0 (indirect) | Apache-2.0 | +| github.com/santhosh-tekuri/jsonschema/v5 v5.0.2 | Apache-2.0 | +| github.com/sirupsen/logrus v1.9.3 (indirect) | MIT | +| github.com/spaolacci/murmur3 v1.1.0 (indirect) | BSD-3-Clause | +| github.com/twmb/franz-go/pkg | BSD-3-Clause | +| github.com/twmb/franz-go/pkg/kmsg v1.5.0 (indirect) | BSD-3-Clause | +| github.com/twmb/franz-go/pkg/sasl/kerberos v1.1.0 (indirect) | BSD-3-Clause | +| github.com/twmb/franz-go/plugin/kprom v1.0.0 (indirect) | BSD-3-Clause | +| github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb (indirect) | Apache-2.0 | +| github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 (indirect) | Apache-2.0 | +| github.com/xeipuuv/gojsonschema v1.2.0 | Apache-2.0 | +| go.opencensus.io v0.24.0 (indirect) | Apache-2.0 | +| go.uber.org/atomic v1.11.0 (indirect) | MIT | +| go.uber.org/multierr v1.9.0 | MIT | +| go.uber.org/ratelimit v0.2.0 | MIT | +| go.uber.org/zap v1.23.0 (indirect) | MIT | +| golang.org/x/crypto v0.22.0 (indirect) | BSD-3-Clause | +| golang.org/x/mod/semver | BSD-3-Clause | +| golang.org/x/net v0.23.0 | BSD-3-Clause | +| golang.org/x/oauth2 v0.11.0 (indirect) | BSD-3-Clause | +| golang.org/x/sync v0.3.0 | BSD-3-Clause | +| golang.org/x/sys v0.19.0 (indirect) | BSD-3-Clause | +| golang.org/x/term v0.19.0 (indirect) | BSD-3-Clause | +| golang.org/x/text v0.14.0 (indirect) | BSD-3-Clause | +| google.golang.org/api v0.128.0 (indirect) | BSD-3-Clause | +| google.golang.org/api/internal/third_party/uritemplates | BSD-3-Clause | +| google.golang.org/appengine v1.6.7 (indirect) | Apache-2.0 | +| google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a (indirect) | Apache-2.0 | +| google.golang.org/genproto/googleapis/api v0.0.0-20231002182017-d307bd883b97 (indirect) | Apache-2.0 | +| google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b (indirect) | Apache-2.0 | +| google.golang.org/grpc v1.59.0 (indirect) | Apache-2.0 | +| google.golang.org/protobuf v1.33.0 | BSD-3-Clause | +| gopkg.in/yaml.v2 v2.4.0 (indirect) | Apache-2.0 | \ No newline at end of file diff --git a/validator/licenses/csv-validator/LICENSE-3RD-PARTY.md b/validator/licenses/csv-validator/LICENSE-3RD-PARTY.md new file mode 100644 index 0000000..4489b2a --- /dev/null +++ b/validator/licenses/csv-validator/LICENSE-3RD-PARTY.md @@ -0,0 +1,84 @@ +# Licenses list + +Dependencies sometimes change licenses between versions, please keep this up to date with every new library use. + + (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Classic Module (ch.qos.logback:logback-classic:1.2.3 - http://logback.qos.ch/logback-classic) + (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Core Module (ch.qos.logback:logback-core:1.2.3 - http://logback.qos.ch/logback-core) + (The Apache Software License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.11.4 - http://github.com/FasterXML/jackson) + (The Apache Software License, Version 2.0) Jackson-core (com.fasterxml.jackson.core:jackson-core:2.11.4 - https://github.com/FasterXML/jackson-core) + (The Apache Software License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.11.4 - http://github.com/FasterXML/jackson) + (The Apache Software License, Version 2.0) Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + (The Apache Software License, Version 2.0) Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + (The Apache Software License, Version 2.0) Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.11.4 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + (MIT) gfc-semver (com.gilt:gfc-semver_2.11:0.0.5 - https://github.com/gilt/gfc-semver) + (Scala License) scala-io-core (com.github.scala-incubator.io:scala-io-core_2.11:0.4.3-1 - http://jesseeichar.github.com/scala-io-doc/index.html) + (Scala License) scala-io-file (com.github.scala-incubator.io:scala-io-file_2.11:0.4.3-1 - http://jesseeichar.github.com/scala-io-doc/index.html) + (The Apache Software License, Version 2.0) project ':json-path' (com.jayway.jsonpath:json-path:2.4.0 - https://github.com/jayway/JsonPath) + (BSD-Style) scala-arm (com.madgag:scala-arm_2.11:1.3.4 - http://jsuereth.com/scala-arm) + (Apache 2) opencsv (com.opencsv:opencsv:3.3 - http://opencsv.sf.net) + (Apache License 2.0) JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) + (The Apache Software License, Version 2.0) Commons IO (commons-io:commons-io:1.3.2 - http://jakarta.apache.org/commons/io/) + (EDL 1.0) Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) + (EPL 2.0) (GPL2 w/ CPE) Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) + (Eclipse Distribution License - v 1.0) Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) + (Apache 2) Joda time (joda-time:joda-time:1.6.2 - http://joda-time.sourceforge.net) + (Apache License, Version 2.0) Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.10.22 - https://bytebuddy.net/byte-buddy) + (Apache License, Version 2.0) Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.10.22 - https://bytebuddy.net/byte-buddy-agent) + (The Apache Software License, Version 2.0) ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/) + (The Apache Software License, Version 2.0) JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/) + (Apache License, Version 2.0) Apache Commons Lang (org.apache.commons:commons-lang3:3.11 - https://commons.apache.org/proper/commons-lang/) + (Apache License, Version 2.0) Apache Log4j API (org.apache.logging.log4j:log4j-api:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-api/) + (Apache License, Version 2.0) Apache Log4j to SLF4J Adapter (org.apache.logging.log4j:log4j-to-slf4j:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-to-slf4j/) + (Apache License, Version 2.0) tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.44 - https://tomcat.apache.org/) + (Apache License, Version 2.0) tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.44 - https://tomcat.apache.org/) + (The Apache License, Version 2.0) org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) + (Apache License, Version 2.0) AssertJ fluent assertions (org.assertj:assertj-core:3.18.1 - https://assertj.github.io/doc/assertj-core/) + (EPL 2.0) (GPL2 w/ CPE) Jakarta Expression Language 3.0 (org.glassfish:jakarta.el:3.0.3 - https://projects.eclipse.org/projects/ee4j.el) + (BSD License 3) Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/) + (Eclipse Public License v2.0) JUnit Jupiter (Aggregator) (org.junit.jupiter:junit-jupiter:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter API (org.junit.jupiter:junit-jupiter-api:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter Engine (org.junit.jupiter:junit-jupiter-engine:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter Params (org.junit.jupiter:junit-jupiter-params:5.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Platform Commons (org.junit.platform:junit-platform-commons:1.7.1 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Platform Engine API (org.junit.platform:junit-platform-engine:1.7.1 - https://junit.org/junit5/) + (The MIT License) mockito-core (org.mockito:mockito-core:3.6.28 - https://github.com/mockito/mockito) + (The MIT License) mockito-junit-jupiter (org.mockito:mockito-junit-jupiter:3.6.28 - https://github.com/mockito/mockito) + (Apache License, Version 2.0) Objenesis (org.objenesis:objenesis:3.1 - http://objenesis.org) + (The Apache License, Version 2.0) org.opentest4j:opentest4j (org.opentest4j:opentest4j:1.2.0 - https://github.com/ota4j-team/opentest4j) + (BSD) ASM Core (org.ow2.asm:asm:5.0.4 - http://asm.objectweb.org/asm/) + (BSD 3-Clause) Scala Library (org.scala-lang:scala-library:2.11.7 - http://www.scala-lang.org/) + (BSD 3-clause) scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.11:1.0.1 - http://www.scala-lang.org/) + (BSD 3-clause) scala-continuations-library (org.scala-lang.plugins:scala-continuations-library_2.11:1.0.1 - http://www.scala-lang.org/) + (BSD-style) scalaz-concurrent (org.scalaz:scalaz-concurrent_2.11:7.2.0 - http://scalaz.org) + (BSD-style) scalaz-core (org.scalaz:scalaz-core_2.11:7.2.0 - http://scalaz.org) + (BSD-style) scalaz-effect (org.scalaz:scalaz-effect_2.11:7.2.0 - http://scalaz.org) + (MIT) scalaz-stream (org.scalaz.stream:scalaz-stream_2.11:0.7.3a - https://github.com/scalaz/scalaz-stream) + (Three-clause BSD-style) scodec-bits (org.scodec:scodec-bits_2.11:1.0.6 - http://github.com/scodec/scodec-bits) + (The Apache Software License, Version 2.0) JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) + (MIT License) JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.30 - http://www.slf4j.org) + (MIT License) SLF4J API Module (org.slf4j:slf4j-api:1.7.30 - http://www.slf4j.org) + (Apache License, Version 2.0) Spring AOP (org.springframework:spring-aop:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Beans (org.springframework:spring-beans:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Context (org.springframework:spring-context:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Core (org.springframework:spring-core:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring TestContext Framework (org.springframework:spring-test:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Web (org.springframework:spring-web:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) Spring Web MVC (org.springframework:spring-webmvc:5.3.5 - https://github.com/spring-projects/spring-framework) + (Apache License, Version 2.0) spring-boot (org.springframework.boot:spring-boot:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-devtools (org.springframework.boot:spring-boot-devtools:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter (org.springframework.boot:spring-boot-starter:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-logging (org.springframework.boot:spring-boot-starter-logging:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-test (org.springframework.boot:spring-boot-test:2.4.4 - https://spring.io/projects/spring-boot) + (Apache License, Version 2.0) spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.4.4 - https://spring.io/projects/spring-boot) + (The Apache Software License, Version 2.0) org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.7.0 - https://www.xmlunit.org/) + (Apache License, Version 2.0) SnakeYAML (org.yaml:snakeyaml:1.27 - http://www.snakeyaml.org) + (The Mozilla Public License Version 2.0) csv-validator-core (uk.gov.nationalarchives:csv-validator-core:1.1.5 - http://digital-preservation.github.com/csv-validator/csv-validator-core/) + (The Mozilla Public License Version 2.0) csv-validator-java-api (uk.gov.nationalarchives:csv-validator-java-api:1.1.5 - http://digital-preservation.github.com/csv-validator/csv-validator-java-api/) + (The BSD 3-Clause License) UTF-8 Vaidator (uk.gov.nationalarchives:utf8-validator:1.2 - https://github.com/digital-preservation/utf8-validator) diff --git a/validator/licenses/xml-validator/LICENSE-3RD-PARTY.md b/validator/licenses/xml-validator/LICENSE-3RD-PARTY.md new file mode 100644 index 0000000..cc95099 --- /dev/null +++ b/validator/licenses/xml-validator/LICENSE-3RD-PARTY.md @@ -0,0 +1,29 @@ +# Licenses list + +Dependencies sometimes change licenses between versions, please keep this up to date with every new library use. + +Flask 2.0.1 BSD License + +Jinja2 3.0.1 BSD License + +MarkupSafe 2.0.1 BSD License + +Werkzeug 2.0.1 BSD License + +asgiref 3.5.2 BSD License + +click 8.0.1 BSD License + +colorama 0.4.4 BSD License + +elementpath 2.2.3 MIT License + +itsdangerous 2.0.1 BSD License + +sqlparse 0.4.3 BSD License + +tzdata 2022.5 Apache Software License + +waitress 2.0.0 Zope Public License + +xmlschema 1.7.0 MIT License