From 6f643af328efa08b6759babf037d5d41150a10ba Mon Sep 17 00:00:00 2001 From: Dominic Pelini <111786059+DomPeliniAerospike@users.noreply.github.com> Date: Thu, 8 Feb 2024 10:21:44 -0700 Subject: [PATCH] Added workflows to pipeline Fixed testing to be compatible with suitable servers. Added npm, yarn, pnpm, bun, and typescript actions Added binding-bindings workflow to compile binaries Added Admin testing and ee server testing --- .github/actions/run-ee-server/action.yml | 75 -- .github/workflows/build-bindings.yml | 379 +++++++--- .github/workflows/combine-bindings/action.yml | 77 ++ .github/workflows/tests.yml | 689 ++++++++++-------- examples/typescript.ts | 26 + test/admin.js | 8 +- test/exp.js | 1 + test/exp_map.js | 2 - test/index.js | 8 +- test/maps.js | 11 +- test/query.js | 387 +++++----- 11 files changed, 1010 insertions(+), 653 deletions(-) delete mode 100644 .github/actions/run-ee-server/action.yml create mode 100644 .github/workflows/combine-bindings/action.yml create mode 100644 examples/typescript.ts diff --git a/.github/actions/run-ee-server/action.yml b/.github/actions/run-ee-server/action.yml deleted file mode 100644 index 515ed0306..000000000 --- a/.github/actions/run-ee-server/action.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: 'Run EE Server' -description: 'Run EE server' -inputs: - use-server-rc: - required: true - default: false - server-tag: - required: true - default: 'latest' - # Github Composite Actions can't access secrets - # so we need to pass them in as inputs - docker-hub-username: - required: false - type: string - docker-hub-password: - required: false - type: string - -runs: - using: "composite" - steps: - - name: Install crudini to manipulate config.conf - run: python3 -m pip install crudini==0.9.4 - shell: bash - - - name: Add enterprise edition config to config.conf - run: | - python3 -m crudini --set config.conf enterprise-edition hosts 127.0.0.1:3000 - python3 -m crudini --set config.conf enterprise-edition user superuser - python3 -m crudini --set config.conf enterprise-edition password superuser - working-directory: test - shell: bash - - - name: Create config folder to store configs in - run: mkdir configs - shell: bash - - - name: Use release server - if: ${{ inputs.use-server-rc == 'false' }} - run: echo "SERVER_IMAGE=aerospike/aerospike-server-enterprise" >> $GITHUB_ENV - shell: bash - - - name: Use release candidate server - if: ${{ inputs.use-server-rc == 'true' }} - run: echo "SERVER_IMAGE=aerospike/aerospike-server-enterprise-rc" >> $GITHUB_ENV - shell: bash - - - name: Log into Docker Hub to get server RC - if: ${{ inputs.use-server-rc == 'true' }} - run: docker login --username ${{ inputs.docker-hub-username }} --password ${{ inputs.docker-hub-password }} - shell: bash - - - name: Get default aerospike.conf from Docker server EE container - run: | - docker run -d --name aerospike -p 3000-3002:3000-3002 $SERVER_IMAGE:${{ inputs.server-tag }} - sleep 5 - docker cp aerospike:/etc/aerospike/aerospike.conf ./configs/aerospike.conf - docker container stop aerospike - docker container rm aerospike - shell: bash - - - name: Enable security features using aerospike.conf - # Security stanza - run: echo -e "security {\n\tenable-quotas true\n}\n" >> ./aerospike.conf - working-directory: ./configs - shell: bash - - - name: Run enterprise edition server - run: docker run -tid -v $(pwd)/configs:/opt/aerospike/etc -p 3000:3000 --name aerospike $SERVER_IMAGE:${{ inputs.server-tag }} asd --config-file /opt/aerospike/etc/aerospike.conf - shell: bash - - - name: Create user in database for tests - # Use default admin user to create another user for testing - run: docker exec aerospike asadm --user admin --password admin --enable -e "manage acl create user superuser password superuser roles read-write-udf sys-admin user-admin data-admin" - shell: bash \ No newline at end of file diff --git a/.github/workflows/build-bindings.yml b/.github/workflows/build-bindings.yml index dceec5c63..cc0fc503a 100644 --- a/.github/workflows/build-bindings.yml +++ b/.github/workflows/build-bindings.yml @@ -9,7 +9,7 @@ on: push: branches: ["stage", "master"] pull_request: - branches: ["stage"] + branches: ["stage", "master"] types: [ # Default triggers opened, @@ -86,15 +86,15 @@ on: jobs: manylinux: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # nodejs versions to build bindings on nodejs: [ - "v108", - "v115", - "v120" + ["v108", 18], + ["v115", 20], + ["v120", 21] ] platform: [ "x86_64", @@ -102,22 +102,31 @@ jobs: ] steps: + - name: Show job status for commit uses: myrotvorets/set-commit-status-action@v2.0.0 with: sha: ${{ github.sha }} - context: "Build bindings (${{ matrix.nodejs }}-manylinux_${{ matrix.platform }})" + context: "Build bindings (${{ matrix.nodejs[0] }}-manylinux_${{ matrix.platform }})" - uses: actions/checkout@v4 with: submodules: recursive ref: ${{ inputs.commit_sha }} + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.nodejs[1] }} + architecture: 'x64' + + - name: print + run: uname -m + - name: Set up QEMU for cross compiling arm64 if: ${{ matrix.platform == 'aarch64' }} - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 with: - platforms: arm64 + platforms: all - uses: ./.github/actions/run-ee-server if: ${{ inputs.run_tests }} @@ -146,21 +155,35 @@ jobs: if: ${{ !inputs.run_tests }} run: echo "TEST_COMMAND=node -e 'aerospike = require(\".\/lib\/aerospike\")'" >> $GITHUB_ENV - - - run: sudo apt update - - name: Install build dependencies (C Client dependency packages) - run: sudo apt install g++ libssl-dev zlib1g-dev; - - name: Install build dependencies (make) - run: sudo apt-get install -y make; - - name: Install build dependencies (make) - run: sudo apt install build-essential; - - - name: Build client + - uses: uraimo/run-on-arch-action@v2 + name: Build client arm + if: ${{ matrix.platform == 'aarch64' }} + id: runcmd + with: + arch: aarch64 + distro: ubuntu22.04 + + # Set an output parameter `uname` for use in subsequent steps + run: | + apt update + apt install -y g++ libssl-dev zlib1g-dev make build-essential libuv1-dev wget curl + ./scripts/build-c-client.sh + wget https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash; + source ~/.bashrc; + nvm i 20 + npm install + + - name: Build client x64 + if: ${{ matrix.platform != 'aarch64' }} run: | + sudo apt update + sudo apt install g++ libssl-dev zlib1g-dev; + sudo apt-get install -y make; + sudo apt install build-essential; ./scripts/build-c-client.sh npm install env: - CFLAGS: '-Werror' + CFLAGS: '-Werror' - name: Test client run: | @@ -169,7 +192,7 @@ jobs: - name: Send binding to test jobs uses: actions/upload-artifact@v4 with: - name: ${{ matrix.nodejs }}-manylinux_${{ matrix.platform }}.build} + name: ${{ matrix.nodejs[0] }}-manylinux_${{ matrix.platform }}.build path: ./lib/binding/node-*-linux-*/ - name: Set final commit status @@ -178,7 +201,7 @@ jobs: with: sha: ${{ github.sha }} status: ${{ job.status }} - context: "Build bindings (${{ matrix.nodejs }}-manylinux_${{ matrix.platform }})" + context: "Build bindings (${{ matrix.nodejs[0] }}-manylinux_${{ matrix.platform }})" macOS-x86: strategy: @@ -189,7 +212,7 @@ jobs: "v115", "v120" ] - runs-on: macos-12-large + runs-on: macos-latest steps: - name: Show job status for commit uses: myrotvorets/set-commit-status-action@v2.0.0 @@ -255,124 +278,254 @@ jobs: sha: ${{ github.sha }} context: "Build bindings (${{ matrix.nodejs }}-macosx_x86_64)" - macOS-m1: - runs-on: [ - self-hosted, - macOS, - ARM64 - ] - strategy: - matrix: - nodejs-version: [ - ["v108", "18"], - ["v115", "20"], - ["v120", "21"], - ] - fail-fast: false +# macOS-m1: +# runs-on: [ +# self-hosted, +# macOS, +# ARM64 +# ] +# strategy: +# matrix: +# nodejs-version: [ +# ["v108", "18"], +# ["v115", "20"], +# ["v120", "21"], +# ] +# fail-fast: false +# steps: +# - name: Show job status for commit +# uses: myrotvorets/set-commit-status-action@v2.0.0 +# with: +# sha: ${{ github.sha }} +# context: "Build bindings (${{ matrix.nodejs-version[1] }}-macosx_arm64)" +# +# - uses: actions/checkout@v4 +# with: +# submodules: recursive +# ref: ${{ inputs.commit_sha }} +# +# - name: Install NVM +# run: | +# curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash +# source ~/.zshrc +# nvm -v +# nvm install ${{ matrix.nodejs-version[1] }} +# +# +# - name: Setup symlink folders +# run: | +# sudo rm -rf /usr/local/opt/openssl; +# sudo rm -rf /usr/local/opt/libuv; +# sudo mkdir -p /usr/local/opt; +# sudo chown -R $(whoami) /usr/local/opt +# +# - name: Install brew packages +# run: | +# brew install openssl@3.2.1 +# brew install libuv@1.47.0 +# +# - name: Set environment for building +# run: | +# echo "export PATH="/usr/local/bin/:/usr/local/opt/openssl/bin:$PATH" +# export LDFLAGS="-L/usr/local/opt/openssl/lib" +# export CPPFLAGS="-I/usr/local/opt/openssl/include" +# export EXT_CFLAGS="-I/usr/local/opt/openssl/include"" >> ~/.zshrc; +# source ~/.zshrc; +# +# - name: Setup symlink folders +# run: | +# sudo ln -s /usr/local/Cellar/libuv/1.47.0/ /usr/local/opt/libuv; +# sudo ln -s /usr/local/Cellar/openssl@3/3.2.1/ /usr/local/opt/openssl; +# +# # Self-hosted runner only +# # Need to be able to save Docker Hub credentials to keychain +# - run: security unlock-keychain -p ${{ secrets.MAC_M1_SELF_HOSTED_RUNNER_PW }} +# if: ${{ inputs.run_tests && inputs.use-server-rc }} +# +# - if: ${{ inputs.run_tests && inputs.use-server-rc }} +# uses: docker/login-action@v3 +# with: +# username: ${{ secrets.DOCKER_HUB_BOT_USERNAME }} +# password: ${{ secrets.DOCKER_HUB_BOT_PW }} +# +# - name: Use server rc +# if: ${{ inputs.run_tests && inputs.use-server-rc }} +# run: echo IMAGE_NAME="${{ vars.SERVER_RC_REPO_LINK }}:${{ inputs.server-tag }}" >> $GITHUB_ENV +# +# - name: Use server release +# if: ${{ inputs.run_tests && !inputs.use-server-rc }} +# run: echo IMAGE_NAME="${{ vars.SERVER_REPO_LINK }}:${{ inputs.server-tag }}" >> $GITHUB_ENV +# +# - name: Run server +# if: ${{ inputs.run_tests }} +# run: docker run -d -p 3000:3000 --name aerospike ${{ env.IMAGE_NAME }} +# +# - name: Build client +# run: | +# ./scripts/build-c-client.sh +# npm install +# env: +# CFLAGS: '-Werror' +# +# - name: Enable tests +# if: ${{ inputs.run_tests }} +# run: echo "TEST_COMMAND=npm test -- --h 127.0.0.1 --port 3000" >> $GITHUB_ENV +# +# - name: Disable tests (only run basic import test) +# if: ${{ !inputs.run_tests }} +# run: echo "TEST_COMMAND=node -e 'aerospike = require(\".\/lib\/aerospike\")'" >> $GITHUB_ENV +# +# +# - name: Test client +# run: | +# ${{ env.TEST_COMMAND }} +# +# - name: Save macOS wheel +# uses: actions/upload-artifact@v4 +# with: +# name: ${{ matrix.nodejs-version[0] }}-macosx_arm64.build +# path: ./lib/binding/*/ +# +# - name: Stop server +# if: ${{ always() && inputs.run_tests }} +# run: | +# docker container stop aerospike +# docker container prune -f +# +# - name: Set final commit status +# uses: myrotvorets/set-commit-status-action@v2.0.0 +# if: always() +# with: +# sha: ${{ github.sha }} +# status: ${{ job.status }} +# context: "Build bindings (${{ matrix.nodejs-version[0] }}-macosx_arm64)" + + test-npm-install: + runs-on: ubuntu-latest + needs: [manylinux, macOS-x86] steps: - - name: Show job status for commit - uses: myrotvorets/set-commit-status-action@v2.0.0 - with: - sha: ${{ github.sha }} - context: "Build bindings (${{ matrix.nodejs-version[1] }}-macosx_arm64)" - - - uses: actions/checkout@v4 + - uses: actions/checkout@v2 with: submodules: recursive - ref: ${{ inputs.commit_sha }} - - name: Install NVM - run: | - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash - source ~/.zshrc - nvm -v - nvm install ${{ matrix.nodejs-version[1] }} + - uses: ./.github/workflows/combine-bindings/ + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' - - name: Setup symlink folders + - name: Modify the package.json run: | - sudo rm -rf /usr/local/opt/openssl; - sudo rm -rf /usr/local/opt/libuv; - sudo mkdir -p /usr/local/opt; - sudo chown -R $(whoami) /usr/local/opt + npm install -g json + json -I -f package.json -e "this.scripts.install=\"npm-run-all removeExtraBinaries build\"" - - name: Install brew packages + - name: Run tests run: | - brew install openssl@3.2.1 - brew install libuv@1.47.0 + mkdir -p testDir lib/binding/openssl@3/ lib/binding/openssl@1/ + cd testDir + npm install .. - - name: Set environment for building - run: | - echo "export PATH="/usr/local/bin/:/usr/local/opt/openssl/bin:$PATH" - export LDFLAGS="-L/usr/local/opt/openssl/lib" - export CPPFLAGS="-I/usr/local/opt/openssl/include" - export EXT_CFLAGS="-I/usr/local/opt/openssl/include"" >> ~/.zshrc; - source ~/.zshrc; + test-yarn-install: + runs-on: ubuntu-latest + needs: [manylinux, macOS-x86] + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive - - name: Setup symlink folders + - uses: ./.github/workflows/combine-bindings/ + + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' + + - name: Modify the package.json run: | - sudo ln -s /usr/local/Cellar/libuv/1.47.0/ /usr/local/opt/libuv; - sudo ln -s /usr/local/Cellar/openssl@3/3.2.1/ /usr/local/opt/openssl; + npm install -g json + json -I -f package.json -e "this.scripts.install=\"npm-run-all removeExtraBinaries build\"" - # Self-hosted runner only - # Need to be able to save Docker Hub credentials to keychain - - run: security unlock-keychain -p ${{ secrets.MAC_M1_SELF_HOSTED_RUNNER_PW }} - if: ${{ inputs.run_tests && inputs.use-server-rc }} + - name: Run tests + run: | + mkdir -p testDir lib/binding/openssl@3/ lib/binding/openssl@1/ + cd testDir + yarn install .. - - if: ${{ inputs.run_tests && inputs.use-server-rc }} - uses: docker/login-action@v3 + test-pnpm-install: + runs-on: ubuntu-latest + needs: [manylinux, macOS-x86] + steps: + - uses: actions/checkout@v2 with: - username: ${{ secrets.DOCKER_HUB_BOT_USERNAME }} - password: ${{ secrets.DOCKER_HUB_BOT_PW }} + submodules: recursive - - name: Use server rc - if: ${{ inputs.run_tests && inputs.use-server-rc }} - run: echo IMAGE_NAME="${{ vars.SERVER_RC_REPO_LINK }}:${{ inputs.server-tag }}" >> $GITHUB_ENV + - uses: ./.github/workflows/combine-bindings/ - - name: Use server release - if: ${{ inputs.run_tests && !inputs.use-server-rc }} - run: echo IMAGE_NAME="${{ vars.SERVER_REPO_LINK }}:${{ inputs.server-tag }}" >> $GITHUB_ENV + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' - - name: Run server - if: ${{ inputs.run_tests }} - run: docker run -d -p 3000:3000 --name aerospike ${{ env.IMAGE_NAME }} + - name: Modify the package.json + run: | + npm install -g json + json -I -f package.json -e "this.scripts.install=\"npm-run-all removeExtraBinaries build\"" - - name: Build client + - name: Run tests run: | - ./scripts/build-c-client.sh - npm install - env: - CFLAGS: '-Werror' + mkdir -p testDir lib/binding/openssl@3/ lib/binding/openssl@1/ + cd testDir + pnpm install .. - - name: Enable tests - if: ${{ inputs.run_tests }} - run: echo "TEST_COMMAND=npm test -- --h 127.0.0.1 --port 3000" >> $GITHUB_ENV + test-bun-install: + runs-on: ubuntu-latest + needs: [manylinux, macOS-x86] + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive - - name: Disable tests (only run basic import test) - if: ${{ !inputs.run_tests }} - run: echo "TEST_COMMAND=node -e 'aerospike = require(\".\/lib\/aerospike\")'" >> $GITHUB_ENV + - uses: ./.github/workflows/combine-bindings/ + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' - - name: Test client + - name: Modify the package.json run: | - ${{ env.TEST_COMMAND }} + npm install -g json + json -I -f package.json -e "this.scripts.install=\"npm-run-all removeExtraBinaries build\"" - - name: Save macOS wheel - uses: actions/upload-artifact@v4 + - name: Run tests + run: | + mkdir -p testDir lib/binding/openssl@3/ lib/binding/openssl@1/ + cd testDir + bun install .. + + test-bun-install: + runs-on: ubuntu-latest + needs: [manylinux, macOS-x86] + steps: + - uses: actions/checkout@v2 with: - name: ${{ matrix.nodejs-version[0] }}-macosx_arm64.build - path: ./lib/binding/*/ + submodules: recursive - - name: Stop server - if: ${{ always() && inputs.run_tests }} - run: | - docker container stop aerospike - docker container prune -f + - uses: ./.github/workflows/combine-bindings/ - - name: Set final commit status - uses: myrotvorets/set-commit-status-action@v2.0.0 - if: always() + - uses: actions/setup-node@v4 with: - sha: ${{ github.sha }} - status: ${{ job.status }} - context: "Build bindings (${{ matrix.nodejs-version[0] }}-macosx_arm64)" + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' + + - name: Modify the package.json + run: | + npm install -g json + json -I -f package.json -e "this.scripts.install=\"npm-run-all removeExtraBinaries build\"" + + - name: Run tests + run: | + mkdir -p testDir lib/binding/openssl@3/ lib/binding/openssl@1/ + cd testDir + bun install .. diff --git a/.github/workflows/combine-bindings/action.yml b/.github/workflows/combine-bindings/action.yml new file mode 100644 index 000000000..be45e0120 --- /dev/null +++ b/.github/workflows/combine-bindings/action.yml @@ -0,0 +1,77 @@ +name: Combine bindings +description: 'composite run action' + +runs: + using: "composite" + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + +# - uses: actions/download-artifact@v4 +# with: +# name: v108-macosx_aarch64.build + + - uses: actions/download-artifact@v4 + with: + name: v108-macosx_x86_64.build + +# - uses: actions/download-artifact@v4 +# with: +# name: v115-macosx_aarch64.build + + - uses: actions/download-artifact@v4 + with: + name: v115-macosx_x86_64.build + +# - uses: actions/download-artifact@v4 +# with: +# name: v120-macosx_aarch64.build + + - uses: actions/download-artifact@v4 + with: + name: v120-macosx_x86_64.build + + - uses: actions/download-artifact@v4 + with: + name: v108-manylinux_x86_64.build + + - uses: actions/download-artifact@v4 + with: + name: v108-manylinux_aarch64.build + + - uses: actions/download-artifact@v4 + with: + name: v115-manylinux_x86_64.build + + - uses: actions/download-artifact@v4 + with: + name: v115-manylinux_aarch64.build + + - uses: actions/download-artifact@v4 + with: + name: v120-manylinux_x86_64.build + + - uses: actions/download-artifact@v4 + with: + name: v120-manylinux_aarch64.build + + - name: Install client + shell: bash + run: | + mkdir -p lib/binding/node-v108-darwin-arm64 lib/binding/node-v115-darwin-arm64 lib/binding/node-v120-darwin-arm64 + cp -r node-v108-linux-x64 lib/binding/node-v108-linux-x64 + cp -r node-v115-linux-x64 lib/binding/node-v115-linux-x64 + cp -r node-v120-linux-x64 lib/binding/node-v120-linux-x64 + cp -r node-v108-linux-arm64 lib/binding/node-v108-linux-arm64 + cp -r node-v115-linux-arm64 lib/binding/node-v115-linux-arm64 + cp -r node-v120-linux-arm64 lib/binding/node-v120-linux-arm64 + cp -r node-v108-darwin-x64 lib/binding/node-v108-darwin-x64 + cp -r node-v115-darwin-x64 lib/binding/node-v115-darwin-x64 + cp -r node-v120-darwin-x64 lib/binding/node-v120-darwin-x64 +# cp -r node-v108-darwin-arm64 lib/binding/node-v108-darwin-arm64 +# cp -r node-v115-darwin-arm64 lib/binding/node-v115-darwin-arm64 +# cp -r node-v120-darwin-arm64 lib/binding/node-v120-darwin-arm64 + + + diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1d67ec7a7..3dad94844 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,316 +1,427 @@ -#name: PR pre-merge tests -# -#env: -# LOWEST_SUPPORTED_NODEJS_VERSION: '18' -# LOWEST_SUPPORTED_NODE_MODULE: 'v108' -# -## Trigger test workflow whenever: -## 1. A pull request is updated (e.g with new commits) -## 2. Commits are pushed directly to the stage or master branch -#on: -# push: -# branches: ["stage", "master"] -# pull_request: -# branches: ["stage"] -# types: [ -# # Default triggers -# opened, -# synchronize, -# reopened, -# # Additional triggers -# labeled, -# unlabeled -# ] -# workflow_dispatch: -# -#jobs: -# lint: -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v4 -# with: -# submodules: recursive -# - name: install standard -# run: npm install standard -# - name: lint -# run: npm run lint -# -# build-ubuntu: +name: PR pre-merge tests + +env: + LOWEST_SUPPORTED_NODEJS_VERSION: '18' + LOWEST_SUPPORTED_NODE_MODULE: 'v108' + +# Trigger test workflow whenever: +# 1. A pull request is updated (e.g with new commits) +# 2. Commits are pushed directly to the stage or master branch +on: + push: + branches: ["stage", "master"] + pull_request: + branches: ["stage", "master"] + types: [ + # Default triggers + opened, + synchronize, + reopened, + # Additional triggers + labeled, + unlabeled + ] + workflow_dispatch: + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: install standard + run: npm install standard + + - name: lint + run: npm run lint + + build-ubuntu: + runs-on: ubuntu-latest + strategy: + matrix: + nodejs-version: ["18", "20", "21"] + fail-fast: false + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.nodejs-version }} + architecture: 'x64' + - name: debugging + run: | + echo "LOWEST_SUPPORTED_NODEJS_VERSION: $LOWEST_SUPPORTED_NODEJS_VERSION" + + - run: sudo apt update + - name: Install build dependencies (C Client dependency packages) + run: sudo apt install g++ libssl-dev zlib1g-dev; + - name: Install build dependencies (make) + run: sudo apt-get install -y make; + - name: Install build dependencies (make) + run: sudo apt install build-essential; + + - name: Build client + run: | + ./scripts/build-c-client.sh + npm install + env: + CFLAGS: '-Werror' + + - name: list + run: ls lib/binding + - name: Send binding to test jobs + uses: actions/upload-artifact@v4 + with: + name: binding-${{ matrix.nodejs-version }} + path: ./lib/binding/node-*-linux-x64/ + +# test-memray: +# needs: build-ubuntu # runs-on: ubuntu-latest -# strategy: -# matrix: -# nodejs-version: ["18", "20", "21"] -# fail-fast: false -# -# # steps: # - uses: actions/checkout@v4 -# with: -# submodules: recursive # # - uses: actions/setup-node@v4 # with: # node-version: ${{ matrix.nodejs-version }} # architecture: 'x64' -# - name: debugging -# run: | -# echo "LOWEST_SUPPORTED_NODEJS_VERSION: $LOWEST_SUPPORTED_NODEJS_VERSION" -# -# - run: sudo apt update -# - name: Install build dependencies (C Client dependency packages) -# run: sudo apt install g++ libssl-dev zlib1g-dev; -# - name: Install build dependencies (make) -# run: sudo apt-get install -y make; -# - name: Install build dependencies (make) -# run: sudo apt install build-essential; -# -# - name: Build client -# run: | -# ./scripts/build-c-client.sh -# npm install -# env: -# CFLAGS: '-Werror' -# -# - name: list -# run: ls lib/binding -# - name: Send binding to test jobs -# uses: actions/upload-artifact@v4 -# with: -# name: binding-${{ matrix.nodejs-version }} -# path: ./lib/binding/node-*-linux-x64/ -# -## test-memray: -## needs: build-ubuntu -## runs-on: ubuntu-latest -## steps: -## - uses: actions/checkout@v4 -## -## - uses: actions/setup-node@v4 -## with: -## node-version: ${{ matrix.nodejs-version }} -## architecture: 'x64' -## -## - uses: actions/download-artifact@v4 -## with: -## name: binding-18 -## -## - name: make binding folder -## run: mkdir lib/binding -## -## - name: Install client -## run: cp -r install node-v108-linux-x64 lib/binding/node-v108-linux-x64 -## -## - name: Install client -## run: npm install . -## -## - name: Run Aerospike server -## run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server -## -## - name: Wait for database to be ready -## # Should be ready after 3 seconds -## run: sleep 3 -## -## - name: Get number of tests -## run: echo "NUM_TESTS=$(npm run test-dry-run | grep -oP '\d+ (passing|pending)' | awk '{ sum += $1 } END { print sum }')" >> $GITHUB_ENV -## working-directory: test -## -## - name: Run tests -## # Get number of tests since setting to 0 doesn't work properly -## # pytest-memray currently throws a ZeroDivision error due to having a bug -## # We ignore this for now -## run: python -m pytest ./new_tests --memray --memray-bin-path=./ --most-allocations=${{ env.NUM_TESTS }} || true -## working-directory: test -# -# # Run this when testing new server features on server release candidate -# # to make sure the tests don't regress on the last server release. -# test-ce-latest-release: -# runs-on: ubuntu-latest -# if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} -# needs: build-ubuntu -# -# steps: -# - uses: actions/checkout@v4 -# with: -# submodules: recursive -# - name: debugging -# run: | -# echo "LOWEST_SUPPORTED_NODEJS_VERSION: $LOWEST_SUPPORTED_NODEJS_VERSION" -# - uses: actions/setup-node@v4 -# with: -# node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# architecture: 'x64' -# -# - uses: actions/download-artifact@v4 -# with: -# name: binding-${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# env: -# LOWEST_SUPPORTED_NODEJS_VERSION: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# -# -# - name: make binding folder -# run: mkdir lib/binding -# -# - name: Install client -# run: cp -r node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 lib/binding/node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 -# -# - name: Run Aerospike server -# run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server -# -# - name: Wait for database to be ready -# # Should be ready after 3 seconds -# run: sleep 3 -# -# - name: Run tests -# run: npm run test -# -# test-ce: -# runs-on: ubuntu-latest -# needs: build-ubuntu -# strategy: -# matrix: -# node-version: [ -# "18", -# "20", -# "21" -# ] -# fail-fast: false -# -# steps: -# - uses: actions/checkout@v2 -# with: -# submodules: recursive -# -# - uses: actions/setup-node@v4 -# with: -# node-version: ${{ matrix.node-version }} -# architecture: 'x64' # # - uses: actions/download-artifact@v4 # with: -# name: binding-${{ matrix.node-version }} +# name: binding-18 # # - name: make binding folder # run: mkdir lib/binding # -# - name: install mocha -# run: npm install mocha -# # - name: Install client -# #fix the convention here -# run: | -# if [ "${{ matrix.node-version }}" = "18" ]; then -# cp -r node-v108-linux-x64 lib/binding/node-v108-linux-x64 -# elif [ "${{ matrix.node-version }}" = "20" ]; then -# cp -r node-v115-linux-x64 lib/binding/node-v115-linux-x64 -# elif [ "${{ matrix.node-version }}" = "21" ]; then -# cp -r node-v120-linux-x64 lib/binding/node-v120-linux-x64 -# fi -# -# - if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} -# uses: docker/login-action@v3 -# with: -# username: ${{ secrets.DOCKER_HUB_BOT_USERNAME }} -# password: ${{ secrets.DOCKER_HUB_BOT_PW }} +# run: cp -r install node-v108-linux-x64 lib/binding/node-v108-linux-x64 # -# - name: Run Aerospike server release candidate with latest tag -# if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} -# run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server-rc:latest +# - name: Install client +# run: npm install . # # - name: Run Aerospike server -# if: ${{ !contains(github.event.pull_request.labels.*.name, 'new-server-features') }} # run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server # # - name: Wait for database to be ready # # Should be ready after 3 seconds # run: sleep 3 # -# - name: Run tests -# run: npm run test -# -# test-lowest-supported-server: -# runs-on: ubuntu-latest -# needs: build-ubuntu -# steps: -# - uses: actions/checkout@v2 -# with: -# submodules: recursive -# -# - uses: actions/setup-node@v4 -# with: -# node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# architecture: 'x64' -# -# - uses: actions/download-artifact@v4 -# with: -# name: binding-${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# env: -# LOWEST_SUPPORTED_NODEJS_VERSION: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# -# - name: make binding folder -# run: mkdir lib/binding -# -# - name: install mocha -# run: npm install mocha -# -# - name: Install client -# #fix the convention here -# run: cp -r node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 lib/binding/node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 -# -# - name: Run lowest supported server -# run: | -# SERVER_VERSION=$(curl -s "https://registry.hub.docker.com/v2/repositories/aerospike/aerospike-server/tags?page_size=100" | jq '.results[] | select(.name | startswith("6.0")).name' -r | head -n 1) -# docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server:$SERVER_VERSION -# -# - name: Wait for database to be ready -# # Should be ready after 3 seconds -# run: sleep 3 -# -# - name: Run tests -# run: npm run test -# -# test-ee: -# runs-on: ubuntu-latest -# needs: build-ubuntu -# steps: -# - uses: actions/checkout@v2 -# with: -# submodules: recursive -# -# - uses: actions/setup-node@v4 -# with: -# node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# architecture: 'x64' -# -# - uses: actions/download-artifact@v4 -# with: -# name: binding-${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# env: -# LOWEST_SUPPORTED_NODEJS_VERSION: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} -# -# - name: make binding folder -# run: mkdir lib/binding -# -# - name: install mocha -# run: npm install mocha -# -# - name: Install client -# #fix the convention here -# run: cp -r node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 lib/binding/node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 -# -# - uses: ./.github/actions/run-ee-server -# with: -# use-server-rc: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} -# docker-hub-username: ${{ secrets.DOCKER_HUB_BOT_USERNAME }} -# docker-hub-password: ${{ secrets.DOCKER_HUB_BOT_PW }} -# -# - name: Wait for server to start -# run: sleep 5 +# - name: Get number of tests +# run: echo "NUM_TESTS=$(npm run test-dry-run | grep -oP '\d+ (passing|pending)' | awk '{ sum += $1 } END { print sum }')" >> $GITHUB_ENV +# working-directory: test # # - name: Run tests -# run: npm run test -- --h localhost --U superuser --P superuser -# -# - name: Show logs if failed -# if: ${{ failure() }} -# run: | -# docker container logs aerospike -# cat ./configs/aerospike.conf \ No newline at end of file +# # Get number of tests since setting to 0 doesn't work properly +# # pytest-memray currently throws a ZeroDivision error due to having a bug +# # We ignore this for now +# run: python -m pytest ./new_tests --memray --memray-bin-path=./ --most-allocations=${{ env.NUM_TESTS }} || true +# working-directory: test + + # Run this when testing new server features on server release candidate + # to make sure the tests don't regress on the last server release. + test-ce-latest-release: + runs-on: ubuntu-latest + if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + needs: build-ubuntu + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: debugging + run: | + echo "LOWEST_SUPPORTED_NODEJS_VERSION: $LOWEST_SUPPORTED_NODEJS_VERSION" + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' + + - uses: actions/download-artifact@v4 + with: + name: binding-${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + env: + LOWEST_SUPPORTED_NODEJS_VERSION: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + + + - name: make binding folder + run: mkdir lib/binding + + - name: Install client + run: cp -r node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 lib/binding/node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 + + - name: Run Aerospike server + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + + - name: Wait for database to be ready + # Should be ready after 3 seconds + run: sleep 3 + + - name: Run tests + run: npm run test + + test-ce: + runs-on: ubuntu-latest + needs: build-ubuntu + strategy: + matrix: + node-version: [ + "18", + "20", + "21" + ] + fail-fast: false + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + architecture: 'x64' + + - uses: actions/download-artifact@v4 + with: + name: binding-${{ matrix.node-version }} + + - name: make binding folder + run: mkdir lib/binding + + - name: install mocha + run: npm install mocha + + - name: Install client + #fix the convention here + run: | + if [ "${{ matrix.node-version }}" = "18" ]; then + cp -r node-v108-linux-x64 lib/binding/node-v108-linux-x64 + elif [ "${{ matrix.node-version }}" = "20" ]; then + cp -r node-v115-linux-x64 lib/binding/node-v115-linux-x64 + elif [ "${{ matrix.node-version }}" = "21" ]; then + cp -r node-v120-linux-x64 lib/binding/node-v120-linux-x64 + fi + + - if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_HUB_BOT_USERNAME }} + password: ${{ secrets.DOCKER_HUB_BOT_PW }} + + - name: Run Aerospike server release candidate with latest tag + if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server-rc:latest + + - name: Run Aerospike server + if: ${{ !contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + + - name: Wait for database to be ready + # Should be ready after 3 seconds + run: sleep 3 + + - name: Run tests + run: npm run test + + test-lowest-supported-server: + runs-on: ubuntu-latest + needs: build-ubuntu + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' + + - uses: actions/download-artifact@v4 + with: + name: binding-${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + env: + LOWEST_SUPPORTED_NODEJS_VERSION: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + + - name: make binding folder + run: mkdir lib/binding + + - name: install mocha + run: npm install mocha + + - name: Install client + #fix the convention here + run: cp -r node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 lib/binding/node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 + + - name: Run lowest supported server + run: | + SERVER_VERSION=$(curl -s "https://registry.hub.docker.com/v2/repositories/aerospike/aerospike-server/tags?page_size=100" | jq '.results[] | select(.name | startswith("6.0")).name' -r | head -n 1) + docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server:$SERVER_VERSION + + - name: Wait for database to be ready + # Should be ready after 3 seconds + run: sleep 3 + + - name: Run tests + run: npm run test + + test-ee: + runs-on: ubuntu-latest + needs: build-ubuntu + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + architecture: 'x64' + + - uses: actions/download-artifact@v4 + with: + name: binding-${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + env: + LOWEST_SUPPORTED_NODEJS_VERSION: ${{ env.LOWEST_SUPPORTED_NODEJS_VERSION }} + + - name: make binding folder + run: mkdir lib/binding + + - name: install mocha + run: npm install mocha + + - name: Install client + #fix the convention here + run: cp -r node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 lib/binding/node-${{ env.LOWEST_SUPPORTED_NODE_MODULE }}-linux-x64 + + - name: Download aerospike-client-python repository + run: git clone https://github.com/aerospike/aerospike-client-python.git + + - name: phil + run: | + ls aerospike-client-python/.github + pwd + + - name: Run ee server + uses: ./aerospike-client-python/.github/actions/run-ee-server/ + with: + use-server-rc: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + docker-hub-username: ${{ secrets.DOCKER_HUB_BOT_USERNAME }} + docker-hub-password: ${{ secrets.DOCKER_HUB_BOT_PW }} + + + - name: Wait for server to start + run: sleep 5 + + - name: Run tests + run: npm run test -- --h localhost --U admin --P admin --t 40000 test/admin.js + + test-valgrind: + runs-on: ubuntu-latest + needs: build-ubuntu + strategy: + matrix: + node-version: [ + "20", + ] + fail-fast: false + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + architecture: 'x64' + + - uses: actions/download-artifact@v4 + with: + name: binding-${{ matrix.node-version }} + + - name: make binding folder + run: mkdir lib/binding + + - name: install mocha + run: npm install mocha + + - name: install valgrind + run: sudo apt install valgrind + + - name: Install client + #fix the convention here + run: | + cp -r node-v115-linux-x64 lib/binding/node-v115-linux-x64 + + - if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_HUB_BOT_USERNAME }} + password: ${{ secrets.DOCKER_HUB_BOT_PW }} + + - name: Run Aerospike server release candidate with latest tag + if: ${{ contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server-rc:latest + + - name: Run Aerospike server + if: ${{ !contains(github.event.pull_request.labels.*.name, 'new-server-features') }} + run: docker run -d --name aerospike -p 3000-3002:3000-3002 aerospike/aerospike-server + + - name: Wait for database to be ready + # Should be ready after 3 seconds + run: sleep 3 + + - name: Run tests + run: npm run valgrind -- --t 40000 + + test-typescript: + runs-on: ubuntu-latest + needs: build-ubuntu + strategy: + matrix: + node-version: [ + "20", + ] + fail-fast: false + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + architecture: 'x64' + + - uses: actions/download-artifact@v4 + with: + name: binding-${{ matrix.node-version }} + + - name: make binding folder + run: mkdir lib/binding + + - name: Install client + #fix the convention here + run: | + cp -r node-v115-linux-x64 lib/binding/node-v115-linux-x64 + + - name: Modify the package.json + run: | + mkdir my-aerospike-project + cd my-aerospike-project + npm init -y + npm install typescript ts-node --save-dev + npm install .. + cp ../examples/typescript.ts + npx tsc index.ts + node index.js + + - name: Run tests + run: | + mkdir -p testDir lib/binding/openssl@3/ lib/binding/openssl@1/ + cd testDir + bun install .. \ No newline at end of file diff --git a/examples/typescript.ts b/examples/typescript.ts new file mode 100644 index 000000000..659dc635e --- /dev/null +++ b/examples/typescript.ts @@ -0,0 +1,26 @@ +import * as Aerospike from 'aerospike'; + +(async function () { + let client: Aerospike.Client | undefined; + + try { + client = await Aerospike.connect(); + const key = new Aerospike.Key('test', 'test', 'abcd'); + const bins: Record = { + name: 'Norma', + age: 31 + }; + + await client.put(key, bins); + const record = await client.get(key); + console.info('Record:', record); + await client.remove(key); + } catch (error) { + console.error('Error:', error); + process.exit(1); + } finally { + if (client) { + await client.close(); + } + } +})(); diff --git a/test/admin.js b/test/admin.js index 62f71d58d..015209ad9 100644 --- a/test/admin.js +++ b/test/admin.js @@ -460,24 +460,28 @@ context('admin commands', async function () { describe('Client#changePassword()', function () { it('Changes password for user', async function () { client.changePassword(username1, 'password350', null) + await wait(waitMs + 30000) const config = { hosts: options.host + ':' + options.port, user: username1, password: 'password350' } const dummyClient = await Aerospike.connect(config) - dummyClient.close() + return dummyClient.close() }) it('With policy', async function () { client.changePassword(username2, 'password250', policy) + await wait(waitMs + 3000) + const config = { hosts: options.host + ':' + options.port, user: username2, password: 'password250' } + console.log(config) const dummyClient = await Aerospike.connect(config) - dummyClient.close() + return dummyClient.close() }) }) diff --git a/test/exp.js b/test/exp.js index 73b773880..18f4c5960 100644 --- a/test/exp.js +++ b/test/exp.js @@ -113,6 +113,7 @@ describe('Aerospike.exp', function () { }) describe('eq on list bin', function () { + helper.skipUnlessVersion('>= 6.3.0', this) it('evaluates to true if a list bin matches a value', async function () { const key = await createRecord({ list: [4, 2, 0] }) await orderByKey(key, 'map') diff --git a/test/exp_map.js b/test/exp_map.js index 5f69179d3..0da8d0c9d 100644 --- a/test/exp_map.js +++ b/test/exp_map.js @@ -720,7 +720,6 @@ describe('Aerospike.exp_operations', function () { ] let result = await client.operate(key, ops, {}) result = await client.get(key) - console.log(result) expect(result.bins).to.eql({ tags: { a: 'blue', c: 'yellow' } }) }) @@ -738,7 +737,6 @@ describe('Aerospike.exp_operations', function () { ] let result = await client.operate(key, ops, {}) result = await client.get(key) - console.log(result) expect(result.bins).to.eql({ tags: { a: 'blue', nested: { d: 'orange', f: 'white', g: 'black' } } }) }) }) diff --git a/test/index.js b/test/index.js index 46a0323b9..797d59c16 100644 --- a/test/index.js +++ b/test/index.js @@ -210,8 +210,8 @@ context('secondary indexes', function () { }) describe('Client#indexRemove()', async function () { - beforeEach(() => { - helper.index.create(testIndex.name, helper.set, testIndex.bin, + beforeEach(async () => { + await helper.index.create(testIndex.name, helper.set, testIndex.bin, Aerospike.indexDataType.STRING, Aerospike.indexType.DEFAULT) }) @@ -240,8 +240,8 @@ context('secondary indexes', function () { } }) - it('should return a Promise if called without callback function', function () { - return client.indexRemove(helper.namespace, testIndex.name) + it('should return a Promise if called without callback function', async function () { + return await client.indexRemove(helper.namespace, testIndex.name) }) }) }) diff --git a/test/maps.js b/test/maps.js index 03daf486f..48460a4c8 100644 --- a/test/maps.js +++ b/test/maps.js @@ -297,7 +297,6 @@ describe('client.operate() - CDT Map operations', function () { }) it('adds each item from the Map class to the map and returns the size of the map', function () { - console.log(maps.putItems('map', new Map([['e', 150], ['d', 100], ['c', 99]]))) return initState() .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) .then(operate(maps.putItems('map', new Map([['e', 150], ['d', 100], ['c', 99]])))) @@ -1554,7 +1553,8 @@ describe('client.operate() - CDT Map operations', function () { .then(assertResultEql({ map: ['a', 1, 'b', 2] })) .then(cleanup()) }) - context('Need Server 6.1+', function () { + + context('returnType.EXISTS', function () { helper.skipUnlessVersion('>= 6.1.0', this) it('returns true or false for a single key read', function () { return initState() @@ -1565,7 +1565,7 @@ describe('client.operate() - CDT Map operations', function () { .then(cleanup()) }) - it('returns true if any values exisst', function () { + it('returns true if any values exist', function () { return initState() .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) .then(orderByKey('map')) @@ -1575,7 +1575,7 @@ describe('client.operate() - CDT Map operations', function () { }) }) - context('Need Server 6.3+', function () { + context('returnType.ORDERED_MAP', function () { helper.skipUnlessVersion('>= 6.3.0', this) it('returns key/value for a single read', function () { return initState() @@ -1594,7 +1594,10 @@ describe('client.operate() - CDT Map operations', function () { .then(assertResultEql({ map: { a: 1, b: 2 } })) .then(cleanup()) }) + }) + context('returnType.UNORDERED_MAP', function () { + helper.skipUnlessVersion('>= 6.3.0', this) it('returns key/value for a single read', function () { return initState() .then(createRecord({ map: { a: 1, b: 2, c: 3 } })) diff --git a/test/query.js b/test/query.js index 66c878c4b..6f65a2e63 100644 --- a/test/query.js +++ b/test/query.js @@ -44,12 +44,13 @@ const MAPKEYS = Aerospike.indexType.MAPKEYS const keygen = helper.keygen const metagen = helper.metagen const putgen = helper.putgen +let samples describe('Queries', function () { const client = helper.client - this.timeout(40000) + const testSet = 'test/query-' + Math.floor(Math.random() * 100000) - const samples = [ + samples = [ { name: 'int match', i: 5 }, { name: 'int non-match', i: 500 }, { name: 'int list match', li: [1, 5, 9] }, @@ -82,37 +83,7 @@ describe('Queries', function () { { name: 'filter', value: 1 }, { name: 'filter', value: 2 }, { name: 'filter', value: 3 }, - { name: 'filter', value: 4 }, - - { name: 'nested int list match', li: { nested: [1, 5, 9] } }, - { name: 'nested int list non-match', li: { nested: [500, 501, 502] } }, - { name: 'nested int map match', mi: { nested: { a: 1, b: 5, c: 9 } } }, - { name: 'nested int map non-match', mi: { nested: { a: 500, b: 501, c: 502 } } }, - { name: 'nested string list match', ls: { nested: ['banana', 'blueberry'] } }, - { name: 'nested string list non-match', ls: { nested: ['tomato', 'cuccumber'] } }, - { name: 'nested string map match', ms: { nested: { a: 'banana', b: 'blueberry' } } }, - { name: 'nested string map non-match', ms: { nested: { a: 'tomato', b: 'cuccumber' } } }, - { name: 'nested string mapkeys match', mks: { nested: { banana: 1, blueberry: 2 } } }, - { name: 'nested string mapkeys non-match', mks: { nested: { tomato: 3, cuccumber: 4 } } }, - { name: 'nested point match', g: { nested: GeoJSON.Point(103.913, 1.308) } }, - { name: 'nested point non-match', g: { nested: GeoJSON.Point(-122.101, 37.421) } }, - { name: 'nested point list match', lg: { nested: [GeoJSON.Point(103.913, 1.308), GeoJSON.Point(105.913, 3.308)] } }, - { name: 'nested point list non-match', lg: { nested: [GeoJSON.Point(-122.101, 37.421), GeoJSON.Point(-120.101, 39.421)] } }, - { name: 'nested point map match', mg: { nested: { a: GeoJSON.Point(103.913, 1.308), b: GeoJSON.Point(105.913, 3.308) } } }, - { name: 'nested point map non-match', mg: { nested: { a: GeoJSON.Point(-122.101, 37.421), b: GeoJSON.Point(-120.101, 39.421) } } }, - { name: 'nested region match', g: { nested: GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } }, - { name: 'nested region non-match', g: { nested: GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421]) } }, - { name: 'nested region list match', lg: { nested: [GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308])] } }, - { name: 'nested region list non-match', lg: { nested: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } }, - { name: 'nested region map match', mg: { nested: { a: GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } } }, - { name: 'nested region map non-match', mg: { nested: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } }, - { name: 'nested aggregate', nested: { value: 10 } }, - { name: 'nested aggregate', nested: { value: 20 } }, - { name: 'nested aggregate', nested: { value: 30 } }, - { name: 'nested aggregate', nested: { doubleNested: { value: 10 } } }, - { name: 'nested aggregate', nested: { doubleNested: { value: 20 } } }, - { name: 'nested aggregate', nested: { doubleNested: { value: 30 } } } - + { name: 'filter', value: 4 } ] const indexes = [ @@ -126,19 +97,8 @@ describe('Queries', function () { ['qidxStrMapKeys', 'mks', STRING, MAPKEYS], ['qidxGeo', 'g', GEO2DSPHERE], ['qidxGeoList', 'lg', GEO2DSPHERE, LIST], - ['qidxGeoMap', 'mg', GEO2DSPHERE, MAPVALUES], - // CDT context indexes - ['qidxNameNested', 'name', STRING, MAPKEYS, new Context().addMapKey('nested')], - ['qidxIntListNested', 'li', NUMERIC, LIST, new Context().addMapKey('nested')], - ['qidxIntMapNested', 'mi', NUMERIC, MAPVALUES, new Context().addMapKey('nested')], - ['qidxStrListNested', 'ls', STRING, LIST, new Context().addMapKey('nested')], - ['qidxStrMapNested', 'ms', STRING, MAPVALUES, new Context().addMapKey('nested')], - ['qidxStrMapKeysNested', 'mks', STRING, MAPKEYS, new Context().addMapKey('nested')], - ['qidxGeoListNested', 'lg', GEO2DSPHERE, LIST, new Context().addMapKey('nested')], - ['qidxGeoMapNested', 'mg', GEO2DSPHERE, MAPVALUES, new Context().addMapKey('nested')], - - ['qidxAggregateMapNested', 'nested', STRING, MAPKEYS], - ['qidxAggregateMapDoubleNested', 'nested', STRING, MAPKEYS, new Context().addMapKey('doubleNested')] + ['qidxGeoMap', 'mg', GEO2DSPHERE, MAPVALUES] + ] let keys = [] @@ -191,12 +151,53 @@ describe('Queries', function () { indexes.push(['qidxBlobMapNested', 'mblob', BLOB, MAPVALUES, new Context().addMapKey('nested')]) indexes.push(['qidxBlobMapKeysNested', 'mkblob', BLOB, MAPKEYS, new Context().addMapKey('nested')]) } + + if (helper.cluster.isVersionInRange('>= 6.1.0')) { + samples.push({ name: 'nested int list match', li: { nested: [1, 5, 9] } }) + samples.push({ name: 'nested int list non-match', li: { nested: [500, 501, 502] } }) + samples.push({ name: 'nested int map match', mi: { nested: { a: 1, b: 5, c: 9 } } }) + samples.push({ name: 'nested int map non-match', mi: { nested: { a: 500, b: 501, c: 502 } } }) + samples.push({ name: 'nested string list match', ls: { nested: ['banana', 'blueberry'] } }) + samples.push({ name: 'nested string list non-match', ls: { nested: ['tomato', 'cuccumber'] } }) + samples.push({ name: 'nested string map match', ms: { nested: { a: 'banana', b: 'blueberry' } } }) + samples.push({ name: 'nested string map non-match', ms: { nested: { a: 'tomato', b: 'cuccumber' } } }) + samples.push({ name: 'nested string mapkeys match', mks: { nested: { banana: 1, blueberry: 2 } } }) + samples.push({ name: 'nested string mapkeys non-match', mks: { nested: { tomato: 3, cuccumber: 4 } } }) + samples.push({ name: 'nested point match', g: { nested: GeoJSON.Point(103.913, 1.308) } }) + samples.push({ name: 'nested point non-match', g: { nested: GeoJSON.Point(-122.101, 37.421) } }) + samples.push({ name: 'nested point list match', lg: { nested: [GeoJSON.Point(103.913, 1.308), GeoJSON.Point(105.913, 3.308)] } }) + samples.push({ name: 'nested point list non-match', lg: { nested: [GeoJSON.Point(-122.101, 37.421), GeoJSON.Point(-120.101, 39.421)] } }) + samples.push({ name: 'nested point map match', mg: { nested: { a: GeoJSON.Point(103.913, 1.308), b: GeoJSON.Point(105.913, 3.308) } } }) + samples.push({ name: 'nested point map non-match', mg: { nested: { a: GeoJSON.Point(-122.101, 37.421), b: GeoJSON.Point(-120.101, 39.421) } } }) + samples.push({ name: 'nested region match', g: { nested: GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } }) + samples.push({ name: 'nested region non-match', g: { nested: GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421]) } }) + samples.push({ name: 'nested region list match', lg: { nested: [GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308])] } }) + samples.push({ name: 'nested region list non-match', lg: { nested: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } }) + samples.push({ name: 'nested region map match', mg: { nested: { a: GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } } }) + samples.push({ name: 'nested region map non-match', mg: { nested: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } }) + samples.push({ name: 'nested aggregate', nested: { value: 10 } }) + samples.push({ name: 'nested aggregate', nested: { value: 20 } }) + samples.push({ name: 'nested aggregate', nested: { value: 30 } }) + samples.push({ name: 'nested aggregate', nested: { doubleNested: { value: 10 } } }) + samples.push({ name: 'nested aggregate', nested: { doubleNested: { value: 20 } } }) + samples.push({ name: 'nested aggregate', nested: { doubleNested: { value: 30 } } }) + + indexes.push(['qidxNameNested', 'name', STRING, MAPKEYS, new Context().addMapKey('nested')]) + indexes.push(['qidxIntListNested', 'li', NUMERIC, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxIntMapNested', 'mi', NUMERIC, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxStrListNested', 'ls', STRING, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxStrMapNested', 'ms', STRING, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxStrMapKeysNested', 'mks', STRING, MAPKEYS, new Context().addMapKey('nested')]) + indexes.push(['qidxGeoListNested', 'lg', GEO2DSPHERE, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxGeoMapNested', 'mg', GEO2DSPHERE, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxAggregateMapNested', 'nested', STRING, MAPKEYS]) + indexes.push(['qidxAggregateMapDoubleNested', 'nested', STRING, MAPKEYS, new Context().addMapKey('doubleNested')]) + } + const numberOfSamples = samples.length - console.log("SPOT 1") return Promise.all([ putgen.put(numberOfSamples, generators) - .then((records) => { keys = records.map((rec) => rec.key) - console.log("SPOT 2") }) + .then((records) => { keys = records.map((rec) => rec.key) }) .then(() => Promise.all(indexes.map(idx => helper.index.create(idx[0], testSet, idx[1], idx[2], idx[3], idx[4])))), helper.udf.register('udf.lua') @@ -267,7 +268,7 @@ describe('Queries', function () { stream.on('error', error => { throw error }) stream.on('data', record => results.push(record.bins)) stream.on('end', () => { - expect(results.length).to.be.above(60) + expect(results.length).to.be.above(samples.length) done() }) }) @@ -286,7 +287,7 @@ describe('Queries', function () { recordTotal += recordsReceived if (recordsReceived !== maxRecs) { expect(query.hasNextPage()).to.equal(false) - expect(recordTotal).to.be.above(60) + expect(recordTotal).to.be.above(samples.length) break } recordsReceived = 0 @@ -309,19 +310,22 @@ describe('Queries', function () { }) }) - it('should apply a stream UDF to the nested context', function (done) { - const args = { - filters: [filter.contains('name', 'value', MAPKEYS, new Context().addMapKey('nested'))] - } - const query = client.query(helper.namespace, testSet, args) - query.setUdf('udf', 'even') - const stream = query.foreach() - const results = [] - stream.on('error', error => { throw error }) - stream.on('data', record => results.push(record.bins)) - stream.on('end', () => { - expect(results.sort()).to.eql([]) - done() + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should apply a stream UDF to the nested context', function (done) { + const args = { + filters: [filter.contains('name', 'value', MAPKEYS, new Context().addMapKey('nested'))] + } + const query = client.query(helper.namespace, testSet, args) + query.setUdf('udf', 'even') + const stream = query.foreach() + const results = [] + stream.on('error', error => { throw error }) + stream.on('data', record => results.push(record.bins)) + stream.on('end', () => { + expect(results.sort()).to.eql([]) + done() + }) }) }) @@ -416,29 +420,31 @@ describe('Queries', function () { recordsReceived = 0 } }) - - it('Paginates correctly using query.results() on an index with a cdt context', async function () { - let recordTotal = 0 - let recordsReceived = 0 - let pageTotal = 0 - const lastPage = 1 - const maxRecs = 5 - const query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs, filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] }) - let results = [] - while (1) { - results = await query.results() - recordsReceived += results.length - results = [] - pageTotal += 1 - recordTotal += recordsReceived - if (recordsReceived !== maxRecs) { - expect(query.hasNextPage()).to.equal(false) - expect(pageTotal).to.equal(lastPage) - expect(recordTotal).to.equal(3) - break + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('Paginates correctly using query.results() on an index with a cdt context', async function () { + let recordTotal = 0 + let recordsReceived = 0 + let pageTotal = 0 + const lastPage = 1 + const maxRecs = 5 + const query = client.query(helper.namespace, testSet, { paginate: true, maxRecords: maxRecs, filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] }) + let results = [] + while (1) { + results = await query.results() + recordsReceived += results.length + results = [] + pageTotal += 1 + recordTotal += recordsReceived + if (recordsReceived !== maxRecs) { + expect(query.hasNextPage()).to.equal(false) + expect(pageTotal).to.equal(lastPage) + expect(recordTotal).to.equal(3) + break + } + recordsReceived = 0 } - recordsReceived = 0 - } + }) }) it('Throw error when query.UDF is set and query.paginate is true', async function () { @@ -636,9 +642,12 @@ describe('Queries', function () { verifyQueryResults(args, 'int list match', done) }) - it('should match integers in a list within a range in a nested context', function (done) { - const args = { filters: [filter.range('li', 3, 7, LIST, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested int list match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match integers in a list within a range in a nested context', function (done) { + const args = { filters: [filter.range('li', 3, 7, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int list match', done) + }) }) it('should match integers in a map within a range', function (done) { @@ -646,9 +655,12 @@ describe('Queries', function () { verifyQueryResults(args, 'int map match', done) }) - it('should match integers in a map within a range in a nested context', function (done) { - const args = { filters: [filter.range('mi', 3, 7, MAPVALUES, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested int map match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match integers in a map within a range in a nested context', function (done) { + const args = { filters: [filter.range('mi', 3, 7, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int map match', done) + }) }) }) @@ -658,9 +670,12 @@ describe('Queries', function () { verifyQueryResults(args, 'int list match', done) }) - it('should match lists containing an integer in a nested context', function (done) { - const args = { filters: [filter.contains('li', 5, LIST, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested int list match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match lists containing an integer in a nested context', function (done) { + const args = { filters: [filter.contains('li', 5, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int list match', done) + }) }) it('should match maps containing an integer value', function (done) { @@ -668,9 +683,12 @@ describe('Queries', function () { verifyQueryResults(args, 'int map match', done) }) - it('should match maps containing an integer value in a nested context', function (done) { - const args = { filters: [filter.contains('mi', 5, MAPVALUES, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested int map match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match maps containing an integer value in a nested context', function (done) { + const args = { filters: [filter.contains('mi', 5, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested int map match', done) + }) }) it('should match lists containing a string', function (done) { @@ -678,9 +696,12 @@ describe('Queries', function () { verifyQueryResults(args, 'string list match', done) }) - it('should match lists containing a string in a nested context', function (done) { - const args = { filters: [filter.contains('ls', 'banana', LIST, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested string list match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match lists containing a string in a nested context', function (done) { + const args = { filters: [filter.contains('ls', 'banana', LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested string list match', done) + }) }) it('should match maps containing a string value', function (done) { @@ -688,9 +709,12 @@ describe('Queries', function () { verifyQueryResults(args, 'string map match', done) }) - it('should match maps containing a string value in a nested context', function (done) { - const args = { filters: [filter.contains('ms', 'banana', MAPVALUES, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested string map match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match maps containing a string value in a nested context', function (done) { + const args = { filters: [filter.contains('ms', 'banana', MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested string map match', done) + }) }) it('should match maps containing a string key', function (done) { @@ -698,10 +722,14 @@ describe('Queries', function () { verifyQueryResults(args, 'string mapkeys match', done) }) - it('should match maps containing a string key in a nested context', function (done) { - const args = { filters: [filter.contains('mks', 'banana', MAPKEYS, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested string mapkeys match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match maps containing a string key in a nested context', function (done) { + const args = { filters: [filter.contains('mks', 'banana', MAPKEYS, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested string mapkeys match', done) + }) }) + context('Uses blob Secondary indexes', function () { helper.skipUnlessVersion('>= 7.0.0', this) it('should match lists containing a blob', function (done) { @@ -753,10 +781,13 @@ describe('Queries', function () { verifyQueryResults(args, 'point list match', done) }) - it('should match locations in a list within a GeoJSON region in a nested context', function (done) { - const region = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) - const args = { filters: [filter.geoWithinGeoJSONRegion('lg', region, LIST, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested point list match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a list within a GeoJSON region in a nested context', function (done) { + const region = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) + const args = { filters: [filter.geoWithinGeoJSONRegion('lg', region, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point list match', done) + }) }) it('should match locations in a map within a GeoJSON region', function (done) { @@ -764,11 +795,13 @@ describe('Queries', function () { const args = { filters: [filter.geoWithinGeoJSONRegion('mg', region, MAPVALUES)] } verifyQueryResults(args, 'point map match', done) }) - - it('should match locations in a map within a GeoJSON region in a nested context', function (done) { - const region = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) - const args = { filters: [filter.geoWithinGeoJSONRegion('mg', region, MAPVALUES, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested point map match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a map within a GeoJSON region in a nested context', function (done) { + const region = new GeoJSON({ type: 'Polygon', coordinates: [[[103, 1.3], [104, 1.3], [104, 1.4], [103, 1.4], [103, 1.3]]] }) + const args = { filters: [filter.geoWithinGeoJSONRegion('mg', region, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point map match', done) + }) }) it('accepts a plain object as GeoJSON', function (done) { @@ -789,9 +822,12 @@ describe('Queries', function () { verifyQueryResults(args, 'point list match', done) }) - it('should match locations in a list within a radius from another location in a nested context', function (done) { - const args = { filters: [filter.geoWithinRadius('lg', 103.9135, 1.3085, 15000, LIST, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested point list match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a list within a radius from another location in a nested context', function (done) { + const args = { filters: [filter.geoWithinRadius('lg', 103.9135, 1.3085, 15000, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point list match', done) + }) }) it('should match locations in a map within a radius from another location', function (done) { @@ -799,9 +835,12 @@ describe('Queries', function () { verifyQueryResults(args, 'point map match', done) }) - it('should match locations in a map within a radius from another location in a nested context', function (done) { - const args = { filters: [filter.geoWithinRadius('mg', 103.9135, 1.3085, 15000, MAPVALUES, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested point map match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match locations in a map within a radius from another location in a nested context', function (done) { + const args = { filters: [filter.geoWithinRadius('mg', 103.9135, 1.3085, 15000, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested point map match', done) + }) }) }) @@ -818,10 +857,13 @@ describe('Queries', function () { verifyQueryResults(args, 'region list match', done) }) - it('should match regions in a list that contain a GeoJSON point in a nested context', function (done) { - const point = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) - const args = { filters: [filter.geoContainsGeoJSONPoint('lg', point, LIST, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested region list match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a list that contain a GeoJSON point in a nested context', function (done) { + const point = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + const args = { filters: [filter.geoContainsGeoJSONPoint('lg', point, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region list match', done) + }) }) it('should match regions in a map that contain a GeoJSON point', function (done) { @@ -830,10 +872,13 @@ describe('Queries', function () { verifyQueryResults(args, 'region map match', done) }) - it('should match regions in a map that contain a GeoJSON point in a nested context', function (done) { - const point = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) - const args = { filters: [filter.geoContainsGeoJSONPoint('mg', point, MAPVALUES, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested region map match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a map that contain a GeoJSON point in a nested context', function (done) { + const point = new GeoJSON({ type: 'Point', coordinates: [103.913, 1.308] }) + const args = { filters: [filter.geoContainsGeoJSONPoint('mg', point, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region map match', done) + }) }) it('accepts a plain object as GeoJSON', function (done) { @@ -854,9 +899,12 @@ describe('Queries', function () { verifyQueryResults(args, 'region list match', done) }) - it('should match regions in a list that contain a lng/lat coordinate pair in a nested context', function (done) { - const args = { filters: [filter.geoContainsPoint('lg', 103.913, 1.308, LIST, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested region list match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a list that contain a lng/lat coordinate pair in a nested context', function (done) { + const args = { filters: [filter.geoContainsPoint('lg', 103.913, 1.308, LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region list match', done) + }) }) it('should match regions in a map that contain a lng/lat coordinate pair', function (done) { @@ -864,9 +912,12 @@ describe('Queries', function () { verifyQueryResults(args, 'region map match', done) }) - it('should match regions in a map that contain a lng/lat coordinate pair in a nested context', function (done) { - const args = { filters: [filter.geoContainsPoint('mg', 103.913, 1.308, MAPVALUES, new Context().addMapKey('nested'))] } - verifyQueryResults(args, 'nested region map match', done) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should match regions in a map that contain a lng/lat coordinate pair in a nested context', function (done) { + const args = { filters: [filter.geoContainsPoint('mg', 103.913, 1.308, MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested region map match', done) + }) }) }) }) @@ -915,28 +966,33 @@ describe('Queries', function () { done() }) }) - - it('should apply a user defined function and aggregate the results from a map', function (done) { - const args = { - filters: [filter.contains('nested', 'value', MAPKEYS)] - } - const query = client.query(helper.namespace, testSet, args) - query.apply('udf', 'count', function (error, result) { - if (error) throw error - expect(result).to.equal(3) - done() + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should apply a user defined function and aggregate the results from a map', function (done) { + const args = { + filters: [filter.contains('nested', 'value', MAPKEYS)] + } + const query = client.query(helper.namespace, testSet, args) + query.apply('udf', 'count', function (error, result) { + if (error) throw error + expect(result).to.equal(3) + done() + }) }) }) - it('should apply a user defined function and aggregate the results from a nested map', function (done) { - const args = { - filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] - } - const query = client.query(helper.namespace, testSet, args) - query.apply('udf', 'count', function (error, result) { - if (error) throw error - expect(result).to.equal(3) - done() + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('should apply a user defined function and aggregate the results from a nested map', function (done) { + const args = { + filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] + } + const query = client.query(helper.namespace, testSet, args) + query.apply('udf', 'count', function (error, result) { + if (error) throw error + expect(result).to.equal(3) + done() + }) }) }) @@ -987,15 +1043,18 @@ describe('Queries', function () { expect(job).to.be.instanceof(Job) }) }) - it('returns a Promise that resolves to a Job with a filter containing a CDT context', function () { - const args = { - filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] - } - const query = client.query(helper.namespace, testSet, args) - return query.background('udf', 'noop') - .then(job => { - expect(job).to.be.instanceof(Job) - }) + describe('index with cdt context', function () { + helper.skipUnlessVersion('>= 6.1.0', this) + it('returns a Promise that resolves to a Job with a filter containing a CDT context', function () { + const args = { + filters: [filter.contains('nested', 'value', MAPKEYS, new Context().addMapKey('doubleNested'))] + } + const query = client.query(helper.namespace, testSet, args) + return query.background('udf', 'noop') + .then(job => { + expect(job).to.be.instanceof(Job) + }) + }) }) })