diff --git a/.github/workflows/windows-build.yml b/.github/workflows/windows-build.yml index fa37d6624..b7e526e27 100644 --- a/.github/workflows/windows-build.yml +++ b/.github/workflows/windows-build.yml @@ -37,10 +37,19 @@ jobs: - uses: actions/checkout@v3 with: submodules: recursive + - uses: actions/setup-python@v4 + with: + python-version: '3.11' - name: Setup Windows SDK uses: GuillaumeFalourd/setup-windows10-sdk-action@v1.11 with: sdk-version: 17763 + - name: Setup NuGet.exe + uses: nuget/setup-nuget@v1 + with: + nuget-version: latest + - run: nuget restore aerospike.sln + working-directory: aerospike-client-c\vs\ - name: Setup node uses: actions/setup-node@v3 with: @@ -56,10 +65,10 @@ jobs: $NodeModulesPath = $NodeDirPath + "\node_modules\npm\node_modules\@npmcli\run-script" } cd $NodeModulesPath - npm install node-gyp@9.x + npm install node-gyp@10.x - name: Install nodejs Client shell: pwsh - run: npm install --unsafe-perm --build-from-source + run: npm ci --unsafe-perm --build-from-source # - name: Test nodejs client # run: npm test # env: diff --git a/README.md b/README.md index 07d7ff7b7..a5c45f73a 100644 --- a/README.md +++ b/README.md @@ -14,12 +14,11 @@ The Aerospike Node.js client is a Node.js add-on module, written using V8. The client is compatible with Node.js 20 (Upcoming LTS) and 18 (LTS). It supports the following operating systems: - RHEL 8/9 -- Debian 10 (x86_64 architecture only) - Debian 11 - Amazon Linux 2023 - Ubuntu 20.04/22.04 (Focal Fossa, Jammy Jellyfish) - Many Linux distributions compatible with one of the above OS releases. -- macOS versions 11/12/13 are also supported. (Node.js 14 install unavailable on M1 Mac systems) +- macOS 12/13 The client is compatible with arm64, aarch64, and x86_64 architectures. @@ -72,7 +71,7 @@ record using the Aerospike database. ```js const Aerospike = require('aerospike') -// INSERT HOSTNAME AND PORT NUMBER OF AEROPSIKE SERVER NODE HERE! +// INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! const config = { hosts: '192.168.33.10:3000', } diff --git a/aerospike-client-c b/aerospike-client-c index a45aa32de..15bb8f8fd 160000 --- a/aerospike-client-c +++ b/aerospike-client-c @@ -1 +1 @@ -Subproject commit a45aa32de45c145ea0d0381b73f5df3d74648557 +Subproject commit 15bb8f8fdff24acf0d25a5668b51328f3af640bc diff --git a/aerospike-client-c.ini b/aerospike-client-c.ini index ae66a1201..bd900a48e 100644 --- a/aerospike-client-c.ini +++ b/aerospike-client-c.ini @@ -1,3 +1,3 @@ # Version number of the dependencies package for C client (Windows only) -AEROSPIKE_C_DEPS_VERSION=1.0.1 +AEROSPIKE_C_DEPS_VERSION=1.0.2 diff --git a/aerospike-client-c.sha256 b/aerospike-client-c.sha256 index 88260ba85..c103da3ff 100644 --- a/aerospike-client-c.sha256 +++ b/aerospike-client-c.sha256 @@ -1 +1 @@ -3ef446436aa0b97dc952ab5f90141f7e2c7ff8254d545edb82654e490189d80a aerospike-client-c-dependencies.1.0.1.zip +BB7AEB63571E63FD6C9CD042DC1810743DF9C8F122C91EAFB705C2E08B87733A aerospike-client-c-dependencies.1.0.2.zip diff --git a/binding.gyp b/binding.gyp index fcc379236..e9fcc6dc5 100644 --- a/binding.gyp +++ b/binding.gyp @@ -145,6 +145,8 @@ 'src/main/enums/exp_enum.cc', 'src/main/enums/batch_type.cc', 'src/main/enums/privilege_code.cc', + 'src/main/enums/exp_read_flags.cc', + 'src/main/enums/exp_write_flags.cc', 'src/main/stats.cc', 'src/main/util/conversions.cc', 'src/main/util/conversions_batch.cc', diff --git a/docker/Dockerfile.alpine b/docker/Dockerfile.alpine deleted file mode 100644 index a7856dc30..000000000 --- a/docker/Dockerfile.alpine +++ /dev/null @@ -1,47 +0,0 @@ -# This Dockerfile demonstrates how to build the Aerospike Node.js client on -# Alpine Linux. Since there is no pre-build package for the Aerospike C Client -# SDK for Alpine Linux, this Dockerfile first builds the C Client SDK from source -# (a submodule for the Node.js client), then builds the Node.js client using it. -# Stage 2 install the Node.js client into the final Docker image, to keep the size of -# that image minimal (i.e. no build dependencies). -# -# Note: The AS_NODEJS_VERSION must use version 4.0.3 and up since this is where the -# C client submodule was introduced. - -# Stage 1: Build Aerospike C client and Node.js client -FROM node:lts-alpine as as-node-builder -WORKDIR /src - -ENV AS_NODEJS_VERSION v5.0.1 - -RUN apk update -RUN apk add --no-cache \ - build-base \ - linux-headers \ - bash \ - libuv-dev \ - openssl-dev \ - lua5.1-dev \ - zlib-dev \ - git \ - python3 - -RUN git clone --branch ${AS_NODEJS_VERSION} --recursive https://github.com/aerospike/aerospike-client-nodejs -# TODO: build-command.sh might be broken for alpine in some versions, use latest version when it's in git -COPY build-commands.sh /src/aerospike-client-nodejs/scripts/build-commands.sh - -RUN cd /src/aerospike-client-nodejs \ - && /src/aerospike-client-nodejs/scripts/build-c-client.sh \ - && npm install /src/aerospike-client-nodejs --unsafe-perm --build-from-source - -# Stage 2: Deploy Aerospike Node.js Runtime only -FROM node:lts-alpine -WORKDIR /src - -RUN apk add --no-cache \ - zlib \ - openssl - -COPY --from=as-node-builder /src/aerospike-client-nodejs/ aerospike-client-nodejs/ - -RUN npm install /src/aerospike-client-nodejs \ No newline at end of file diff --git a/docker/Dockerfile.bullseye-slim b/docker/Dockerfile.bullseye-slim deleted file mode 100644 index 8b15169f5..000000000 --- a/docker/Dockerfile.bullseye-slim +++ /dev/null @@ -1,18 +0,0 @@ -# This Dockerfile demonstrates how to use the Aerospike Node.js client on -# bullseye-slim (Debian 11) using the pre-built package and minimal prerequirements. -# -# Note: The AS_NODEJS_VERSION must use version 4.0.3 and up since this is where -# the C client submodule was introduced. - -# Stage 1: Install Node.js Client Dependencies -FROM node:lts-bullseye-slim as installer -WORKDIR /src - -ENV AS_NODEJS_VERSION v5.0.1 - -RUN apt update -y -RUN apt install -y \ - openssl \ - zlib1g - -RUN npm install aerospike@${AS_NODEJS_VERSION} diff --git a/docker/Dockerfile.lambda b/docker/Dockerfile.lambda deleted file mode 100644 index 1eb9dbe43..000000000 --- a/docker/Dockerfile.lambda +++ /dev/null @@ -1,44 +0,0 @@ -# This Dockerfile demonstrates how to build the Aerospike Node.js client on -# Amazon Linux 2 for AWS Lambda. Since there is no pre-built package for the -# Aerospike C Client SDK for Amazon Linux 2, this Dockerfile uses a multi-stage -# approach to building the client to minimize the final image size. -# -# Note: The AS_NODEJS_VERSION must use version 4.0.3 and up since this is where -# the C client submodule was introduced. - -# Stage 1: Build Aerospike C Client & Node.js Client -FROM public.ecr.aws/lambda/nodejs:16 as builder -WORKDIR /src - -ENV AS_NODEJS_VERSION v5.0.1 - -RUN yum update -y -RUN yum install -y \ - gcc-c++ \ - linux-headers \ - libuv-devel \ - lua5.1-devel \ - openssl11-devel \ - zlib-devel \ - python3 \ - make \ - wget \ - tar \ - git - -RUN git clone --branch ${AS_NODEJS_VERSION} --recursive https://github.com/aerospike/aerospike-client-nodejs aerospike -RUN cd /src/aerospike \ - && /src/aerospike/scripts/build-c-client.sh \ - && npm install /src/aerospike --unsafe-perm --build-from-source - -# Stage 2: Install Node.js Client Dependencies -FROM public.ecr.aws/lambda/nodejs:16 as installer -WORKDIR /src - -COPY --from=builder /src/aerospike ./aerospike - -RUN yum update -y -RUN yum install -y \ - openssl11 \ - zlib -RUN npm install /src/aerospike \ No newline at end of file diff --git a/docker/Dockerfile.ubuntu22.04 b/docker/Dockerfile.ubuntu22.04 deleted file mode 100644 index 59a6d0872..000000000 --- a/docker/Dockerfile.ubuntu22.04 +++ /dev/null @@ -1,45 +0,0 @@ -# This Dockerfile demonstrates how to build the Aerospike Node.js client on -# Ubuntu 22.04. Since there is no pre-built package for the Aerospike C Client SDK -# for Ubuntu 22.04 yet, this Dockerfile uses a multi-stage approach to building the -# client to minimize the final image size. -# -# Note: The AS_NODEJS_VERSION must use version 4.0.3 and up since this is where -# the C client submodule was introduced. - -# Stage 1: Build Aerospike C Client & Node.js Client -FROM sitespeedio/node:ubuntu-22.04-nodejs-16.15.0 as builder -WORKDIR /src - -ENV AS_NODEJS_VERSION v5.0.1 - -RUN apt update -y -RUN apt install -y \ - libc6-dev \ - libssl-dev \ - autoconf \ - automake \ - libtool \ - g++ \ - zlib1g-dev \ - liblua5.1-0-dev \ - ncurses-dev \ - python3 \ - wget \ - git \ - make - -RUN git clone --branch ${AS_NODEJS_VERSION} --recursive https://github.com/aerospike/aerospike-client-nodejs aerospike -RUN cd /src/aerospike \ - && /src/aerospike/scripts/build-c-client.sh \ - && npm install /src/aerospike --unsafe-perm --build-from-source - -# Stage 2: Install Node.js Client Dependencies -FROM sitespeedio/node:ubuntu-22.04-nodejs-16.15.0 as installer -WORKDIR /src - -COPY --from=builder /src/aerospike ./aerospike - -RUN apt update -y -RUN apt install -y \ - zlib1g -RUN npm install /src/aerospike diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 000000000..33b48ad90 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,7 @@ +# Docker Examples + +The docker files that were previously located in this directory have been moved to +the [Nodejs-Client-Examples](https://github.com/aerospike-examples/nodejs-client-examples) directory. + +Files in this directory are not intended for production use, but rather to demonstrate the steps +necessary to install and build the client on various systems. \ No newline at end of file diff --git a/docs/overview.md b/docs/overview.md index 9c67cf178..e736bf954 100644 --- a/docs/overview.md +++ b/docs/overview.md @@ -94,7 +94,7 @@ The following is very simple example of how to write and read a record from Aero ```js const Aerospike = require('aerospike') -// INSERT HOSTNAME AND PORT NUMBER OF AEROPSIKE SERVER NODE HERE! +// INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! const config = { hosts: '192.168.33.10:3000', } diff --git a/lib/client.js b/lib/client.js index d2432afb3..cff231723 100644 --- a/lib/client.js +++ b/lib/client.js @@ -321,7 +321,7 @@ Client.prototype.contextFromBase64 = function (serializedContext) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -357,7 +357,7 @@ Client.prototype.changePassword = function (user, password, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -367,7 +367,7 @@ Client.prototype.changePassword = function (user, password, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -419,7 +419,7 @@ Client.prototype.createUser = function (user, password, roles, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -456,7 +456,7 @@ Client.prototype.createRole = function (roleName, privileges, policy, whitelist, * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -466,7 +466,7 @@ Client.prototype.createRole = function (roleName, privileges, policy, whitelist, * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -505,7 +505,7 @@ Client.prototype.dropRole = function (roleName, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -515,7 +515,7 @@ Client.prototype.dropRole = function (roleName, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -555,7 +555,7 @@ Client.prototype.dropUser = function (user, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -565,7 +565,7 @@ Client.prototype.dropUser = function (user, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -604,7 +604,7 @@ Client.prototype.grantPrivileges = function (roleName, privileges, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -614,7 +614,7 @@ Client.prototype.grantPrivileges = function (roleName, privileges, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -655,7 +655,7 @@ Client.prototype.grantRoles = function (user, roles, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -665,7 +665,7 @@ Client.prototype.grantRoles = function (user, roles, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -701,7 +701,7 @@ Client.prototype.queryRole = async function (roleName, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -711,7 +711,7 @@ Client.prototype.queryRole = async function (roleName, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -747,7 +747,7 @@ Client.prototype.queryRoles = function (policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -757,7 +757,7 @@ Client.prototype.queryRoles = function (policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -793,7 +793,7 @@ Client.prototype.queryUser = function (user, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -803,7 +803,7 @@ Client.prototype.queryUser = function (user, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -838,7 +838,7 @@ Client.prototype.queryUsers = function (policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -848,7 +848,7 @@ Client.prototype.queryUsers = function (policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -886,7 +886,7 @@ Client.prototype.revokePrivileges = function (roleName, privileges, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -896,7 +896,7 @@ Client.prototype.revokePrivileges = function (roleName, privileges, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -936,7 +936,7 @@ Client.prototype.revokeRoles = function (user, roles, policy) { * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -946,7 +946,7 @@ Client.prototype.revokeRoles = function (user, roles, policy) { * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -986,7 +986,7 @@ Client.prototype.setQuotas = function (roleName, readQuota, writeQuota, policy) * * @example * - * const Aerospike = require('./lib/aerospike') + * const Aerospike = require('aerospike') * * function wait (ms) { * return new Promise(resolve => setTimeout(resolve, ms)) @@ -996,7 +996,7 @@ Client.prototype.setQuotas = function (roleName, readQuota, writeQuota, policy) * let client * try { * client = await Aerospike.connect({ - * hosts: '172.17.0.2:3000', + * hosts: '192.168.33.10:3000', * policies: { * write : new Aerospike.WritePolicy({socketTimeout : 1, totalTimeout : 1}), * }, @@ -1089,27 +1089,35 @@ Client.prototype.removeSeedHost = function (hostname, port) { * new Key('test', 'demo', 'key3') * ] * - * Aerospike.connect(config, (error, client) => { - * if (error) throw error - * client.batchExists(keys, (error, results) => { - * if (error) throw error + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30}) + * await client.put(keys[1], {example: 35}) + * await client.put(keys[2], {example: 40}) + * + * let results = await client.batchExists(keys) * results.forEach((result) => { - * switch (result.status) { - * case Aerospike.status.OK: - * console.log("Record found") - * break - * case Aerospike.status.ERR_RECORD_NOT_FOUND: - * console.log("Record not found") - * break - * default: - * // error while reading record - * console.log("Other error") - * break - * } + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } * }) - * client.close() - * }) - * }) + * + * // Close the connection to the server + * await client.close(); + * })(); + * * */ Client.prototype.batchExists = function (keys, policy, callback) { @@ -1157,27 +1165,34 @@ Client.prototype.batchExists = function (keys, policy, callback) { * new Key('test', 'demo', 'key3') * ] * - * Aerospike.connect(config, (error, client) => { - * if (error) throw error - * client.batchGet(keys, (error, results) => { - * if (error) throw error + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30}) + * await client.put(keys[1], {example: 35}) + * await client.put(keys[2], {example: 40}) + * + * let results = await client.batchGet(keys) * results.forEach((result) => { - * switch (result.status) { - * case Aerospike.status.OK: - * console.log("Record found") - * break - * case Aerospike.status.ERR_RECORD_NOT_FOUND: - * console.log("Record not found") - * break - * default: - * // error while reading record - * console.log("Other error") - * break - * } + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } * }) - * client.close() - * }) - * }) + * + * // Close the connection to the server + * await client.close(); + * })(); * */ Client.prototype.batchGet = function (keys, policy, callback) { @@ -1220,6 +1235,7 @@ Client.prototype.batchGet = function (keys, policy, callback) { * @since v2.0 * * @example + * * const Aerospike = require('aerospike') * const batchType = Aerospike.batchType * const op = Aerospike.operations @@ -1235,36 +1251,51 @@ Client.prototype.batchGet = function (keys, policy, callback) { * * var batchRecords = [ * { type: batchType.BATCH_READ, - * key: new Aerospike.Key('test', 'demo', 'key1'), bins: ['i', 's'] }, + * key: new Aerospike.Key('test', 'demo', 'key1'), bins: ['example'] }, * { type: batchType.BATCH_READ, * key: new Aerospike.Key('test', 'demo', 'key2'), readAllBins: true }, * { type: batchType.BATCH_READ, * key: new Aerospike.Key('test', 'demo', 'key3'), * ops:[ - * op.read('blob-bin') - * ]} + * op.read('example') + * ]}, + * { type: batchType.BATCH_READ, + * key: new Aerospike.Key('test', 'demo', 'key4')} * ] - * Aerospike.connect(config, (error, client) => { - * if (error) throw error - * client.batchRead(batchRecords, (error, results) => { - * if (error) throw error + * + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(batchRecords[0].key, {example: 30}) + * await client.put(batchRecords[1].key, {example: 35}) + * await client.put(batchRecords[2].key, {example: 40}) + * await client.put(batchRecords[3].key, {example: 45}) + * + * let results = await client.batchRead(batchRecords) * results.forEach((result) => { - * switch (result.status) { - * case Aerospike.status.OK: - * console.log("Record found") - * break - * case Aerospike.status.ERR_RECORD_NOT_FOUND: - * console.log("Record not found") - * break - * default: - * // error while reading record - * console.log("Other error") - * break - * } + * + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * // Since the fourth record didn't specify bins to read, + * // the fourth record will return no bins, eventhough the batchRead succeeded. + * console.log(result.record.bins) + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } * }) - * client.close() - * }) - * }) + * // Close the connection to the server + * await client.close(); + * })(); */ Client.prototype.batchRead = function (records, policy, callback) { if (typeof policy === 'function') { @@ -1298,41 +1329,109 @@ Client.prototype.batchRead = function (records, policy, callback) { * * @since v5.0.0 * - * @example Apply a UDF to a batch of ten keys - * - * const Aerospike = require('aerospike'); - * const batchType = Aerospike.batchType; + * @example * - * // Define host configuration - * let config = {hosts: '127.0.0.1:3000'}; + * const Aerospike = require('aerospike') + * const batchType = Aerospike.batchType + * const Key = Aerospike.Key + * const op = Aerospike.operations * - * // Create batch of keys - * let keys = []; - * for(i = 0; i < 10; i++){ - * keys.push(new Aerospike.Key('sandbox', 'ufodata', i + 1)); + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), + * } * } * + * const batchRecords = [ + * { + * type: batchType.BATCH_REMOVE, + * key: new Key("test", "demo", 'key1') + * }, + * { + * type: batchType.BATCH_WRITE, + * key: new Key("test", "demo", 'key2'), + * ops: [ + * op.write('example', 30), + * op.write('blob', Buffer.from('foo')) + * ], + * policy: new Aerospike.BatchWritePolicy({ + * exists: Aerospike.policy.exists.IGNORE + * }) + * }, + * { + * type: batchType.BATCH_WRITE, + * key: new Key("test", "demo", 'key3'), + * ops: [ + * op.write('example', 35), + * op.write('blob', Buffer.from('bar')) + * ], + * policy: new Aerospike.BatchWritePolicy({ + * exists: Aerospike.policy.exists.IGNORE + * }) + * } + * ] + * + * const batchReadRecords = [ + * { + * type: batchType.BATCH_READ, + * key: new Key("test", "demo", 'key1'), + * readAllBins: true + * }, + * { + * type: batchType.BATCH_READ, + * key: new Key("test", "demo", 'key2'), + * readAllBins: true + * }, + * { + * type: batchType.BATCH_READ, + * key: new Key("test", "demo", 'key3'), + * readAllBins: true + * } + * ] + * * ;(async () => { * // Establishes a connection to the server * let client = await Aerospike.connect(config); * - * // Execute the UDF - * let batchResult = await client.batchApply(batchRecords, - * { - * module: 'example', - * funcname: 'getDaysBetween', - * args: ['occurred', 'posted'] - * } - * ); + * // Place a record for demonstration + * await client.put(new Key("test", "demo", 'key1'), {example: 30, user: 'Doug', extra: 'unused'}) * - * // Access the records - * batchResult.forEach(result => { - * // Do something - * console.info("%o days between occurrence and post", result.record.bins.SUCCESS); - * }); + * let results = await client.batchWrite(batchRecords) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) * + * results = await client.batchWrite(batchRecords) + * results.forEach((result) => { + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } + * }) * // Close the connection to the server - * client.close(); + * await client.close(); * })(); */ Client.prototype.batchWrite = function (records, policy, callback) { @@ -1367,49 +1466,66 @@ Client.prototype.batchWrite = function (records, policy, callback) { * * @since v5.0.0 * - * @example + * @example Simple batchApply example + * * const Aerospike = require('aerospike') - * const batchType = Aerospike.batchType; + * var path = require('path'); * * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! * const config = { * hosts: '192.168.33.10:3000', * // Timeouts disabled, latency dependent on server location. Configure as needed. * policies: { - * batchApply : new Aerospike.BatchApplyPolicy({socketTimeout : 0, totalTimeout : 0}), + * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), * } * } * - * // Create batch of keys - * let keys = []; - * for(i = 0; i < 10; i++){ - * keys.push(new Aerospike.Key('sandbox', 'ufodata', i + 1)); - * } + * // This must be a path to a UDF file + * const scriptLocation = path.join(__dirname, 'udf-list.lua') * * ;(async () => { * // Establishes a connection to the server * let client = await Aerospike.connect(config); * + * // Place some records for demonstration + * await client.put(new Aerospike.Key('test', 'demo', 'key1'), {example: 30}) + * await client.put(new Aerospike.Key('test', 'demo', 'key2'), {example: 35}) + * await client.udfRegister(scriptLocation) + * * // Execute the UDF - * let batchResult = await client.batchApply(batchRecords, + * let batchResult = await client.batchApply([new Aerospike.Key('test', 'demo', 'key1'), new Aerospike.Key('test', 'demo', 'key2')], * { - * module: 'example', - * funcname: 'getDaysBetween', - * args: ['occurred', 'posted'] + * module: 'udf-list', + * funcname: 'updateRecord', + * args: ['example', 45] * } * ); * * // Access the records * batchResult.forEach(result => { * // Do something - * console.info("%o days between occurrence and post", result.record.bins.SUCCESS); + * console.info("New value of example bin is %o \n", result.record.bins.SUCCESS); * }); * + * //Additional verfication + * let result = await client.get(new Aerospike.Key('test', 'demo', 'key1')) + * console.log(result.bins) // { example: 45 } + * result = await client.get(new Aerospike.Key('test', 'demo', 'key2')) + * console.log(result.bins) // { example: 45 } + * * // Close the connection to the server - * client.close(); - * })(); + * await client.close(); + * })(); * + * + * @example Simple lua script to be used in example above + * + * function updateRecord(rec, binName, binValue) + * rec[binName] = binValue + * aerospike:update(rec) + * return binValue + * end */ -Client.prototype.batchApply = function (records, udf, batchPolicy, batchApplyPolicy, callback) { +Client.prototype.batchApply = function (keys, udf, batchPolicy, batchApplyPolicy, callback) { if (typeof batchPolicy === 'function') { callback = batchPolicy batchPolicy = null @@ -1421,7 +1537,7 @@ Client.prototype.batchApply = function (records, udf, batchPolicy, batchApplyPol } } - const cmd = new Commands.BatchApply(this, [records, udf, batchPolicy, batchApplyPolicy], callback) + const cmd = new Commands.BatchApply(this, [keys, udf, batchPolicy, batchApplyPolicy], callback) return cmd.execute() } @@ -1447,6 +1563,7 @@ Client.prototype.batchApply = function (records, udf, batchPolicy, batchApplyPol * @since v5.0.0 * * @example + * * const Aerospike = require('aerospike') * const batchType = Aerospike.batchType * const exp = Aerospike.exp @@ -1459,52 +1576,42 @@ Client.prototype.batchApply = function (records, udf, batchPolicy, batchApplyPol * batch : new Aerospike.BatchPolicy({socketTimeout : 0, totalTimeout : 0}), * } * } - * var batchRecords = [ - * { type: batchType.BATCH_REMOVE, - * key: new Aerospike.Key('test', 'demo', 'key5'), - * policy: new Aerospike.BatchRemovePolicy({ - * filterExpression: exp.eq(exp.binInt('i'), exp.int(37)), - * key: Aerospike.policy.key.SEND, - * commitLevel: Aerospike.policy.commitLevel.ALL, - * gen: Aerospike.policy.gen.EQ, - * durableDelete: true - * }), - * }, - * { type: batchType.BATCH_REMOVE, - * key: new Aerospike.Key('test', 'demo', 'key6'), - * policy: new Aerospike.BatchRemovePolicy({ - * filterExpression: exp.eq(exp.binInt('i'), exp.int(37)), - * key: Aerospike.policy.key.SEND, - * commitLevel: Aerospike.policy.commitLevel.ALL, - * gen: Aerospike.policy.gen.EQ, - * durableDelete: true - * }), - * } + * + * var keys = [ + * new Aerospike.Key('test', 'demo', 'key1'), + * new Aerospike.Key('test', 'demo', 'key2'), + * new Aerospike.Key('test', 'demo', 'key3') * ] * - * Aerospike.connect(config, (error, client) => { - * if (error) throw error - * client.batchRemove(batchRecords, (error, results) => { - * if (error) throw error + * + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30}) + * await client.put(keys[1], {example: 35}) + * + * let results = await client.batchRemove(keys) * results.forEach((result) => { - * switch (result.status) { - * case Aerospike.status.OK: - * console.log("Record found") - * break - * case Aerospike.status.ERR_RECORD_NOT_FOUND: - * console.log("Record not found") - * break - * default: - * // error while reading record - * console.log("Other error") - * break - * } + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record deleted") + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } * }) - * client.close() - * }) - * }) + * // Close the connection to the server + * await client.close(); + * })(); */ -Client.prototype.batchRemove = function (records, batchPolicy, batchRemovePolicy, callback) { +Client.prototype.batchRemove = function (keys, batchPolicy, batchRemovePolicy, callback) { if (typeof batchPolicy === 'function') { callback = batchPolicy batchPolicy = null @@ -1516,7 +1623,7 @@ Client.prototype.batchRemove = function (records, batchPolicy, batchRemovePolicy } } - const cmd = new Commands.BatchRemove(this, [records, batchPolicy, batchRemovePolicy], callback) + const cmd = new Commands.BatchRemove(this, [keys, batchPolicy, batchRemovePolicy], callback) return cmd.execute() } @@ -1537,8 +1644,10 @@ Client.prototype.batchRemove = function (records, batchPolicy, batchRemovePolicy * @deprecated since v2.0 - use {@link Client#batchRead} instead. * * @example + * * const Aerospike = require('aerospike') - * const Key = Aerospike.Key + * const batchType = Aerospike.batchType + * const exp = Aerospike.exp * * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! * var config = { @@ -1550,34 +1659,42 @@ Client.prototype.batchRemove = function (records, batchPolicy, batchRemovePolicy * } * * var keys = [ - * new Key('test', 'demo', 'key1'), - * new Key('test', 'demo', 'key2'), - * new Key('test', 'demo', 'key3') + * new Aerospike.Key('test', 'demo', 'key1'), + * new Aerospike.Key('test', 'demo', 'key2'), + * new Aerospike.Key('test', 'demo', 'key3') * ] * - * var bins = ['s', 'i'] + * var bins = ['example', 'user'] * - * Aerospike.connect(config, (error, client) => { - * if (error) throw error - * client.batchSelect(keys, bins, (error, results) => { - * if (error) throw error + * ;(async () => { + * // Establishes a connection to the server + * let client = await Aerospike.connect(config); + * + * // Place some records for demonstration + * await client.put(keys[0], {example: 30, user: 'Doug', extra: 'unused'}) + * await client.put(keys[1], {example: 35}) + * + * let results = await client.batchSelect(keys, bins) * results.forEach((result) => { - * switch (result.status) { - * case Aerospike.status.OK: - * console.log("Record found") - * break - * case Aerospike.status.ERR_RECORD_NOT_FOUND: - * console.log("Record not found") - * break - * default: - * // error while reading record - * console.log("Other error") - * break - * } + * switch (result.status) { + * case Aerospike.status.OK: + * console.log("Record found") + * // Since the fourth record didn't specify bins to read, + * // the fourth record will return no bins, eventhough the batchRead succeeded. + * console.log(result.record.bins) + * break + * case Aerospike.status.ERR_RECORD_NOT_FOUND: + * console.log("Record not found") + * break + * default: + * // error while reading record + * console.log("Other error") + * break + * } * }) - * client.close() - * }) - * }) + * // Close the connection to the server + * await client.close(); + * })(); */ Client.prototype.batchSelect = function (keys, bins, policy, callback) { if (typeof policy === 'function') { @@ -1945,7 +2062,7 @@ Client.prototype.createStringIndex = function (options, policy, callback) { /** * @function Client#createGeo2DSphereIndex * - * @summary Creates a secondary, geospatial index. + * @summary Creates a geospatial secondary secondary index. * * @description This is a short-hand for calling {@link Client#createIndex} * with the datatype option set to Aerospike.indexDataType.GEO2DSPHERE. @@ -1997,6 +2114,61 @@ Client.prototype.createGeo2DSphereIndex = function (options, policy, callback) { return this.createIndex(options, policy, callback) } +/** + * @function Client#createBlobIndex + * + * @summary Creates a blob secondary index index. + * + * @description This is a short-hand for calling {@link Client#createIndex} + * with the datatype option set to Aerospike.indexDataType.BLOB. + * + * @param {Object} options - Options for creating the index. + * @param {string} options.ns - The namespace on which the index is to be created. + * @param {string} options.set - The set on which the index is to be created. + * @param {string} options.bin - The name of the bin which values are to be indexed. + * @param {string} options.index - The name of the index to be created. + * @param {module:aerospike.indexType} [options.type] - Type of index to be + * created based on the type of values stored in the bin. This option needs to + * be specified if the bin to be indexed contains list or map values and the + * individual entries of the list or keys/values of the map should be indexed. + * @param {InfoPolicy} [policy] - The Info Policy to use for this operation. + * @param {jobCallback} [callback] - The function to call when the operation completes. + * + * @returns {?Promise} - If no callback function is passed, the function + * returns a Promise that will resolve to an {@link IndexJob} instance. + * + * @see {@link Client#indexCreate} + * + * @example + * + * const Aerospike = require('aerospike') + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * } + * + * Aerospike.connect(config, (error, client) => { + * if (error) throw error + * + * var binName = 'location' + * var indexName = 'locationIndex' + * var options = { ns: 'test', + * set: 'demo', + * bin: binName, + * index: indexName } + * + * client.createBlobIndex(options, function (error) { + * if (error) throw error + * console.info('SI %s on %s was created successfully', indexName, binName) + * client.close() + * }) + * }) + */ +Client.prototype.createBlobIndex = function (options, policy, callback) { + options.datatype = as.indexDataType.BLOB + return this.createIndex(options, policy, callback) +} + /** * @function Client#apply * diff --git a/lib/config.js b/lib/config.js index 12afacf37..042202b31 100644 --- a/lib/config.js +++ b/lib/config.js @@ -400,6 +400,42 @@ class Config { this.maxConnsPerNode = config.maxConnsPerNode } + /** + * @name Config#maxErrorRate + * + * @summary Maximum number of errors allowed per node per error_rate_window before backoff algorithm returns + * AEROSPIKE_MAX_ERROR_RATE for database commands to that node. If max_error_rate is zero, there is no error limit. + * The counted error types are any error that causes the connection to close (socket errors and client timeouts), + * server device overload and server timeouts. + * + * The application should backoff or reduce the transaction load until AEROSPIKE_MAX_ERROR_RATE stops being returned. + * + * @description If the backoff algorithm has been activated, transactions will fail with {@link + * module:aerospike/status.AEROSPIKE_MAX_ERROR_RATE|AEROSPIKE_MAX_ERROR_RATE} until the {@link Config#errorRateWindow} has passed and the + * error count has been reset. + * + * @type {number} + * + * @default 100 + */ + if (Number.isInteger(config.maxErrorRate)) { + this.maxErrorRate = config.maxErrorRate + } + + /** + * @name Config#errorRateWindow + * + * @summary The number of cluster tend iterations that defines the window for {@link Config#maxErrorRate} to be surpassed. One tend iteration is defined + * as {@link Config#tendInterval} plus the time to tend all nodes. At the end of the window, the error count is reset to zero and backoff state is removed on all nodes. + * + * @type {number} + * + * @default 1 + */ + if (Number.isInteger(config.errorRateWindow)) { + this.errorRateWindow = config.errorRateWindow + } + /** * @name Config#minConnsPerNode * diff --git a/lib/exp.js b/lib/exp.js index b6a853719..933f97b23 100644 --- a/lib/exp.js +++ b/lib/exp.js @@ -18,6 +18,8 @@ const as = require('bindings')('aerospike.node') const exp = as.exp +const writeFlags = as.expWriteFlags +const readFlags = as.expReadFlags const BIN_TYPE_UNDEF = 0 /** @@ -430,6 +432,8 @@ exports.setName = _metaExp(exp.ops.SET_NAME) * Create expression that returns record size on disk. If server storage-engine is * memory, then zero is returned. This expression usually evaluates quickly * because record meta data is cached in memory. + * Requires server version between 5.3.0 inclusive and 7.0 exclusive. + * Use {@link #recordSize} for server version 7.0+. * * @function * @return {AerospikeExp} integer value Uncompressed storage size of the record. @@ -489,13 +493,25 @@ exports.isTombstone = _metaExp(exp.ops.IS_TOMBSTONE) * storage-engine is memory or data-in-memory is true, otherwise returns 0. * This expression usually evaluates quickly because record meta data is cached * in memory. - * Requires server version 5.3.0+. + * Requires server version between 5.3.0 inclusive and 7.0 exclusive. + * Use {@link #recordSize} for server version 7.0+. * * @function * @return {AerospikeExp} integer value memory size of the record. */ exports.memorySize = _metaExp(exp.ops.MEMORY_SIZE) +/** + * Create expression that returns the record size. This expression usually evaluates + * quickly because record meta data is cached in memory. + * Requires server version 7.0+. This expression replaces {@link #deviceSize} and + * {@link #memorySize} since those older expressions are equivalent on server version 7.0+. + * + * @function + * @return {AerospikeExp} integer value size of the record in Megabytes. + */ +exports.recordSize = _metaExp(exp.ops.RECORD_SIZE) + /** * Create expression that returns record digest modulo as integer. * @@ -1010,3 +1026,67 @@ exports.bit = require('./exp_bit') * @summary HyperLogLog expressions. */ exports.hll = require('./exp_hll') + +/** + * + * @readonly + * @enum {number} + * @description Expression read bit flags. Use BITWISE OR to combine flags. + */ +exports.expReadFlags = { + /** + * Default. + * @const {number} + */ + DEFAULT: readFlags.DEFAULT, + + /** + * Ignore failures caused by the expression resolving to unknown or a non-bin type. + * @const {number} + */ + EVAL_NO_FAIL: readFlags.EVAL_NO_FAIL +} + +/** + * + * @readonly + * @enum {number} + * @description Expression write bit flags. Use BITWISE OR to combine flags. + */ +exports.expWriteFlags = { + /** + * Default. + * @const {number} + */ + DEFAULT: writeFlags.DEFAULT, + + /** + * If bin does not exist, a new bin will be created. + * @const {number} + */ + CREATE_ONLY: writeFlags.CREATE_ONLY, + + /** + * If bin exists, the bin will be overwritten. + * @const {number} + */ + UPDATE_ONLY: writeFlags.UPDATE_ONLY, + + /** + * If expression results in nil value, then delete the bin. + * @const {number} + */ + ALLOW_DELETE: writeFlags.ALLOW_DELETE, + + /** + * Do not raise error if operation is denied. + * @const {number} + */ + POLICY_NO_FAIL: writeFlags.POLICY_NO_FAIL, + + /** + * Ignore failures caused by the expression resolving to unknown or a non-bin type. + * @const {number} + */ + EVAL_NO_FAIL: writeFlags.EVAL_NO_FAIL +} diff --git a/lib/exp_operations.js b/lib/exp_operations.js index 5ab142db7..c0c88bc93 100644 --- a/lib/exp_operations.js +++ b/lib/exp_operations.js @@ -48,7 +48,7 @@ * op.incr('b', 10), * exp.operations.read(tempBin, * exp.add(exp.binInt('b'), exp.binInt('b')), - * 0), + * exp.expReadFlags.DEFAULT), * op.read('a'), * op.read('b') * ] @@ -96,6 +96,9 @@ exports.ExpOperation = ExpOperation * @summary Read the value of the bin. * * @param {string} bin - The name of the bin. + * @param {string} name - The name of bin to store expression result + * @param {AerospikeExp} exp - The expression to evaluate + * @param flags - Expression read flags. flags must be an integer. See {@link exp.expReadFlags} for more information. * @returns {Operation} Operation that can be passed to the {@link Client#operate} command. */ exports.read = function (bin, exp, flags) { @@ -107,6 +110,9 @@ exports.read = function (bin, exp, flags) { * * @param {string} bin - The name of the bin. * @param {any} value - The value to set the bin to. + * @param {string} binName - The variable name of read expression result. This name can be used as the bin name when retrieving bin results from the record. + * @param {AerospikeExp} exp - The expression to evaluate + * @param flags - Expression write flags. flags must be an integer. See {@link exp.expWriteFlags} for more information. * @returns {Operation} Operation that can be passed to the {@link Client#operate} command. */ exports.write = function (bin, exp, flags) { diff --git a/lib/filter.js b/lib/filter.js index 0c820786e..3cfd362ee 100644 --- a/lib/filter.js +++ b/lib/filter.js @@ -110,6 +110,10 @@ function dataTypeOf (value) { case 'double': return as.indexDataType.NUMERIC default: + if (Buffer.isBuffer(value)) { + return as.indexDataType.BLOB + } + throw new TypeError('Unknown data type for filter value.') } } diff --git a/lib/maps.js b/lib/maps.js index 0b9c40987..49dec454d 100644 --- a/lib/maps.js +++ b/lib/maps.js @@ -1275,3 +1275,54 @@ exports.getByRankRange = function (bin, rank, count, returnType) { op.returnType = returnType return op } + +/** + * @summary Creates map create operation. + * + * + * @param {string} bin - bin name. + * @param {number} order - map order. + * @param {persistIndex} count - if true, persist map index. A map index improves lookup performance, but requires more storage. + * A map index can be created for a top-level ordered map only. Nested and unordered map indexes are not supported. + * @param {number} ctx - optional path to nested map. If not defined, the top-level map is used. + * + * @returns {Object} Operation that can be passed to the {@link Client#operate} command. + * + * @example + * + * const Aerospike = require('aerospike') + * const maps = Aerospike.maps + * const key = new Aerospike.Key('test', 'demo', 'mapKey') + * + * // INSERT HOSTNAME AND PORT NUMBER OF AEROSPIKE SERVER NODE HERE! + * var config = { + * hosts: '192.168.33.10:3000', + * // Timeouts disabled, latency dependent on server location. Configure as needed. + * policies: { + * operate : new Aerospike.OperatePolicy({socketTimeout : 0, totalTimeout : 0}) + * } + * } + * + * Aerospike.connect(config).then(async client => { + * let ops = [ + * maps.create('map', maps.order.KEY_ORDERED, true) + * ] + * let result = await client.operate(key, ops) + * console.log(result.bins) // => { map: null } + * let record = await client.get(key) + * console.log(record.bins) // => { map: {} } + * + * await client.remove(key) + * client.close() + * }) + */ +exports.create = function (bin, order, persistIndex = false, ctx) { + const op = new MapOperation(opcodes.MAP_CREATE, bin) + op.order = order + op.persistIndex = persistIndex + if (ctx === undefined) { + return op + } + + return op.withContext(ctx) +} diff --git a/package-lock.json b/package-lock.json index 42991f5e4..ccd1f906d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "aerospike", - "version": "5.8.0", + "version": "5.9.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "aerospike", - "version": "5.8.0", + "version": "5.9.0", "cpu": [ "x64", "arm64" @@ -83,18 +83,18 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.20.tgz", - "integrity": "sha512-BQYjKbpXjoXwFW5jGqiizJQQT/aC7pFm9Ok1OWssonuguICi264lbgMzRp2ZMmRSlfkX6DsWDDcsrctK8Rwfiw==", + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.2.tgz", + "integrity": "sha512-0S9TQMmDHlqAZ2ITT95irXKfxN9bncq8ZCoJhun3nHL/lLUxd2NKBJYoNGWH7S0hz6fRQwWlAWn/ILM0C70KZQ==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.0.tgz", - "integrity": "sha512-97z/ju/Jy1rZmDxybphrBuI+jtJjFVoz7Mr9yUQVVVi+DNZE333uFQeMOqcCIy1x3WYBIbWftUSLmbNXNT7qFQ==", + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.2.tgz", + "integrity": "sha512-n7s51eWdaWZ3vGT2tD4T7J6eJs3QoBXydv7vkUM06Bf1cbVD2Kc2UrkzhiQwobfV7NwOnQXYL7UBJ5VPU+RGoQ==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.0", @@ -102,10 +102,10 @@ "@babel/generator": "^7.23.0", "@babel/helper-compilation-targets": "^7.22.15", "@babel/helper-module-transforms": "^7.23.0", - "@babel/helpers": "^7.23.0", + "@babel/helpers": "^7.23.2", "@babel/parser": "^7.23.0", "@babel/template": "^7.22.15", - "@babel/traverse": "^7.23.0", + "@babel/traverse": "^7.23.2", "@babel/types": "^7.23.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", @@ -308,13 +308,13 @@ } }, "node_modules/@babel/helpers": { - "version": "7.23.1", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.1.tgz", - "integrity": "sha512-chNpneuK18yW5Oxsr+t553UZzzAs3aZnFm4bxhebsNTeshrC95yA7l5yl7GBAG+JG1rF0F7zzD2EixK9mWSDoA==", + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.2.tgz", + "integrity": "sha512-lzchcp8SjTSVe/fPmLwtWVBFC7+Tbn8LGHDVfDp9JGxpAY5opSaEFgt8UQvrnECWOTdji2mOWMz1rOhkHscmGQ==", "dev": true, "dependencies": { "@babel/template": "^7.22.15", - "@babel/traverse": "^7.23.0", + "@babel/traverse": "^7.23.2", "@babel/types": "^7.23.0" }, "engines": { @@ -362,9 +362,9 @@ } }, "node_modules/@babel/traverse": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.0.tgz", - "integrity": "sha512-t/QaEvyIoIkwzpiZ7aoSKK8kObQYeF7T2v+dazAYCb8SXtp58zEVkWW7zAnju8FNKNdr4ScAOEDmMItbyOmEYw==", + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz", + "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==", "dev": true, "dependencies": { "@babel/code-frame": "^7.22.13", @@ -553,9 +553,9 @@ "dev": true }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.19", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz", - "integrity": "sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==", + "version": "0.3.20", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz", + "integrity": "sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==", "dev": true, "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", @@ -970,9 +970,9 @@ "dev": true }, "node_modules/browserslist": { - "version": "4.21.11", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.11.tgz", - "integrity": "sha512-xn1UXOKUz7DjdGlg9RrUr0GGiWzI97UQJnugHtH0OLDfJB7jMgoIkYvRIEO1l9EeEERVqeqLYOcFBW9ldjypbQ==", + "version": "4.22.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.1.tgz", + "integrity": "sha512-FEVc202+2iuClEhZhrWy6ZiAcRLvNMyYcxZ8raemul1DYVOVdFsbqckWLdsixQZCpJlwe77Z3UTalE7jsjnKfQ==", "dev": true, "funding": [ { @@ -989,8 +989,8 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001538", - "electron-to-chromium": "^1.4.526", + "caniuse-lite": "^1.0.30001541", + "electron-to-chromium": "^1.4.535", "node-releases": "^2.0.13", "update-browserslist-db": "^1.0.13" }, @@ -1045,13 +1045,14 @@ } }, "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", + "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.1", + "set-function-length": "^1.1.1" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -1076,9 +1077,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001539", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001539.tgz", - "integrity": "sha512-hfS5tE8bnNiNvEOEkm8HElUHroYwlqMMENEzELymy77+tJ6m+gA2krtHl5hxJaj71OlpC2cHZbdSMX1/YEqEkA==", + "version": "1.0.30001561", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001561.tgz", + "integrity": "sha512-NTt0DNoKe958Q0BE0j0c1V9jbUzhBxHIEJy7asmGrpE0yG63KTV7PLHPnK2E1O9RsQrQ081I3NLuXGS6zht3cw==", "dev": true, "funding": [ { @@ -1096,18 +1097,18 @@ ] }, "node_modules/chai": { - "version": "4.3.8", - "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.8.tgz", - "integrity": "sha512-vX4YvVVtxlfSZ2VecZgFUTU5qPCYsobVI2O9FmwEXBhDigYGQA6jRXCycIs1yJnnWbZ6/+a2zNIF5DfVCcJBFQ==", + "version": "4.3.10", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.10.tgz", + "integrity": "sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g==", "dev": true, "dependencies": { "assertion-error": "^1.1.0", - "check-error": "^1.0.2", - "deep-eql": "^4.1.2", - "get-func-name": "^2.0.0", - "loupe": "^2.3.1", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", "pathval": "^1.1.1", - "type-detect": "^4.0.5" + "type-detect": "^4.0.8" }, "engines": { "node": ">=4" @@ -1128,10 +1129,13 @@ } }, "node_modules/check-error": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", - "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", "dev": true, + "dependencies": { + "get-func-name": "^2.0.2" + }, "engines": { "node": "*" } @@ -1364,9 +1368,9 @@ } }, "node_modules/define-data-property": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.0.tgz", - "integrity": "sha512-UzGwzcjyv3OtAvolTj1GoyNYzfFR+iqbGjcnBEENZVCpM4/Ng1yhGNvS3lR/xDS74Tb2wGG9WzNSNIOS9UVb2g==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", + "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", "dev": true, "dependencies": { "get-intrinsic": "^1.2.1", @@ -1438,9 +1442,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.530", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.530.tgz", - "integrity": "sha512-rsJ9O8SCI4etS8TBsXuRfHa2eZReJhnGf5MHZd3Vo05PukWHKXhk3VQGbHHnDLa8nZz9woPCpLCMQpLGgkGNRA==", + "version": "1.4.578", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.578.tgz", + "integrity": "sha512-V0ZhSu1BQZKfG0yNEL6Dadzik8E1vAzfpVOapdSiT9F6yapEJ3Bk+4tZ4SMPdWiUchCgnM/ByYtBzp5ntzDMIA==", "dev": true }, "node_modules/emoji-regex": { @@ -1493,26 +1497,26 @@ } }, "node_modules/es-abstract": { - "version": "1.22.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.2.tgz", - "integrity": "sha512-YoxfFcDmhjOgWPWsV13+2RNjq1F6UQnfs+8TftwNqtzlmFzEXvlUwdrNrYeaizfjQzRMxkZ6ElWMOJIFKdVqwA==", + "version": "1.22.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", + "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", "dev": true, "dependencies": { "array-buffer-byte-length": "^1.0.0", "arraybuffer.prototype.slice": "^1.0.2", "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "call-bind": "^1.0.5", "es-set-tostringtag": "^2.0.1", "es-to-primitive": "^1.2.1", "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.1", + "get-intrinsic": "^1.2.2", "get-symbol-description": "^1.0.0", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has": "^1.0.3", "has-property-descriptors": "^1.0.0", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", + "hasown": "^2.0.0", "internal-slot": "^1.0.5", "is-array-buffer": "^3.0.2", "is-callable": "^1.2.7", @@ -1522,7 +1526,7 @@ "is-string": "^1.0.7", "is-typed-array": "^1.1.12", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", + "object-inspect": "^1.13.1", "object-keys": "^1.1.1", "object.assign": "^4.1.4", "regexp.prototype.flags": "^1.5.1", @@ -1536,7 +1540,7 @@ "typed-array-byte-offset": "^1.0.0", "typed-array-length": "^1.0.4", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -1546,26 +1550,26 @@ } }, "node_modules/es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", + "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", "dev": true, "dependencies": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" + "get-intrinsic": "^1.2.2", + "has-tostringtag": "^1.0.0", + "hasown": "^2.0.0" }, "engines": { "node": ">= 0.4" } }, "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", "dev": true, "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.0" } }, "node_modules/es-to-primitive": { @@ -1991,12 +1995,12 @@ } }, "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", "dev": true, "dependencies": { - "is-core-module": "^2.9.0", + "is-core-module": "^2.13.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -2374,12 +2378,12 @@ } }, "node_modules/flat-cache": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.1.0.tgz", - "integrity": "sha512-OHx4Qwrrt0E4jEIcI5/Xb+f+QmJYNj2rrK8wiIdQOIrB9WrrJL8cjZvXdXuBTkkEwEqLycb5BeZDV1o2i9bTew==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.1.1.tgz", + "integrity": "sha512-/qM2b3LUIaIgviBQovTLvijfyOQXPtSRnRK26ksj2J7rzPIecePUIpJsZ4T02Qg+xiAEKIs5K8dsHEd+VaKa/Q==", "dev": true, "dependencies": { - "flatted": "^3.2.7", + "flatted": "^3.2.9", "keyv": "^4.5.3", "rimraf": "^3.0.2" }, @@ -2510,10 +2514,13 @@ } }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/function.prototype.name": { "version": "1.1.6", @@ -2595,15 +2602,15 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", + "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", + "function-bind": "^1.1.2", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2728,13 +2735,10 @@ } }, "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.4.tgz", + "integrity": "sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ==", "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, "engines": { "node": ">= 0.4.0" } @@ -2758,12 +2762,12 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", + "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.1.1" + "get-intrinsic": "^1.2.2" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2829,6 +2833,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -2994,13 +3010,13 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "node_modules/internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", + "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", + "get-intrinsic": "^1.2.2", + "hasown": "^2.0.0", "side-channel": "^1.0.4" }, "engines": { @@ -3085,12 +3101,12 @@ } }, "node_modules/is-core-module": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz", - "integrity": "sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==", + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", "dev": true, "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3326,9 +3342,9 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, "node_modules/istanbul-lib-coverage": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", - "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.1.tgz", + "integrity": "sha512-opCrKqbthmq3SKZ10mFMQG9dk3fTa3quaOLD35kJa5ejwZHd9xAr+kLuziiZz2cG32s4lMZxNdmdcEQnTDP4+g==", "dev": true, "engines": { "node": ">=8" @@ -3603,9 +3619,9 @@ } }, "node_modules/keyv": { - "version": "4.5.3", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.3.tgz", - "integrity": "sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug==", + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", "dev": true, "dependencies": { "json-buffer": "3.0.1" @@ -3780,12 +3796,12 @@ } }, "node_modules/loupe": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.6.tgz", - "integrity": "sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", "dev": true, "dependencies": { - "get-func-name": "^2.0.0" + "get-func-name": "^2.0.1" } }, "node_modules/lru-cache": { @@ -4542,9 +4558,9 @@ } }, "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5176,9 +5192,9 @@ } }, "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, "engines": { "node": ">=6" @@ -5384,9 +5400,9 @@ "dev": true }, "node_modules/resolve": { - "version": "1.22.6", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.6.tgz", - "integrity": "sha512-njhxM7mV12JfufShqGy3Rz8j11RPdLy4xi15UurGJeoHLfJpVXKdh3ueuOqbYUcDZnffr6X739JBo5LzyahEsw==", + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", "dev": true, "dependencies": { "is-core-module": "^2.13.0", @@ -5522,6 +5538,21 @@ "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" }, + "node_modules/set-function-length": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", + "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.1", + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/set-function-name": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", @@ -5723,9 +5754,9 @@ } }, "node_modules/spdx-license-ids": { - "version": "3.0.15", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.15.tgz", - "integrity": "sha512-lpT8hSQp9jAKp9mhtBU4Xjon8LPGBvLIuBiSVhMEtmLecTh2mO0tlqrAMp47tBXzMr13NJMQ2lf7RpQGLJ3HsQ==", + "version": "3.0.16", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.16.tgz", + "integrity": "sha512-eWN+LnM3GR6gPu35WxNgbGl8rmY1AEmoMDvL/QD6zYmPWgywxWqJWNdLGT+ke8dKNWrcYgYjPpG5gbTfghP8rw==", "dev": true }, "node_modules/sprintf-js": { @@ -6429,13 +6460,13 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.11.tgz", - "integrity": "sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", + "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", "dev": true, "dependencies": { "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "call-bind": "^1.0.4", "for-each": "^0.3.3", "gopd": "^1.0.1", "has-tostringtag": "^1.0.0" diff --git a/package.json b/package.json index ef73e0666..fe749294e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "aerospike", - "version": "5.8.0", + "version": "5.9.0", "description": "Aerospike Client Library", "keywords": [ "aerospike", diff --git a/scripts/build-package.ps1 b/scripts/build-package.ps1 index f5ed6f579..36afce550 100644 --- a/scripts/build-package.ps1 +++ b/scripts/build-package.ps1 @@ -35,6 +35,17 @@ function build_nodejs_client { build_nodejs_client v18 +Copy-Item -Recurse build\release\* lib\binding\node-v108-win32-x64 +Remove-Item .\lib\binding\node-v108-win32-x64\obj +Remove-Item .\lib\binding\node-v108-win32-x64\aerospike.pdb +Remove-Item .\lib\binding\node-v108-win32-x64\aerospike.ipdb +Remove-Item .\lib\binding\node-v108-win32-x64\aerospike.iobj + build_nodejs_client v20 +Copy-Item -Recurse build\release\* lib\binding\node-v115-win32-x64 +Remove-Item -Recurse .\lib\binding\node-v115-win32-x64\obj +Remove-Item .\lib\binding\node-v115-win32-x64\aerospike.pdb +Remove-Item .\lib\binding\node-v115-win32-x64\aerospike.ipdb +Remove-Item .\lib\binding\node-v115-win32-x64\aerospike.iobj nvm use v18.12.1 diff --git a/src/include/enums.h b/src/include/enums.h index 66815551d..6aff4cdfa 100644 --- a/src/include/enums.h +++ b/src/include/enums.h @@ -41,3 +41,6 @@ v8::Local status(); v8::Local ttl_enum_values(); v8::Local batchTypes(); v8::Local privilegeCode(); +v8::Local expReadFlags(); +v8::Local expWriteFlags(); + diff --git a/src/main/aerospike.cc b/src/main/aerospike.cc index fce7fc725..a4f2b4758 100644 --- a/src/main/aerospike.cc +++ b/src/main/aerospike.cc @@ -157,6 +157,8 @@ NAN_MODULE_INIT(Aerospike) export("auth", auth_mode_enum_values()); export("batchTypes", batchTypes()); export("privilegeCode", privilegeCode()); + export("expReadFlags", expReadFlags()); + export("expWriteFlags", expWriteFlags()); } NODE_MODULE(aerospike, Aerospike); diff --git a/src/main/async.cc b/src/main/async.cc index 1c449256c..d8eb7a99b 100644 --- a/src/main/async.cc +++ b/src/main/async.cc @@ -117,22 +117,15 @@ void async_batch_listener(as_error *err, as_batch_read_records *records, { Nan::HandleScope scope; AsyncCommand *cmd = reinterpret_cast(udata); + if (!err || (err->code == AEROSPIKE_BATCH_FAILED && records->list.size != 0)) { - if (err) { - if (err->code == AEROSPIKE_BATCH_FAILED) { - Local argv[]{Nan::Null(), - batch_records_to_jsarray(records, cmd->log)}; - cmd->Callback(2, argv); - } - else { - cmd->ErrorCallback(err); - } - } - else { Local argv[]{Nan::Null(), batch_records_to_jsarray(records, cmd->log)}; cmd->Callback(2, argv); } + else { + cmd->ErrorCallback(err); + } batch_records_free(records, cmd->log); delete cmd; diff --git a/src/main/client.cc b/src/main/client.cc index 67e6dc9da..71c28e5d9 100644 --- a/src/main/client.cc +++ b/src/main/client.cc @@ -108,7 +108,7 @@ NAN_METHOD(AerospikeClient::Connect) } /** - * Close the connections to the Aeropsike cluster. + * Close the connections to the Aerospike cluster. */ NAN_METHOD(AerospikeClient::Close) { diff --git a/src/main/commands/batch_apply.cc b/src/main/commands/batch_apply.cc index 4953c7284..08955bd8b 100644 --- a/src/main/commands/batch_apply.cc +++ b/src/main/commands/batch_apply.cc @@ -206,10 +206,7 @@ static void respond(uv_work_t *req, int status) BatchApplyCommand *cmd = reinterpret_cast(req->data); LogInfo *log = cmd->log; - if (cmd->IsError()) { - cmd->ErrorCallback(); - } - else { + if (!(cmd->IsError()) || ((cmd->err.code == AEROSPIKE_BATCH_FAILED) && (cmd->results_len != 0))) { as_batch_read *batch_results = cmd->results; Local results = Nan::New(cmd->results_len); for (uint32_t i = 0; i < cmd->results_len; i++) { @@ -242,6 +239,9 @@ static void respond(uv_work_t *req, int status) Local argv[] = {Nan::Null(), results}; cmd->Callback(2, argv); } + else { + cmd->ErrorCallback(); + } delete cmd; delete req; diff --git a/src/main/commands/batch_exists.cc b/src/main/commands/batch_exists.cc index 13ed34e4d..4d205238c 100644 --- a/src/main/commands/batch_exists.cc +++ b/src/main/commands/batch_exists.cc @@ -138,10 +138,7 @@ static void respond(uv_work_t *req, int status) BatchExistsCommand *cmd = reinterpret_cast(req->data); LogInfo *log = cmd->log; - if (cmd->IsError()) { - cmd->ErrorCallback(); - } - else { + if (!(cmd->IsError()) || ((cmd->err.code == AEROSPIKE_BATCH_FAILED) && (cmd->results_len != 0))) { as_batch_read *batch_results = cmd->results; Local results = Nan::New(cmd->results_len); for (uint32_t i = 0; i < cmd->results_len; i++) { @@ -171,6 +168,9 @@ static void respond(uv_work_t *req, int status) Local argv[] = {Nan::Null(), results}; cmd->Callback(2, argv); } + else { + cmd->ErrorCallback(); + } delete cmd; delete req; diff --git a/src/main/commands/batch_get.cc b/src/main/commands/batch_get.cc index 1984670a1..b65d2886c 100644 --- a/src/main/commands/batch_get.cc +++ b/src/main/commands/batch_get.cc @@ -137,10 +137,7 @@ static void respond(uv_work_t *req, int status) BatchGetCommand *cmd = reinterpret_cast(req->data); LogInfo *log = cmd->log; - if (cmd->IsError()) { - cmd->ErrorCallback(); - } - else { + if (!(cmd->IsError()) || ((cmd->err.code == AEROSPIKE_BATCH_FAILED) && (cmd->results_len != 0))) { as_batch_read *batch_results = cmd->results; Local results = Nan::New(cmd->results_len); for (uint32_t i = 0; i < cmd->results_len; i++) { @@ -173,6 +170,9 @@ static void respond(uv_work_t *req, int status) Local argv[] = {Nan::Null(), results}; cmd->Callback(2, argv); } + else { + cmd->ErrorCallback(); + } delete cmd; delete req; diff --git a/src/main/commands/batch_remove.cc b/src/main/commands/batch_remove.cc index eb677d193..411778aca 100644 --- a/src/main/commands/batch_remove.cc +++ b/src/main/commands/batch_remove.cc @@ -28,8 +28,10 @@ extern "C" { #include #include #include +#include } + using namespace v8; class BatchRemoveCommand : public AerospikeCommand { @@ -167,10 +169,8 @@ static void respond(uv_work_t *req, int status) BatchRemoveCommand *cmd = reinterpret_cast(req->data); LogInfo *log = cmd->log; - if (cmd->IsError()) { - cmd->ErrorCallback(); - } - else { + + if (!(cmd->IsError()) || ((cmd->err.code == AEROSPIKE_BATCH_FAILED) && (cmd->results_len != 0))) { as_batch_read *batch_results = cmd->results; Local results = Nan::New(cmd->results_len); for (uint32_t i = 0; i < cmd->results_len; i++) { @@ -203,6 +203,9 @@ static void respond(uv_work_t *req, int status) Local argv[] = {Nan::Null(), results}; cmd->Callback(2, argv); } + else { + cmd->ErrorCallback(); + } delete cmd; delete req; diff --git a/src/main/commands/batch_select.cc b/src/main/commands/batch_select.cc index 0f03bf984..bafbe4000 100644 --- a/src/main/commands/batch_select.cc +++ b/src/main/commands/batch_select.cc @@ -28,6 +28,7 @@ extern "C" { #include #include #include +#include } using namespace v8; @@ -153,10 +154,7 @@ static void respond(uv_work_t *req, int status) BatchSelectCommand *cmd = reinterpret_cast(req->data); LogInfo *log = cmd->log; - if (cmd->IsError()) { - cmd->ErrorCallback(); - } - else { + if (!(cmd->IsError()) || ((cmd->err.code == AEROSPIKE_BATCH_FAILED) && (cmd->results_len != 0))) { as_batch_read *batch_results = cmd->results; Local results = Nan::New(cmd->results_len); for (uint32_t i = 0; i < cmd->results_len; i++) { @@ -188,6 +186,9 @@ static void respond(uv_work_t *req, int status) Local argv[] = {Nan::Null(), results}; cmd->Callback(2, argv); } + else { + cmd->ErrorCallback(); + } delete cmd; delete req; diff --git a/src/main/config.cc b/src/main/config.cc index 9bc72e96a..44da4f40a 100644 --- a/src/main/config.cc +++ b/src/main/config.cc @@ -419,7 +419,7 @@ int config_from_jsobject(as_config *config, Local configObj, goto Cleanup; } if ((rc = get_optional_uint32_property(&config->tender_interval, NULL, - configObj, "tendInterval", log)) != + configObj, "tenderInterval", log)) != AS_NODE_PARAM_OK) { goto Cleanup; } @@ -428,6 +428,16 @@ int config_from_jsobject(as_config *config, Local configObj, log)) != AS_NODE_PARAM_OK) { goto Cleanup; } + if ((rc = get_optional_uint32_property(&config->max_error_rate, + NULL, configObj, "maxErrorRate", + log)) != AS_NODE_PARAM_OK) { + goto Cleanup; + } + if ((rc = get_optional_uint32_property(&config->error_rate_window, + NULL, configObj, "errorRateWindow", + log)) != AS_NODE_PARAM_OK) { + goto Cleanup; + } if ((rc = get_optional_uint32_property(&config->async_min_conns_per_node, NULL, configObj, "minConnsPerNode", log)) != AS_NODE_PARAM_OK) { diff --git a/src/main/enums/exp_enum.cc b/src/main/enums/exp_enum.cc index c3ced9b02..1654bbdd2 100644 --- a/src/main/enums/exp_enum.cc +++ b/src/main/enums/exp_enum.cc @@ -56,6 +56,7 @@ Local exp_opcode_values() set(exp_ops, "KEY_EXIST", as_exp_ops::_AS_EXP_CODE_KEY_EXIST); set(exp_ops, "IS_TOMBSTONE", as_exp_ops::_AS_EXP_CODE_NOT); set(exp_ops, "MEMORY_SIZE", as_exp_ops::_AS_EXP_CODE_MEMORY_SIZE); + set(exp_ops, "RECORD_SIZE", as_exp_ops::_AS_EXP_CODE_RECORD_SIZE); set(exp_ops, "KEY", as_exp_ops::_AS_EXP_CODE_KEY); set(exp_ops, "BIN", as_exp_ops::_AS_EXP_CODE_BIN); diff --git a/src/main/enums/exp_read_flags.cc b/src/main/enums/exp_read_flags.cc new file mode 100644 index 000000000..17252b2d1 --- /dev/null +++ b/src/main/enums/exp_read_flags.cc @@ -0,0 +1,37 @@ +/******************************************************************************* + * Copyright 2023 Aerospike, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#include +#include + +extern "C" { +#include +} + +using namespace v8; + +#define set(__obj, __name, __value) \ + Nan::Set(__obj, Nan::New(__name).ToLocalChecked(), Nan::New(__value)) + +Local expReadFlags() +{ + Nan::EscapableHandleScope scope; + Local obj = Nan::New(); + set(obj, "DEFAULT", AS_EXP_READ_DEFAULT); + set(obj, "EVAL_NO_FAIL", AS_EXP_READ_EVAL_NO_FAIL); + + return scope.Escape(obj); +} \ No newline at end of file diff --git a/src/main/enums/exp_write_flags.cc b/src/main/enums/exp_write_flags.cc new file mode 100644 index 000000000..65e786ab0 --- /dev/null +++ b/src/main/enums/exp_write_flags.cc @@ -0,0 +1,41 @@ +/******************************************************************************* + * Copyright 2023 Aerospike, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ + +#include +#include + +extern "C" { +#include +} + +using namespace v8; + +#define set(__obj, __name, __value) \ + Nan::Set(__obj, Nan::New(__name).ToLocalChecked(), Nan::New(__value)) + +Local expWriteFlags() +{ + Nan::EscapableHandleScope scope; + Local obj = Nan::New(); + set(obj, "DEFAULT", AS_EXP_WRITE_DEFAULT); + set(obj, "CREATE_ONLY", AS_EXP_WRITE_CREATE_ONLY); + set(obj, "UPDATE_ONLY", AS_EXP_WRITE_UPDATE_ONLY); + set(obj, "ALLOW_DELETE", AS_EXP_WRITE_ALLOW_DELETE); + set(obj, "POLICY_NO_FAIL", AS_EXP_WRITE_POLICY_NO_FAIL); + set(obj, "EVAL_NO_FAIL", AS_EXP_WRITE_EVAL_NO_FAIL); + + return scope.Escape(obj); +} \ No newline at end of file diff --git a/src/main/enums/index.cc b/src/main/enums/index.cc index f578de600..6196a7191 100644 --- a/src/main/enums/index.cc +++ b/src/main/enums/index.cc @@ -33,6 +33,7 @@ Local indexDataType() set(obj, "STRING", AS_INDEX_STRING); set(obj, "NUMERIC", AS_INDEX_NUMERIC); set(obj, "GEO2DSPHERE", AS_INDEX_GEO2DSPHERE); + set(obj, "BLOB", AS_INDEX_BLOB); return scope.Escape(obj); } diff --git a/src/main/map_operations.cc b/src/main/map_operations.cc index ab244d5a9..afc51f6da 100644 --- a/src/main/map_operations.cc +++ b/src/main/map_operations.cc @@ -1027,6 +1027,28 @@ bool add_map_get_by_rank_range_op(as_operations *ops, const char *bin, return true; } +bool add_map_create_op(as_operations *ops, const char *bin, + as_cdt_ctx *context, Local obj, + LogInfo *log) +{ + as_map_order order; + if (get_uint32_property((uint32_t*)&order, obj, "order", log) != AS_NODE_PARAM_OK) { + return false; + } + + bool persist_index; + if (get_bool_property(&persist_index, obj, "persistIndex", log) != AS_NODE_PARAM_OK) { + return false; + } + + + as_v8_debug(log, "order=%i, persist_index=%s", order, persist_index ? "true" : "false"); + as_operations_map_create_all(ops, bin, context, order, persist_index); + + + return true; +} + typedef bool (*MapOperation)(as_operations *ops, const char *bin, as_cdt_ctx *context, Local op, LogInfo *log); @@ -1069,7 +1091,8 @@ const ops_table_entry ops_table[] = { {"MAP_GET_BY_INDEX", add_map_get_by_index_op}, {"MAP_GET_BY_INDEX_RANGE", add_map_get_by_index_range_op}, {"MAP_GET_BY_RANK", add_map_get_by_rank_op}, - {"MAP_GET_BY_RANK_RANGE", add_map_get_by_rank_range_op}}; + {"MAP_GET_BY_RANK_RANGE", add_map_get_by_rank_range_op}, + {"MAP_CREATE", add_map_create_op}}; int add_map_op(as_operations *ops, uint32_t opcode, Local op, LogInfo *log) @@ -1120,4 +1143,4 @@ Local map_opcode_values() } return scope.Escape(obj); -} +} \ No newline at end of file diff --git a/src/main/query.cc b/src/main/query.cc index 2c03d3f92..5ad894087 100644 --- a/src/main/query.cc +++ b/src/main/query.cc @@ -187,6 +187,24 @@ void setup_options(as_query *query, Local options, as_cdt_ctx* context, bin_val); as_v8_debug(log, "String equality predicate %s", bin_val); } + else if (datatype == AS_INDEX_BLOB) { + Local value = + Nan::Get(filter, Nan::New("val").ToLocalChecked()) + .ToLocalChecked(); + if (!node::Buffer::HasInstance(value)) { + as_v8_error( + log, + "The region value passed must be a Buffer"); + Nan::ThrowError( + "The region value passed is not a buffer"); + } + uint8_t *bytes; + int size = 0; + get_bytes_property(&bytes, &size, filter, "val" , log); + + as_query_where_with_ctx(query, bin_name, *with_context ? context : NULL, predicate, type, datatype, bytes, size, true); + as_v8_debug(log, "Blob equality predicate"); + } break; } } diff --git a/src/main/util/conversions.cc b/src/main/util/conversions.cc index 3ea176eea..3e109634c 100644 --- a/src/main/util/conversions.cc +++ b/src/main/util/conversions.cc @@ -1116,10 +1116,14 @@ int map_from_jsmap(as_map **map, Local obj, const LogInfo *log) const Local name = Nan::Get(data, i).ToLocalChecked(); const Local value = Nan::Get(data, i+1).ToLocalChecked(); as_val *val = NULL; + as_val *nameVal = NULL; if (asval_from_jsvalue(&val, value, log) != AS_NODE_PARAM_OK) { return AS_NODE_PARAM_ERR; } - as_stringmap_set(*map, *Nan::Utf8String(name), val); + if (asval_from_jsvalue(&nameVal, name, log) != AS_NODE_PARAM_OK) { + return AS_NODE_PARAM_ERR; + } + as_map_set(*map, nameVal, val); } return AS_NODE_PARAM_OK; } diff --git a/test/batch_remove.js b/test/batch_remove.js index 5af50d148..b6c7f5c20 100644 --- a/test/batch_remove.js +++ b/test/batch_remove.js @@ -72,5 +72,30 @@ describe('client.batchRemove()', function () { done() }) }) + + it('Will return records even if generation values is not correct', async function () { + const batchRecords = [ + new Key(helper.namespace, helper.set, 'test/batch_remove/6'), + new Key(helper.namespace, helper.set, 'test/batch_remove/7'), + new Key(helper.namespace, helper.set, 'test/batch_remove/8'), + new Key(helper.namespace, helper.set, 'test/batch_remove/9'), + new Key(helper.namespace, helper.set, 'test/batch_remove/0') + ] + try { + await client.batchRemove(batchRecords, null, new Aerospike.BatchRemovePolicy({ gen: Aerospike.policy.gen.EQ, generation: 10 })) + // Will fail if code makes it here + expect(1).to.eql(2) + } catch (error) { + // code will fail with undefined if expect(1).to.eql(2) executes + expect(error.code).to.eql(-16) + const results = await client.batchRemove(batchRecords) + expect(results.length).to.equal(5) + results.forEach(function (result) { + expect(result.status).to.equal(Aerospike.status.OK) + // expect(results.record.bins).to.be.empty() + // console.log(util.inspect(result, true, 10, true)) + }) + } + }) }) }) diff --git a/test/batch_write.js b/test/batch_write.js index 23ea97aa4..477001bf1 100644 --- a/test/batch_write.js +++ b/test/batch_write.js @@ -281,6 +281,28 @@ describe('client.batchWrite()', function () { }) }) }) + + it('Returns correct status and error with async', async function () { + const batchRecords = [ + { + type: batchType.BATCH_WRITE, + key: new Key(helper.namespace, helper.set, 'test/batch_write/11'), + ops: [ + op.write('geo', new GeoJSON({ type: 'Point', coordinates: [123.456, 1.308] })), + op.write('blob', Buffer.from('bar')) + ], + policy: new Aerospike.BatchWritePolicy({ + exists: Aerospike.policy.exists.CREATE + }) + } + + ] + + await client.batchWrite(batchRecords) + const results = await client.batchWrite(batchRecords) + + expect(results[0].status).to.equal(status.ERR_RECORD_EXISTS) + }) }) context('with exists.CREATE returning promise', function () { diff --git a/test/config.js b/test/config.js index 05dd355fe..58d1f3ea2 100644 --- a/test/config.js +++ b/test/config.js @@ -48,6 +48,8 @@ describe('Config #noserver', function () { maxConnsPerNode: 200, maxSocketIdle: 30, minConnsPerNode: 10, + maxErrorRate: 100, + errorRateWindow: 1, modlua: { userPath: '/user/path' }, password: 'sekret', port: 3333, @@ -74,6 +76,8 @@ describe('Config #noserver', function () { expect(config).to.have.property('authMode') expect(config).to.have.property('clusterName') expect(config).to.have.property('connTimeoutMs') + expect(config).to.have.property('maxErrorRate') + expect(config).to.have.property('errorRateWindow') expect(config).to.have.property('hosts') expect(config).to.have.property('log') expect(config).to.have.property('loginTimeoutMs') diff --git a/test/exp.js b/test/exp.js index e00c30a55..a3f4ff2a1 100644 --- a/test/exp.js +++ b/test/exp.js @@ -255,6 +255,21 @@ describe('Aerospike.exp', function () { }) }) + describe('recordSize', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.recordSize(), exp.int(1))) + await testMatch(key, exp.eq(exp.recordSize(), exp.int(96))) + }) + + it('evaluates to true if any expression evaluates to true', async function () { + const key = await createRecord({ tags: { a: '123456789', b: 'green', c: 'yellow' } }) + await testNoMatch(key, exp.eq(exp.recordSize(), exp.int(1))) + await testMatch(key, exp.eq(exp.recordSize(), exp.int(112))) + }) + }) + describe('wildcard', function () { it('evaluates to true if any expression evaluates to true', async function () { const key = await createRecord({ tags: { a: 'blue', b: 'green', c: 'yellow' } }) @@ -264,6 +279,23 @@ describe('Aerospike.exp', function () { }) }) + describe('expWriteFlags', function () { + it('write flags have correct value', async function () { + expect(exp.expWriteFlags).to.have.property('DEFAULT', 0) + expect(exp.expWriteFlags).to.have.property('CREATE_ONLY', 1) + expect(exp.expWriteFlags).to.have.property('UPDATE_ONLY', 2) + expect(exp.expWriteFlags).to.have.property('ALLOW_DELETE', 4) + expect(exp.expWriteFlags).to.have.property('POLICY_NO_FAIL', 8) + expect(exp.expWriteFlags).to.have.property('EVAL_NO_FAIL', 16) + }) + }) + describe('expReadFlags', function () { + it('read flags have correct value', async function () { + expect(exp.expReadFlags).to.have.property('DEFAULT', 0) + expect(exp.expReadFlags).to.have.property('EVAL_NO_FAIL', 16) + }) + }) + describe('arithmetic expressions', function () { describe('int bin add expression', function () { it('evaluates exp_read op to true if temp bin equals the sum of bin and given value', async function () { @@ -271,7 +303,7 @@ describe('Aerospike.exp', function () { const ops = [ exp.operations.read(tempBin, exp.add(exp.binInt('intVal'), exp.binInt('intVal')), - 0), + exp.expWriteFlags.DEFAULT), op.read('intVal') ] const result = await client.operate(key, ops, {}) @@ -284,7 +316,7 @@ describe('Aerospike.exp', function () { const ops = [ exp.operations.write('intVal', exp.add(exp.binInt('intVal'), exp.binInt('intVal')), - 0), + exp.expWriteFlags.DEFAULT), op.read('intVal') ] const result = await client.operate(key, ops, {}) @@ -296,7 +328,7 @@ describe('Aerospike.exp', function () { const ops = [ exp.operations.read(tempBin, exp.add(exp.binInt('intVal'), exp.binInt('intVal')), - 0), + exp.expWriteFlags.DEFAULT), op.read('intVal') ] const result = await client.operate(key, ops, {}) diff --git a/test/index.js b/test/index.js index 9fbf5b748..46a0323b9 100644 --- a/test/index.js +++ b/test/index.js @@ -194,6 +194,21 @@ context('secondary indexes', function () { }) }) + describe('Client#createBlobIndex()', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('should create a blob index', function () { + const args = { + ns: helper.namespace, + set: helper.set, + bin: testIndex.bin, + index: testIndex.name + } + + return client.createBlobIndex(args) + .then(() => verifyIndexExists(helper.namespace, testIndex.name)) + }) + }) + describe('Client#indexRemove()', async function () { beforeEach(() => { helper.index.create(testIndex.name, helper.set, testIndex.bin, diff --git a/test/maps.js b/test/maps.js index 2455db4eb..8c72ec4a4 100644 --- a/test/maps.js +++ b/test/maps.js @@ -22,6 +22,7 @@ const Aerospike = require('../lib/aerospike') const helper = require('./test_helper') const maps = Aerospike.maps +const op = Aerospike.operations const Context = Aerospike.cdt.Context const status = Aerospike.status @@ -62,6 +63,48 @@ describe('client.operate() - CDT Map operations', function () { }) }) + describe('maps.create', function () { + it('Creates a new map', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('emptyMap', maps.order.KEY_ORDERED))) + .then(operate(op.read('dap'))) + .then(assertRecordEql({ emptyMap: {}, map: { a: 3, b: 2, c: 1 } })) + .then(cleanup()) + }) + + it('Creates a new map from a cdt context', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('map', maps.order.KEY_ORDERED).withContext(ctx => ctx.addMapKeyCreate('nested')))) + .then(assertRecordEql({ map: { a: 3, b: 2, c: 1, nested: {} } })) + .then(cleanup()) + }) + + it('Creates a new map from a cdt context as parameter', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('map', maps.order.KEY_ORDERED, false, new Context().addMapKeyCreate('nested')))) + .then(assertRecordEql({ map: { a: 3, b: 2, c: 1, nested: {} } })) + .then(cleanup()) + }) + + context('persistent indexes added in 7.0', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('Creates a new map with persistent index', function () { + return initState() + .then(createRecord({ map: { c: 1, b: 2, a: 3 } })) + .then(orderByKey('map')) + .then(operate(maps.create('emptyMap', maps.order.KEY_ORDERED, true))) + .then(assertRecordEql({ emptyMap: {}, map: { a: 3, b: 2, c: 1 } })) + .then(cleanup()) + }) + }) + }) + describe('maps.put', function () { it('adds the item to the map and returns the size of the map', function () { return initState() diff --git a/test/query.js b/test/query.js index 8561959f4..63b4499b6 100644 --- a/test/query.js +++ b/test/query.js @@ -35,6 +35,8 @@ const op = Aerospike.operations const NUMERIC = Aerospike.indexDataType.NUMERIC const STRING = Aerospike.indexDataType.STRING const GEO2DSPHERE = Aerospike.indexDataType.GEO2DSPHERE +const BLOB = Aerospike.indexDataType.BLOB + const LIST = Aerospike.indexType.LIST const MAPVALUES = Aerospike.indexType.MAPVALUES const MAPKEYS = Aerospike.indexType.MAPKEYS @@ -112,7 +114,7 @@ describe('Queries', function () { { name: 'nested aggregate', nested: { doubleNested: { value: 30 } } } ] - const numberOfSamples = samples.length + const indexes = [ ['qidxName', 'name', STRING], ['qidxInt', 'i', NUMERIC], @@ -134,9 +136,11 @@ describe('Queries', function () { ['qidxStrMapKeysNested', 'mks', STRING, MAPKEYS, new Context().addMapKey('nested')], ['qidxGeoListNested', 'lg', GEO2DSPHERE, LIST, new Context().addMapKey('nested')], ['qidxGeoMapNested', 'mg', GEO2DSPHERE, MAPVALUES, new Context().addMapKey('nested')], + ['qidxAggregateMapNested', 'nested', STRING, MAPKEYS], ['qidxAggregateMapDoubleNested', 'nested', STRING, MAPKEYS, new Context().addMapKey('doubleNested')] ] + let keys = [] function verifyQueryResults (queryOptions, matchName, done) { @@ -160,6 +164,34 @@ describe('Queries', function () { recgen: () => samples.pop(), metagen: metagen.constant({ ttl: 300 }) } + + if (helper.cluster.isVersionInRange('>= 7.0.0')) { + samples.push({ name: 'blob match', blob: Buffer.from('guava') }) + samples.push({ name: 'blob non-match', blob: Buffer.from('pumpkin') }) + samples.push({ name: 'blob list match', lblob: [Buffer.from('guava'), Buffer.from('papaya')] }) + samples.push({ name: 'blob list non-match', lblob: [Buffer.from('pumpkin'), Buffer.from('turnip')] }) + samples.push({ name: 'blob map match', mblob: { a: Buffer.from('guava'), b: Buffer.from('papaya') } }) + samples.push({ name: 'blob map non-match', mblob: { a: Buffer.from('pumpkin'), b: Buffer.from('turnip') } }) + samples.push({ name: 'blob mapkeys match', mkblob: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]]) }) + samples.push({ name: 'blob mapkeys non-match', mkblob: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]]) }) + samples.push({ name: 'nested blob match', blob: { nested: Buffer.from('guava') } }) + samples.push({ name: 'nested blob non-match', blob: { nested: Buffer.from('pumpkin') } }) + samples.push({ name: 'nested blob list match', lblob: { nested: [Buffer.from('guava'), Buffer.from('papaya')] } }) + samples.push({ name: 'nested blob list non-match', lblob: { nested: [Buffer.from('pumpkin'), Buffer.from('turnip')] } }) + samples.push({ name: 'nested blob map match', mblob: { nested: { a: Buffer.from('guava'), b: Buffer.from('papaya') } } }) + samples.push({ name: 'nested blob map non-match', mblob: { nested: { a: Buffer.from('pumpkin'), b: Buffer.from('turnip') } } }) + samples.push({ name: 'nested blob mapkeys match', mkblob: { nested: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]]) } }) + samples.push({ name: 'nested blob mapkeys non-match', mkblob: { nested: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]]) } }) + + indexes.push(['qidxBlob', 'blob', BLOB]) + indexes.push(['qidxBlobList', 'lblob', BLOB, LIST]) + indexes.push(['qidxBlobMap', 'mblob', BLOB, MAPVALUES]) + indexes.push(['qidxBlobMapKeys', 'mkblob', BLOB, MAPKEYS]) + indexes.push(['qidxBlobListNested', 'lblob', BLOB, LIST, new Context().addMapKey('nested')]) + indexes.push(['qidxBlobMapNested', 'mblob', BLOB, MAPVALUES, new Context().addMapKey('nested')]) + indexes.push(['qidxBlobMapKeysNested', 'mkblob', BLOB, MAPKEYS, new Context().addMapKey('nested')]) + } + const numberOfSamples = samples.length return Promise.all([ putgen.put(numberOfSamples, generators) .then((records) => { keys = records.map((rec) => rec.key) }) @@ -573,7 +605,13 @@ describe('Queries', function () { const args = { filters: [filter.equal('i', 5)] } verifyQueryResults(args, 'int match', done) }) - + context('Uses blob Secondary indexes', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('should match equal blob values', function (done) { + const args = { filters: [filter.equal('blob', Buffer.from('guava'))] } + verifyQueryResults(args, 'blob match', done) + }) + }) it('should match equal string values', function (done) { const args = { filters: [filter.equal('s', 'banana')] } verifyQueryResults(args, 'string match', done) @@ -662,7 +700,38 @@ describe('Queries', function () { const args = { filters: [filter.contains('mks', 'banana', MAPKEYS, new Context().addMapKey('nested'))] } verifyQueryResults(args, 'nested string mapkeys match', done) }) + context('Uses blob Secondary indexes', function () { + helper.skipUnlessVersion('>= 7.0.0', this) + it('should match lists containing a blob', function (done) { + const args = { filters: [filter.contains('lblob', Buffer.from('guava'), LIST)] } + verifyQueryResults(args, 'blob list match', done) + }) + it('should match lists containing a blob in a nested context', function (done) { + const args = { filters: [filter.contains('lblob', Buffer.from('guava'), LIST, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested blob list match', done) + }) + + it('should match maps containing a blob value', function (done) { + const args = { filters: [filter.contains('mblob', Buffer.from('guava'), MAPVALUES)] } + verifyQueryResults(args, 'blob map match', done) + }) + + it('should match maps containing a blob value in a nested context', function (done) { + const args = { filters: [filter.contains('mblob', Buffer.from('guava'), MAPVALUES, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested blob map match', done) + }) + + it('should match maps containing a blob key', function (done) { + const args = { filters: [filter.contains('mkblob', Buffer.from('guava'), MAPKEYS)] } + verifyQueryResults(args, 'blob mapkeys match', done) + }) + + it('should match maps containing a blob key in a nested context', function (done) { + const args = { filters: [filter.contains('mkblob', Buffer.from('guava'), MAPKEYS, new Context().addMapKey('nested'))] } + verifyQueryResults(args, 'nested blob mapkeys match', done) + }) + }) it('throws a type error if the comparison value is of invalid type', function () { const fn = () => filter.contains('list', { foo: 'bar' }, LIST) expect(fn).to.throw(TypeError) diff --git a/test/util/options.js b/test/util/options.js index 643515cad..73ae917d9 100644 --- a/test/util/options.js +++ b/test/util/options.js @@ -177,6 +177,8 @@ options.getConfig = function () { if (options.auth) { config.auth = options.auth } + // Disable maxErrorRate + config.maxErrorRate = 0 return config } diff --git a/typings/index.d.ts b/typings/index.d.ts index f5371c15c..27f82a347 100644 --- a/typings/index.d.ts +++ b/typings/index.d.ts @@ -198,14 +198,8 @@ declare module 'client' { type: number; key: Key; }, policy?: BatchPolicy, callback?: batchRecordsCallback | undefined): Promise | null; - batchApply(records: { - type: number; - key: Key; - }, udf: object[], batchPolicy?: BatchPolicy, batchApplyPolicy?: any, callback?: batchRecordsCallback | undefined): Promise | null; - batchRemove(records: { - type: number; - key: Key; - }, batchPolicy?: BatchPolicy, batchRemovePolicy?: any, callback?: batchRecordsCallback | undefined): Promise | null; + batchApply(keys: Key[], udf: object[], batchPolicy?: BatchPolicy, batchApplyPolicy?: BatchApplyPolicy, callback?: batchRecordsCallback | undefined): Promise | null; + batchRemove(keys: Key[], batchPolicy?: BatchPolicy, batchRemovePolicy?: BatchRemovePolicy, callback?: batchRecordsCallback | undefined): Promise | null; batchSelect(keys: Key[], bins: string[], policy?: BatchPolicy, callback?: batchRecordsCallback | undefined): Promise | null; close(releaseEventLoop?: boolean | undefined): void; connect(callback?: connectCallback | undefined): Promise | null; @@ -239,6 +233,13 @@ declare module 'client' { index: string; type?: any; }, policy?: InfoPolicy, callback?: jobCallback | undefined): Promise | null; + createBlobIndex(options: { + ns: string; + set: string; + bin: string; + index: string; + type?: any; + }, policy?: InfoPolicy, callback?: jobCallback | undefined): Promise | null; apply(key: Key, udfArgs: { module: string; funcname: string; @@ -273,6 +274,7 @@ declare module 'client' { import Config = require("config"); import Query = require("query"); import Scan = require("scan"); + } declare module 'commands/batch_command' { function _exports(asCommand: any): { @@ -281,10 +283,12 @@ declare module 'commands/batch_command' { }; }; export = _exports; + } declare module 'commands/command' { const _exports: Class; export = _exports; + } declare module 'commands/connect_command' { function _exports(asCommand: any): { @@ -293,6 +297,7 @@ declare module 'commands/connect_command' { }; }; export = _exports; + } declare module 'commands/exists_command' { function _exports(asCommand: any): { @@ -301,6 +306,7 @@ declare module 'commands/exists_command' { }; }; export = _exports; + } declare module 'commands/index' { class ApplyCommand { @@ -546,6 +552,7 @@ declare module 'commands/index' { class UserDropCommand { } export { ApplyCommand as Apply, BatchExistsCommand as BatchExists, BatchGetCommand as BatchGet, BatchReadCommand as BatchRead, BatchWriteCommand as BatchWrite, BatchApplyCommand as BatchApply, BatchRemoveCommand as BatchRemove, BatchSelectCommand as BatchSelect, ChangePasswordCommand as ChangePassword, ConnectCommand as Connect, ExistsCommand as Exists, GetCommand as Get, IndexCreateCommand as IndexCreate, IndexRemoveCommand as IndexRemove, InfoAnyCommand as InfoAny, InfoForeachCommand as InfoForeach, InfoHostCommand as InfoHost, InfoNodeCommand as InfoNode, JobInfoCommand as JobInfo, OperateCommand as Operate, PrivilegeGrantCommand as PrivilegeGrant, PrivilegeRevokeCommand as PrivilegeRevoke, PutCommand as Put, QueryCommand as Query, QueryPagesCommand as QueryPages, QueryApplyCommand as QueryApply, QueryBackgroundCommand as QueryBackground, QueryOperateCommand as QueryOperate, QueryForeachCommand as QueryForeach, QueryRoleCommand as QueryRole, QueryRolesCommand as QueryRoles, QueryUserCommand as QueryUser, QueryUsersCommand as QueryUsers, RemoveCommand as Remove, RoleCreateCommand as RoleCreate, RoleDropCommand as RoleDrop, RoleGrantCommand as RoleGrant, RoleRevokeCommand as RoleRevoke, RoleSetWhitelistCommand as RoleSetWhitelist, RoleSetQuotasCommand as RoleSetQuotas, ScanCommand as Scan, ScanPagesCommand as ScanPages, ScanBackgroundCommand as ScanBackground, ScanOperateCommand as ScanOperate, SelectCommand as Select, TruncateCommand as Truncate, UdfRegisterCommand as UdfRegister, UdfRemoveCommand as UdfRemove, UserCreateCommand as UserCreate, UserDropCommand as UserDrop }; + } declare module 'commands/query_background_command' { function _exports(asCommand: any): { @@ -558,6 +565,7 @@ declare module 'commands/query_background_command' { }; export = _exports; import Job = require(".job"); + } declare module 'commands/read_record_command' { function _exports(asCommand: any): { @@ -567,6 +575,7 @@ declare module 'commands/read_record_command' { }; }; export = _exports; + } declare module 'commands/stream_command' { function _exports(asCommand: any): { @@ -577,6 +586,7 @@ declare module 'commands/stream_command' { }; }; export = _exports; + } declare module 'commands/write_record_command' { function _exports(asCommand: any): { @@ -586,6 +596,7 @@ declare module 'commands/write_record_command' { }; }; export = _exports; + } declare module 'config' { export = Config; @@ -605,6 +616,8 @@ declare module 'config' { maxSocketIdle: any; tenderInterval: any; maxConnsPerNode: any; + maxErrorRate: any; + errorRateWindow: any; minConnsPerNode: any; modlua: any; sharedMemory: any; @@ -615,6 +628,7 @@ declare module 'config' { private [inspect]; } const inspect: unique symbol; + } declare module 'double' { export = Double; @@ -624,6 +638,7 @@ declare module 'double' { Double: number; value(): number; } + } declare module 'error' { export = AerospikeError; @@ -642,6 +657,7 @@ declare module 'error' { isServerError(): boolean; get client(): any; } + } declare module 'event_loop' { export function releaseEventLoop(): void; @@ -649,6 +665,7 @@ declare module 'event_loop' { export function referenceEventLoop(): void; export function unreferenceEventLoop(): void; export function setCommandQueuePolicy(policy: any): void; + } declare module 'exp' { export function bool(value: any): { @@ -780,6 +797,10 @@ declare module 'exp' { op: any; count: number; }[]; + export function recordSize(): { + op: any; + count: number; + }[]; export function digestModulo(): { op: any; count: number; @@ -951,11 +972,26 @@ declare module 'exp' { describe: (bin: any) => any; mayContain: (bin: any, list: any) => any; }; + export const expReadFlags { + const DEFAULT: 0; + const EVAL_NO_FAIL: 16; + } + export type expReadFlags = number; + export const expWriteFlags { + const DEFAULT: 0; + const CREATE_ONLY: 1; + const UPDATE_ONLY: 2; + const ALLOW_DELETE: 4; + const POLICY_NO_FAIL: 8; + const EVAL_NO_FAIL: 16; + } + export type expWriteFlags = number; function _val(value: any): { [x: number]: any; op: any; }[]; export { _val as list, _val as map, _let as let, _var as var }; + } declare module 'exp_bit' { export function reSize(bin: any, flags: number, byteSize: number, policy?: any): any; @@ -976,6 +1012,7 @@ declare module 'exp_bit' { export function lScan(bin: any, value: any, bitSize: any, bitOffset: any): number; export function rScan(bin: any, value: any, bitSize: any, bitOffset: any): number; export function getInt(bin: any, sign: boolean, bitSize: any, bitOffset: any): any; + } declare module 'exp_hll' { export function initMH(bin: any, mhBitCount: number, indexBitCount: number, policy?: any): any; @@ -990,6 +1027,7 @@ declare module 'exp_hll' { export function getSimilarity(bin: any, list: any): any[]; export function describe(bin: any): any; export function mayContain(bin: any, list: any): any; + } declare module 'exp_lists' { export function size(bin: any, ctx?: any): any; @@ -1023,6 +1061,7 @@ declare module 'exp_lists' { export function removeByRank(bin: any, rank: any, ctx?: any, returnType?: any): any; export function removeByRankRangeToEnd(bin: any, rank: any, ctx?: any, returnType?: any): any; export function removeByRankRange(bin: any, count: any, rank: any, ctx?: any, returnType?: any): any; + } declare module 'exp_maps' { export function put(bin: any, value: any, key: any, policy?: any, ctx?: any): any; @@ -1062,10 +1101,11 @@ declare module 'exp_maps' { export function getByRank(bin: any, rank: any, valueType: any, returnType: any, ctx?: any): any; export function getByRankRangeToEnd(bin: any, rank: any, returnType: any, ctx?: any): any; export function getByRankRange(bin: any, count: any, rank: any, returnType: any, ctx?: any): any; + } declare module 'exp_operations' { - export function read(bin: string, exp: any, flags: any): Operation; - export function write(bin: string, exp: any, flags: any): Operation; + export function read(bin: string, exp: AerospikeExp, flags: expReadFlags): Operation; + export function write(bin: string, exp: AerospikeExp, flags: expWriteFlags): Operation; export class ExpOperation { protected constructor(); op: any; @@ -1073,11 +1113,13 @@ declare module 'exp_operations' { exp: any; flags: any; } + } declare module 'features' { export const CDT_MAP: "cdt-map"; export const CDT_LIST: "cdt-list"; export const BLOB_BITS: "blob-bits"; + } declare module 'filter' { export function range(bin: string, min: number, max: number, indexType?: number | undefined, context?: any): any; @@ -1096,6 +1138,7 @@ declare module 'filter' { context: any; } import GeoJSON = require("geojson"); + } declare module 'geojson' { export = GeoJSON; @@ -1112,6 +1155,7 @@ declare module 'geojson' { function Polygon(...args: number[][]): GeoJSON; function Circle(lng: number, lat: number, radius: number): GeoJSON; } + } declare module 'hll' { export function init(bin: string, indexBits: number, minhashBits?: number | undefined): any; @@ -1125,6 +1169,7 @@ declare module 'hll' { export function getIntersectCount(bin: string, list: any[]): any; export function getSimilarity(bin: string, list: any[]): any; export function describe(bin: string): any; + } declare module 'index_job' { export = IndexJob; @@ -1137,6 +1182,7 @@ declare module 'index_job' { private hasCompleted; private info; } + } declare module 'info' { export function parse(info: string): any; @@ -1160,6 +1206,7 @@ declare module 'info' { }; function chop(str: any): any; export {}; + } declare module 'job' { export = Job; @@ -1179,6 +1226,7 @@ declare module 'job' { function safeRandomJobID(): number; function pollUntilDone(statusFunction: any, pollInterval: any): Promise; } + } declare module 'key' { export = Key; @@ -1194,6 +1242,7 @@ declare module 'key' { namespace Key { function fromASKey(keyObj: any): Key | null; } + } declare module 'lists' { export function setOrder(bin: string, order: number): any; @@ -1229,6 +1278,7 @@ declare module 'lists' { export function getByRankRange(bin: string, rank: any, count?: number | undefined, returnType?: number | undefined): any; export function increment(bin: string, index: number, value?: number | undefined, policy?: any): any; export function size(bin: string): any; + } declare module 'maps' { export function setPolicy(bin: string, policy: MapPolicy): any; @@ -1262,6 +1312,8 @@ declare module 'maps' { export function getByIndexRange(bin: string, index: number, count?: number | undefined, returnType?: number | undefined): any; export function getByRank(bin: string, rank: number, returnType?: number | undefined): any; export function getByRankRange(bin: string, rank: any, count: number, returnType?: number | undefined): any; + export function create(bin: string, order: number, persistIndex: boolean | undefined, ctx: any): any; + } declare module 'operations' { export function read(bin: string): Operation; @@ -1278,6 +1330,7 @@ declare module 'operations' { bin: any; } export { _delete as delete }; + } declare module 'policies/admin_policy' { export = AdminPolicy; @@ -1285,6 +1338,7 @@ declare module 'policies/admin_policy' { constructor(props?: any); timeout: number; } + } declare module 'policies/apply_policy' { export = ApplyPolicy; @@ -1295,6 +1349,7 @@ declare module 'policies/apply_policy' { durableDelete: boolean; } import BasePolicy = require("policies/base_policy"); + } declare module 'policies/base_policy' { export = BasePolicy; @@ -1306,6 +1361,7 @@ declare module 'policies/base_policy' { filterExpression: any; compress: boolean; } + } declare module 'policies/batch_apply_policy' { export = BatchApplyPolicy; @@ -1317,6 +1373,7 @@ declare module 'policies/batch_apply_policy' { ttl: number; durableDelete: boolean; } + } declare module 'policies/batch_policy' { export = BatchPolicy; @@ -1332,6 +1389,7 @@ declare module 'policies/batch_policy' { deserialize: boolean; } import BasePolicy = require("policies/base_policy"); + } declare module 'policies/batch_read_policy' { export = BatchReadPolicy; @@ -1341,6 +1399,7 @@ declare module 'policies/batch_read_policy' { readModeAP: number; readModeSC: number; } + } declare module 'policies/batch_remove_policy' { export = BatchRemovePolicy; @@ -1353,6 +1412,7 @@ declare module 'policies/batch_remove_policy' { generation: number; durableDelete: boolean; } + } declare module 'policies/batch_write_policy' { export = BatchWritePolicy; @@ -1365,6 +1425,7 @@ declare module 'policies/batch_write_policy' { exists: number; durableDelete: boolean; } + } declare module 'policies/bitwise_policy' { export = BitwisePolicy; @@ -1372,6 +1433,7 @@ declare module 'policies/bitwise_policy' { constructor(props?: any); writeFlags: number; } + } declare module 'policies/command_queue_policy' { export = CommandQueuePolicy; @@ -1385,6 +1447,7 @@ declare module 'policies/command_queue_policy' { maxCommandsInQueue: number; queueInitialCapacity: number; } + } declare module 'policies/hll_policy' { export = HLLPolicy; @@ -1392,6 +1455,7 @@ declare module 'policies/hll_policy' { constructor(props?: any); writeFlags: number; } + } declare module 'policies/info_policy' { export = InfoPolicy; @@ -1401,6 +1465,7 @@ declare module 'policies/info_policy' { sendAsIs: boolean; checkBounds: boolean; } + } declare module 'policies/list_policy' { export = ListPolicy; @@ -1409,6 +1474,7 @@ declare module 'policies/list_policy' { order: number; writeFlags: number; } + } declare module 'policies/map_policy' { export = MapPolicy; @@ -1418,6 +1484,7 @@ declare module 'policies/map_policy' { writeMode: number; writeFlags: number; } + } declare module 'policies/operate_policy' { export = OperatePolicy; @@ -1433,6 +1500,7 @@ declare module 'policies/operate_policy' { readModeSC: number; } import BasePolicy = require("policies/base_policy"); + } declare module 'policies/query_policy' { export = QueryPolicy; @@ -1443,6 +1511,7 @@ declare module 'policies/query_policy' { infoTimeout: number; } import BasePolicy = require("policies/base_policy"); + } declare module 'policies/read_policy' { export = ReadPolicy; @@ -1454,6 +1523,7 @@ declare module 'policies/read_policy' { deserialize: boolean; } import BasePolicy = require("policies/base_policy"); + } declare module 'policies/remove_policy' { export = RemovePolicy; @@ -1465,6 +1535,7 @@ declare module 'policies/remove_policy' { durableDelete: boolean; } import BasePolicy = require("policies/base_policy"); + } declare module 'policies/scan_policy' { export = ScanPolicy; @@ -1475,6 +1546,7 @@ declare module 'policies/scan_policy' { maxRecords: number; } import BasePolicy = require("policies/base_policy"); + } declare module 'policies/write_policy' { export = WritePolicy; @@ -1487,6 +1559,7 @@ declare module 'policies/write_policy' { durableDelete: boolean; } import BasePolicy = require("policies/base_policy"); + } declare module 'policy' { export function createPolicy(type: any, values: any): CommandQueuePolicy | BasePolicy | BatchApplyPolicy | BatchReadPolicy | BatchRemovePolicy | BatchWritePolicy | HLLPolicy | InfoPolicy | AdminPolicy | undefined; @@ -1510,6 +1583,7 @@ declare module 'policy' { import ListPolicy = require("policies/list_policy"); import MapPolicy = require("policies/map_policy"); export { BasePolicy, ApplyPolicy, OperatePolicy, QueryPolicy, ReadPolicy, RemovePolicy, ScanPolicy, WritePolicy, BatchPolicy, BatchApplyPolicy, BatchReadPolicy, BatchRemovePolicy, BatchWritePolicy, CommandQueuePolicy, HLLPolicy, InfoPolicy, AdminPolicy, ListPolicy, MapPolicy }; + } declare module 'privilege' { export = Privilege; @@ -1520,6 +1594,7 @@ declare module 'privilege' { namespace: any; set: any; } + } declare module 'privilege_code' { export const USER_ADMIN: any; @@ -1532,6 +1607,7 @@ declare module 'privilege_code' { export const READ_WRITE_UDF: any; export const WRITE: any; export const TRUNCATE: any; + } declare module 'query' { export = Query; @@ -1575,11 +1651,12 @@ declare module 'query' { foreach(policy?: QueryPolicy, dataCb?: recordCallback | undefined, errorCb?: errorCallback | undefined, endCb?: doneCallback | undefined): RecordStream; results(policy?: QueryPolicy): Promise; apply(udfModule: string, udfFunction: string, udfArgs?: any[] | undefined, policy?: QueryPolicy, callback?: QueryaggregationResultCallback | undefined): Promise | null; - background(udfModule: string, udfFunction: string, udfArgs?: any[] | undefined, policy?: WritePolicy, queryID?: number | undefined, callback?: jobCallback | undefined): Promise | null; + background(udfModule: string, udfFunction: string, udfArgs?: any[] | undefined, policy?: QueryPolicy, queryID?: number | undefined, callback?: jobCallback | undefined): Promise | null; operate(operations: any, policy?: QueryPolicy, queryID?: number | undefined, callback?: jobCallback | undefined): Promise | null; ops: any; } import RecordStream = require("record_stream"); + } declare module 'record' { export = Record; @@ -1595,6 +1672,7 @@ declare module 'record' { ops: any; udf: any; } + } declare module 'record_stream' { export = RecordStream; @@ -1608,6 +1686,7 @@ declare module 'record_stream' { _read(): void; abort(): void; } + } declare module 'role' { export = Role; @@ -1620,6 +1699,7 @@ declare module 'role' { whitelist: any; privileges: any; } + } declare module 'scan' { export = Scan; @@ -1667,6 +1747,7 @@ declare module 'scan' { results(policy?: ScanPolicy): Promise; } import RecordStream = require("record_stream"); + } declare module 'status' { export const ERR_ASYNC_QUEUE_FULL: any; @@ -1749,7 +1830,9 @@ declare module 'status' { export const ERR_UDF_NOT_FOUND: any; export const ERR_LUA_FILE_NOT_FOUND: any; export function getMessage(code: any): string; + } + type Host = object; type ClientStats = any; type doneCallback = () => any; @@ -1816,6 +1899,7 @@ declare module 'udf_job' { const REGISTER: string; const UNREGISTER: string; } + } declare module 'user' { export = User; @@ -1828,6 +1912,7 @@ declare module 'user' { writeInfo: any; roles: any; } + } declare module 'utils' { export function parseHostString(hostString: any): { @@ -1836,4 +1921,5 @@ declare module 'utils' { port: number; }; export function print(err: any, result: any): void; + } \ No newline at end of file