Skip to content

Commit

Permalink
Linted repository
Browse files Browse the repository at this point in the history
use "npm run lint"
Added additional logic to batchRemove test to clean records in which batchRemove failed
  • Loading branch information
DomPeliniAerospike committed Nov 1, 2023
1 parent 6f43a85 commit 8b75631
Show file tree
Hide file tree
Showing 6 changed files with 42 additions and 39 deletions.
16 changes: 8 additions & 8 deletions lib/exp.js
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ exports.setName = _metaExp(exp.ops.SET_NAME)
* because record meta data is cached in memory.
* Requires server version between 5.3.0 inclusive and 7.0 exclusive.
* Use {@link #recordSize} for server version 7.0+.
*
*
* @function
* @return {AerospikeExp} integer value Uncompressed storage size of the record.
*/
Expand Down Expand Up @@ -495,7 +495,7 @@ exports.isTombstone = _metaExp(exp.ops.IS_TOMBSTONE)
* in memory.
* Requires server version between 5.3.0 inclusive and 7.0 exclusive.
* Use {@link #recordSize} for server version 7.0+.
*
*
* @function
* @return {AerospikeExp} integer value memory size of the record.
*/
Expand Down Expand Up @@ -1028,8 +1028,8 @@ exports.bit = require('./exp_bit')
exports.hll = require('./exp_hll')

/**
*
* @readonly
*
* @readonly
* @enum {number}
* @description Expression read bit flags. Use BITWISE OR to combine flags.
*/
Expand All @@ -1044,12 +1044,12 @@ exports.expReadFlags = {
* Ignore failures caused by the expression resolving to unknown or a non-bin type.
* @const {number}
*/
EVAL_NO_FAIL: readFlags.EVAL_NO_FAIL,
EVAL_NO_FAIL: readFlags.EVAL_NO_FAIL
}

/**
*
* @readonly
*
* @readonly
* @enum {number}
* @description Expression write bit flags. Use BITWISE OR to combine flags.
*/
Expand Down Expand Up @@ -1089,4 +1089,4 @@ exports.expWriteFlags = {
* @const {number}
*/
EVAL_NO_FAIL: writeFlags.EVAL_NO_FAIL
}
}
2 changes: 1 addition & 1 deletion lib/filter.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ function dataTypeOf (value) {
case 'double':
return as.indexDataType.NUMERIC
default:
if(Buffer.isBuffer(value)){
if (Buffer.isBuffer(value)) {
return as.indexDataType.BLOB
}

Expand Down
2 changes: 1 addition & 1 deletion lib/maps.js
Original file line number Diff line number Diff line change
Expand Up @@ -1295,7 +1295,7 @@ exports.getByRankRange = function (bin, rank, count, returnType) {
* {@link module:aerospike/maps~MapOperation#andReturn|MapOperation#andReturn} to
* select what data to return.
*/
exports.create = function (bin, order, persistIndex=false) {
exports.create = function (bin, order, persistIndex = false) {
const op = new MapOperation(opcodes.MAP_CREATE, bin)
op.order = order
op.persistIndex = persistIndex
Expand Down
11 changes: 10 additions & 1 deletion test/batch_remove.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,19 @@ describe('client.batchRemove()', function () {
new Key(helper.namespace, helper.set, 'test/batch_remove/0')
]
try {
await client.batchRemove(batchRecords, null, new Aerospike.BatchRemovePolicy({ gen: Aerospike.policy.gen.EQ, generation: 3 }))
await client.batchRemove(batchRecords, null, new Aerospike.BatchRemovePolicy({ gen: Aerospike.policy.gen.EQ, generation: 10 }))
// Will fail if code makes it here
expect(1).to.eql(2)
} catch (error) {
// code will fail with undefined if expect(1).to.eql(2) executes
expect(error.code).to.eql(-16)
const results = await client.batchRemove(batchRecords)
expect(results.length).to.equal(5)
results.forEach(function (result) {
expect(result.status).to.equal(Aerospike.status.OK)
// expect(results.record.bins).to.be.empty()
// console.log(util.inspect(result, true, 10, true))
})
}
})
})
Expand Down
9 changes: 3 additions & 6 deletions test/maps.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@
const Aerospike = require('../lib/aerospike')
const helper = require('./test_helper')

const { Buffer } = require('node:buffer');


const maps = Aerospike.maps
const op = Aerospike.operations
const Context = Aerospike.cdt.Context
Expand Down Expand Up @@ -73,7 +70,7 @@ describe('client.operate() - CDT Map operations', function () {
.then(orderByKey('map'))
.then(operate(maps.create('emptyMap', maps.order.KEY_ORDERED)))
.then(operate(op.read('dap')))
.then(assertRecordEql({emptyMap: {}, map: {a: 3, b: 2, c: 1}}))
.then(assertRecordEql({ emptyMap: {}, map: { a: 3, b: 2, c: 1 } }))
.then(cleanup())
})

Expand All @@ -82,7 +79,7 @@ describe('client.operate() - CDT Map operations', function () {
.then(createRecord({ map: { c: 1, b: 2, a: 3 } }))
.then(orderByKey('map'))
.then(operate(maps.create('map', maps.order.KEY_ORDERED).withContext(ctx => ctx.addMapKeyCreate('nested'))))
.then(assertRecordEql({map: {a: 3, b: 2, c: 1, nested: {}}}))
.then(assertRecordEql({ map: { a: 3, b: 2, c: 1, nested: {} } }))
.then(cleanup())
})

Expand All @@ -91,7 +88,7 @@ describe('client.operate() - CDT Map operations', function () {
.then(createRecord({ map: { c: 1, b: 2, a: 3 } }))
.then(orderByKey('map'))
.then(operate(maps.create('emptyMap', maps.order.KEY_ORDERED, true)))
.then(assertRecordEql({ emptyMap: {}, map: {a: 3, b: 2, c: 1}}))
.then(assertRecordEql({ emptyMap: {}, map: { a: 3, b: 2, c: 1 } }))
.then(cleanup())
})
})
Expand Down
41 changes: 19 additions & 22 deletions test/query.js
Original file line number Diff line number Diff line change
Expand Up @@ -76,14 +76,14 @@ describe('Queries', function () {
{ name: 'region list non-match', lg: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] },
{ name: 'region map match', mg: { a: GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } },
{ name: 'region map non-match', mg: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] },
{ name: 'blob match', blob: Buffer.from('guava')},
{ name: 'blob non-match', blob: Buffer.from('pumpkin')},
{ name: 'blob list match', lblob: [Buffer.from('guava'), Buffer.from('papaya')]},
{ name: 'blob list non-match', lblob: [Buffer.from('pumpkin'), Buffer.from('turnip')]},
{ name: 'blob map match', mblob: {a: Buffer.from('guava'), b: Buffer.from('papaya')}},
{ name: 'blob map non-match', mblob: {a: Buffer.from('pumpkin'), b: Buffer.from('turnip')}},
{ name: 'blob mapkeys match', mkblob: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]])},
{ name: 'blob mapkeys non-match', mkblob: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]])},
{ name: 'blob match', blob: Buffer.from('guava') },
{ name: 'blob non-match', blob: Buffer.from('pumpkin') },
{ name: 'blob list match', lblob: [Buffer.from('guava'), Buffer.from('papaya')] },
{ name: 'blob list non-match', lblob: [Buffer.from('pumpkin'), Buffer.from('turnip')] },
{ name: 'blob map match', mblob: { a: Buffer.from('guava'), b: Buffer.from('papaya') } },
{ name: 'blob map non-match', mblob: { a: Buffer.from('pumpkin'), b: Buffer.from('turnip') } },
{ name: 'blob mapkeys match', mkblob: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]]) },
{ name: 'blob mapkeys non-match', mkblob: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]]) },
{ name: 'aggregate', value: 10 },
{ name: 'aggregate', value: 20 },
{ name: 'aggregate', value: 30 },
Expand Down Expand Up @@ -114,14 +114,14 @@ describe('Queries', function () {
{ name: 'nested region list non-match', lg: { nested: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } },
{ name: 'nested region map match', mg: { nested: { a: GeoJSON.Polygon([102.913, 0.308], [102.913, 2.308], [104.913, 2.308], [104.913, 0.308], [102.913, 0.308]) } } },
{ name: 'nested region map non-match', mg: { nested: [GeoJSON.Polygon([-121.101, 36.421], [-121.101, 38.421], [-123.101, 38.421], [-123.101, 36.421], [-121.101, 36.421])] } },
{ name: 'nested blob match', blob: {nested: Buffer.from('guava')}},
{ name: 'nested blob non-match', blob: {nested: Buffer.from('pumpkin')}},
{ name: 'nested blob list match', lblob: {nested: [Buffer.from('guava'), Buffer.from('papaya')]}},
{ name: 'nested blob list non-match', lblob: {nested: [Buffer.from('pumpkin'), Buffer.from('turnip')]}},
{ name: 'nested blob map match', mblob: {nested: {a: Buffer.from('guava'), b: Buffer.from('papaya')}}},
{ name: 'nested blob map non-match', mblob: {nested: {a: Buffer.from('pumpkin'), b: Buffer.from('turnip')}}},
{ name: 'nested blob mapkeys match', mkblob: {nested: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]])}},
{ name: 'nested blob mapkeys non-match', mkblob: {nested: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]])}},
{ name: 'nested blob match', blob: { nested: Buffer.from('guava') } },
{ name: 'nested blob non-match', blob: { nested: Buffer.from('pumpkin') } },
{ name: 'nested blob list match', lblob: { nested: [Buffer.from('guava'), Buffer.from('papaya')] } },
{ name: 'nested blob list non-match', lblob: { nested: [Buffer.from('pumpkin'), Buffer.from('turnip')] } },
{ name: 'nested blob map match', mblob: { nested: { a: Buffer.from('guava'), b: Buffer.from('papaya') } } },
{ name: 'nested blob map non-match', mblob: { nested: { a: Buffer.from('pumpkin'), b: Buffer.from('turnip') } } },
{ name: 'nested blob mapkeys match', mkblob: { nested: new Map([[Buffer.from('guava'), 1], [Buffer.from('papaya'), 2]]) } },
{ name: 'nested blob mapkeys non-match', mkblob: { nested: new Map([[Buffer.from('pumpkin'), 3], [Buffer.from('turnip'), 4]]) } },
{ name: 'nested aggregate', nested: { value: 10 } },
{ name: 'nested aggregate', nested: { value: 20 } },
{ name: 'nested aggregate', nested: { value: 30 } },
Expand Down Expand Up @@ -604,7 +604,6 @@ describe('Queries', function () {
verifyQueryResults(args, 'string match', done)
})


it('should match equal blob values', function (done) {
const args = { filters: [filter.equal('blob', Buffer.from('guava'))] }
verifyQueryResults(args, 'blob match', done)
Expand Down Expand Up @@ -712,9 +711,8 @@ describe('Queries', function () {
it('should match maps containing a blob value in a nested context', function (done) {
const args = { filters: [filter.contains('mblob', Buffer.from('guava'), MAPVALUES, new Context().addMapKey('nested'))] }
verifyQueryResults(args, 'nested blob map match', done)
})
})


it('should match maps containing a blob key', function (done) {
const args = { filters: [filter.contains('mkblob', Buffer.from('guava'), MAPKEYS)] }
verifyQueryResults(args, 'blob mapkeys match', done)
Expand All @@ -723,13 +721,12 @@ describe('Queries', function () {
it('should match maps containing a blob key in a nested context', function (done) {
const args = { filters: [filter.contains('mkblob', Buffer.from('guava'), MAPKEYS, new Context().addMapKey('nested'))] }
verifyQueryResults(args, 'nested blob mapkeys match', done)
})
})

it('throws a type error if the comparison value is of invalid type', function () {
const fn = () => filter.contains('list', { foo: 'bar' }, LIST)
expect(fn).to.throw(TypeError)
})

})

describe('filter.geoWithinGeoJSONRegion()', function () {
Expand Down

0 comments on commit 8b75631

Please sign in to comment.