Skip to content

Commit

Permalink
Merge pull request #122 from Expensify/marcaaron-fixMultiMerge
Browse files Browse the repository at this point in the history
Add sync writes to multiMerge() in WebStorage. Fix caching when multiMerge() runs.
  • Loading branch information
marcaaron authored Mar 18, 2022
2 parents 032ff54 + f6bad97 commit 2d42566
Show file tree
Hide file tree
Showing 8 changed files with 296 additions and 63 deletions.
3 changes: 0 additions & 3 deletions __mocks__/lib/storage/index.js

This file was deleted.

34 changes: 32 additions & 2 deletions __mocks__/localforage.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,37 @@
import _ from 'underscore';

let storageMap = {};

const localforageMock = {
get storageMap() {
return storageMap;
},
keys() {
return new Promise((resolve) => {
resolve(_.keys(storageMap));
});
},
setInitialMockData: (data) => {
storageMap = data;
},
config: jest.fn(),
getItem: jest.fn(),
setItem: jest.fn(() => Promise.resolve()),
getItem: jest.fn(key => new Promise((resolve) => {
resolve(storageMap[key]);
})),
setItem: jest.fn((key, value) => new Promise((resolve) => {
storageMap[key] = value;
resolve();
})),
removeItem: jest.fn(key => new Promise((resolve) => {
delete storageMap[key];
resolve();
})),
clear() {
return new Promise((resolve) => {
storageMap = {};
resolve();
});
},
};

export default localforageMock;
13 changes: 7 additions & 6 deletions lib/Onyx.js
Original file line number Diff line number Diff line change
Expand Up @@ -768,11 +768,12 @@ function mergeCollection(collectionKey, collection) {
promises.push(Storage.multiSet(keyValuePairsForNewCollection));
}

// Merge original data to cache
cache.merge(collection);

// Optimistically inform collection subscribers
keysChanged(collectionKey, collection);
// Prefill cache if necessary by calling get() on any existing keys and then merge original data to cache
// and update all subscribers
Promise.all(_.map(existingKeys, get)).then(() => {
cache.merge(collection);
keysChanged(collectionKey, collection);
});

return Promise.all(promises)
.catch(error => evictStorageAndRetry(error, mergeCollection, collection));
Expand Down Expand Up @@ -808,7 +809,7 @@ function init({
keys = {},
initialKeyStates = {},
safeEvictionKeys = [],
maxCachedKeysCount = 55,
maxCachedKeysCount = 1000,
captureMetrics = false,
shouldSyncMultipleInstances = Boolean(global.localStorage),
} = {}) {
Expand Down
51 changes: 51 additions & 0 deletions lib/SyncQueue.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/**
* Synchronous queue that can be used to ensure promise based tasks are run in sequence.
* Pass to the constructor a function that returns a promise to run the task then add data.
*
* @example
*
* const queue = new SyncQueue(({key, val}) => {
* return someAsyncProcess(key, val);
* });
*
* queue.push({key: 1, val: '1'});
* queue.push({key: 2, val: '2'});
*/
export default class SyncQueue {
/**
* @param {Function} run - must return a promise
*/
constructor(run) {
this.queue = [];
this.isProcessing = false;
this.run = run;
}

process() {
if (this.isProcessing || this.queue.length === 0) {
return;
}

this.isProcessing = true;

const {data, resolve, reject} = this.queue.shift();
this.run(data)
.then(resolve)
.catch(reject)
.finally(() => {
this.isProcessing = false;
this.process();
});
}

/**
* @param {*} data
* @returns {Promise}
*/
push(data) {
return new Promise((resolve, reject) => {
this.queue.push({resolve, reject, data});
this.process();
});
}
}
3 changes: 3 additions & 0 deletions lib/storage/__mocks__/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import WebStorage from '../WebStorage';

export default WebStorage;
67 changes: 24 additions & 43 deletions lib/storage/providers/LocalForage.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,39 +7,32 @@
import localforage from 'localforage';
import _ from 'underscore';
import lodashMerge from 'lodash/merge';
import SyncQueue from '../../SyncQueue';

localforage.config({
name: 'OnyxDB'
});

const writeQueue = [];
let isQueueProcessing = false;

/**
* Writing very quickly to IndexedDB causes performance issues and can lock up the page and lead to jank.
* So, we are slowing this process down by waiting until one write is complete before moving on
* to the next.
*/
function processNextWrite() {
if (isQueueProcessing || writeQueue.length === 0) {
return;
}

isQueueProcessing = true;

const {
key, value, resolve, reject,
} = writeQueue.shift();
localforage.setItem(key, value)
.then(resolve)
.catch(reject)
.finally(() => {
isQueueProcessing = false;
processNextWrite();
});
}

const provider = {
/**
* Writing very quickly to IndexedDB causes performance issues and can lock up the page and lead to jank.
* So, we are slowing this process down by waiting until one write is complete before moving on
* to the next.
*/
setItemQueue: new SyncQueue(({key, value, shouldMerge}) => {
if (shouldMerge) {
return localforage.getItem(key)
.then((existingValue) => {
const newValue = _.isObject(existingValue)
? lodashMerge({}, existingValue, value)
: value;
return localforage.setItem(key, newValue);
});
}

return localforage.setItem(key, value);
}),

/**
* Get multiple key-value pairs for the give array of keys in a batch
* @param {String[]} keys
Expand All @@ -61,14 +54,7 @@ const provider = {
* @return {Promise<void>}
*/
multiMerge(pairs) {
const tasks = _.map(pairs, ([key, partialValue]) => this.getItem(key)
.then((existingValue) => {
const newValue = _.isObject(existingValue)
? lodashMerge(existingValue, partialValue)
: partialValue;

return this.setItem(key, newValue);
}));
const tasks = _.map(pairs, ([key, value]) => this.setItemQueue.push({key, value, shouldMerge: true}));

// We're returning Promise.resolve, otherwise the array of task results will be returned to the caller
return Promise.all(tasks).then(() => Promise.resolve());
Expand Down Expand Up @@ -117,14 +103,9 @@ const provider = {
* @param {*} value
* @return {Promise<void>}
*/
setItem: (key, value) => (
new Promise((resolve, reject) => {
writeQueue.push({
key, value, resolve, reject,
});
processNextWrite();
})
),
setItem(key, value) {
return this.setItemQueue.push({key, value});
},
};

export default provider;
172 changes: 172 additions & 0 deletions tests/unit/onyxMultiMergeWebStorageTest.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
import OnyxCache from '../../lib/OnyxCache';
import waitForPromisesToResolve from '../utils/waitForPromisesToResolve';
import localforageMock from '../../__mocks__/localforage';

const ONYX_KEYS = {
COLLECTION: {
TEST_KEY: 'test_',
},
};

const initialTestObject = {a: 'a'};
const initialData = {
test_1: initialTestObject,
test_2: initialTestObject,
test_3: initialTestObject,
};

localforageMock.setInitialMockData(initialData);

describe('Onyx.mergeCollection() amd WebStorage', () => {
let Onyx;

beforeAll(() => {
// Force using WebStorage provider for these tests
jest.mock('../../lib/storage');
Onyx = require('../../index').default;
jest.useRealTimers();

Onyx.init({
keys: ONYX_KEYS,
registerStorageEventListener: () => {},
initialKeyStates: {},
});
});

afterEach(() => Onyx.clear());

it('merges two sets of data consecutively', () => {
// Given initial data in storage
expect(localforageMock.storageMap.test_1).toEqual(initialTestObject);
expect(localforageMock.storageMap.test_2).toEqual(initialTestObject);
expect(localforageMock.storageMap.test_3).toEqual(initialTestObject);

// And an empty cache values for the collection keys
expect(OnyxCache.getValue('test_1')).not.toBeDefined();
expect(OnyxCache.getValue('test_2')).not.toBeDefined();
expect(OnyxCache.getValue('test_3')).not.toBeDefined();

// When we merge additional data
const additionalDataOne = {b: 'b', c: 'c'};
Onyx.mergeCollection(ONYX_KEYS.COLLECTION.TEST_KEY, {
test_1: additionalDataOne,
test_2: additionalDataOne,
test_3: additionalDataOne,
});

// And call again consecutively with different data
const additionalDataTwo = {d: 'd'};
Onyx.mergeCollection(ONYX_KEYS.COLLECTION.TEST_KEY, {
test_1: additionalDataTwo,
test_2: additionalDataTwo,
test_3: additionalDataTwo,
});

return waitForPromisesToResolve()
.then(() => {
const finalObject = {
a: 'a', b: 'b', c: 'c', d: 'd',
};

// Then our new data should merge with the existing data in the cache
expect(OnyxCache.getValue('test_1')).toEqual(finalObject);
expect(OnyxCache.getValue('test_2')).toEqual(finalObject);
expect(OnyxCache.getValue('test_3')).toEqual(finalObject);

// And the storage should reflect the same state
expect(localforageMock.storageMap.test_1).toEqual(finalObject);
expect(localforageMock.storageMap.test_2).toEqual(finalObject);
expect(localforageMock.storageMap.test_3).toEqual(finalObject);
});
});

it('cache updates correctly when accessed again if keys are removed or evicted', () => {
// Given empty storage
expect(localforageMock.storageMap.test_1).toBeFalsy();
expect(localforageMock.storageMap.test_2).toBeFalsy();
expect(localforageMock.storageMap.test_3).toBeFalsy();

// And an empty cache values for the collection keys
expect(OnyxCache.getValue('test_1')).toBeFalsy();
expect(OnyxCache.getValue('test_2')).toBeFalsy();
expect(OnyxCache.getValue('test_3')).toBeFalsy();

// When we merge additional data and wait for the change
const data = {a: 'a', b: 'b'};
Onyx.mergeCollection(ONYX_KEYS.COLLECTION.TEST_KEY, {
test_1: data,
test_2: data,
test_3: data,
});

return waitForPromisesToResolve()
.then(() => {
// Then the cache and storage should match
expect(OnyxCache.getValue('test_1')).toEqual(data);
expect(OnyxCache.getValue('test_2')).toEqual(data);
expect(OnyxCache.getValue('test_3')).toEqual(data);
expect(localforageMock.storageMap.test_1).toEqual(data);
expect(localforageMock.storageMap.test_2).toEqual(data);
expect(localforageMock.storageMap.test_3).toEqual(data);

// When we drop all the cache keys (but do not modify the underlying storage) and merge another object
OnyxCache.drop('test_1');
OnyxCache.drop('test_2');
OnyxCache.drop('test_3');

const additionalData = {c: 'c'};
Onyx.mergeCollection(ONYX_KEYS.COLLECTION.TEST_KEY, {
test_1: additionalData,
test_2: additionalData,
test_3: additionalData,
});

return waitForPromisesToResolve();
})
.then(() => {
const finalObject = {
a: 'a', b: 'b', c: 'c',
};

// Then our new data should merge with the existing data in the cache
expect(OnyxCache.getValue('test_1')).toEqual(finalObject);
expect(OnyxCache.getValue('test_2')).toEqual(finalObject);
expect(OnyxCache.getValue('test_3')).toEqual(finalObject);

// And the storage should reflect the same state
expect(localforageMock.storageMap.test_1).toEqual(finalObject);
expect(localforageMock.storageMap.test_2).toEqual(finalObject);
expect(localforageMock.storageMap.test_3).toEqual(finalObject);
});
});

it('setItem() and multiMerge()', () => {
expect(localforageMock.storageMap).toEqual({});

// Given no previous data and several calls to setItem and call to mergeCollection to update a given key

// 1st call
Onyx.set('test_1', {a: 'a'});

// These merges will all queue together
Onyx.merge('test_1', {b: 'b'});
Onyx.merge('test_1', {c: 'c'});

// 2nd call
Onyx.mergeCollection(ONYX_KEYS.COLLECTION.TEST_KEY, {
test_1: {d: 'd', e: 'e'},
});

// Last call
Onyx.merge('test_1', {f: 'f'});
return waitForPromisesToResolve()
.then(() => {
const finalObject = {
a: 'a', b: 'b', c: 'c', f: 'f', d: 'd', e: 'e',
};

expect(OnyxCache.getValue('test_1')).toEqual(finalObject);
expect(localforageMock.storageMap.test_1).toEqual(finalObject);
});
});
});
Loading

0 comments on commit 2d42566

Please sign in to comment.