Skip to content

Commit

Permalink
v10.1.4: fix batch json requests limit by with max chunk size
Browse files Browse the repository at this point in the history
  • Loading branch information
10xSebastian committed Oct 22, 2022
1 parent 0fd7763 commit 2a7a2f1
Show file tree
Hide file tree
Showing 6 changed files with 142 additions and 102 deletions.
48 changes: 28 additions & 20 deletions dist/esm/index.evm.js
Original file line number Diff line number Diff line change
Expand Up @@ -15119,6 +15119,7 @@ class JsonRpcBatchProvider extends JsonRpcProvider {
new Logger(version$3);

const BATCH_INTERVAL = 10;
const CHUNK_SIZE = 99;

class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {

Expand Down Expand Up @@ -15150,31 +15151,38 @@ class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {
if (!this._pendingBatchAggregator) {
// Schedule batch for next event loop + short duration
this._pendingBatchAggregator = setTimeout(() => {
// Get teh current batch and clear it, so new requests
// Get the current batch and clear it, so new requests
// go into the next batch
const batch = this._pendingBatch;
this._pendingBatch = null;
this._pendingBatchAggregator = null;
// Get the request as an array of requests
const request = batch.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
batch.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
// Prepare Chunks of CHUNK_SIZE
const chunks = [];
for (let i = 0; i < Math.ceil(batch.length / CHUNK_SIZE); i++) {
chunks[i] = batch.slice(i*CHUNK_SIZE, (i+1)*CHUNK_SIZE);
}
chunks.forEach((chunk)=>{
// Get the request as an array of requests
const request = chunk.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
chunk.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
chunk.forEach((inflightRequest) => {
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
batch.forEach((inflightRequest) => {
inflightRequest.reject(error);
});
});
});
}, BATCH_INTERVAL);
Expand Down
48 changes: 28 additions & 20 deletions dist/esm/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -15120,6 +15120,7 @@ class JsonRpcBatchProvider extends JsonRpcProvider {
new Logger(version$3);

const BATCH_INTERVAL = 10;
const CHUNK_SIZE = 99;

class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {

Expand Down Expand Up @@ -15151,31 +15152,38 @@ class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {
if (!this._pendingBatchAggregator) {
// Schedule batch for next event loop + short duration
this._pendingBatchAggregator = setTimeout(() => {
// Get teh current batch and clear it, so new requests
// Get the current batch and clear it, so new requests
// go into the next batch
const batch = this._pendingBatch;
this._pendingBatch = null;
this._pendingBatchAggregator = null;
// Get the request as an array of requests
const request = batch.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
batch.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
// Prepare Chunks of CHUNK_SIZE
const chunks = [];
for (let i = 0; i < Math.ceil(batch.length / CHUNK_SIZE); i++) {
chunks[i] = batch.slice(i*CHUNK_SIZE, (i+1)*CHUNK_SIZE);
}
chunks.forEach((chunk)=>{
// Get the request as an array of requests
const request = chunk.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
chunk.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
chunk.forEach((inflightRequest) => {
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
batch.forEach((inflightRequest) => {
inflightRequest.reject(error);
});
});
});
}, BATCH_INTERVAL);
Expand Down
48 changes: 28 additions & 20 deletions dist/umd/index.evm.js
Original file line number Diff line number Diff line change
Expand Up @@ -15122,6 +15122,7 @@
new Logger(version$3);

const BATCH_INTERVAL = 10;
const CHUNK_SIZE = 99;

class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {

Expand Down Expand Up @@ -15153,31 +15154,38 @@
if (!this._pendingBatchAggregator) {
// Schedule batch for next event loop + short duration
this._pendingBatchAggregator = setTimeout(() => {
// Get teh current batch and clear it, so new requests
// Get the current batch and clear it, so new requests
// go into the next batch
const batch = this._pendingBatch;
this._pendingBatch = null;
this._pendingBatchAggregator = null;
// Get the request as an array of requests
const request = batch.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
batch.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
// Prepare Chunks of CHUNK_SIZE
const chunks = [];
for (let i = 0; i < Math.ceil(batch.length / CHUNK_SIZE); i++) {
chunks[i] = batch.slice(i*CHUNK_SIZE, (i+1)*CHUNK_SIZE);
}
chunks.forEach((chunk)=>{
// Get the request as an array of requests
const request = chunk.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
chunk.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
chunk.forEach((inflightRequest) => {
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
batch.forEach((inflightRequest) => {
inflightRequest.reject(error);
});
});
});
}, BATCH_INTERVAL);
Expand Down
48 changes: 28 additions & 20 deletions dist/umd/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -15122,6 +15122,7 @@
new Logger(version$3);

const BATCH_INTERVAL = 10;
const CHUNK_SIZE = 99;

class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {

Expand Down Expand Up @@ -15153,31 +15154,38 @@
if (!this._pendingBatchAggregator) {
// Schedule batch for next event loop + short duration
this._pendingBatchAggregator = setTimeout(() => {
// Get teh current batch and clear it, so new requests
// Get the current batch and clear it, so new requests
// go into the next batch
const batch = this._pendingBatch;
this._pendingBatch = null;
this._pendingBatchAggregator = null;
// Get the request as an array of requests
const request = batch.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
batch.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
// Prepare Chunks of CHUNK_SIZE
const chunks = [];
for (let i = 0; i < Math.ceil(batch.length / CHUNK_SIZE); i++) {
chunks[i] = batch.slice(i*CHUNK_SIZE, (i+1)*CHUNK_SIZE);
}
chunks.forEach((chunk)=>{
// Get the request as an array of requests
const request = chunk.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
chunk.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
chunk.forEach((inflightRequest) => {
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
batch.forEach((inflightRequest) => {
inflightRequest.reject(error);
});
});
});
}, BATCH_INTERVAL);
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@depay/web3-client",
"moduleName": "Web3Client",
"version": "10.1.3",
"version": "10.1.4",
"description": "A web3 client to fetch blockchain data just like you are used to with HTTP clients.",
"main": "dist/umd/index.js",
"module": "dist/esm/index.js",
Expand Down
50 changes: 29 additions & 21 deletions src/clients/ethers/provider.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { fetchJson } from "@ethersproject/web"
import { JsonRpcBatchProvider } from '@ethersproject/providers'

const BATCH_INTERVAL = 10
const CHUNK_SIZE = 99

class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {

Expand Down Expand Up @@ -34,33 +35,40 @@ class StaticJsonRpcBatchProvider extends JsonRpcBatchProvider {
if (!this._pendingBatchAggregator) {
// Schedule batch for next event loop + short duration
this._pendingBatchAggregator = setTimeout(() => {
// Get teh current batch and clear it, so new requests
// Get the current batch and clear it, so new requests
// go into the next batch
const batch = this._pendingBatch;
this._pendingBatch = null;
this._pendingBatchAggregator = null;
// Get the request as an array of requests
const request = batch.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
batch.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
// Prepare Chunks of CHUNK_SIZE
const chunks = []
for (let i = 0; i < Math.ceil(batch.length / CHUNK_SIZE); i++) {
chunks[i] = batch.slice(i*CHUNK_SIZE, (i+1)*CHUNK_SIZE);
}
chunks.forEach((chunk)=>{
// Get the request as an array of requests
const request = chunk.map((inflight) => inflight.request);
return fetchJson(this.connection, JSON.stringify(request)).then((result) => {
// For each result, feed it to the correct Promise, depending
// on whether it was a success or error
chunk.forEach((inflightRequest, index) => {
const payload = result[index];
if (payload.error) {
const error = new Error(payload.error.message);
error.code = payload.error.code;
error.data = payload.error.data;
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
}, (error) => {
chunk.forEach((inflightRequest) => {
inflightRequest.reject(error);
}
else {
inflightRequest.resolve(payload.result);
}
});
});
}, (error) => {
batch.forEach((inflightRequest) => {
inflightRequest.reject(error);
});
});
})
}, BATCH_INTERVAL);
}
return promise;
Expand Down

0 comments on commit 2a7a2f1

Please sign in to comment.