@@ -865,33 +865,18 @@ function createRpcRequest(
865
865
connection : Connection ,
866
866
) : RpcRequest {
867
867
return ( method , args ) => {
868
- if ( connection . _autoBatch ) {
868
+ if ( connection . _numRequestsToBatch > 0 ) {
869
869
return new Promise ( ( resolve , reject ) => {
870
- // Automatically batch requests every 100 ms.
871
- const BATCH_INTERVAL_MS = 100 ;
872
-
873
- connection . _batchRequests . push ( [
874
- client . request ( method , args ) ,
870
+ // Automatically queue request to be processed in this batch.
871
+ connection . _batchedRequests . push ( {
872
+ params : { methodName : method , args} ,
875
873
resolve,
876
874
reject,
877
- ] ) ;
878
-
879
- if ( ! connection . _pendingBatchTimer ) {
880
- connection . _pendingBatchTimer = setTimeout ( ( ) => {
881
- const batch = client . batchRequests . map ( ( e : any ) => e [ 0 ] ) ;
882
- client . request ( batch , ( err : any , response : any ) => {
883
- if ( err ) {
884
- // Call reject handler of each promise
885
- connection . _batchRequests . map ( ( e : any ) => e [ 2 ] ( err ) ) ;
886
- } else {
887
- // Call resolve handler of each promise
888
- connection . _batchRequests . map ( ( e : any , i : number ) =>
889
- e [ 1 ] ( response [ i ] ) ,
890
- ) ;
891
- }
892
- connection . _pendingBatchTimer = 0 ;
893
- } ) ;
894
- } , BATCH_INTERVAL_MS ) ;
875
+ } ) ;
876
+ if (
877
+ connection . _batchedRequests . length === connection . _numRequestsToBatch
878
+ ) {
879
+ connection . _resolvePendingBatchRequests ( ) ;
895
880
}
896
881
} ) ;
897
882
} else {
@@ -2119,9 +2104,14 @@ export class Connection {
2119
2104
/** @internal */ _confirmTransactionInitialTimeout ?: number ;
2120
2105
/** @internal */ _rpcEndpoint : string ;
2121
2106
/** @internal */ _rpcWsEndpoint : string ;
2122
- /** @internal */ _autoBatch ?: boolean ;
2123
- /** @internal */ _batchRequests : any [ ] = [ ] ;
2124
- /** @internal */ _pendingBatchTimer : number = 0 ;
2107
+ /** @internal */ _numRequestsToBatch : number = 0 ;
2108
+ /** @internal */ _resolvePendingBatchRequests : ( value ?: unknown ) => void =
2109
+ ( ) => null ;
2110
+ /** @internal */ _batchedRequests : {
2111
+ params : RpcParams ;
2112
+ resolve : ( value ?: unknown ) => void ;
2113
+ reject : ( reason ?: any ) => void ;
2114
+ } [ ] = [ ] ;
2125
2115
/** @internal */ _rpcClient : RpcClient ;
2126
2116
/** @internal */ _rpcRequest : RpcRequest ;
2127
2117
/** @internal */ _rpcBatchRequest : RpcBatchRequest ;
@@ -2210,7 +2200,6 @@ export class Connection {
2210
2200
httpHeaders = commitmentOrConfig . httpHeaders ;
2211
2201
fetchMiddleware = commitmentOrConfig . fetchMiddleware ;
2212
2202
disableRetryOnRateLimit = commitmentOrConfig . disableRetryOnRateLimit ;
2213
- this . _autoBatch = commitmentOrConfig . autoBatch ;
2214
2203
}
2215
2204
2216
2205
this . _rpcEndpoint = endpoint ;
@@ -4009,6 +3998,63 @@ export class Connection {
4009
3998
return res . result ;
4010
3999
}
4011
4000
4001
+ /**
4002
+ * Perform the provided requests in a single batch JSON RPC request. Basically, this function allows you to
4003
+ * replace the following code, which executes multiple JSON RPC requests in parallel:
4004
+ *
4005
+ * Promise.all(addresses.map(address => connection.getSignaturesForAddress(address, undefined, 'confirmed'))
4006
+ *
4007
+ * with the below code, which batches all requests into a single JSON RPC request:
4008
+ *
4009
+ * connection.performBatchRequest(addresses.map(address => () => connection.getSignaturesForAddress(address, undefined, 'confirmed'))
4010
+ *
4011
+ * @param deferredRequests an array of functions, each which returns a promise to be batched. Each promise should call a
4012
+ * method on this Connection instance that performs a non-batched request. Note: only methods on
4013
+ * the Connection class that call _rpcRequest are supported (most do).
4014
+ * @return {Promise<Array<any>> } an array of responses that correspond to each request.
4015
+ */
4016
+ async performBatchRequest (
4017
+ deferredRequests : Array < ( ) => Promise < any > > ,
4018
+ ) : Promise < Array < any > > {
4019
+ this . _numRequestsToBatch = deferredRequests . length ;
4020
+ let promises : Array < any > = [ ] ;
4021
+ await new Promise ( ( resolve , reject ) => {
4022
+ this . _resolvePendingBatchRequests = resolve ;
4023
+
4024
+ // Begin executing the promises.
4025
+ promises = deferredRequests . map ( e => e ( ) . catch ( reject ) ) ;
4026
+
4027
+ // Each promise generates an RPC payload, and it stores
4028
+ // that payload, resolve function, and reject function
4029
+ // in this._batchedRequests.
4030
+ //
4031
+ // This outer Promise is resolved only when all the entries
4032
+ // in this._batchedRequests are created, at which
4033
+ // point _resolvePendingBatchRequests is called.
4034
+ } ) ;
4035
+
4036
+ assert (
4037
+ this . _batchedRequests . length === this . _numRequestsToBatch ,
4038
+ 'all requests were not properly batched' ,
4039
+ ) ;
4040
+
4041
+ // Now call the RPC batch request with the data.
4042
+ try {
4043
+ const unsafeRes = await this . _rpcBatchRequest (
4044
+ this . _batchedRequests . map ( e => e . params ) ,
4045
+ ) ;
4046
+
4047
+ // Finally, resolve the promises created by deferredRequests with the appropriate data for each promise.
4048
+ this . _batchedRequests . forEach ( ( { resolve} , i ) => resolve ( unsafeRes [ i ] ) ) ;
4049
+ } catch ( err ) {
4050
+ // Propagate the error to the promises created by deferredRequests.
4051
+ this . _batchedRequests . forEach ( ( { reject} ) => reject ( err ) ) ;
4052
+ }
4053
+
4054
+ // Await all promises so we return a list of the results from each one.
4055
+ return Promise . all ( promises ) ;
4056
+ }
4057
+
4012
4058
/**
4013
4059
* @internal
4014
4060
*/
0 commit comments