Skip to content

Commit 1cb287f

Browse files
committed
Add maxRequestsPerBatch to batchMiddleware.
1 parent 524299a commit 1cb287f

File tree

3 files changed

+63
-0
lines changed

3 files changed

+63
-0
lines changed

src/index.d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,7 @@ export type BatchMiddlewareOpts = {
146146
batchUrl?: string | Promise<string> | ((requestMap: BatchRequestMap) => string | Promise<string>);
147147
batchTimeout?: number;
148148
maxBatchSize?: number;
149+
maxRequestsPerBatch?: number;
149150
allowMutations?: boolean;
150151
method?: 'POST' | 'GET';
151152
headers?: Headers | Promise<Headers> | ((req: RelayRequestBatch) => Headers | Promise<Headers>);

src/middlewares/__tests__/batch-test.js

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -554,4 +554,56 @@ describe('middlewares/batch', () => {
554554
);
555555
});
556556
});
557+
558+
describe('option `maxRequestsPerBatch`', () => {
559+
beforeEach(() => {
560+
fetchMock.restore();
561+
});
562+
563+
it('should split batch requests based on max requests limit', async () => {
564+
// Set up mocks for single and batch requests
565+
fetchMock.mock({
566+
matcher: '/graphql',
567+
response: {
568+
status: 200,
569+
body: { data: {} },
570+
},
571+
method: 'POST',
572+
});
573+
574+
fetchMock.mock({
575+
matcher: '/graphql/batch',
576+
response: {
577+
status: 200,
578+
body: [{ data: {} }, { data: {} }],
579+
},
580+
method: 'POST',
581+
});
582+
583+
// Create a network layer with maxRequestsPerBatch set to 2
584+
const rnl = new RelayNetworkLayer([batchMiddleware({ maxRequestsPerBatch: 2 })]);
585+
586+
// Create 5 mock requests
587+
const req1 = mockReq(1);
588+
const req2 = mockReq(2);
589+
const req3 = mockReq(3);
590+
const req4 = mockReq(4);
591+
const req5 = mockReq(5);
592+
593+
// Execute all requests simultaneously
594+
await Promise.all([
595+
req1.execute(rnl),
596+
req2.execute(rnl),
597+
req3.execute(rnl),
598+
req4.execute(rnl),
599+
req5.execute(rnl),
600+
]);
601+
602+
// Check if the requests were properly split into batches
603+
const batchReqs = fetchMock.calls('/graphql/batch');
604+
const singleReqs = fetchMock.calls('/graphql');
605+
expect(batchReqs).toHaveLength(2);
606+
expect(singleReqs).toHaveLength(1);
607+
});
608+
});
557609
});

src/middlewares/batch.js

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ export type BatchMiddlewareOpts = {|
2020
| ((requestList: RequestWrapper[]) => string | Promise<string>),
2121
batchTimeout?: number,
2222
maxBatchSize?: number,
23+
maxRequestsPerBatch?: number,
2324
allowMutations?: boolean,
2425
method?: 'POST' | 'GET',
2526
headers?: Headers | Promise<Headers> | ((req: RelayRequestBatch) => Headers | Promise<Headers>),
@@ -57,6 +58,7 @@ export default function batchMiddleware(options?: BatchMiddlewareOpts): Middlewa
5758
const allowMutations = opts.allowMutations || false;
5859
const batchUrl = opts.batchUrl || '/graphql/batch';
5960
const maxBatchSize = opts.maxBatchSize || DEFAULT_BATCH_SIZE;
61+
const maxRequestsPerBatch = opts.maxRequestsPerBatch || 0; // 0 is the same as no limit
6062
const singleton = {};
6163

6264
const fetchOpts = {};
@@ -94,6 +96,7 @@ export default function batchMiddleware(options?: BatchMiddlewareOpts): Middlewa
9496
batchUrl,
9597
singleton,
9698
maxBatchSize,
99+
maxRequestsPerBatch,
97100
fetchOpts,
98101
});
99102
};
@@ -111,6 +114,13 @@ function passThroughBatch(req: RelayRequest, next, opts) {
111114
singleton.batcher = prepareNewBatcher(next, opts);
112115
}
113116

117+
if (
118+
opts.maxRequestsPerBatch &&
119+
singleton.batcher.requestList.length + 1 > opts.maxRequestsPerBatch
120+
) {
121+
singleton.batcher = prepareNewBatcher(next, opts);
122+
}
123+
114124
if (singleton.batcher.bodySize + bodyLength + 1 > opts.maxBatchSize) {
115125
singleton.batcher = prepareNewBatcher(next, opts);
116126
}

0 commit comments

Comments
 (0)