@@ -2,11 +2,9 @@ import { DataSource, DataSourceConfig } from 'apollo-datasource'
22import { Pool } from 'undici'
33import { STATUS_CODES } from 'http'
44import QuickLRU from '@alloc/quick-lru'
5- import pTimeout from 'p-timeout'
6- import sjson from 'secure-json-parse'
75
86import { KeyValueCache } from 'apollo-server-caching'
9- import Dispatcher , { ResponseData } from 'undici/types/dispatcher'
7+ import Dispatcher , { HttpMethod , ResponseData } from 'undici/types/dispatcher'
108import { toApolloError } from 'apollo-server-errors'
119import { EventEmitter } from 'stream'
1210import { Logger } from 'apollo-server-types'
@@ -28,12 +26,9 @@ export class RequestError<T = unknown> extends Error {
2826
2927export type CacheTTLOptions = {
3028 requestCache ?: {
31- // In case of the cache does not respond for any reason. This defines the max duration (ms) until the operation is aborted.
32- maxCacheTimeout : number
3329 // The maximum time an item is cached in seconds.
3430 maxTtl : number
35- // The maximum time an item fetched from the cache is case of an error in seconds.
36- // This value must be greater than `maxTtl`.
31+ // The maximum time the cache should be used when the re-fetch from the origin fails.
3732 maxTtlIfError : number
3833 }
3934}
@@ -52,7 +47,7 @@ export type Request<T = unknown> = {
5247 json ?: boolean
5348 origin : string
5449 path : string
55- method : string
50+ method : HttpMethod
5651 headers : Dictionary < string >
5752} & CacheTTLOptions
5853
@@ -267,6 +262,7 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
267262 cacheKey : string ,
268263 ) : Promise < Response < TResult > > {
269264 try {
265+ // in case of JSON set appropriate content-type header
270266 if ( request . body !== null && typeof request . body === 'object' ) {
271267 if ( request . headers [ 'content-type' ] === undefined ) {
272268 request . headers [ 'content-type' ] = 'application/json; charset=utf-8'
@@ -286,43 +282,42 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
286282 }
287283
288284 const responseData = await this . pool . request ( requestOptions )
289- responseData . body . setEncoding ( 'utf8' )
290285
291- let data = ''
292- for await ( const chunk of responseData . body ) {
293- data += chunk
294- }
295-
296- let json = null
297- if ( responseData . headers [ 'content-type' ] ?. includes ( 'application/json' ) ) {
298- if ( data . length && typeof data === 'string' ) {
299- json = sjson . parse ( data )
300- }
286+ let body = await responseData . body . text ( )
287+ // can we parse it as JSON?
288+ if (
289+ responseData . headers [ 'content-type' ] ?. includes ( 'application/json' ) &&
290+ body . length &&
291+ typeof body === 'string'
292+ ) {
293+ body = JSON . parse ( body )
301294 }
302295
303296 const response : Response < TResult > = {
304297 isFromCache : false ,
305298 memoized : false ,
306299 ...responseData ,
307- body : json ?? data ,
300+ // in case of the server does not properly respond with JSON we pass it as text.
301+ // this is necessary since POST, DELETE don't always have a JSON body.
302+ body : body as unknown as TResult ,
308303 }
309304
310305 this . onResponse < TResult > ( request , response )
311306
312307 // let's see if we can fill the shared cache
313308 if ( request . requestCache && this . isResponseCacheable < TResult > ( request , response ) ) {
314- response . maxTtl = Math . max ( request . requestCache . maxTtl , request . requestCache . maxTtlIfError )
309+ response . maxTtl = request . requestCache . maxTtl
315310 const cachedResponse = JSON . stringify ( response )
316311
317- // respond with the result immedialty without waiting for the cache
312+ // respond with the result immediately without waiting for the cache
318313 this . cache
319314 . set ( cacheKey , cachedResponse , {
320- ttl : request . requestCache ? .maxTtl ,
315+ ttl : request . requestCache . maxTtl ,
321316 } )
322317 . catch ( ( err ) => this . logger ?. error ( err ) )
323318 this . cache
324319 . set ( `staleIfError:${ cacheKey } ` , cachedResponse , {
325- ttl : request . requestCache ? .maxTtlIfError ,
320+ ttl : request . requestCache . maxTtl + request . requestCache . maxTtlIfError ,
326321 } )
327322 . catch ( ( err ) => this . logger ?. error ( err ) )
328323 }
@@ -331,15 +326,12 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
331326 } catch ( error ) {
332327 this . onError ?.( error , request )
333328
329+ // in case of an error we try to respond with a stale result from the stale-if-error cache
334330 if ( request . requestCache ) {
335- // short circuit in case of the cache does not fail fast enough for any reason
336- const cacheItem = await pTimeout (
337- this . cache . get ( `staleIfError:${ cacheKey } ` ) ,
338- request . requestCache . maxCacheTimeout ,
339- )
331+ const cacheItem = await this . cache . get ( `staleIfError:${ cacheKey } ` )
340332
341333 if ( cacheItem ) {
342- const response : Response < TResult > = sjson . parse ( cacheItem )
334+ const response : Response < TResult > = JSON . parse ( cacheItem )
343335 response . isFromCache = true
344336 return response
345337 }
@@ -356,7 +348,7 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
356348
357349 const cacheKey = this . onCacheKeyCalculation ( request )
358350
359- // check if we have any GET call in the cache to respond immediatly
351+ // check if we have any GET call in the cache to respond immediately
360352 if ( request . method === 'GET' ) {
361353 // Memoize GET calls for the same data source instance
362354 // a single instance of the data sources is scoped to one graphql request
@@ -377,13 +369,9 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
377369 // try to fetch from shared cache
378370 if ( request . requestCache ) {
379371 try {
380- // short circuit in case of the cache does not fail fast enough for any reason
381- const cacheItem = await pTimeout (
382- this . cache . get ( cacheKey ) ,
383- request . requestCache . maxCacheTimeout ,
384- )
372+ const cacheItem = await this . cache . get ( cacheKey )
385373 if ( cacheItem ) {
386- const cachedResponse : Response < TResult > = sjson . parse ( cacheItem )
374+ const cachedResponse : Response < TResult > = JSON . parse ( cacheItem )
387375 cachedResponse . memoized = false
388376 cachedResponse . isFromCache = true
389377 return cachedResponse
0 commit comments