Skip to content

Commit

Permalink
upgrade undici, dont include staleIfError in maxTTL, remove cache tim…
Browse files Browse the repository at this point in the history
…eout
  • Loading branch information
StarpTech committed Aug 14, 2021
1 parent 38602fc commit 2240409
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 296 deletions.
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,8 @@ const datasource = new (class MoviesAPI extends HTTPDataSource {
'X-Foo': 'bar',
},
requestCache: {
maxCacheTimeout: 50 // In case of the cache does not respond for any reason (ms).
maxTtl: 1000 * 60 * 10, // 10min, will respond for 10min with the cached result (updated every 10min)
maxTtlIfError: 1000 * 60 * 30, // 30min, will respond in an error case with the cached response (for further 20min)
maxTtlIfError: 1000 * 60 * 30, // 30min, will respond with the cached response in case of an error (for further 20min)
},
})
}
Expand Down
12 changes: 6 additions & 6 deletions benchmarks/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ Compare `apollo-datasource-http` (HTTP1 + Undici Pool) with apollo's `apollo-dat
```
❯ node benchmarks/http.js
{
'apollo-datasource-rest (http1)': { startTime: 114330370557900n, endTime: 114331160850400n },
'apollo-datasource-http (http1)': { startTime: 114330327205800n, endTime: 114330690627800n }
'apollo-datasource-rest (http1)': { startTime: 5974754539400n, endTime: 5975292928900n },
'apollo-datasource-http (http1)': { startTime: 5974751416200n, endTime: 5974986816000n }
}
Results for 1000 subsequent requests:
apollo-datasource-rest (http1) | total time: 790292500ns (790.293ms)
apollo-datasource-http (http1) | total time: 363422000ns (363.422ms)
apollo-datasource-rest (http1) | total time: 538389500ns (538.389ms)
apollo-datasource-http (http1) | total time: 235399800ns (235.400ms)
---
apollo-datasource-http (http1) <> apollo-datasource-rest (http1) percent change: -54.014%
apollo-datasource-http (http1) <> apollo-datasource-rest (http1) percent change: -56.277%
```

**Result:** `apollo-datasource-http` is around `54%` faster than `apollo-datasource-rest`
**Result:** `apollo-datasource-http` is around `56%` faster than `apollo-datasource-rest`
12 changes: 5 additions & 7 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,7 @@
"apollo-server-errors": "^2.5.0",
"apollo-server-types": "^0.9.0",
"graphql": "^15.5.1",
"p-timeout": "^4.1.0",
"secure-json-parse": "^2.4.0",
"undici": "^4.1.0"
"undici": "^4.4.2"
},
"devDependencies": {
"@tsconfig/node12": "^1.0.9",
Expand All @@ -66,12 +64,12 @@
"apollo-datasource-rest": "^0.14.0",
"ava": "^3.15.0",
"h2url": "^0.2.0",
"nock": "^13.1.0",
"nock": "^13.1.1",
"nyc": "^15.1.0",
"prettier": "^2.3.2",
"release-it": "^14.10.0",
"ts-node": "^10.0.0",
"typescript": "^4.3.4",
"release-it": "^14.11.3",
"ts-node": "^10.2.0",
"typescript": "^4.3.5",
"uid": "^2.0.0"
}
}
62 changes: 25 additions & 37 deletions src/http-data-source.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,9 @@ import { DataSource, DataSourceConfig } from 'apollo-datasource'
import { Pool } from 'undici'
import { STATUS_CODES } from 'http'
import QuickLRU from '@alloc/quick-lru'
import pTimeout from 'p-timeout'
import sjson from 'secure-json-parse'

import { KeyValueCache } from 'apollo-server-caching'
import Dispatcher, { ResponseData } from 'undici/types/dispatcher'
import Dispatcher, { HttpMethod, ResponseData } from 'undici/types/dispatcher'
import { toApolloError } from 'apollo-server-errors'
import { EventEmitter } from 'stream'
import { Logger } from 'apollo-server-types'
Expand All @@ -28,12 +26,9 @@ export class RequestError<T = unknown> extends Error {

export type CacheTTLOptions = {
requestCache?: {
// In case of the cache does not respond for any reason. This defines the max duration (ms) until the operation is aborted.
maxCacheTimeout: number
// The maximum time an item is cached in seconds.
maxTtl: number
// The maximum time an item fetched from the cache is case of an error in seconds.
// This value must be greater than `maxTtl`.
// The maximum time the cache should be used when the re-fetch from the origin fails.
maxTtlIfError: number
}
}
Expand All @@ -52,7 +47,7 @@ export type Request<T = unknown> = {
json?: boolean
origin: string
path: string
method: string
method: HttpMethod
headers: Dictionary<string>
} & CacheTTLOptions

Expand Down Expand Up @@ -267,6 +262,7 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
cacheKey: string,
): Promise<Response<TResult>> {
try {
// in case of JSON set appropriate content-type header
if (request.body !== null && typeof request.body === 'object') {
if (request.headers['content-type'] === undefined) {
request.headers['content-type'] = 'application/json; charset=utf-8'
Expand All @@ -286,43 +282,42 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
}

const responseData = await this.pool.request(requestOptions)
responseData.body.setEncoding('utf8')

let data = ''
for await (const chunk of responseData.body) {
data += chunk
}

let json = null
if (responseData.headers['content-type']?.includes('application/json')) {
if (data.length && typeof data === 'string') {
json = sjson.parse(data)
}
let body = await responseData.body.text()
// can we parse it as JSON?
if (
responseData.headers['content-type']?.includes('application/json') &&
body.length &&
typeof body === 'string'
) {
body = JSON.parse(body)
}

const response: Response<TResult> = {
isFromCache: false,
memoized: false,
...responseData,
body: json ?? data,
// in case of the server does not properly respond with JSON we pass it as text.
// this is necessary since POST, DELETE don't always have a JSON body.
body: body as unknown as TResult,
}

this.onResponse<TResult>(request, response)

// let's see if we can fill the shared cache
if (request.requestCache && this.isResponseCacheable<TResult>(request, response)) {
response.maxTtl = Math.max(request.requestCache.maxTtl, request.requestCache.maxTtlIfError)
response.maxTtl = request.requestCache.maxTtl
const cachedResponse = JSON.stringify(response)

// respond with the result immedialty without waiting for the cache
// respond with the result immediately without waiting for the cache
this.cache
.set(cacheKey, cachedResponse, {
ttl: request.requestCache?.maxTtl,
ttl: request.requestCache.maxTtl,
})
.catch((err) => this.logger?.error(err))
this.cache
.set(`staleIfError:${cacheKey}`, cachedResponse, {
ttl: request.requestCache?.maxTtlIfError,
ttl: request.requestCache.maxTtl + request.requestCache.maxTtlIfError,
})
.catch((err) => this.logger?.error(err))
}
Expand All @@ -331,15 +326,12 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
} catch (error) {
this.onError?.(error, request)

// in case of an error we try to respond with a stale result from the stale-if-error cache
if (request.requestCache) {
// short circuit in case of the cache does not fail fast enough for any reason
const cacheItem = await pTimeout(
this.cache.get(`staleIfError:${cacheKey}`),
request.requestCache.maxCacheTimeout,
)
const cacheItem = await this.cache.get(`staleIfError:${cacheKey}`)

if (cacheItem) {
const response: Response<TResult> = sjson.parse(cacheItem)
const response: Response<TResult> = JSON.parse(cacheItem)
response.isFromCache = true
return response
}
Expand All @@ -356,7 +348,7 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {

const cacheKey = this.onCacheKeyCalculation(request)

// check if we have any GET call in the cache to respond immediatly
// check if we have any GET call in the cache to respond immediately
if (request.method === 'GET') {
// Memoize GET calls for the same data source instance
// a single instance of the data sources is scoped to one graphql request
Expand All @@ -377,13 +369,9 @@ export abstract class HTTPDataSource<TContext = any> extends DataSource {
// try to fetch from shared cache
if (request.requestCache) {
try {
// short circuit in case of the cache does not fail fast enough for any reason
const cacheItem = await pTimeout(
this.cache.get(cacheKey),
request.requestCache.maxCacheTimeout,
)
const cacheItem = await this.cache.get(cacheKey)
if (cacheItem) {
const cachedResponse: Response<TResult> = sjson.parse(cacheItem)
const cachedResponse: Response<TResult> = JSON.parse(cacheItem)
cachedResponse.memoized = false
cachedResponse.isFromCache = true
return cachedResponse
Expand Down
Loading

0 comments on commit 2240409

Please sign in to comment.