Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add search segment #529

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions src/HttpClient/HttpClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
FORWARDED_HOST_HEADER,
LOCALE_HEADER,
PRODUCT_HEADER,
SEARCH_SEGMENT_HEADER,
SEGMENT_HEADER,
SESSION_HEADER,
TENANT_HEADER,
Expand Down Expand Up @@ -56,6 +57,7 @@ export class HttpClient {
userAgent,
timeout = DEFAULT_TIMEOUT_MS,
segmentToken,
searchSegmentToken,
sessionToken,
retries,
concurrency,
Expand Down Expand Up @@ -88,6 +90,7 @@ export class HttpClient {
...operationId ? { 'x-vtex-operation-id': operationId } : null,
...product ? { [PRODUCT_HEADER]: product } : null,
...segmentToken ? { [SEGMENT_HEADER]: segmentToken } : null,
...searchSegmentToken ? { [SEARCH_SEGMENT_HEADER]: searchSegmentToken } : null,
...sessionToken ? { [SESSION_HEADER]: sessionToken } : null,
}

Expand Down Expand Up @@ -131,16 +134,16 @@ export class HttpClient {
return typeof v !== 'object' || v === null || Array.isArray(v) ? v :
Object.fromEntries(Object.entries(v).sort(([ka], [kb]) =>
ka < kb ? -1 : ka > kb ? 1 : 0))
}
catch(error) {
}
catch(error) {
// I don't believe this will ever happen, but just in case
// Also, I didn't include error as I am unsure if it would have sensitive information
this.logger.warn({message: 'Error while sorting object for cache key'})
return v
}
}


const bodyHash = createHash('md5').update(JSON.stringify(data, deterministicReplacer)).digest('hex')
const cacheableConfig = this.getConfig(url, {
...config,
Expand Down
74 changes: 40 additions & 34 deletions src/HttpClient/middlewares/cache.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { AxiosRequestConfig, AxiosResponse } from 'axios'

import { CacheLayer } from '../../caches/CacheLayer'
import { LOCALE_HEADER, SEGMENT_HEADER, SESSION_HEADER } from '../../constants'
import { LOCALE_HEADER, SEARCH_SEGMENT_HEADER, SEGMENT_HEADER, SESSION_HEADER } from '../../constants'
import { HttpLogEvents } from '../../tracing/LogEvents'
import { HttpCacheLogFields } from '../../tracing/LogFields'
import { CustomHttpTags } from '../../tracing/Tags'
Expand All @@ -11,7 +11,7 @@ const RANGE_HEADER_QS_KEY = '__range_header'
const cacheableStatusCodes = [200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501] // https://tools.ietf.org/html/rfc7231#section-6.1

export const cacheKey = (config: AxiosRequestConfig) => {
const {baseURL = '', url = '', params, headers} = config
const { baseURL = '', url = '', params, headers } = config
const locale = headers[LOCALE_HEADER]

const encodedBaseURL = baseURL.replace(/\//g, '\\')
Expand All @@ -20,9 +20,9 @@ export const cacheKey = (config: AxiosRequestConfig) => {
let key = `${locale}--${encodedBaseURL}--${encodedURL}?`

if (params) {
Object.keys(params).sort().forEach(p =>
key = key.concat(`--${p}=${params[p]}`)
)
Object.keys(params)
.sort()
.forEach((p) => (key = key.concat(`--${p}=${params[p]}`)))
}
if (headers?.range) {
key = key.concat(`--${RANGE_HEADER_QS_KEY}=${headers.range}`)
Expand All @@ -32,9 +32,9 @@ export const cacheKey = (config: AxiosRequestConfig) => {
}

const parseCacheHeaders = (headers: Record<string, string>) => {
const {'cache-control': cacheControl = '', etag, age: ageStr} = headers
const cacheDirectives = cacheControl.split(',').map(d => d.trim())
const maxAgeDirective = cacheDirectives.find(d => d.startsWith('max-age'))
const { 'cache-control': cacheControl = '', etag, age: ageStr } = headers
const cacheDirectives = cacheControl.split(',').map((d) => d.trim())
const maxAgeDirective = cacheDirectives.find((d) => d.startsWith('max-age'))
const [, maxAgeStr] = maxAgeDirective ? maxAgeDirective.split('=') : [null, null]
const maxAge = maxAgeStr ? parseInt(maxAgeStr, 10) : 0
const age = ageStr ? parseInt(ageStr, 10) : 0
Expand All @@ -48,13 +48,12 @@ const parseCacheHeaders = (headers: Record<string, string>) => {
}
}

export function isLocallyCacheable (arg: RequestConfig, type: CacheType): arg is CacheableRequestConfig {
return arg && !!arg.cacheable
&& (arg.cacheable === type || arg.cacheable === CacheType.Any || type === CacheType.Any)
export function isLocallyCacheable(arg: RequestConfig, type: CacheType): arg is CacheableRequestConfig {
return arg && !!arg.cacheable && (arg.cacheable === type || arg.cacheable === CacheType.Any || type === CacheType.Any)
}

const addNotModified = (validateStatus: (status: number) => boolean) =>
(status: number) => validateStatus(status) || status === 304
const addNotModified = (validateStatus: (status: number) => boolean) => (status: number) =>
validateStatus(status) || status === 304

export enum CacheType {
None,
Expand Down Expand Up @@ -82,7 +81,8 @@ interface CacheOptions {
}

export const cacheMiddleware = ({ type, storage }: CacheOptions) => {
const CACHE_RESULT_TAG = type === CacheType.Disk ? CustomHttpTags.HTTP_DISK_CACHE_RESULT : CustomHttpTags.HTTP_MEMORY_CACHE_RESULT
const CACHE_RESULT_TAG =
type === CacheType.Disk ? CustomHttpTags.HTTP_DISK_CACHE_RESULT : CustomHttpTags.HTTP_MEMORY_CACHE_RESULT
const cacheType = CacheTypeNames[type]

return async (ctx: MiddlewareContext, next: () => Promise<void>) => {
Expand All @@ -93,21 +93,24 @@ export const cacheMiddleware = ({ type, storage }: CacheOptions) => {
const span = ctx.tracing!.rootSpan

const key = cacheKey(ctx.config)
const segmentToken = ctx.config.headers[SEGMENT_HEADER]
const keyWithSegment = key + segmentToken
const segmentToken = ctx.config.headers[SEGMENT_HEADER] ?? ''
const searchSegmentToken = ctx.config.headers[SEARCH_SEGMENT_HEADER] ?? ''
const keyWithSegment = `${key}${segmentToken}`
const keyWithSegmentAndSearchSegment = `${keyWithSegment}${searchSegmentToken}`

span.log({
event: HttpLogEvents.CACHE_KEY_CREATE,
[HttpCacheLogFields.CACHE_TYPE]: cacheType,
[HttpCacheLogFields.KEY]: key,
[HttpCacheLogFields.KEY_WITH_SEGMENT]: keyWithSegment,
[HttpCacheLogFields.KEY_WITH_SEGMENT]: `${key}${segmentToken}`,
[HttpCacheLogFields.KEY_WITH_SEGMENT_AND_SEARCH_SEGMENT]: keyWithSegmentAndSearchSegment,
})

const cacheHasWithSegment = await storage.has(keyWithSegment)
const cached = cacheHasWithSegment ? await storage.get(keyWithSegment) : await storage.get(key)
const hasCache = await storage.has(keyWithSegmentAndSearchSegment)
const cached = hasCache ? await storage.get(keyWithSegmentAndSearchSegment) : null

if (cached && cached.response) {
const {etag: cachedEtag, response, expiration, responseType, responseEncoding} = cached as Cached
const { etag: cachedEtag, response, expiration, responseType, responseEncoding } = cached as Cached

if (type === CacheType.Disk && responseType === 'arraybuffer') {
response.data = Buffer.from(response.data, responseEncoding)
Expand All @@ -119,7 +122,7 @@ export const cacheMiddleware = ({ type, storage }: CacheOptions) => {
event: HttpLogEvents.LOCAL_CACHE_HIT_INFO,
[HttpCacheLogFields.CACHE_TYPE]: cacheType,
[HttpCacheLogFields.ETAG]: cachedEtag,
[HttpCacheLogFields.EXPIRATION_TIME]: (expiration-now)/1000,
[HttpCacheLogFields.EXPIRATION_TIME]: (expiration - now) / 1000,
[HttpCacheLogFields.RESPONSE_TYPE]: responseType,
[HttpCacheLogFields.RESPONSE_ENCONDING]: responseEncoding,
})
Expand Down Expand Up @@ -162,11 +165,12 @@ export const cacheMiddleware = ({ type, storage }: CacheOptions) => {
}
}

const {data, headers, status} = ctx.response as AxiosResponse
const {age, etag, maxAge: headerMaxAge, noStore, noCache} = parseCacheHeaders(headers)
const { data, headers, status } = ctx.response as AxiosResponse
const { age, etag, maxAge: headerMaxAge, noStore, noCache } = parseCacheHeaders(headers)

const {forceMaxAge} = ctx.config
const maxAge = forceMaxAge && cacheableStatusCodes.includes(status) ? Math.max(forceMaxAge, headerMaxAge) : headerMaxAge
const { forceMaxAge } = ctx.config
const maxAge =
forceMaxAge && cacheableStatusCodes.includes(status) ? Math.max(forceMaxAge, headerMaxAge) : headerMaxAge

span.log({
event: HttpLogEvents.CACHE_CONFIG,
Expand All @@ -189,20 +193,22 @@ export const cacheMiddleware = ({ type, storage }: CacheOptions) => {
const shouldCache = maxAge || etag
const varySession = ctx.response.headers.vary && ctx.response.headers.vary.includes(SESSION_HEADER)
if (shouldCache && !varySession) {
const {responseType, responseEncoding: configResponseEncoding} = ctx.config
const { responseType, responseEncoding: configResponseEncoding } = ctx.config
const currentAge = revalidated ? 0 : age
const varySegment = ctx.response.headers.vary && ctx.response.headers.vary.includes(SEGMENT_HEADER)
const setKey = varySegment ? keyWithSegment : key
const varySearchSegment = ctx.response.headers.vary && ctx.response.headers.vary.includes(SEARCH_SEGMENT_HEADER)

const setKey = `${key}${varySegment ? segmentToken : ''}${varySearchSegment ? searchSegmentToken : ''}`

const responseEncoding = configResponseEncoding || (responseType === 'arraybuffer' ? 'base64' : undefined)
const cacheableData = type === CacheType.Disk && responseType === 'arraybuffer'
? (data as Buffer).toString(responseEncoding)
: data
const cacheableData =
type === CacheType.Disk && responseType === 'arraybuffer' ? (data as Buffer).toString(responseEncoding) : data

const expiration = Date.now() + (maxAge - currentAge) * 1000
await storage.set(setKey, {
etag,
expiration,
response: {data: cacheableData, headers, status},
response: { data: cacheableData, headers, status },
responseEncoding,
responseType,
})
Expand All @@ -213,7 +219,7 @@ export const cacheMiddleware = ({ type, storage }: CacheOptions) => {
[HttpCacheLogFields.KEY_SET]: setKey,
[HttpCacheLogFields.AGE]: currentAge,
[HttpCacheLogFields.ETAG]: etag,
[HttpCacheLogFields.EXPIRATION_TIME]: (expiration - Date.now())/1000,
[HttpCacheLogFields.EXPIRATION_TIME]: (expiration - Date.now()) / 1000,
[HttpCacheLogFields.RESPONSE_ENCONDING]: responseEncoding,
[HttpCacheLogFields.RESPONSE_TYPE]: responseType,
})
Expand All @@ -234,7 +240,7 @@ export interface Cached {
}

export type CacheableRequestConfig = RequestConfig & {
url: string,
cacheable: CacheType,
url: string
cacheable: CacheType
memoizable: boolean
}
3 changes: 2 additions & 1 deletion src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export const PID = process.pid

export const CACHE_CONTROL_HEADER = 'cache-control'
export const SEGMENT_HEADER = 'x-vtex-segment'
export const SEARCH_SEGMENT_HEADER = 'x-vtex-search-segment'
export const SESSION_HEADER = 'x-vtex-session'
export const PRODUCT_HEADER = 'x-vtex-product'
export const LOCALE_HEADER = 'x-vtex-locale'
Expand Down Expand Up @@ -36,7 +37,7 @@ export const COLOSSUS_PARAMS_HEADER = 'x-colossus-params'
export const TRACE_ID_HEADER = 'x-trace-id'
export const PROVIDER_HEADER = 'x-vtex-provider'

export type VaryHeaders = typeof SEGMENT_HEADER | typeof SESSION_HEADER | typeof PRODUCT_HEADER | typeof LOCALE_HEADER
export type VaryHeaders = typeof SEGMENT_HEADER | typeof SESSION_HEADER | typeof PRODUCT_HEADER | typeof LOCALE_HEADER | typeof SEARCH_SEGMENT_HEADER

export const BODY_HASH = '__graphqlBodyHash'

Expand Down
9 changes: 7 additions & 2 deletions src/service/worker/runtime/graphql/middlewares/response.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@ import {
ETAG_HEADER,
FORWARDED_HOST_HEADER,
META_HEADER,
SEARCH_SEGMENT_HEADER,
SEGMENT_HEADER,
SESSION_HEADER,
} from '../../../../../constants'
import { Maybe } from '../../typings'
import { Recorder } from '../../utils/recorder'
import { GraphQLCacheControl, GraphQLServiceContext } from '../typings'
import { cacheControlHTTP } from '../utils/cacheControl'

Expand All @@ -17,8 +16,14 @@ function setVaryHeaders (ctx: GraphQLServiceContext, cacheControl: GraphQLCacheC
ctx.vary(SEGMENT_HEADER)
}

if (cacheControl.scope === 'search_segment') {
ctx.vary(SEGMENT_HEADER)
ctx.vary(SEARCH_SEGMENT_HEADER)
}

if (cacheControl.scope === 'private' || ctx.query.scope === 'private') {
ctx.vary(SEGMENT_HEADER)
ctx.vary(SEARCH_SEGMENT_HEADER)
ctx.vary(SESSION_HEADER)
} else if (ctx.vtex.sessionToken) {
ctx.vtex.logger.warn({
Expand Down
2 changes: 1 addition & 1 deletion src/service/worker/runtime/graphql/typings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export type GraphQLResponse = TypeFromPromise<ReturnType<typeof execute>>

export interface GraphQLCacheControl {
maxAge: number
scope: 'private' | 'public' | 'segment'
scope: 'private' | 'public' | 'segment' | 'search_segment'
noCache: boolean
noStore: boolean
}
Expand Down
2 changes: 1 addition & 1 deletion src/service/worker/runtime/http/middlewares/error.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { IOClients } from '../../../../../clients/IOClients'
import { LINKED } from '../../../../../constants'
import { LINKED, SEARCH_SEGMENT_HEADER, SEGMENT_HEADER } from '../../../../../constants'
import {
cancelledRequestStatus,
RequestCancelledError,
Expand Down
26 changes: 16 additions & 10 deletions src/service/worker/runtime/http/middlewares/vary.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { IOClients } from '../../../../../clients/IOClients'
import {
LOCALE_HEADER,
SEARCH_SEGMENT_HEADER,
SEGMENT_HEADER,
SESSION_HEADER,
VaryHeaders,
Expand All @@ -17,18 +18,23 @@ const cachingStrategies: CachingStrategy[] = [
{
forbidden: [],
path: '/_v/private/',
vary: [SEGMENT_HEADER, SESSION_HEADER],
vary: [SEGMENT_HEADER, SEARCH_SEGMENT_HEADER, SESSION_HEADER],
},
{
forbidden: [SEGMENT_HEADER, SESSION_HEADER],
forbidden: [SEGMENT_HEADER, SEARCH_SEGMENT_HEADER, SESSION_HEADER],
path: '/_v/public/',
vary: [],
},
{
forbidden: [SESSION_HEADER],
forbidden: [SESSION_HEADER, SEARCH_SEGMENT_HEADER],
path: '/_v/segment/',
vary: [SEGMENT_HEADER],
},
{
forbidden: [SESSION_HEADER],
path: '/_v/search-segment/',
vary: [SEGMENT_HEADER, SEARCH_SEGMENT_HEADER],
},
]

const shouldVaryByHeader = <T extends IOClients, U extends RecorderState, V extends ParamsContext>(
Expand All @@ -47,12 +53,10 @@ const shouldVaryByHeader = <T extends IOClients, U extends RecorderState, V exte
return !!ctx.get(header)
}


export async function vary <
T extends IOClients,
U extends RecorderState,
V extends ParamsContext
> (ctx: ServiceContext<T, U, V>, next: () => Promise<void>) {
export async function vary<T extends IOClients, U extends RecorderState, V extends ParamsContext>(
ctx: ServiceContext<T, U, V>,
next: () => Promise<void>
) {
const { method, path } = ctx
const strategy = cachingStrategies.find((cachingStrategy) => path.indexOf(cachingStrategy.path) === 0)

Expand All @@ -62,7 +66,6 @@ export async function vary <
})
}


// We don't need to vary non GET requests, since they are never cached
if (method.toUpperCase() !== 'GET') {
await next()
Expand All @@ -76,6 +79,9 @@ export async function vary <
if (shouldVaryByHeader(ctx, SESSION_HEADER, strategy)) {
ctx.vary(SESSION_HEADER)
}
if (shouldVaryByHeader(ctx, SEARCH_SEGMENT_HEADER, strategy)) {
ctx.vary(SEARCH_SEGMENT_HEADER)
}

await next()
}
1 change: 1 addition & 0 deletions src/service/worker/runtime/typings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,7 @@ export interface IOContext {
userAgent: string
workspace: string
segmentToken?: string
searchSegmentToken?: string
sessionToken?: string
requestId: string
operationId: string
Expand Down
2 changes: 2 additions & 0 deletions src/service/worker/runtime/utils/context.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
REGION,
REQUEST_ID_HEADER,
SEGMENT_HEADER,
SEARCH_SEGMENT_HEADER,
SESSION_HEADER,
TENANT_HEADER,
WORKSPACE_HEADER,
Expand Down Expand Up @@ -44,6 +45,7 @@ export const prepareHandlerCtx = (header: Context['request']['header'], tracingC
region: REGION,
requestId: header[REQUEST_ID_HEADER],
segmentToken: header[SEGMENT_HEADER],
searchSegmentToken: header[SEARCH_SEGMENT_HEADER],
sessionToken: header[SESSION_HEADER],
tenant: header[TENANT_HEADER] ? parseTenantHeaderValue(header[TENANT_HEADER]) : undefined,
tracer: new UserLandTracer(tracingContext.tracer, tracingContext.currentSpan),
Expand Down
3 changes: 3 additions & 0 deletions src/tracing/LogFields.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ export const enum HttpCacheLogFields {
/** The generated cache key for local cache with the segment added to it */
KEY_WITH_SEGMENT = 'key-with-segment',

/** The generated cache key for local cache with the segment and the searchSegment added to it */
KEY_WITH_SEGMENT_AND_SEARCH_SEGMENT = 'key-with-segment-and-search-segment',

/** The key that was just set on the cache */
KEY_SET = 'key-set',

Expand Down