Skip to content

Commit

Permalink
fix: handle missing vary header values (#4031)
Browse files Browse the repository at this point in the history
  • Loading branch information
gurgunday authored Feb 16, 2025
1 parent c14781c commit 4269dab
Show file tree
Hide file tree
Showing 7 changed files with 166 additions and 22 deletions.
8 changes: 7 additions & 1 deletion lib/cache/memory-cache-store.js
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,13 @@ class MemoryCacheStore {
const entry = this.#entries.get(topLevelKey)?.find((entry) => (
entry.deleteAt > now &&
entry.method === key.method &&
(entry.vary == null || Object.keys(entry.vary).every(headerName => entry.vary[headerName] === key.headers?.[headerName]))
(entry.vary == null || Object.keys(entry.vary).every(headerName => {
if (entry.vary[headerName] === null) {
return key.headers[headerName] === undefined
}

return entry.vary[headerName] === key.headers[headerName]
}))
))

return entry == null
Expand Down
21 changes: 10 additions & 11 deletions lib/cache/sqlite-cache-store.js
Original file line number Diff line number Diff line change
Expand Up @@ -411,10 +411,6 @@ module.exports = class SqliteCacheStore {
let matches = true

if (value.vary) {
if (!headers) {
return undefined
}

const vary = JSON.parse(value.vary)

for (const header in vary) {
Expand All @@ -440,18 +436,21 @@ module.exports = class SqliteCacheStore {
* @returns {boolean}
*/
function headerValueEquals (lhs, rhs) {
if (lhs == null && rhs == null) {
return true
}

if ((lhs == null && rhs != null) ||
(lhs != null && rhs == null)) {
return false
}

if (Array.isArray(lhs) && Array.isArray(rhs)) {
if (lhs.length !== rhs.length) {
return false
}

for (let i = 0; i < lhs.length; i++) {
if (rhs.includes(lhs[i])) {
return false
}
}

return true
return lhs.every((x, i) => x === rhs[i])
}

return lhs === rhs
Expand Down
17 changes: 9 additions & 8 deletions lib/util/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,14 @@ function makeCacheKey (opts) {
if (typeof key !== 'string' || typeof val !== 'string') {
throw new Error('opts.headers is not a valid header map')
}
headers[key] = val
headers[key.toLowerCase()] = val
}
} else if (typeof opts.headers === 'object') {
headers = opts.headers
headers = {}

for (const key of Object.keys(opts.headers)) {
headers[key.toLowerCase()] = opts.headers[key]
}
} else {
throw new Error('opts.headers is not an object')
}
Expand Down Expand Up @@ -260,19 +264,16 @@ function parseVaryHeader (varyHeader, headers) {
return headers
}

const output = /** @type {Record<string, string | string[]>} */ ({})
const output = /** @type {Record<string, string | string[] | null>} */ ({})

const varyingHeaders = typeof varyHeader === 'string'
? varyHeader.split(',')
: varyHeader

for (const header of varyingHeaders) {
const trimmedHeader = header.trim().toLowerCase()

if (headers[trimmedHeader]) {
output[trimmedHeader] = headers[trimmedHeader]
} else {
return undefined
}
output[trimmedHeader] = headers[trimmedHeader] ?? null
}

return output
Expand Down
34 changes: 34 additions & 0 deletions test/cache-interceptor/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,40 @@ describe('parseVaryHeader', () => {
'another-one': '123'
})
})

test('handles missing headers with null', () => {
const result = parseVaryHeader('Accept-Encoding, Authorization', {})
deepStrictEqual(result, {
'accept-encoding': null,
authorization: null
})
})

test('handles mix of present and missing headers', () => {
const result = parseVaryHeader('Accept-Encoding, Authorization', {
authorization: 'example-value'
})
deepStrictEqual(result, {
'accept-encoding': null,
authorization: 'example-value'
})
})

test('handles array input', () => {
const result = parseVaryHeader(['Accept-Encoding', 'Authorization'], {
'accept-encoding': 'gzip'
})
deepStrictEqual(result, {
'accept-encoding': 'gzip',
authorization: null
})
})

test('preserves existing * behavior', () => {
const headers = { accept: 'text/html' }
const result = parseVaryHeader('*', headers)
deepStrictEqual(result, headers)
})
})

describe('isEtagUsable', () => {
Expand Down
65 changes: 65 additions & 0 deletions test/issue-3959.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
const { describe, test, after } = require('node:test')
const assert = require('node:assert')
const { createServer } = require('node:http')
const MemoryCacheStore = require('../lib/cache/memory-cache-store.js')
const { request, Agent, setGlobalDispatcher } = require('..')
const { interceptors } = require('..')

describe('Cache with Vary headers', () => {
async function runCacheTest (store) {
let requestCount = 0
const server = createServer((req, res) => {
requestCount++
res.setHeader('Vary', 'Accept-Encoding')
res.setHeader('Cache-Control', 'max-age=60')
res.end(`Request count: ${requestCount}`)
})

await new Promise(resolve => server.listen(0, resolve))
const port = server.address().port
const url = `http://localhost:${port}`

const agent = new Agent()
setGlobalDispatcher(
agent.compose(
interceptors.cache({
store,
cacheByDefault: 1000,
methods: ['GET']
})
)
)

const res1 = await request(url)
const body1 = await res1.body.text()
assert.strictEqual(body1, 'Request count: 1')
assert.strictEqual(requestCount, 1)

const res2 = await request(url)
const body2 = await res2.body.text()
assert.strictEqual(body2, 'Request count: 1')
assert.strictEqual(requestCount, 1)

const res3 = await request(url, {
headers: {
'Accept-Encoding': 'gzip'
}
})
const body3 = await res3.body.text()
assert.strictEqual(body3, 'Request count: 2')
assert.strictEqual(requestCount, 2)

await new Promise(resolve => server.close(resolve))
}

test('should cache response with MemoryCacheStore when Vary header exists but request header is missing', async () => {
await runCacheTest(new MemoryCacheStore())
})

test('should cache response with SqliteCacheStore when Vary header exists but request header is missing', { skip: process.versions.node < '22' }, async () => {
const SqliteCacheStore = require('../lib/cache/sqlite-cache-store.js')
const sqliteStore = new SqliteCacheStore()
await runCacheTest(sqliteStore)
after(() => sqliteStore.close())
})
})
39 changes: 39 additions & 0 deletions test/types/cache-interceptor.test-d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,45 @@ expectNotAssignable<CacheInterceptor.CacheValue>({
deleteAt: ''
})

expectAssignable<CacheInterceptor.CacheValue>({
statusCode: 200,
statusMessage: 'OK',
headers: {},
vary: {
'accept-encoding': null,
authorization: 'example-value'
},
cachedAt: 0,
staleAt: 0,
deleteAt: 0
})

expectAssignable<CacheInterceptor.CacheValue>({
statusCode: 200,
statusMessage: 'OK',
headers: {},
vary: {
'accept-encoding': null,
authorization: null
},
cachedAt: 0,
staleAt: 0,
deleteAt: 0
})

expectNotAssignable<CacheInterceptor.CacheValue>({
statusCode: 200,
statusMessage: 'OK',
headers: {},
vary: {
'accept-encoding': undefined,
authorization: 'example-value'
},
cachedAt: 0,
staleAt: 0,
deleteAt: 0
})

expectAssignable<CacheInterceptor.MemoryCacheStoreOpts>({})
expectAssignable<CacheInterceptor.MemoryCacheStoreOpts>({
maxSize: 0
Expand Down
4 changes: 2 additions & 2 deletions types/cache-interceptor.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ declare namespace CacheHandler {
statusCode: number
statusMessage: string
headers: Record<string, string | string[]>
vary?: Record<string, string | string[]>
vary?: Record<string, string | string[] | null>
etag?: string
cacheControlDirectives?: CacheControlDirectives
cachedAt: number
Expand All @@ -88,7 +88,7 @@ declare namespace CacheHandler {
statusCode: number
statusMessage: string
headers: Record<string, string | string[]>
vary?: Record<string, string | string[]>
vary?: Record<string, string | string[] | null>
etag?: string
body?: Readable | Iterable<Buffer> | AsyncIterable<Buffer> | Buffer | Iterable<string> | AsyncIterable<string> | string
cacheControlDirectives: CacheControlDirectives,
Expand Down

0 comments on commit 4269dab

Please sign in to comment.