Skip to content

Commit

Permalink
add resultCacheMaxSize configuration
Browse files Browse the repository at this point in the history
  • Loading branch information
Sofian Hnaide committed May 4, 2021
1 parent b43a60c commit 482f4d8
Show file tree
Hide file tree
Showing 5 changed files with 158 additions and 70 deletions.
5 changes: 5 additions & 0 deletions src/cache/inmemory/__mocks__/optimism.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
const optimism = jest.requireActual('optimism');
module.exports = {
...optimism,
wrap: jest.fn(optimism.wrap),
};
35 changes: 35 additions & 0 deletions src/cache/inmemory/__tests__/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@ import { makeReference, Reference, makeVar, TypedDocumentNode, isReference, Docu
import { Cache } from '../../../cache';
import { InMemoryCache, InMemoryCacheConfig } from '../inMemoryCache';

jest.mock('optimism');
import { wrap } from 'optimism';

disableFragmentWarnings();

describe('Cache', () => {
Expand Down Expand Up @@ -1733,6 +1736,38 @@ describe('Cache', () => {
});
});

describe('resultCacheMaxSize', () => {
let wrapSpy: jest.Mock = wrap as jest.Mock;
beforeEach(() => {
wrapSpy.mockClear();
});

it("does not set max size on caches if resultCacheMaxSize is not configured", () => {
new InMemoryCache();
expect(wrapSpy).toHaveBeenCalled();
/*
* The first wrap call is for getFragmentQueryDocument which intentionally
* does not have a max set since it's not expected to grow.
*/
wrapSpy.mock.calls.splice(1).forEach(([, { max }]) => {
expect(max).toBeUndefined();
})
});

it("configures max size on caches when resultCacheMaxSize is set", () => {
const resultCacheMaxSize = 12345;
new InMemoryCache({ resultCacheMaxSize });
expect(wrapSpy).toHaveBeenCalled();
/*
* The first wrap call is for getFragmentQueryDocument which intentionally
* does not have a max set since it's not expected to grow.
*/
wrapSpy.mock.calls.splice(1).forEach(([, { max }]) => {
expect(max).toBe(resultCacheMaxSize);
})
});
});

describe("InMemoryCache#broadcastWatches", function () {
it("should keep distinct consumers distinct (issue #5733)", function () {
const cache = new InMemoryCache();
Expand Down
31 changes: 31 additions & 0 deletions src/cache/inmemory/__tests__/readFromStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,37 @@ import {
TypedDocumentNode,
} from '../../../core';

jest.mock('optimism');
import { wrap } from 'optimism';

describe('resultCacheMaxSize', () => {
const cache = new InMemoryCache();
let wrapSpy: jest.Mock = wrap as jest.Mock;
beforeEach(() => {
wrapSpy.mockClear();
});

it("does not set max size on caches if resultCacheMaxSize is not configured", () => {
new StoreReader({ cache });
expect(wrapSpy).toHaveBeenCalled();

wrapSpy.mock.calls.forEach(([, { max }]) => {
expect(max).toBeUndefined();
})
});

it("configures max size on caches when resultCacheMaxSize is set", () => {
const resultCacheMaxSize = 12345;
new StoreReader({ cache, resultCacheMaxSize });
expect(wrapSpy).toHaveBeenCalled();

wrapSpy.mock.calls.forEach(([, { max }]) => {
expect(max).toBe(resultCacheMaxSize);
})
});
});


describe('reading from the store', () => {
const reader = new StoreReader({
cache: new InMemoryCache(),
Expand Down
64 changes: 36 additions & 28 deletions src/cache/inmemory/inMemoryCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import './fixPolyfills';

import { DocumentNode } from 'graphql';
import { wrap } from 'optimism';
import { OptimisticWrapperFunction, wrap } from 'optimism';

import { ApolloCache, BatchOptions } from '../core/cache';
import { Cache } from '../core/types/Cache';
Expand Down Expand Up @@ -33,6 +33,7 @@ export interface InMemoryCacheConfig extends ApolloReducerConfig {
resultCaching?: boolean;
possibleTypes?: PossibleTypesMap;
typePolicies?: TypePolicies;
resultCacheMaxSize?: number;
}

type BroadcastOptions = Pick<
Expand Down Expand Up @@ -60,6 +61,11 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
private storeReader: StoreReader;
private storeWriter: StoreWriter;

private maybeBroadcastWatch: OptimisticWrapperFunction<
[Cache.WatchOptions, BroadcastOptions?],
any,
[Cache.WatchOptions]>;

// Dynamically imported code can augment existing typePolicies or
// possibleTypes by calling cache.policies.addTypePolicies or
// cache.policies.addPossibletypes.
Expand Down Expand Up @@ -99,8 +105,37 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
this.storeReader = new StoreReader({
cache: this,
addTypename: this.addTypename,
resultCacheMaxSize: this.config.resultCacheMaxSize,
}),
);

this.maybeBroadcastWatch = wrap((
c: Cache.WatchOptions,
options?: BroadcastOptions,
) => {
return this.broadcastWatch(c, options);
}, {
max: this.config.resultCacheMaxSize,
makeCacheKey: (c: Cache.WatchOptions) => {
// Return a cache key (thus enabling result caching) only if we're
// currently using a data store that can track cache dependencies.
const store = c.optimistic ? this.optimisticData : this.data;
if (supportsResultCaching(store)) {
const { optimistic, rootId, variables } = c;
return store.makeCacheKey(
c.query,
// Different watches can have the same query, optimistic
// status, rootId, and variables, but if their callbacks are
// different, the (identical) result needs to be delivered to
// each distinct callback. The easiest way to achieve that
// separation is to include c.callback in the cache key for
// maybeBroadcastWatch calls. See issue #5733.
c.callback,
JSON.stringify({ optimistic, rootId, variables }),
);
}
}
});
}

public restore(data: NormalizedCacheObject): this {
Expand Down Expand Up @@ -423,33 +458,6 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
}
}

private maybeBroadcastWatch = wrap((
c: Cache.WatchOptions,
options?: BroadcastOptions,
) => {
return this.broadcastWatch(c, options);
}, {
makeCacheKey: (c: Cache.WatchOptions) => {
// Return a cache key (thus enabling result caching) only if we're
// currently using a data store that can track cache dependencies.
const store = c.optimistic ? this.optimisticData : this.data;
if (supportsResultCaching(store)) {
const { optimistic, rootId, variables } = c;
return store.makeCacheKey(
c.query,
// Different watches can have the same query, optimistic
// status, rootId, and variables, but if their callbacks are
// different, the (identical) result needs to be delivered to
// each distinct callback. The easiest way to achieve that
// separation is to include c.callback in the cache key for
// maybeBroadcastWatch calls. See issue #5733.
c.callback,
JSON.stringify({ optimistic, rootId, variables }),
);
}
}
});

// This method is wrapped by maybeBroadcastWatch, which is called by
// broadcastWatches, so that we compute and broadcast results only when
// the data that would be broadcast might have changed. It would be
Expand Down
93 changes: 51 additions & 42 deletions src/cache/inmemory/readFromStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,62 @@ type ExecSubSelectedArrayOptions = {
export interface StoreReaderConfig {
cache: InMemoryCache,
addTypename?: boolean;
resultCacheMaxSize?: number;
}

export class StoreReader {
// cached version of executeSelectionset
private executeSelectionSet: OptimisticWrapperFunction<
[ExecSelectionSetOptions], // Actual arguments tuple type.
ExecResult, // Actual return type.
// Arguments type after keyArgs translation.
[SelectionSetNode, StoreObject | Reference, ReadMergeModifyContext]>;

// cached version of executeSubSelectedArray
private executeSubSelectedArray: OptimisticWrapperFunction<
[ExecSubSelectedArrayOptions],
ExecResult<any>,
[ExecSubSelectedArrayOptions]>;

constructor(private config: StoreReaderConfig) {
this.config = { addTypename: true, ...config };

this.executeSelectionSet = wrap(options => this.execSelectionSetImpl(options), {
keyArgs(options) {
return [
options.selectionSet,
options.objectOrReference,
options.context,
];
},
max: this.config.resultCacheMaxSize,
// Note that the parameters of makeCacheKey are determined by the
// array returned by keyArgs.
makeCacheKey(selectionSet, parent, context) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
selectionSet,
isReference(parent) ? parent.__ref : parent,
context.varString,
);
}
}
});

this.executeSubSelectedArray = wrap((options: ExecSubSelectedArrayOptions) => {
return this.execSubSelectedArrayImpl(options);
}, {
max: this.config.resultCacheMaxSize,
makeCacheKey({ field, array, context }) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
field,
array,
context.varString,
);
}
}
});
}

/**
Expand Down Expand Up @@ -152,33 +203,6 @@ export class StoreReader {
return false;
}

// Cached version of execSelectionSetImpl.
private executeSelectionSet: OptimisticWrapperFunction<
[ExecSelectionSetOptions], // Actual arguments tuple type.
ExecResult, // Actual return type.
// Arguments type after keyArgs translation.
[SelectionSetNode, StoreObject | Reference, ReadMergeModifyContext]
> = wrap(options => this.execSelectionSetImpl(options), {
keyArgs(options) {
return [
options.selectionSet,
options.objectOrReference,
options.context,
];
},
// Note that the parameters of makeCacheKey are determined by the
// array returned by keyArgs.
makeCacheKey(selectionSet, parent, context) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
selectionSet,
isReference(parent) ? parent.__ref : parent,
context.varString,
);
}
}
});

// Uncached version of executeSelectionSet.
private execSelectionSetImpl({
selectionSet,
Expand Down Expand Up @@ -330,21 +354,6 @@ export class StoreReader {

private knownResults = new WeakMap<Record<string, any>, SelectionSetNode>();

// Cached version of execSubSelectedArrayImpl.
private executeSubSelectedArray = wrap((options: ExecSubSelectedArrayOptions) => {
return this.execSubSelectedArrayImpl(options);
}, {
makeCacheKey({ field, array, context }) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
field,
array,
context.varString,
);
}
}
});

// Uncached version of executeSubSelectedArray.
private execSubSelectedArrayImpl({
field,
Expand Down

0 comments on commit 482f4d8

Please sign in to comment.