Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add resultCacheMaxSize #8107

Merged
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions src/cache/inmemory/__mocks__/optimism.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
const optimism = jest.requireActual('optimism');
module.exports = {
...optimism,
wrap: jest.fn(optimism.wrap),
};
33 changes: 33 additions & 0 deletions src/cache/inmemory/__tests__/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@ import { makeReference, Reference, makeVar, TypedDocumentNode, isReference, Docu
import { Cache } from '../../../cache';
import { InMemoryCache, InMemoryCacheConfig } from '../inMemoryCache';

jest.mock('optimism');
import { wrap } from 'optimism';

disableFragmentWarnings();

describe('Cache', () => {
Expand Down Expand Up @@ -1733,6 +1736,36 @@ describe('Cache', () => {
});
});

describe('resultCacheMaxSize', () => {
let wrapSpy: jest.Mock = wrap as jest.Mock;
beforeEach(() => {
wrapSpy.mockClear();
});

it("does not set max size on caches if resultCacheMaxSize is not configured", () => {
new InMemoryCache();
expect(wrapSpy).toHaveBeenCalled();

// The first wrap call is for getFragmentQueryDocument which intentionally
// does not have a max set since it's not expected to grow.
wrapSpy.mock.calls.splice(1).forEach(([, { max }]) => {
expect(max).toBeUndefined();
})
});

it("configures max size on caches when resultCacheMaxSize is set", () => {
const resultCacheMaxSize = 12345;
new InMemoryCache({ resultCacheMaxSize });
expect(wrapSpy).toHaveBeenCalled();

// The first wrap call is for getFragmentQueryDocument which intentionally
// does not have a max set since it's not expected to grow.
wrapSpy.mock.calls.splice(1).forEach(([, { max }]) => {
expect(max).toBe(resultCacheMaxSize);
})
});
});

describe("InMemoryCache#broadcastWatches", function () {
it("should keep distinct consumers distinct (issue #5733)", function () {
const cache = new InMemoryCache();
Expand Down
31 changes: 31 additions & 0 deletions src/cache/inmemory/__tests__/readFromStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,37 @@ import {
TypedDocumentNode,
} from '../../../core';

jest.mock('optimism');
import { wrap } from 'optimism';

describe('resultCacheMaxSize', () => {
const cache = new InMemoryCache();
let wrapSpy: jest.Mock = wrap as jest.Mock;
beforeEach(() => {
wrapSpy.mockClear();
});

it("does not set max size on caches if resultCacheMaxSize is not configured", () => {
new StoreReader({ cache });
expect(wrapSpy).toHaveBeenCalled();

wrapSpy.mock.calls.forEach(([, { max }]) => {
expect(max).toBeUndefined();
})
});

it("configures max size on caches when resultCacheMaxSize is set", () => {
const resultCacheMaxSize = 12345;
new StoreReader({ cache, resultCacheMaxSize });
expect(wrapSpy).toHaveBeenCalled();

wrapSpy.mock.calls.forEach(([, { max }]) => {
expect(max).toBe(resultCacheMaxSize);
})
});
});


describe('reading from the store', () => {
const reader = new StoreReader({
cache: new InMemoryCache(),
Expand Down
11 changes: 0 additions & 11 deletions src/cache/inmemory/entityStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -341,17 +341,6 @@ export abstract class EntityStore implements NormalizedCache {
}
}

// Remove every Layer, leaving behind only the Root and the Stump.
public prune(): EntityStore {
if (this instanceof Layer) {
const parent = this.removeLayer(this.id);
if (parent !== this) {
return parent.prune();
}
}
return this;
}

public abstract getStorage(
idOrObj: string | StoreObject,
...storeFieldNames: (string | number)[]
Expand Down
71 changes: 41 additions & 30 deletions src/cache/inmemory/inMemoryCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import './fixPolyfills';

import { DocumentNode } from 'graphql';
import { wrap } from 'optimism';
import { OptimisticWrapperFunction, wrap } from 'optimism';

import { ApolloCache, BatchOptions } from '../core/cache';
import { Cache } from '../core/types/Cache';
Expand Down Expand Up @@ -33,6 +33,7 @@ export interface InMemoryCacheConfig extends ApolloReducerConfig {
resultCaching?: boolean;
possibleTypes?: PossibleTypesMap;
typePolicies?: TypePolicies;
resultCacheMaxSize?: number;
}

type BroadcastOptions = Pick<
Expand Down Expand Up @@ -60,6 +61,11 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
private storeReader: StoreReader;
private storeWriter: StoreWriter;

private maybeBroadcastWatch: OptimisticWrapperFunction<
[Cache.WatchOptions, BroadcastOptions?],
any,
[Cache.WatchOptions]>;

// Dynamically imported code can augment existing typePolicies or
// possibleTypes by calling cache.policies.addTypePolicies or
// cache.policies.addPossibletypes.
Expand All @@ -79,6 +85,10 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
typePolicies: this.config.typePolicies,
});

this.init();
}

private init() {
// Passing { resultCaching: false } in the InMemoryCache constructor options
// will completely disable dependency tracking, which will improve memory
// usage but worsen the performance of repeated reads.
Expand All @@ -99,8 +109,37 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
this.storeReader = new StoreReader({
cache: this,
addTypename: this.addTypename,
resultCacheMaxSize: this.config.resultCacheMaxSize,
}),
);

this.maybeBroadcastWatch = wrap((
c: Cache.WatchOptions,
options?: BroadcastOptions,
) => {
return this.broadcastWatch(c, options);
}, {
max: this.config.resultCacheMaxSize,
makeCacheKey: (c: Cache.WatchOptions) => {
// Return a cache key (thus enabling result caching) only if we're
// currently using a data store that can track cache dependencies.
const store = c.optimistic ? this.optimisticData : this.data;
if (supportsResultCaching(store)) {
const { optimistic, rootId, variables } = c;
return store.makeCacheKey(
c.query,
// Different watches can have the same query, optimistic
// status, rootId, and variables, but if their callbacks are
// different, the (identical) result needs to be delivered to
// each distinct callback. The easiest way to achieve that
// separation is to include c.callback in the cache key for
// maybeBroadcastWatch calls. See issue #5733.
c.callback,
JSON.stringify({ optimistic, rootId, variables }),
);
}
}
});
}

public restore(data: NormalizedCacheObject): this {
Expand Down Expand Up @@ -285,8 +324,7 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
}

public reset(): Promise<void> {
this.optimisticData = this.optimisticData.prune();
this.data.clear();
this.init();
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks to this change, resetting the cache now dumps this.storeReader and all other result-caching-related data, and replaces them with empty data structures.

this.broadcastWatches();
return Promise.resolve();
}
Expand Down Expand Up @@ -423,33 +461,6 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
}
}

private maybeBroadcastWatch = wrap((
c: Cache.WatchOptions,
options?: BroadcastOptions,
) => {
return this.broadcastWatch(c, options);
}, {
makeCacheKey: (c: Cache.WatchOptions) => {
// Return a cache key (thus enabling result caching) only if we're
// currently using a data store that can track cache dependencies.
const store = c.optimistic ? this.optimisticData : this.data;
if (supportsResultCaching(store)) {
const { optimistic, rootId, variables } = c;
return store.makeCacheKey(
c.query,
// Different watches can have the same query, optimistic
// status, rootId, and variables, but if their callbacks are
// different, the (identical) result needs to be delivered to
// each distinct callback. The easiest way to achieve that
// separation is to include c.callback in the cache key for
// maybeBroadcastWatch calls. See issue #5733.
c.callback,
JSON.stringify({ optimistic, rootId, variables }),
);
}
}
});

// This method is wrapped by maybeBroadcastWatch, which is called by
// broadcastWatches, so that we compute and broadcast results only when
// the data that would be broadcast might have changed. It would be
Expand Down
93 changes: 51 additions & 42 deletions src/cache/inmemory/readFromStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,62 @@ type ExecSubSelectedArrayOptions = {
export interface StoreReaderConfig {
cache: InMemoryCache,
addTypename?: boolean;
resultCacheMaxSize?: number;
}

export class StoreReader {
// cached version of executeSelectionset
private executeSelectionSet: OptimisticWrapperFunction<
[ExecSelectionSetOptions], // Actual arguments tuple type.
ExecResult, // Actual return type.
// Arguments type after keyArgs translation.
[SelectionSetNode, StoreObject | Reference, ReadMergeModifyContext]>;

// cached version of executeSubSelectedArray
private executeSubSelectedArray: OptimisticWrapperFunction<
[ExecSubSelectedArrayOptions],
ExecResult<any>,
[ExecSubSelectedArrayOptions]>;

constructor(private config: StoreReaderConfig) {
this.config = { addTypename: true, ...config };

this.executeSelectionSet = wrap(options => this.execSelectionSetImpl(options), {
Comment on lines 100 to +103
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think these initializations make sense to move to an init or reset method (like I did in InMemoryCache), since it's simpler to dump/replace the whole StoreReader object, and let the original one be garbage collected.

keyArgs(options) {
return [
options.selectionSet,
options.objectOrReference,
options.context,
];
},
max: this.config.resultCacheMaxSize,
// Note that the parameters of makeCacheKey are determined by the
// array returned by keyArgs.
makeCacheKey(selectionSet, parent, context) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
selectionSet,
isReference(parent) ? parent.__ref : parent,
context.varString,
);
}
}
});

this.executeSubSelectedArray = wrap((options: ExecSubSelectedArrayOptions) => {
return this.execSubSelectedArrayImpl(options);
}, {
max: this.config.resultCacheMaxSize,
makeCacheKey({ field, array, context }) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
field,
array,
context.varString,
);
}
}
});
}

/**
Expand Down Expand Up @@ -152,33 +203,6 @@ export class StoreReader {
return false;
}

// Cached version of execSelectionSetImpl.
private executeSelectionSet: OptimisticWrapperFunction<
[ExecSelectionSetOptions], // Actual arguments tuple type.
ExecResult, // Actual return type.
// Arguments type after keyArgs translation.
[SelectionSetNode, StoreObject | Reference, ReadMergeModifyContext]
> = wrap(options => this.execSelectionSetImpl(options), {
keyArgs(options) {
return [
options.selectionSet,
options.objectOrReference,
options.context,
];
},
// Note that the parameters of makeCacheKey are determined by the
// array returned by keyArgs.
makeCacheKey(selectionSet, parent, context) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
selectionSet,
isReference(parent) ? parent.__ref : parent,
context.varString,
);
}
}
});

// Uncached version of executeSelectionSet.
private execSelectionSetImpl({
selectionSet,
Expand Down Expand Up @@ -330,21 +354,6 @@ export class StoreReader {

private knownResults = new WeakMap<Record<string, any>, SelectionSetNode>();

// Cached version of execSubSelectedArrayImpl.
private executeSubSelectedArray = wrap((options: ExecSubSelectedArrayOptions) => {
return this.execSubSelectedArrayImpl(options);
}, {
makeCacheKey({ field, array, context }) {
if (supportsResultCaching(context.store)) {
return context.store.makeCacheKey(
field,
array,
context.varString,
);
}
}
});

// Uncached version of executeSubSelectedArray.
private execSubSelectedArrayImpl({
field,
Expand Down