From a9d29202d9e41dbc51f3ea14882b47548a1f092c Mon Sep 17 00:00:00 2001 From: James Baxley Date: Fri, 14 Jul 2017 13:03:12 -0700 Subject: [PATCH 1/4] run prettier on all files --- benchmark/index.ts | 254 +-- benchmark/util.ts | 109 +- package.json | 12 + src/ApolloClient.ts | 199 ++- src/actions.ts | 97 +- src/core/ObservableQuery.ts | 232 ++- src/core/QueryManager.ts | 521 +++--- src/core/types.ts | 4 +- src/core/watchQueryOptions.ts | 39 +- src/data/fragmentMatcher.ts | 98 +- src/data/mutationResults.ts | 29 +- src/data/proxy.ts | 65 +- src/data/readFromStore.ts | 145 +- src/data/replaceQueryResults.ts | 38 +- src/data/resultReducers.ts | 31 +- src/data/store.ts | 164 +- src/data/storeUtils.ts | 108 +- src/data/writeToStore.ts | 157 +- src/errors/ApolloError.ts | 61 +- src/index.ts | 50 +- src/mutations/store.ts | 11 +- src/optimistic-data/store.ts | 43 +- src/queries/directives.ts | 27 +- src/queries/getFromAST.ts | 92 +- src/queries/networkStatus.ts | 4 +- src/queries/queryTransform.ts | 58 +- src/queries/store.ts | 80 +- src/scheduler/scheduler.ts | 69 +- src/store.ts | 94 +- src/transport/Deduplicator.ts | 29 +- src/transport/afterware.ts | 12 +- src/transport/batchedNetworkInterface.ts | 167 +- src/transport/batching.ts | 40 +- src/transport/middleware.ts | 13 +- src/transport/networkInterface.ts | 94 +- src/util/Observable.ts | 8 +- src/util/assign.ts | 33 +- src/util/cloneDeep.ts | 2 +- src/util/errorHandling.ts | 2 +- src/util/isEqual.ts | 9 +- src/util/maybeDeepFreeze.ts | 19 +- test/ApolloClient.ts | 1218 +++++++++++-- test/ObservableQuery.ts | 830 +++++---- test/QueryManager.ts | 1564 +++++++++------- test/assign.ts | 28 +- test/batchedNetworkInterface.ts | 309 ++-- test/batching.ts | 109 +- test/client.ts | 1469 ++++++++------- test/customResolvers.ts | 63 +- test/deduplicator.ts | 139 +- test/diffAgainstStore.ts | 203 ++- test/directives.ts | 58 +- test/errors.ts | 10 +- test/fetchMore.ts | 169 +- test/fixtures/redux-todomvc/index.ts | 8 +- test/fixtures/redux-todomvc/reducers.ts | 54 +- test/fixtures/redux-todomvc/types.ts | 6 +- test/fragmentMatcher.ts | 51 +- test/getFromAST.ts | 391 +++- test/graphqlSubscriptions.ts | 75 +- test/mockNetworkInterface.ts | 45 +- test/mocks/mockFetch.ts | 32 +- test/mocks/mockNetworkInterface.ts | 152 +- test/mocks/mockQueryManager.ts | 12 +- test/mutationResults.ts | 1350 ++++++++------ test/networkInterface.ts | 156 +- test/optimistic.ts | 2089 +++++++++++++--------- test/proxy.ts | 1574 ++++++++++++---- test/queryTransform.ts | 224 +-- test/readFromStore.ts | 261 +-- test/roundtrip.ts | 664 +++---- test/scheduler.ts | 266 +-- test/store.ts | 90 +- test/subscribeToMore.ts | 82 +- test/tests.ts | 11 +- test/util/observableToPromise.ts | 23 +- test/util/subscribeAndCount.ts | 7 +- test/util/wrap.ts | 11 +- test/writeToStore.ts | 764 ++++---- tslint.json | 64 - 80 files changed, 10939 insertions(+), 7011 deletions(-) diff --git a/benchmark/index.ts b/benchmark/index.ts index e8b6ae7f713..eb679aa2281 100644 --- a/benchmark/index.ts +++ b/benchmark/index.ts @@ -14,28 +14,17 @@ import { dataIdFromObject, } from './util'; -import { - ApolloClient, - ApolloQueryResult, - ObservableQuery, -} from '../src/index'; +import { ApolloClient, ApolloQueryResult, ObservableQuery } from '../src/index'; -import { - diffQueryAgainstStore, -} from '../src/data/readFromStore'; +import { diffQueryAgainstStore } from '../src/data/readFromStore'; import mockNetworkInterface, { MockedResponse, } from '../test/mocks/mockNetworkInterface'; -import { - Deferred, -} from 'benchmark'; +import { Deferred } from 'benchmark'; -import { - times, - cloneDeep, -} from 'lodash'; +import { times, cloneDeep } from 'lodash'; const simpleQuery = gql` query { @@ -43,7 +32,8 @@ const simpleQuery = gql` firstName lastName } -}`; + } +`; const simpleResult = { data: { @@ -70,11 +60,11 @@ const getClientInstance = () => { }; const createReservations = (count: number) => { - const reservations: { - name: string, - id: string, - }[] = []; - times(count, (reservationIndex) => { + const reservations: { + name: string; + id: string; + }[] = []; + times(count, reservationIndex => { reservations.push({ name: 'Fake Reservation', id: reservationIndex.toString(), @@ -83,18 +73,18 @@ const createReservations = (count: number) => { return reservations; }; -group((end) => { - benchmark('constructing an instance', (done) => { +group(end => { + benchmark('constructing an instance', done => { const c = new ApolloClient({}); done(); }); end(); }); -group((end) => { - benchmark('fetching a query result from mocked server', (done) => { +group(end => { + benchmark('fetching a query result from mocked server', done => { const client = getClientInstance(); - client.query({ query: simpleQuery }).then((result) => { + client.query({ query: simpleQuery }).then(result => { done(); }); }); @@ -102,8 +92,8 @@ group((end) => { end(); }); -group((end) => { - benchmark('write data and receive update from the cache', (done) => { +group(end => { + benchmark('write data and receive update from the cache', done => { const client = getClientInstance(); const observable = client.watchQuery({ query: simpleQuery, @@ -125,42 +115,49 @@ group((end) => { end(); }); -group((end) => { +group(end => { // This benchmark is supposed to check whether the time // taken to deliver updates is linear in the number of subscribers or not. // (Should be linear). When plotting the results from this benchmark, // the `meanTimes` structure can be used. const meanTimes: { [subscriberCount: string]: number } = {}; - times(50, (countR) => { + times(50, countR => { const count = countR * 5; - benchmark({ - name: `write data and deliver update to ${count} subscribers`, - count, - }, (done) => { - const promises: Promise[] = []; - const client = getClientInstance(); - - times(count, () => { - promises.push(new Promise((resolve, reject) => { - client.watchQuery({ - query: simpleQuery, - fetchPolicy: 'cache-only', - }).subscribe({ - next(res: ApolloQueryResult) { - if (Object.keys(res.data).length > 0) { - resolve(); - } - }, - }); - })); - }); + benchmark( + { + name: `write data and deliver update to ${count} subscribers`, + count, + }, + done => { + const promises: Promise[] = []; + const client = getClientInstance(); + + times(count, () => { + promises.push( + new Promise((resolve, reject) => { + client + .watchQuery({ + query: simpleQuery, + fetchPolicy: 'cache-only', + }) + .subscribe({ + next(res: ApolloQueryResult) { + if (Object.keys(res.data).length > 0) { + resolve(); + } + }, + }); + }), + ); + }); - client.query({ query: simpleQuery }); - Promise.all(promises).then(() => { - done(); - }); - }); + client.query({ query: simpleQuery }); + Promise.all(promises).then(() => { + done(); + }); + }, + ); afterEach((description: DescriptionObject, event: any) => { const iterCount = description['count'] as number; @@ -180,8 +177,9 @@ times(25, (countR: number) => { id __typename } - }`; - const originalVariables = {id: 1}; + } + `; + const originalVariables = { id: 1 }; const originalResult = { data: { author: { @@ -192,12 +190,12 @@ times(25, (countR: number) => { }, }; - group((end) => { + group(end => { // construct a set of mocked responses that each // returns an author with a different id (but the // same name) so we can populate the cache. - const mockedResponses: MockedResponse[] = []; - times(count, (index) => { + const mockedResponses: MockedResponse[] = []; + times(count, index => { const result = cloneDeep(originalResult); result.data.author.id = index; @@ -217,30 +215,37 @@ times(25, (countR: number) => { }); // insert a bunch of stuff into the cache - const promises = times(count, (index) => { - return client.query({ - query, - variables: { id: index }, - }).then((result) => { - return Promise.resolve({}); - }); + const promises = times(count, index => { + return client + .query({ + query, + variables: { id: index }, + }) + .then(result => { + return Promise.resolve({}); + }); }); const myBenchmark = benchmark; const myAfterEach = afterEach; Promise.all(promises).then(() => { - myBenchmark({ - name: `read single item from cache with ${count} items in cache`, - count, - }, (done) => { - const randomIndex = Math.floor(Math.random() * count); - client.query({ - query, - variables: { id: randomIndex }, - }).then((result) => { - done(); - }); - }); + myBenchmark( + { + name: `read single item from cache with ${count} items in cache`, + count, + }, + done => { + const randomIndex = Math.floor(Math.random() * count); + client + .query({ + query, + variables: { id: randomIndex }, + }) + .then(result => { + done(); + }); + }, + ); end(); }); @@ -249,8 +254,8 @@ times(25, (countR: number) => { // Measure the amount of time it takes to read a bunch of // objects from the cache. -times(50, (index) => { - group((end) => { +times(50, index => { + group(end => { const query = gql` query($id: String) { house(id: $id) { @@ -259,12 +264,13 @@ times(50, (index) => { id } } - }`; + } + `; const houseId = '12'; const reservationCount = index + 1; const reservations = createReservations(reservationCount); - const variables = {id: houseId }; + const variables = { id: houseId }; const result = { data: { house: { @@ -283,22 +289,29 @@ times(50, (index) => { }); const myBenchmark = benchmark; - client.query({ - query, - variables, - }).then(() => { - myBenchmark(`read result with ${reservationCount} items associated with the result`, (done) => { - client.query({ - query, - variables, - fetchPolicy: 'cache-only', - }).then(() => { - done(); - }); + client + .query({ + query, + variables, + }) + .then(() => { + myBenchmark( + `read result with ${reservationCount} items associated with the result`, + done => { + client + .query({ + query, + variables, + fetchPolicy: 'cache-only', + }) + .then(() => { + done(); + }); + }, + ); + + end(); }); - - end(); - }); }); }); @@ -306,8 +319,8 @@ times(50, (index) => { // // This test allows us to differentiate between the fixed cost of .query() and the fixed cost // of actually reading from the store. -times(50, (index) => { - group((end) => { +times(50, index => { + group(end => { const reservationCount = index + 1; // Prime the cache. @@ -319,7 +332,8 @@ times(50, (index) => { id } } - }`; + } + `; const variables = { id: '7' }; const reservations = createReservations(reservationCount); const result = { @@ -341,22 +355,28 @@ times(50, (index) => { // We only keep track of the results so that V8 doesn't decide to just throw // away our cache read code. const results: any[] = []; - client.query({ - query, - variables, - }).then(() => { - myBenchmark(`diff query against store with ${reservationCount} items`, (done) => { - - results.push(diffQueryAgainstStore({ - query, - variables, - store: client.store.getState()['apollo'].data, - })); - done(); + client + .query({ + query, + variables, + }) + .then(() => { + myBenchmark( + `diff query against store with ${reservationCount} items`, + done => { + results.push( + diffQueryAgainstStore({ + query, + variables, + store: client.store.getState()['apollo'].data, + }), + ); + done(); + }, + ); + + end(); }); - - end(); - }); }); }); diff --git a/benchmark/util.ts b/benchmark/util.ts index 029c96d5797..4a75e37ade1 100644 --- a/benchmark/util.ts +++ b/benchmark/util.ts @@ -1,10 +1,6 @@ import * as Benchmark from 'benchmark'; -import { - times, - cloneDeep, - merge, -} from 'lodash'; +import { times, cloneDeep, merge } from 'lodash'; // This file implements utilities around benchmark.js that make it // easier to use for our benchmarking needs. @@ -29,10 +25,18 @@ export interface DescriptionObject { export type Nullable = T | undefined; export type Description = DescriptionObject | string; export type CycleFunction = (doneFn: DoneFunction) => void; -export type BenchmarkFunction = (description: Description, cycleFn: CycleFunction) => void; +export type BenchmarkFunction = ( + description: Description, + cycleFn: CycleFunction, +) => void; export type GroupFunction = (done: DoneFunction) => void; -export type AfterEachCallbackFunction = (descr: Description, event: any) => void; -export type AfterEachFunction = (afterEachFnArg: AfterEachCallbackFunction) => void; +export type AfterEachCallbackFunction = ( + descr: Description, + event: any, +) => void; +export type AfterEachFunction = ( + afterEachFnArg: AfterEachCallbackFunction, +) => void; export type AfterAllCallbackFunction = () => void; export type AfterAllFunction = (afterAllFn: AfterAllCallbackFunction) => void; @@ -84,9 +88,9 @@ export const groupPromises: Promise[] = []; export const group = (groupFn: GroupFunction) => { const oldScope = currentScope(); const scope: { - benchmark?: BenchmarkFunction, - afterEach?: AfterEachFunction, - afterAll?: AfterAllFunction, + benchmark?: BenchmarkFunction; + afterEach?: AfterEachFunction; + afterAll?: AfterAllFunction; } = {}; let afterEachFn: Nullable = undefined; @@ -101,49 +105,56 @@ export const group = (groupFn: GroupFunction) => { const benchmarkPromises: Promise[] = []; - scope.benchmark = (description: string | Description, benchmarkFn: CycleFunction) => { - const name = (description as DescriptionObject).name || (description as string); + scope.benchmark = ( + description: string | Description, + benchmarkFn: CycleFunction, + ) => { + const name = + (description as DescriptionObject).name || (description as string); log('Adding benchmark: ', name); const scopes: Object[] = []; let cycleCount = 0; - benchmarkPromises.push(new Promise((resolve, reject) => { - bsuite.add(name, { - defer: true, - fn: (deferred: any) => { - const done = () => { - cycleCount++; - deferred.resolve(); - }; - - benchmarkFn(done); - }, - - onComplete: (event: any) => { - if (afterEachFn) { - afterEachFn(description, event); - } - resolve(); - }, - }); - })); + benchmarkPromises.push( + new Promise((resolve, reject) => { + bsuite.add(name, { + defer: true, + fn: (deferred: any) => { + const done = () => { + cycleCount++; + deferred.resolve(); + }; + + benchmarkFn(done); + }, + + onComplete: (event: any) => { + if (afterEachFn) { + afterEachFn(description, event); + } + resolve(); + }, + }); + }), + ); }; - - groupPromises.push(new Promise((resolve, reject) => { - const groupDone = () => { - Promise.all(benchmarkPromises).then(() => { - if (afterAllFn) { - afterAllFn(); - } - }); - resolve(); - }; - - setScope(scope); - groupFn(groupDone); - setScope(oldScope); - })); + groupPromises.push( + new Promise((resolve, reject) => { + const groupDone = () => { + Promise.all(benchmarkPromises).then(() => { + if (afterAllFn) { + afterAllFn(); + } + }); + resolve(); + }; + + setScope(scope); + groupFn(groupDone); + setScope(oldScope); + }), + ); }; export function runBenchmarks() { @@ -157,6 +168,6 @@ export function runBenchmarks() { log('Mean time in ms: ', event.target.stats.mean * 1000); log(String(event.target)); }) - .run({'async': false}); + .run({ async: false }); }); } diff --git a/package.json b/package.json index c643866fdec..92b1b154fa0 100644 --- a/package.json +++ b/package.json @@ -28,7 +28,9 @@ "bundle": "rollup -c", "postcompile": "npm run bundle", "prepublish": "npm run compile", + "lint-fix": "prettier --trailing-comma all --single-quote --write \"{src,test,benchmark}/**/*.ts*\"", "lint": "grunt tslint", + "lint-staged": "lint-staged", "precoverage": "npm run compile:coverage", "coverage": "COV=1 istanbul cover ./node_modules/mocha/bin/_mocha -- --reporter dot --full-trace lib/test/tests.js", "postcoverage": "remap-istanbul --input coverage/coverage.json --type lcovonly --output coverage/lcov.info", @@ -51,6 +53,13 @@ ".(ts|tsx)": "/node_modules/ts-jest/preprocessor.js" } }, + "lint-staged": { + "*.ts*": [ + "prettier --trailing-comma all --single-quote --write", + "git add" + ] + }, + "pre-commit": "lint-staged", "keywords": [ "ecmascript", "es2015", @@ -95,10 +104,13 @@ "gzip-size": "^3.0.0", "isomorphic-fetch": "^2.2.1", "istanbul": "^0.4.5", + "lint-staged": "^4.0.1", "lodash": "^4.17.1", "minimist": "^1.2.0", "mocha": "^3.0.0", "nodemon": "^1.11.0", + "pre-commit": "^1.2.2", + "prettier": "^1.5.2", "pretty-bytes": "^4.0.0", "remap-istanbul": "0.8.0", "request": "^2.75.0", diff --git a/src/ApolloClient.ts b/src/ApolloClient.ts index 865537c54b2..1ff9d674a61 100644 --- a/src/ApolloClient.ts +++ b/src/ApolloClient.ts @@ -12,7 +12,6 @@ import { /* tslint:disable */ SelectionSetNode, /* tslint:enable */ - DocumentNode, FragmentDefinitionNode, } from 'graphql'; @@ -30,17 +29,11 @@ import { Store, } from './store'; -import { - ApolloAction, -} from './actions'; +import { ApolloAction } from './actions'; -import { - CustomResolverMap, -} from './data/readFromStore'; +import { CustomResolverMap } from './data/readFromStore'; -import { - QueryManager, -} from './core/QueryManager'; +import { QueryManager } from './core/QueryManager'; import { ApolloQueryResult, @@ -48,17 +41,11 @@ import { IdGetter, } from './core/types'; -import { - ObservableQuery, -} from './core/ObservableQuery'; +import { ObservableQuery } from './core/ObservableQuery'; -import { - Observable, -} from './util/Observable'; +import { Observable } from './util/Observable'; -import { - isProduction, -} from './util/environment'; +import { isProduction } from './util/environment'; import { WatchQueryOptions, @@ -66,13 +53,9 @@ import { MutationOptions, } from './core/watchQueryOptions'; -import { - getStoreKeyName, -} from './data/storeUtils'; +import { getStoreKeyName } from './data/storeUtils'; -import { - getFragmentQueryDocument, -} from './queries/getFromAST'; +import { getFragmentQueryDocument } from './queries/getFromAST'; import { DataProxy, @@ -83,9 +66,7 @@ import { ReduxDataProxy, } from './data/proxy'; -import { - version, -} from './version'; +import { version } from './version'; /** * This type defines a "selector" function that receives state from the Redux store @@ -101,7 +82,7 @@ function defaultReduxRootSelector(state: any) { return state[DEFAULT_REDUX_ROOT_KEY]; } -function defaultDataIdFromObject (result: any): string | null { +function defaultDataIdFromObject(result: any): string | null { if (result.__typename) { if (result.id !== undefined) { return `${result.__typename}:${result.id}`; @@ -176,22 +157,22 @@ export default class ApolloClient implements DataProxy { * @param fragmentMatcher A function to use for matching fragment conditions in GraphQL documents */ - constructor(options: { - networkInterface?: NetworkInterface | ObservableNetworkInterface, - reduxRootSelector?: ApolloStateSelector, - initialState?: any, - dataIdFromObject?: IdGetter, - ssrMode?: boolean, - ssrForceFetchDelay?: number - addTypename?: boolean, - customResolvers?: CustomResolverMap, - connectToDevTools?: boolean, - queryDeduplication?: boolean, - fragmentMatcher?: FragmentMatcherInterface, - } = {}) { - let { - dataIdFromObject, - } = options; + constructor( + options: { + networkInterface?: NetworkInterface | ObservableNetworkInterface; + reduxRootSelector?: ApolloStateSelector; + initialState?: any; + dataIdFromObject?: IdGetter; + ssrMode?: boolean; + ssrForceFetchDelay?: number; + addTypename?: boolean; + customResolvers?: CustomResolverMap; + connectToDevTools?: boolean; + queryDeduplication?: boolean; + fragmentMatcher?: FragmentMatcherInterface; + } = {}, + ) { + let { dataIdFromObject } = options; const { networkInterface, reduxRootSelector, @@ -217,35 +198,45 @@ export default class ApolloClient implements DataProxy { this.fragmentMatcher = fragmentMatcher; } - if ( networkInterface && typeof (networkInterface).request === 'function') { + if ( + networkInterface && + typeof (networkInterface).request === + 'function' + ) { this.networkInterface = { ...networkInterface, - query: (request) => new Promise((resolve, reject) => { - const subscription = (networkInterface as ObservableNetworkInterface) - .request(request) - .subscribe({ - next: resolve, - error: reject, - complete: () => subscription.unsubscribe(), - }); - }), + query: request => + new Promise((resolve, reject) => { + const subscription = (networkInterface as ObservableNetworkInterface) + .request(request) + .subscribe({ + next: resolve, + error: reject, + complete: () => subscription.unsubscribe(), + }); + }), }; } else { - this.networkInterface = networkInterface ? networkInterface : - createNetworkInterface({ uri: '/graphql' }); + this.networkInterface = networkInterface + ? networkInterface + : createNetworkInterface({ uri: '/graphql' }); } this.initialState = initialState ? initialState : {}; this.addTypename = addTypename; this.disableNetworkFetches = ssrMode || ssrForceFetchDelay > 0; - this.dataId = dataIdFromObject = dataIdFromObject || defaultDataIdFromObject; + this.dataId = dataIdFromObject = + dataIdFromObject || defaultDataIdFromObject; this.dataIdFromObject = this.dataId; this.fieldWithArgs = getStoreKeyName; this.queryDeduplication = queryDeduplication; this.ssrMode = ssrMode; if (ssrForceFetchDelay) { - setTimeout(() => this.disableNetworkFetches = false, ssrForceFetchDelay); + setTimeout( + () => (this.disableNetworkFetches = false), + ssrForceFetchDelay, + ); } this.reducerConfig = { @@ -265,9 +256,14 @@ export default class ApolloClient implements DataProxy { // development mode const defaultConnectToDevTools = !isProduction() && - typeof window !== 'undefined' && (!(window as any).__APOLLO_CLIENT__); - - if (typeof connectToDevTools === 'undefined' ? defaultConnectToDevTools : connectToDevTools) { + typeof window !== 'undefined' && + !(window as any).__APOLLO_CLIENT__; + + if ( + typeof connectToDevTools === 'undefined' + ? defaultConnectToDevTools + : connectToDevTools + ) { (window as any).__APOLLO_CLIENT__ = this; } @@ -276,16 +272,23 @@ export default class ApolloClient implements DataProxy { */ if (!hasSuggestedDevtools && !isProduction()) { hasSuggestedDevtools = true; - if ( typeof window !== 'undefined' && window.document && window.top === window.self) { - + if ( + typeof window !== 'undefined' && + window.document && + window.top === window.self + ) { // First check if devtools is not installed - if (typeof (window as any).__APOLLO_DEVTOOLS_GLOBAL_HOOK__ === 'undefined') { + if ( + typeof (window as any).__APOLLO_DEVTOOLS_GLOBAL_HOOK__ === 'undefined' + ) { // Only for Chrome if (navigator.userAgent.indexOf('Chrome') > -1) { // tslint:disable-next-line - console.debug('Download the Apollo DevTools ' + - 'for a better development experience: ' + - 'https://chrome.google.com/webstore/detail/apollo-client-developer-t/jdkknkkbebbapilgoeccciglkfbmbnfm'); + console.debug( + 'Download the Apollo DevTools ' + + 'for a better development experience: ' + + 'https://chrome.google.com/webstore/detail/apollo-client-developer-t/jdkknkkbebbapilgoeccciglkfbmbnfm', + ); } } } @@ -337,7 +340,9 @@ export default class ApolloClient implements DataProxy { this.initStore(); if (options.fetchPolicy === 'cache-and-network') { - throw new Error('cache-and-network fetchPolicy can only be used with watchQuery'); + throw new Error( + 'cache-and-network fetchPolicy can only be used with watchQuery', + ); } // XXX Overwriting options is probably not the best way to do this long term... @@ -358,7 +363,9 @@ export default class ApolloClient implements DataProxy { * * It takes options as an object with the following keys and values: */ - public mutate(options: MutationOptions): Promise> { + public mutate( + options: MutationOptions, + ): Promise> { this.initStore(); return this.queryManager.mutate(options); @@ -422,7 +429,7 @@ export default class ApolloClient implements DataProxy { /** * Returns a reducer function configured according to the `reducerConfig` instance variable. */ - public reducer(): (state: Store, action: ApolloAction) => Store { + public reducer(): (state: Store, action: ApolloAction) => Store { return createApolloReducer(this.reducerConfig); } @@ -457,7 +464,7 @@ export default class ApolloClient implements DataProxy { return returnValue; }; }; - } + }; /** * This initializes the Redux store that we use as a reactive cache. @@ -470,7 +477,7 @@ export default class ApolloClient implements DataProxy { if (this.reduxRootSelector) { throw new Error( - 'Cannot initialize the store because "reduxRootSelector" is provided. ' + + 'Cannot initialize the store because "reduxRootSelector" is provided. ' + 'reduxRootSelector should only be used when the store is created outside of the client. ' + 'This may lead to unexpected results when querying the store internally. ' + `Please remove that option from ApolloClient constructor.`, @@ -478,27 +485,29 @@ export default class ApolloClient implements DataProxy { } // If we don't have a store already, initialize a default one - this.setStore(createApolloStore({ - reduxRootKey: DEFAULT_REDUX_ROOT_KEY, - initialState: this.initialState, - config: this.reducerConfig, - logger: (store: any) => (next: any) => (action: any) => { - const result = next(action); - - if (this.devToolsHookCb) { - this.devToolsHookCb({ - action, - state: { - queries: this.queryManager.queryStore.getStore(), - mutations: this.queryManager.mutationStore.getStore(), - }, - dataWithOptimisticResults: this.queryManager.getDataWithOptimisticResults(), - }); - } + this.setStore( + createApolloStore({ + reduxRootKey: DEFAULT_REDUX_ROOT_KEY, + initialState: this.initialState, + config: this.reducerConfig, + logger: (store: any) => (next: any) => (action: any) => { + const result = next(action); + + if (this.devToolsHookCb) { + this.devToolsHookCb({ + action, + state: { + queries: this.queryManager.queryStore.getStore(), + mutations: this.queryManager.mutationStore.getStore(), + }, + dataWithOptimisticResults: this.queryManager.getDataWithOptimisticResults(), + }); + } - return result; - }, - })); + return result; + }, + }), + ); } /** @@ -517,7 +526,7 @@ export default class ApolloClient implements DataProxy { * re-execute any queries then you should make sure to stop watching any * active queries. */ - public resetStore(): Promise[]>|null { + public resetStore(): Promise[]> | null { return this.queryManager ? this.queryManager.resetStore() : null; } @@ -537,7 +546,7 @@ export default class ApolloClient implements DataProxy { // ensure existing store has apolloReducer if (typeof reduxRootSelector(store.getState()) === 'undefined') { throw new Error( - 'Existing store does not use apolloReducer. Please make sure the store ' + + 'Existing store does not use apolloReducer. Please make sure the store ' + 'is properly configured and "reduxRootSelector" is correctly specified.', ); } diff --git a/src/actions.ts b/src/actions.ts index 53d1843836d..38efbcf15b1 100644 --- a/src/actions.ts +++ b/src/actions.ts @@ -1,27 +1,14 @@ -import { - DocumentNode, - ExecutionResult, -} from 'graphql'; +import { DocumentNode, ExecutionResult } from 'graphql'; -import { - MutationQueryReducer, -} from './data/mutationResults'; +import { MutationQueryReducer } from './data/mutationResults'; -import { - DataProxy, -} from './data/proxy'; +import { DataProxy } from './data/proxy'; -import { - ApolloReducer, -} from './store'; +import { ApolloReducer } from './store'; -import { - FetchPolicy, -} from './core/watchQueryOptions'; +import { FetchPolicy } from './core/watchQueryOptions'; -import { - QueryStoreValue, -} from './queries/store'; +import { QueryStoreValue } from './queries/store'; export type QueryResultAction = { type: 'APOLLO_QUERY_RESULT'; @@ -35,7 +22,9 @@ export type QueryResultAction = { extraReducers?: ApolloReducer[]; }; -export function isQueryResultAction(action: ApolloAction): action is QueryResultAction { +export function isQueryResultAction( + action: ApolloAction, +): action is QueryResultAction { return action.type === 'APOLLO_QUERY_RESULT'; } @@ -47,7 +36,9 @@ export interface QueryErrorAction { fetchMoreForQueryId?: string; } -export function isQueryErrorAction(action: ApolloAction): action is QueryErrorAction { +export function isQueryErrorAction( + action: ApolloAction, +): action is QueryErrorAction { return action.type === 'APOLLO_QUERY_ERROR'; } @@ -67,7 +58,9 @@ export interface QueryInitAction { metadata: any; } -export function isQueryInitAction(action: ApolloAction): action is QueryInitAction { +export function isQueryInitAction( + action: ApolloAction, +): action is QueryInitAction { return action.type === 'APOLLO_QUERY_INIT'; } @@ -80,7 +73,9 @@ export interface QueryResultClientAction { requestId: number; } -export function isQueryResultClientAction(action: ApolloAction): action is QueryResultClientAction { +export function isQueryResultClientAction( + action: ApolloAction, +): action is QueryResultClientAction { return action.type === 'APOLLO_QUERY_RESULT_CLIENT'; } @@ -89,15 +84,17 @@ export interface QueryStopAction { queryId: string; } -export function isQueryStopAction(action: ApolloAction): action is QueryStopAction { +export function isQueryStopAction( + action: ApolloAction, +): action is QueryStopAction { return action.type === 'APOLLO_QUERY_STOP'; } // contains both the original value of a query and a reducer to transform // the query during an update export type QueryWithUpdater = { - reducer: MutationQueryReducer, - query: QueryStoreValue, + reducer: MutationQueryReducer; + query: QueryStoreValue; }; export interface MutationInitAction { @@ -113,7 +110,9 @@ export interface MutationInitAction { update?: (proxy: DataProxy, mutationResult: Object) => void; } -export function isMutationInitAction(action: ApolloAction): action is MutationInitAction { +export function isMutationInitAction( + action: ApolloAction, +): action is MutationInitAction { return action.type === 'APOLLO_MUTATION_INIT'; } @@ -130,7 +129,9 @@ export interface MutationResultAction { update?: (proxy: DataProxy, mutationResult: Object) => void; } -export function isMutationResultAction(action: ApolloAction): action is MutationResultAction { +export function isMutationResultAction( + action: ApolloAction, +): action is MutationResultAction { return action.type === 'APOLLO_MUTATION_RESULT'; } @@ -140,7 +141,9 @@ export interface MutationErrorAction { mutationId: string; } -export function isMutationErrorAction(action: ApolloAction): action is MutationErrorAction { +export function isMutationErrorAction( + action: ApolloAction, +): action is MutationErrorAction { return action.type === 'APOLLO_MUTATION_ERROR'; } @@ -152,7 +155,9 @@ export interface UpdateQueryResultAction { newResult: Object; } -export function isUpdateQueryResultAction(action: ApolloAction): action is UpdateQueryResultAction { +export function isUpdateQueryResultAction( + action: ApolloAction, +): action is UpdateQueryResultAction { return action.type === 'APOLLO_UPDATE_QUERY_RESULT'; } @@ -161,7 +166,9 @@ export interface StoreResetAction { observableQueryIds: string[]; } -export function isStoreResetAction(action: ApolloAction): action is StoreResetAction { +export function isStoreResetAction( + action: ApolloAction, +): action is StoreResetAction { return action.type === 'APOLLO_STORE_RESET'; } @@ -175,7 +182,9 @@ export interface SubscriptionResultAction { extraReducers?: ApolloReducer[]; } -export function isSubscriptionResultAction(action: ApolloAction): action is SubscriptionResultAction { +export function isSubscriptionResultAction( + action: ApolloAction, +): action is SubscriptionResultAction { return action.type === 'APOLLO_SUBSCRIPTION_RESULT'; } @@ -197,15 +206,15 @@ export function isWriteAction(action: ApolloAction): action is WriteAction { } export type ApolloAction = - QueryResultAction | - QueryErrorAction | - QueryInitAction | - QueryResultClientAction | - QueryStopAction | - MutationInitAction | - MutationResultAction | - MutationErrorAction | - UpdateQueryResultAction | - StoreResetAction | - SubscriptionResultAction | - WriteAction; + | QueryResultAction + | QueryErrorAction + | QueryInitAction + | QueryResultClientAction + | QueryStopAction + | MutationInitAction + | MutationResultAction + | MutationErrorAction + | UpdateQueryResultAction + | StoreResetAction + | SubscriptionResultAction + | WriteAction; diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index dd804dcd92c..c02563771a3 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -7,22 +7,13 @@ import { import { Observable, Observer, Subscription } from '../util/Observable'; -import { - QueryScheduler, -} from '../scheduler/scheduler'; +import { QueryScheduler } from '../scheduler/scheduler'; -import { - ApolloError, -} from '../errors/ApolloError'; +import { ApolloError } from '../errors/ApolloError'; -import { - QueryManager, -} from './QueryManager'; +import { QueryManager } from './QueryManager'; -import { - ApolloQueryResult, - FetchType, -} from './types'; +import { ApolloQueryResult, FetchType } from './types'; import { tryFunctionOrLogError } from '../util/errorHandling'; @@ -32,11 +23,9 @@ import maybeDeepFreeze from '../util/maybeDeepFreeze'; import { NetworkStatus, isNetworkRequestInFlight, - } from '../queries/networkStatus'; +} from '../queries/networkStatus'; -import { - getOperationName, -} from '../queries/getFromAST'; +import { getOperationName } from '../queries/getFromAST'; export type ApolloCurrentResult = { data: T | {}; @@ -47,10 +36,13 @@ export type ApolloCurrentResult = { }; export interface FetchMoreOptions { - updateQuery: (previousQueryResult: Object, options: { - fetchMoreResult: Object, - queryVariables: Object, - }) => Object; + updateQuery: ( + previousQueryResult: Object, + options: { + fetchMoreResult: Object; + queryVariables: Object; + }, + ) => Object; } export interface UpdateQueryOptions { @@ -81,9 +73,9 @@ export class ObservableQuery extends Observable> { options, shouldSubscribe = true, }: { - scheduler: QueryScheduler, - options: WatchQueryOptions, - shouldSubscribe?: boolean, + scheduler: QueryScheduler; + options: WatchQueryOptions; + shouldSubscribe?: boolean; }) { const queryManager = scheduler.queryManager; const queryId = queryManager.generateQueryId(); @@ -108,7 +100,7 @@ export class ObservableQuery extends Observable> { public result(): Promise> { const that = this; return new Promise((resolve, reject) => { - let subscription: (Subscription | null) = null; + let subscription: Subscription | null = null; const observer: Observer> = { next(result) { resolve(result); @@ -123,7 +115,9 @@ export class ObservableQuery extends Observable> { // are fired in the meantime, observers that should have been removed // from the QueryManager will continue to fire, causing an unnecessary // performance hit. - const selectedObservers = that.observers.filter((obs: Observer>) => obs !== observer); + const selectedObservers = that.observers.filter( + (obs: Observer>) => obs !== observer, + ); if (selectedObservers.length === 0) { that.queryManager.removeQuery(that.queryId); } @@ -147,21 +141,33 @@ export class ObservableQuery extends Observable> { * @return {result: Object, loading: boolean, networkStatus: number, partial: boolean} */ public currentResult(): ApolloCurrentResult { - const { data, partial } = this.queryManager.getCurrentQueryResult(this, true); + const { data, partial } = this.queryManager.getCurrentQueryResult( + this, + true, + ); const queryStoreValue = this.queryManager.queryStore.get(this.queryId); - if (queryStoreValue && ( - (queryStoreValue.graphQLErrors && queryStoreValue.graphQLErrors.length > 0) || - queryStoreValue.networkError - )) { + if ( + queryStoreValue && + ((queryStoreValue.graphQLErrors && + queryStoreValue.graphQLErrors.length > 0) || + queryStoreValue.networkError) + ) { const error = new ApolloError({ graphQLErrors: queryStoreValue.graphQLErrors, networkError: queryStoreValue.networkError, }); - return { data: {}, loading: false, networkStatus: queryStoreValue.networkStatus, error }; + return { + data: {}, + loading: false, + networkStatus: queryStoreValue.networkStatus, + error, + }; } - const queryLoading = !queryStoreValue || queryStoreValue.networkStatus === NetworkStatus.loading; + const queryLoading = + !queryStoreValue || + queryStoreValue.networkStatus === NetworkStatus.loading; // We need to be careful about the loading state we show to the user, to try // and be vaguely in line with what the user would have seen from .subscribe() @@ -169,15 +175,16 @@ export class ObservableQuery extends Observable> { // will not end up hitting the server. // See more: https://github.com/apollostack/apollo-client/issues/707 // Basically: is there a query in flight right now (modolo the next tick)? - const loading = (this.options.fetchPolicy === 'network-only' && queryLoading) - || (partial && this.options.fetchPolicy !== 'cache-only'); + const loading = + (this.options.fetchPolicy === 'network-only' && queryLoading) || + (partial && this.options.fetchPolicy !== 'cache-only'); // if there is nothing in the query store, it means this query hasn't fired yet. Therefore the // network status is dependent on queryLoading. // XXX querying the currentResult before having fired the query is kind of weird and makes our code a lot more complicated. let networkStatus: NetworkStatus; if (queryStoreValue) { - networkStatus = queryStoreValue.networkStatus; + networkStatus = queryStoreValue.networkStatus; } else { networkStatus = loading ? NetworkStatus.loading : NetworkStatus.ready; } @@ -203,7 +210,11 @@ export class ObservableQuery extends Observable> { }; if (this.options.fetchPolicy === 'cache-only') { - return Promise.reject(new Error('cache-only fetchPolicy option should not be used together with query refetch.')); + return Promise.reject( + new Error( + 'cache-only fetchPolicy option should not be used together with query refetch.', + ), + ); } // Update the existing options with new variables @@ -218,15 +229,18 @@ export class ObservableQuery extends Observable> { fetchPolicy: 'network-only', }; - return this.queryManager.fetchQuery(this.queryId, combinedOptions, FetchType.refetch) - .then(result => maybeDeepFreeze(result)); + return this.queryManager + .fetchQuery(this.queryId, combinedOptions, FetchType.refetch) + .then(result => maybeDeepFreeze(result)); } public fetchMore( fetchMoreOptions: FetchMoreQueryOptions & FetchMoreOptions, ): Promise> { if (!fetchMoreOptions.updateQuery) { - throw new Error('updateQuery option is required. This function defines how to update the query data with the new results.'); + throw new Error( + 'updateQuery option is required. This function defines how to update the query data with the new results.', + ); } return Promise.resolve() .then(() => { @@ -255,21 +269,27 @@ export class ObservableQuery extends Observable> { query: combinedOptions.query, fetchPolicy: 'network-only', } as WatchQueryOptions; - return this.queryManager.fetchQuery(qid, combinedOptions, FetchType.normal, this.queryId); + return this.queryManager.fetchQuery( + qid, + combinedOptions, + FetchType.normal, + this.queryId, + ); }) - .then((fetchMoreResult) => { + .then(fetchMoreResult => { const { data } = fetchMoreResult; const reducer = fetchMoreOptions.updateQuery; - const mapFn = (previousResult: any, { variables }: {variables: any }) => { - + const mapFn = ( + previousResult: any, + { variables }: { variables: any }, + ) => { // TODO REFACTOR: reached max recursion depth (figuratively) when renaming queryVariables. // Continue renaming to variables further down when we have time. const queryVariables = variables; - return reducer( - previousResult, { - fetchMoreResult: data as Object, - queryVariables, - }); + return reducer(previousResult, { + fetchMoreResult: data as Object, + queryVariables, + }); }; this.updateQuery(mapFn); return fetchMoreResult as ApolloQueryResult; @@ -279,30 +299,29 @@ export class ObservableQuery extends Observable> { // XXX the subscription variables are separate from the query variables. // if you want to update subscription variables, right now you have to do that separately, // and you can only do it by stopping the subscription and then subscribing again with new variables. - public subscribeToMore( - options: SubscribeToMoreOptions, - ): () => void { + public subscribeToMore(options: SubscribeToMoreOptions): () => void { const observable = this.queryManager.startGraphQLSubscription({ query: options.document, variables: options.variables, }); const subscription = observable.subscribe({ - next: (data) => { + next: data => { if (options.updateQuery) { const reducer = options.updateQuery; - const mapFn = (previousResult: Object, { variables }: { variables: Object }) => { - return reducer( - previousResult, { - subscriptionData: { data }, - variables, - }, - ); + const mapFn = ( + previousResult: Object, + { variables }: { variables: Object }, + ) => { + return reducer(previousResult, { + subscriptionData: { data }, + variables, + }); }; this.updateQuery(mapFn); } }, - error: (err) => { + error: err => { if (options.onError) { options.onError(err); } else { @@ -324,7 +343,9 @@ export class ObservableQuery extends Observable> { // Note: if the query is not active (there are no subscribers), the promise // will return null immediately. - public setOptions(opts: ModifiableWatchQueryOptions): Promise> { + public setOptions( + opts: ModifiableWatchQueryOptions, + ): Promise> { const oldOptions = this.options; this.options = { ...this.options, @@ -338,12 +359,20 @@ export class ObservableQuery extends Observable> { } // If fetchPolicy went from cache-only to something else, or from something else to network-only - const tryFetch: boolean = (oldOptions.fetchPolicy !== 'network-only' && opts.fetchPolicy === 'network-only') - || (oldOptions.fetchPolicy === 'cache-only' && opts.fetchPolicy !== 'cache-only') - || (oldOptions.fetchPolicy === 'standby' && opts.fetchPolicy !== 'standby') - || false; - - return this.setVariables(this.options.variables, tryFetch, opts.fetchResults); + const tryFetch: boolean = + (oldOptions.fetchPolicy !== 'network-only' && + opts.fetchPolicy === 'network-only') || + (oldOptions.fetchPolicy === 'cache-only' && + opts.fetchPolicy !== 'cache-only') || + (oldOptions.fetchPolicy === 'standby' && + opts.fetchPolicy !== 'standby') || + false; + + return this.setVariables( + this.options.variables, + tryFetch, + opts.fetchResults, + ); } /** @@ -366,7 +395,11 @@ export class ObservableQuery extends Observable> { * @param fetchResults: Option to ignore fetching results when updating variables * */ - public setVariables(variables: any, tryFetch: boolean = false, fetchResults = true): Promise> { + public setVariables( + variables: any, + tryFetch: boolean = false, + fetchResults = true, + ): Promise> { const newVariables = { ...this.variables, ...variables, @@ -377,7 +410,7 @@ export class ObservableQuery extends Observable> { // request. As soon as someone observes the query, the request will kick // off. For now, we just store any changes. (See #1077) if (this.observers.length === 0 || !fetchResults) { - return new Promise((resolve) => resolve()); + return new Promise(resolve => resolve()); } return this.result(); @@ -387,15 +420,19 @@ export class ObservableQuery extends Observable> { // See comment above if (this.observers.length === 0) { - return new Promise((resolve) => resolve()); + return new Promise(resolve => resolve()); } // Use the same options as before, but with new variables - return this.queryManager.fetchQuery(this.queryId, { - ...this.options, - variables: this.variables, - } as WatchQueryOptions) - .then(result => maybeDeepFreeze(result)); + return this.queryManager + .fetchQuery( + this.queryId, + { + ...this.options, + variables: this.variables, + } as WatchQueryOptions, + ) + .then(result => maybeDeepFreeze(result)); } } @@ -408,8 +445,9 @@ export class ObservableQuery extends Observable> { document, } = this.queryManager.getQueryWithPreviousResult(this.queryId); - const newResult = tryFunctionOrLogError( - () => mapFn(previousResult, { variables })); + const newResult = tryFunctionOrLogError(() => + mapFn(previousResult, { variables }), + ); if (newResult) { this.queryManager.store.dispatch({ @@ -431,8 +469,13 @@ export class ObservableQuery extends Observable> { } public startPolling(pollInterval: number) { - if (this.options.fetchPolicy === 'cache-first' || (this.options.fetchPolicy === 'cache-only')) { - throw new Error('Queries that specify the cache-first and cache-only fetchPolicies cannot also be polling queries.'); + if ( + this.options.fetchPolicy === 'cache-first' || + this.options.fetchPolicy === 'cache-only' + ) { + throw new Error( + 'Queries that specify the cache-first and cache-only fetchPolicies cannot also be polling queries.', + ); } if (this.isCurrentlyPolling) { @@ -467,7 +510,7 @@ export class ObservableQuery extends Observable> { // for some reason unsubscribe gets called multiple times by some of the tests return; } - this.observers = this.observers.filter((obs) => obs !== observer); + this.observers = this.observers.filter(obs => obs !== observer); if (this.observers.length === 0) { this.tearDownQuery(); @@ -484,28 +527,30 @@ export class ObservableQuery extends Observable> { } if (!!this.options.pollInterval) { - if (this.options.fetchPolicy === 'cache-first' || (this.options.fetchPolicy === 'cache-only')) { - throw new Error('Queries that specify the cache-first and cache-only fetchPolicies cannot also be polling queries.'); + if ( + this.options.fetchPolicy === 'cache-first' || + this.options.fetchPolicy === 'cache-only' + ) { + throw new Error( + 'Queries that specify the cache-first and cache-only fetchPolicies cannot also be polling queries.', + ); } this.isCurrentlyPolling = true; - this.scheduler.startPollingQuery( - this.options, - this.queryId, - ); + this.scheduler.startPollingQuery(this.options, this.queryId); } const observer: Observer> = { next: (result: ApolloQueryResult) => { this.lastResult = result; - this.observers.forEach((obs) => { + this.observers.forEach(obs => { if (obs.next) { obs.next(result); } }); }, error: (error: ApolloError) => { - this.observers.forEach((obs) => { + this.observers.forEach(obs => { if (obs.error) { obs.error(error); } else { @@ -517,11 +562,14 @@ export class ObservableQuery extends Observable> { }, }; - this.queryManager.startQuery( this.queryId, this.options, - this.queryManager.queryListenerForObserver(this.queryId, this.options, observer), + this.queryManager.queryListenerForObserver( + this.queryId, + this.options, + observer, + ), ); } @@ -532,7 +580,7 @@ export class ObservableQuery extends Observable> { } // stop all active GraphQL subscriptions - this.subscriptionHandles.forEach( sub => sub.unsubscribe() ); + this.subscriptionHandles.forEach(sub => sub.unsubscribe()); this.subscriptionHandles = []; this.queryManager.stopQuery(this.queryId); diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts index ddd942a46c6..22cc6cf2c52 100644 --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -4,9 +4,7 @@ import { Request, } from '../transport/networkInterface'; -import { - Deduplicator, -} from '../transport/Deduplicator'; +import { Deduplicator } from '../transport/Deduplicator'; import { isEqual } from '../util/isEqual'; import { assign } from '../util/assign'; @@ -19,10 +17,7 @@ import { FetchType, } from './types'; -import { - QueryStore, - QueryStoreValue, -} from '../queries/store'; +import { QueryStore, QueryStoreValue } from '../queries/store'; import { NetworkStatus, @@ -46,30 +41,20 @@ import { getMutationDefinition, } from '../queries/getFromAST'; -import { - addTypenameToDocument, -} from '../queries/queryTransform'; +import { addTypenameToDocument } from '../queries/queryTransform'; -import { - NormalizedCache, -} from '../data/storeUtils'; +import { NormalizedCache } from '../data/storeUtils'; -import { - createStoreReducer, -} from '../data/resultReducers'; +import { createStoreReducer } from '../data/resultReducers'; -import { - DataProxy, -} from '../data/proxy'; +import { DataProxy } from '../data/proxy'; import { FragmentMatcherInterface, HeuristicFragmentMatcher, } from '../data/fragmentMatcher'; -import { - isProduction, -} from '../util/environment'; +import { isProduction } from '../util/environment'; import maybeDeepFreeze from '../util/maybeDeepFreeze'; @@ -87,60 +72,37 @@ import { import { print } from 'graphql/language/printer'; -import { - readQueryFromStore, - ReadQueryOptions, -} from '../data/readFromStore'; +import { readQueryFromStore, ReadQueryOptions } from '../data/readFromStore'; -import { - diffQueryAgainstStore, -} from '../data/readFromStore'; +import { diffQueryAgainstStore } from '../data/readFromStore'; -import { - QueryWithUpdater, -} from '../actions'; +import { QueryWithUpdater } from '../actions'; import { MutationQueryReducersMap, MutationQueryReducer, } from '../data/mutationResults'; -import { - MutationStore, -} from '../mutations/store'; +import { MutationStore } from '../mutations/store'; -import { - QueryScheduler, -} from '../scheduler/scheduler'; +import { QueryScheduler } from '../scheduler/scheduler'; -import { - ApolloStateSelector, -} from '../ApolloClient'; +import { ApolloStateSelector } from '../ApolloClient'; -import { - Observer, - Subscription, - Observable, -} from '../util/Observable'; +import { Observer, Subscription, Observable } from '../util/Observable'; import { tryFunctionOrLogError } from '../util/errorHandling'; -import { - isApolloError, - ApolloError, -} from '../errors/ApolloError'; +import { isApolloError, ApolloError } from '../errors/ApolloError'; -import { - WatchQueryOptions, - SubscriptionOptions, -} from './watchQueryOptions'; +import { WatchQueryOptions, SubscriptionOptions } from './watchQueryOptions'; import { ObservableQuery } from './ObservableQuery'; export class QueryManager { public static EMIT_REDUX_ACTIONS = true; - public pollingTimers: {[queryId: string]: any}; + public pollingTimers: { [queryId: string]: any }; public scheduler: QueryScheduler; public store: ApolloStore; public networkInterface: NetworkInterface; @@ -166,18 +128,22 @@ export class QueryManager { // A map going from a requestId to a promise that has not yet been resolved. We use this to keep // track of queries that are inflight and reject them in case some // destabalizing action occurs (e.g. reset of the Apollo store). - private fetchQueryPromises: { [requestId: string]: { - promise: Promise>; - resolve: (result: ApolloQueryResult) => void; - reject: (error: Error) => void; - } }; + private fetchQueryPromises: { + [requestId: string]: { + promise: Promise>; + resolve: (result: ApolloQueryResult) => void; + reject: (error: Error) => void; + }; + }; // A map going from queryId to an observer for a query issued by watchQuery. We use // these to keep track of queries that are inflight and error on the observers associated // with them in case of some destabalizing action (e.g. reset of the Apollo store). - private observableQueries: { [queryId: string]: { - observableQuery: ObservableQuery; - } }; + private observableQueries: { + [queryId: string]: { + observableQuery: ObservableQuery; + }; + }; // A map going from the name of a query to an observer issued for it by watchQuery. This is // generally used to refetches for refetchQueries and to update mutation results through @@ -198,14 +164,14 @@ export class QueryManager { queryDeduplication = false, ssrMode = false, }: { - networkInterface: NetworkInterface, - store: ApolloStore, - reduxRootSelector: ApolloStateSelector, - fragmentMatcher?: FragmentMatcherInterface, - reducerConfig?: ApolloReducerConfig, - addTypename?: boolean, - queryDeduplication?: boolean, - ssrMode?: boolean, + networkInterface: NetworkInterface; + store: ApolloStore; + reduxRootSelector: ApolloStateSelector; + fragmentMatcher?: FragmentMatcherInterface; + reducerConfig?: ApolloReducerConfig; + addTypename?: boolean; + queryDeduplication?: boolean; + ssrMode?: boolean; }) { // XXX this might be the place to do introspection for inserting the `id` into the query? or // is that the network interface? @@ -247,7 +213,10 @@ export class QueryManager { let previousStoreData = currentStoreData || {}; const previousStoreHasData = Object.keys(previousStoreData).length; currentStoreData = this.getApolloState(); - if (isEqual(previousStoreData, currentStoreData) && previousStoreHasData) { + if ( + isEqual(previousStoreData, currentStoreData) && + previousStoreHasData + ) { return; } this.broadcastQueries(); @@ -268,15 +237,17 @@ export class QueryManager { refetchQueries = [], update: updateWithProxyFn, }: { - mutation: DocumentNode, - variables?: Object, - optimisticResponse?: Object | Function, - updateQueries?: MutationQueryReducersMap, - refetchQueries?: string[] | PureQueryOptions[], - update?: (proxy: DataProxy, mutationResult: Object) => void, + mutation: DocumentNode; + variables?: Object; + optimisticResponse?: Object | Function; + updateQueries?: MutationQueryReducersMap; + refetchQueries?: string[] | PureQueryOptions[]; + update?: (proxy: DataProxy, mutationResult: Object) => void; }): Promise> { if (!mutation) { - throw new Error('mutation option is required. You must specify your GraphQL document in the mutation option.'); + throw new Error( + 'mutation option is required. You must specify your GraphQL document in the mutation option.', + ); } const mutationId = this.generateQueryId(); @@ -285,7 +256,11 @@ export class QueryManager { mutation = addTypenameToDocument(mutation); } - variables = assign({}, getDefaultValues(getMutationDefinition(mutation)), variables); + variables = assign( + {}, + getDefaultValues(getMutationDefinition(mutation)), + variables, + ); const mutationString = print(mutation); const request = { @@ -297,16 +272,20 @@ export class QueryManager { this.queryDocuments[mutationId] = mutation; // Create a map of update queries by id to the query instead of by name. - const generateUpdateQueriesInfo: () => { [queryId: string]: QueryWithUpdater } = () => { - const ret: { [queryId: string]: QueryWithUpdater } = {}; + const generateUpdateQueriesInfo: () => { + [queryId: string]: QueryWithUpdater; + } = () => { + const ret: { [queryId: string]: QueryWithUpdater } = {}; if (updateQueriesByName) { - Object.keys(updateQueriesByName).forEach(queryName => (this.queryIdsByName[queryName] || []).forEach(queryId => { - ret[queryId] = { - reducer: updateQueriesByName[queryName], - query: this.queryStore.get(queryId), - }; - })); + Object.keys(updateQueriesByName).forEach(queryName => + (this.queryIdsByName[queryName] || []).forEach(queryId => { + ret[queryId] = { + reducer: updateQueriesByName[queryName], + query: this.queryStore.get(queryId), + }; + }), + ); } return ret; @@ -328,8 +307,9 @@ export class QueryManager { this.mutationStore.initMutation(mutationId, mutationString, variables); return new Promise>((resolve, reject) => { - this.networkInterface.query(request) - .then((result) => { + this.networkInterface + .query(request) + .then(result => { if (result.errors) { const error = new ApolloError({ graphQLErrors: result.errors, @@ -369,9 +349,11 @@ export class QueryManager { } if (typeof refetchQueries[0] === 'string') { - (refetchQueries as string[]).forEach((name) => { this.refetchQueryByName(name); }); + (refetchQueries as string[]).forEach(name => { + this.refetchQueryByName(name); + }); } else { - (refetchQueries as PureQueryOptions[]).forEach( pureQuery => { + (refetchQueries as PureQueryOptions[]).forEach(pureQuery => { this.query({ query: pureQuery.query, variables: pureQuery.variables, @@ -380,11 +362,10 @@ export class QueryManager { }); } - delete this.queryDocuments[mutationId]; resolve(result as ApolloExecutionResult); }) - .catch((err) => { + .catch(err => { this.store.dispatch({ type: 'APOLLO_MUTATION_ERROR', error: err, @@ -392,34 +373,31 @@ export class QueryManager { }); delete this.queryDocuments[mutationId]; - reject(new ApolloError({ - networkError: err, - })); + reject( + new ApolloError({ + networkError: err, + }), + ); }); }); } - public fetchQuery( queryId: string, options: WatchQueryOptions, fetchType?: FetchType, - // This allows us to track if this is a query spawned by a `fetchMore` // call for another query. We need this data to compute the `fetchMore` // network status for the query this is fetching for. fetchMoreForQueryId?: string, ): Promise { - const { variables = {}, metadata = null, fetchPolicy = 'cache-first', // cache-first is the default fetch policy. } = options; - const { - queryDoc, - } = this.transformQueryDocument(options); + const { queryDoc } = this.transformQueryDocument(options); const queryString = print(queryDoc); @@ -429,7 +407,7 @@ export class QueryManager { // If this is not a force fetch, we want to diff the query against the // store before we fetch it from the network interface. // TODO we hit the cache even if the policy is network-first. This could be unnecessary if the network is up. - if ( (fetchType !== FetchType.refetch && fetchPolicy !== 'network-only')) { + if (fetchType !== FetchType.refetch && fetchPolicy !== 'network-only') { const { isMissing, result } = diffQueryAgainstStore({ query: queryDoc, store: this.reduxRootSelector(this.store.getState()).data, @@ -445,11 +423,11 @@ export class QueryManager { storeResult = result; } - const shouldFetch = needToFetch && fetchPolicy !== 'cache-only' && fetchPolicy !== 'standby'; + const shouldFetch = + needToFetch && fetchPolicy !== 'cache-only' && fetchPolicy !== 'standby'; const requestId = this.generateRequestId(); - // Initialize query in store with unique requestId this.queryDocuments[queryId] = queryDoc; @@ -491,7 +469,8 @@ export class QueryManager { // If there is no part of the query we need to fetch from the server (or, // fetchPolicy is cache-only), we just write the store result as the final result. - const shouldDispatchClientResult = !shouldFetch || fetchPolicy === 'cache-and-network'; + const shouldDispatchClientResult = + !shouldFetch || fetchPolicy === 'cache-and-network'; if (shouldDispatchClientResult) { this.queryStore.markQueryResultClient(queryId, !shouldFetch); this.broadcastQueries(); @@ -517,7 +496,7 @@ export class QueryManager { document: queryDoc, options, fetchMoreForQueryId, - }).catch( error => { + }).catch(error => { // This is for the benefit of `refetch` promises, which currently don't get their errors // through the store like watchQuery observers do if (isApolloError(error)) { @@ -577,28 +556,37 @@ export class QueryManager { const storedQuery = this.observableQueries[queryId]; - const fetchPolicy = storedQuery ? storedQuery.observableQuery.options.fetchPolicy : options.fetchPolicy; + const fetchPolicy = storedQuery + ? storedQuery.observableQuery.options.fetchPolicy + : options.fetchPolicy; if (fetchPolicy === 'standby') { // don't watch the store for queries on standby return; } - const shouldNotifyIfLoading = queryStoreValue.previousVariables || - fetchPolicy === 'cache-only' || fetchPolicy === 'cache-and-network'; + const shouldNotifyIfLoading = + queryStoreValue.previousVariables || + fetchPolicy === 'cache-only' || + fetchPolicy === 'cache-and-network'; - const networkStatusChanged = lastResult && queryStoreValue.networkStatus !== lastResult.networkStatus; + const networkStatusChanged = + lastResult && + queryStoreValue.networkStatus !== lastResult.networkStatus; - if (!isNetworkRequestInFlight(queryStoreValue.networkStatus) || - ( networkStatusChanged && options.notifyOnNetworkStatusChange ) || - shouldNotifyIfLoading) { + if ( + !isNetworkRequestInFlight(queryStoreValue.networkStatus) || + (networkStatusChanged && options.notifyOnNetworkStatusChange) || + shouldNotifyIfLoading + ) { // XXX Currently, returning errors and data is exclusive because we // don't handle partial results // If we have either a GraphQL error or a network error, we create // an error and tell the observer about it. if ( - (queryStoreValue.graphQLErrors && queryStoreValue.graphQLErrors.length > 0) || + (queryStoreValue.graphQLErrors && + queryStoreValue.graphQLErrors.length > 0) || queryStoreValue.networkError ) { const apolloError = new ApolloError({ @@ -611,16 +599,21 @@ export class QueryManager { observer.error(apolloError); } catch (e) { // Throw error outside this control flow to avoid breaking Apollo's state - setTimeout(() => { throw e; }, 0); + setTimeout(() => { + throw e; + }, 0); } } else { // Throw error outside this control flow to avoid breaking Apollo's state - setTimeout(() => { throw apolloError; }, 0); + setTimeout(() => { + throw apolloError; + }, 0); if (!isProduction()) { /* tslint:disable-next-line */ console.info( 'An unhandled error was thrown because no error handler is registered ' + - 'for the query ' + queryStoreValue.queryString, + 'for the query ' + + queryStoreValue.queryString, ); } } @@ -629,7 +622,8 @@ export class QueryManager { const { result: data, isMissing } = diffQueryAgainstStore({ store: this.getDataWithOptimisticResults(), query: this.queryDocuments[queryId], - variables: queryStoreValue.previousVariables || queryStoreValue.variables, + variables: + queryStoreValue.previousVariables || queryStoreValue.variables, config: this.reducerConfig, fragmentMatcherFunction: this.fragmentMatcher.match, previousResult: lastResult && lastResult.data, @@ -643,14 +637,18 @@ export class QueryManager { if (isMissing && fetchPolicy !== 'cache-only') { resultFromStore = { data: lastResult && lastResult.data, - loading: isNetworkRequestInFlight(queryStoreValue.networkStatus), + loading: isNetworkRequestInFlight( + queryStoreValue.networkStatus, + ), networkStatus: queryStoreValue.networkStatus, stale: true, }; } else { resultFromStore = { data, - loading: isNetworkRequestInFlight(queryStoreValue.networkStatus), + loading: isNetworkRequestInFlight( + queryStoreValue.networkStatus, + ), networkStatus: queryStoreValue.networkStatus, stale: false, }; @@ -658,16 +656,15 @@ export class QueryManager { if (observer.next) { const isDifferentResult = !( - lastResult && - resultFromStore && - lastResult.networkStatus === resultFromStore.networkStatus && - lastResult.stale === resultFromStore.stale && - - // We can do a strict equality check here because we include a `previousResult` - // with `readQueryFromStore`. So if the results are the same they will be - // referentially equal. - lastResult.data === resultFromStore.data - ); + lastResult && + resultFromStore && + lastResult.networkStatus === resultFromStore.networkStatus && + lastResult.stale === resultFromStore.stale && + // We can do a strict equality check here because we include a `previousResult` + // with `readQueryFromStore`. So if the results are the same they will be + // referentially equal. + lastResult.data === resultFromStore.data + ); if (isDifferentResult || previouslyHadError) { lastResult = resultFromStore; @@ -675,7 +672,9 @@ export class QueryManager { observer.next(maybeDeepFreeze(resultFromStore)); } catch (e) { // Throw error outside this control flow to avoid breaking Apollo's state - setTimeout(() => { throw e; }, 0); + setTimeout(() => { + throw e; + }, 0); } } } @@ -683,9 +682,11 @@ export class QueryManager { } catch (error) { previouslyHadError = true; if (observer.error) { - observer.error(new ApolloError({ - networkError: error, - })); + observer.error( + new ApolloError({ + networkError: error, + }), + ); } return; } @@ -701,28 +702,42 @@ export class QueryManager { // supposed to be refetched in the event of a store reset. Once we unify error handling for // network errors and non-network errors, the shouldSubscribe option will go away. - public watchQuery(options: WatchQueryOptions, shouldSubscribe = true): ObservableQuery { + public watchQuery( + options: WatchQueryOptions, + shouldSubscribe = true, + ): ObservableQuery { if ((options as any).returnPartialData) { - throw new Error('returnPartialData option is no longer supported since Apollo Client 1.0.'); + throw new Error( + 'returnPartialData option is no longer supported since Apollo Client 1.0.', + ); } if ((options as any).forceFetch) { - throw new Error('forceFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.'); + throw new Error( + 'forceFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.', + ); } if ((options as any).noFetch) { - throw new Error('noFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.'); + throw new Error( + 'noFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.', + ); } if (options.fetchPolicy === 'standby') { - throw new Error('client.watchQuery cannot be called with fetchPolicy set to "standby"'); + throw new Error( + 'client.watchQuery cannot be called with fetchPolicy set to "standby"', + ); } // get errors synchronously const queryDefinition = getQueryDefinition(options.query); // assign variable default values if supplied - if (queryDefinition.variableDefinitions && queryDefinition.variableDefinitions.length) { + if ( + queryDefinition.variableDefinitions && + queryDefinition.variableDefinitions.length + ) { const defaultValues = getDefaultValues(queryDefinition); options.variables = assign({}, defaultValues, options.variables); @@ -748,7 +763,9 @@ export class QueryManager { public query(options: WatchQueryOptions): Promise> { if (!options.query) { - throw new Error('query option is required. You must specify your GraphQL document in the query option.'); + throw new Error( + 'query option is required. You must specify your GraphQL document in the query option.', + ); } if (options.query.kind !== 'Document') { @@ -764,15 +781,21 @@ export class QueryManager { } if ((options as any).forceFetch) { - throw new Error('forceFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.'); + throw new Error( + 'forceFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.', + ); } if ((options as any).noFetch) { - throw new Error('noFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.'); + throw new Error( + 'noFetch option is no longer supported since Apollo Client 1.0. Use fetchPolicy instead.', + ); } - if (typeof options.notifyOnNetworkStatusChange !== 'undefined' ) { - throw new Error('Cannot call "query" with "notifyOnNetworkStatusChange" option. Only "watchQuery" has that option.'); + if (typeof options.notifyOnNetworkStatusChange !== 'undefined') { + throw new Error( + 'Cannot call "query" with "notifyOnNetworkStatusChange" option. Only "watchQuery" has that option.', + ); } options.notifyOnNetworkStatusChange = false; @@ -780,13 +803,16 @@ export class QueryManager { const resPromise = new Promise>((resolve, reject) => { this.addFetchQueryPromise(requestId, resPromise, resolve, reject); - return this.watchQuery(options, false).result().then((result) => { - this.removeFetchQueryPromise(requestId); - resolve(result); - }).catch((error) => { - this.removeFetchQueryPromise(requestId); - reject(error); - }); + return this.watchQuery(options, false) + .result() + .then(result => { + this.removeFetchQueryPromise(requestId); + resolve(result); + }) + .catch(error => { + this.removeFetchQueryPromise(requestId); + reject(error); + }); }); return resPromise; @@ -832,10 +858,17 @@ export class QueryManager { } // Adds a promise to this.fetchQueryPromises for a given request ID. - public addFetchQueryPromise(requestId: number, promise: Promise>, + public addFetchQueryPromise( + requestId: number, + promise: Promise>, resolve: (result: ApolloQueryResult) => void, - reject: (error: Error) => void) { - this.fetchQueryPromises[requestId.toString()] = { promise, resolve, reject }; + reject: (error: Error) => void, + ) { + this.fetchQueryPromises[requestId.toString()] = { + promise, + resolve, + reject, + }; } // Removes the promise in this.fetchQueryPromises for a particular request ID. @@ -844,7 +877,10 @@ export class QueryManager { } // Adds an ObservableQuery to this.observableQueries and to this.observableQueriesByName. - public addObservableQuery(queryId: string, observableQuery: ObservableQuery) { + public addObservableQuery( + queryId: string, + observableQuery: ObservableQuery, + ) { this.observableQueries[queryId] = { observableQuery }; // Insert the ObservableQuery into this.observableQueriesByName if the query has a name @@ -864,7 +900,9 @@ export class QueryManager { const queryName = definition.name ? definition.name.value : null; delete this.observableQueries[queryId]; if (queryName) { - this.queryIdsByName[queryName] = this.queryIdsByName[queryName].filter((val) => { + this.queryIdsByName[queryName] = this.queryIdsByName[ + queryName + ].filter(val => { return !(observableQuery.queryId === val); }); } @@ -877,7 +915,7 @@ export class QueryManager { // in the data portion of the store. So, we cancel the promises and observers // that we have issued so far and not yet resolved (in the case of // queries). - Object.keys(this.fetchQueryPromises).forEach((key) => { + Object.keys(this.fetchQueryPromises).forEach(key => { const { reject } = this.fetchQueryPromises[key]; reject(new Error('Store reset while query was in flight.')); }); @@ -898,26 +936,33 @@ export class QueryManager { // the promise for it will be rejected and its results will not be written to the // store. const observableQueryPromises: Promise>[] = []; - Object.keys(this.observableQueries).forEach((queryId) => { + Object.keys(this.observableQueries).forEach(queryId => { const storeQuery = this.queryStore.get(queryId); - const fetchPolicy = this.observableQueries[queryId].observableQuery.options.fetchPolicy; + const fetchPolicy = this.observableQueries[queryId].observableQuery + .options.fetchPolicy; if (fetchPolicy !== 'cache-only' && fetchPolicy !== 'standby') { - observableQueryPromises.push(this.observableQueries[queryId].observableQuery.refetch()); + observableQueryPromises.push( + this.observableQueries[queryId].observableQuery.refetch(), + ); } }); return Promise.all(observableQueryPromises); } - public startQuery(queryId: string, options: WatchQueryOptions, listener: QueryListener) { + public startQuery( + queryId: string, + options: WatchQueryOptions, + listener: QueryListener, + ) { this.addQueryListener(queryId, listener); this.fetchQuery(queryId, options) - // `fetchQuery` returns a Promise. In case of a failure it should be caucht or else the - // console will show an `Uncaught (in promise)` message. Ignore the error for now. - .catch((error: Error) => undefined); + // `fetchQuery` returns a Promise. In case of a failure it should be caucht or else the + // console will show an `Uncaught (in promise)` message. Ignore the error for now. + .catch((error: Error) => undefined); return queryId; } @@ -925,16 +970,18 @@ export class QueryManager { public startGraphQLSubscription( options: SubscriptionOptions, ): Observable { - const { - query, - } = options; + const { query } = options; let transformedDoc = query; // Apply the query transformer if one has been provided. if (this.addTypename) { transformedDoc = addTypenameToDocument(transformedDoc); } - const variables = assign({}, getDefaultValues(getOperationDefinition(query)), options.variables); + const variables = assign( + {}, + getDefaultValues(getOperationDefinition(query)), + options.variables, + ); const request: Request = { query: transformedDoc, @@ -945,7 +992,7 @@ export class QueryManager { let subId: number; let observers: Observer[] = []; - return new Observable((observer) => { + return new Observable(observer => { observers.push(observer); // TODO REFACTOR: the result here is not a normal GraphQL result. @@ -954,7 +1001,7 @@ export class QueryManager { if (observers.length === 1) { const handler = (error: Error, result: any) => { if (error) { - observers.forEach((obs) => { + observers.forEach(obs => { if (obs.error) { obs.error(error); } @@ -970,7 +1017,7 @@ export class QueryManager { extraReducers: this.getExtraReducers(), }); // It's slightly awkward that the data for subscriptions doesn't come from the store. - observers.forEach((obs) => { + observers.forEach(obs => { if (obs.next) { obs.next(result); } @@ -980,17 +1027,22 @@ export class QueryManager { // QueryManager sets up the handler so the query can be transformed. Alternatively, // pass in the transformer to the ObservableQuery. - subId = (this.networkInterface as SubscriptionNetworkInterface).subscribe( - request, handler); + subId = (this + .networkInterface as SubscriptionNetworkInterface).subscribe( + request, + handler, + ); } return { unsubscribe: () => { - observers = observers.filter((obs) => obs !== observer); + observers = observers.filter(obs => obs !== observer); // If we removed the last observer, tear down the network subscription if (observers.length === 0) { - (this.networkInterface as SubscriptionNetworkInterface).unsubscribe(subId); + (this.networkInterface as SubscriptionNetworkInterface).unsubscribe( + subId, + ); } }, // Used in tests... @@ -1011,10 +1063,11 @@ export class QueryManager { this.stopQueryInStore(queryId); } - public getCurrentQueryResult(observableQuery: ObservableQuery, isOptimistic = false) { - const { - variables, - document } = this.getQueryParts(observableQuery); + public getCurrentQueryResult( + observableQuery: ObservableQuery, + isOptimistic = false, + ) { + const { variables, document } = this.getQueryParts(observableQuery); const lastResult = observableQuery.getLastResult(); @@ -1023,7 +1076,9 @@ export class QueryManager { // In case of an optimistic change, apply reducer on top of the // results including previous optimistic updates. Otherwise, apply it // on top of the real data only. - store: isOptimistic ? this.getDataWithOptimisticResults() : this.getApolloState().data, + store: isOptimistic + ? this.getDataWithOptimisticResults() + : this.getApolloState().data, query: document, variables, config: this.reducerConfig, @@ -1040,21 +1095,25 @@ export class QueryManager { } } - public getQueryWithPreviousResult(queryIdOrObservable: string | ObservableQuery, isOptimistic = false) { + public getQueryWithPreviousResult( + queryIdOrObservable: string | ObservableQuery, + isOptimistic = false, + ) { let observableQuery: ObservableQuery; if (typeof queryIdOrObservable === 'string') { if (!this.observableQueries[queryIdOrObservable]) { - throw new Error(`ObservableQuery with this id doesn't exist: ${queryIdOrObservable}`); + throw new Error( + `ObservableQuery with this id doesn't exist: ${queryIdOrObservable}`, + ); } - observableQuery = this.observableQueries[queryIdOrObservable].observableQuery; + observableQuery = this.observableQueries[queryIdOrObservable] + .observableQuery; } else { observableQuery = queryIdOrObservable; } - const { - variables, - document } = this.getQueryParts(observableQuery); + const { variables, document } = this.getQueryParts(observableQuery); const { data } = this.getCurrentQueryResult(observableQuery, isOptimistic); @@ -1085,8 +1144,10 @@ export class QueryManager { // Takes a set of WatchQueryOptions and transforms the query document // accordingly. Specifically, it applies the queryTransformer (if there is one defined) - private transformQueryDocument(options: WatchQueryOptions): { - queryDoc: DocumentNode, + private transformQueryDocument( + options: WatchQueryOptions, + ): { + queryDoc: DocumentNode; } { let queryDoc = options.query; @@ -1101,20 +1162,24 @@ export class QueryManager { } private getExtraReducers(): ApolloReducer[] { - return Object.keys(this.observableQueries).map( obsQueryId => { - const query = this.observableQueries[obsQueryId].observableQuery; - const queryOptions = query.options; - - if (queryOptions.reducer) { - return createStoreReducer( - queryOptions.reducer, - this.addTypename ? addTypenameToDocument(queryOptions.query) : queryOptions.query, - query.variables || {}, - this.reducerConfig, - ); - } - return null as never; - }).filter( reducer => reducer !== null ); + return Object.keys(this.observableQueries) + .map(obsQueryId => { + const query = this.observableQueries[obsQueryId].observableQuery; + const queryOptions = query.options; + + if (queryOptions.reducer) { + return createStoreReducer( + queryOptions.reducer, + this.addTypename + ? addTypenameToDocument(queryOptions.query) + : queryOptions.query, + query.variables || {}, + this.reducerConfig, + ); + } + return null as never; + }) + .filter(reducer => reducer !== null); } // Takes a request id, query id, a query document and information associated with the query @@ -1127,15 +1192,13 @@ export class QueryManager { options, fetchMoreForQueryId, }: { - requestId: number, - queryId: string, - document: DocumentNode, - options: WatchQueryOptions, - fetchMoreForQueryId?: string, + requestId: number; + queryId: string; + document: DocumentNode; + options: WatchQueryOptions; + fetchMoreForQueryId?: string; }): Promise { - const { - variables, - } = options; + const { variables } = options; const request: Request = { query: document, variables, @@ -1145,7 +1208,8 @@ export class QueryManager { const retPromise = new Promise>((resolve, reject) => { this.addFetchQueryPromise(requestId, retPromise, resolve, reject); - this.deduplicator.query(request, this.queryDeduplication) + this.deduplicator + .query(request, this.queryDeduplication) .then((result: ExecutionResult) => { const extraReducers = this.getExtraReducers(); @@ -1170,7 +1234,11 @@ export class QueryManager { const { reducerError } = this.getApolloState(); if (!reducerError || reducerError.queryId !== queryId) { - this.queryStore.markQueryResult(queryId, result, fetchMoreForQueryId); + this.queryStore.markQueryResult( + queryId, + result, + fetchMoreForQueryId, + ); this.broadcastQueries(); } } @@ -1185,7 +1253,8 @@ export class QueryManager { } return result; - }).then((result) => { + }) + .then(result => { let resultFromStore: any; if (fetchMoreForQueryId) { @@ -1217,9 +1286,15 @@ export class QueryManager { // return a chainable promise this.removeFetchQueryPromise(requestId); - resolve({ data: resultFromStore, loading: false, networkStatus: NetworkStatus.ready, stale: false }); + resolve({ + data: resultFromStore, + loading: false, + networkStatus: NetworkStatus.ready, + stale: false, + }); return Promise.resolve(); - }).catch((error: Error) => { + }) + .catch((error: Error) => { reject(error); }); }); @@ -1233,10 +1308,16 @@ export class QueryManager { const refetchedQueries = this.queryIdsByName[queryName]; // Warn if the query named does not exist (misnamed, or merely not yet fetched) if (refetchedQueries === undefined) { - console.warn(`Warning: unknown query with name ${queryName} asked to refetch`); + console.warn( + `Warning: unknown query with name ${queryName} asked to refetch`, + ); return; } else { - return Promise.all(refetchedQueries.map((queryId) => this.observableQueries[queryId].observableQuery.refetch())); + return Promise.all( + refetchedQueries.map(queryId => + this.observableQueries[queryId].observableQuery.refetch(), + ), + ); } } diff --git a/src/core/types.ts b/src/core/types.ts index f7e1fdd2cca..eb22c0f930e 100644 --- a/src/core/types.ts +++ b/src/core/types.ts @@ -5,8 +5,8 @@ import { NetworkStatus } from '../queries/networkStatus'; export type QueryListener = (queryStoreValue: QueryStoreValue) => void; export type PureQueryOptions = { - query: DocumentNode, - variables?: { [key: string]: any}; + query: DocumentNode; + variables?: { [key: string]: any }; }; export type ApolloQueryResult = { diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts index 41d2094a8c3..6d06c46c615 100644 --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -1,21 +1,13 @@ -import { - DocumentNode, - FragmentDefinitionNode, -} from 'graphql'; +import { DocumentNode, FragmentDefinitionNode } from 'graphql'; import { OperationResultReducer, MutationQueryReducersMap, } from '../data/mutationResults'; -import { - DataProxy, -} from '../data/proxy'; +import { DataProxy } from '../data/proxy'; -import { - PureQueryOptions, - ApolloExecutionResult, -} from './types'; +import { PureQueryOptions, ApolloExecutionResult } from './types'; /** * fetchPolicy determines where the client may return a result from. The options are: @@ -26,7 +18,12 @@ import { * - standby: only for queries that aren't actively watched, but should be available for refetch and updateQueries. */ -export type FetchPolicy = 'cache-first' | 'cache-and-network' | 'network-only' | 'cache-only' | 'standby'; +export type FetchPolicy = + | 'cache-first' + | 'cache-and-network' + | 'network-only' + | 'cache-only' + | 'standby'; /** * We can change these options to an ObservableQuery @@ -91,10 +88,13 @@ export interface FetchMoreQueryOptions { export type SubscribeToMoreOptions = { document: DocumentNode; variables?: { [key: string]: any }; - updateQuery?: (previousQueryResult: Object, options: { - subscriptionData: { data: any }, - variables: { [key: string]: any }, - }) => Object; + updateQuery?: ( + previousQueryResult: Object, + options: { + subscriptionData: { data: any }; + variables: { [key: string]: any }; + }, + ) => Object; onError?: (error: Error) => void; }; @@ -103,7 +103,7 @@ export interface SubscriptionOptions { variables?: { [key: string]: any }; } -export interface MutationOptions { +export interface MutationOptions { /** * A GraphQL document, often created with `gql` from the `graphql-tag` * package, that contains a single mutation inside of it. @@ -164,4 +164,7 @@ export interface MutationOptions { } // Add a level of indirection for `typedoc`. -export type MutationUpdaterFn = (proxy: DataProxy, mutationResult: ApolloExecutionResult) => void; +export type MutationUpdaterFn = ( + proxy: DataProxy, + mutationResult: ApolloExecutionResult, +) => void; diff --git a/src/data/fragmentMatcher.ts b/src/data/fragmentMatcher.ts index 6c46877cecb..2a3d329b383 100644 --- a/src/data/fragmentMatcher.ts +++ b/src/data/fragmentMatcher.ts @@ -6,43 +6,46 @@ import { IdValue, isIdValue } from './storeUtils'; import { ReadStoreContext } from './readFromStore'; import introspectionQuery from './fragmentMatcherIntrospectionQuery'; -import { - isTest, -} from '../util/environment'; +import { isTest } from '../util/environment'; -import { - warnOnceInDevelopment, -} from '../util/warnOnce'; +import { warnOnceInDevelopment } from '../util/warnOnce'; export interface FragmentMatcherInterface { - match(idValue: IdValue, typeCondition: string, context: ReadStoreContext): boolean; + match( + idValue: IdValue, + typeCondition: string, + context: ReadStoreContext, + ): boolean; } -type PossibleTypesMap = {[key: string]: string[]}; +type PossibleTypesMap = { [key: string]: string[] }; export type IntrospectionResultData = { __schema: { - types: [{ - kind: string, - name: string, - possibleTypes: { - name: string, - }[], - }], - }, + types: [ + { + kind: string; + name: string; + possibleTypes: { + name: string; + }[]; + } + ]; + }; }; export class IntrospectionFragmentMatcher implements FragmentMatcherInterface { - private isReady: boolean; private readyPromise: Promise | null; private possibleTypesMap: PossibleTypesMap; constructor(options?: { - introspectionQueryResultData?: IntrospectionResultData, + introspectionQueryResultData?: IntrospectionResultData; }) { if (options && options.introspectionQueryResultData) { - this.possibleTypesMap = this.parseIntrospectionResult(options.introspectionQueryResultData); + this.possibleTypesMap = this.parseIntrospectionResult( + options.introspectionQueryResultData, + ); this.isReady = true; } else { this.isReady = false; @@ -51,20 +54,30 @@ export class IntrospectionFragmentMatcher implements FragmentMatcherInterface { this.match = this.match.bind(this); } - public match(idValue: IdValue, typeCondition: string, context: ReadStoreContext) { + public match( + idValue: IdValue, + typeCondition: string, + context: ReadStoreContext, + ) { if (!this.isReady) { // this should basically never happen in proper use. - throw new Error('FragmentMatcher.match() was called before FragmentMatcher.init()'); + throw new Error( + 'FragmentMatcher.match() was called before FragmentMatcher.init()', + ); } const obj = context.store[idValue.id]; - if (! obj) { + if (!obj) { return false; } if (!obj.__typename) { - throw new Error(`Cannot match fragment because __typename property is missing: ${JSON.stringify(obj)}`); + throw new Error( + `Cannot match fragment because __typename property is missing: ${JSON.stringify( + obj, + )}`, + ); } if (obj.__typename === typeCondition) { @@ -79,18 +92,21 @@ export class IntrospectionFragmentMatcher implements FragmentMatcherInterface { return false; } - private parseIntrospectionResult(introspectionResultData: IntrospectionResultData): PossibleTypesMap { + private parseIntrospectionResult( + introspectionResultData: IntrospectionResultData, + ): PossibleTypesMap { const typeMap: PossibleTypesMap = {}; - introspectionResultData.__schema.types.forEach( type => { + introspectionResultData.__schema.types.forEach(type => { if (type.kind === 'UNION' || type.kind === 'INTERFACE') { - typeMap[type.name] = type.possibleTypes.map( implementingType => implementingType.name ); + typeMap[type.name] = type.possibleTypes.map( + implementingType => implementingType.name, + ); } }); return typeMap; } } - let haveWarned = false; /** @@ -114,22 +130,27 @@ export class HeuristicFragmentMatcher implements FragmentMatcherInterface { typeCondition: string, context: ReadStoreContext, ): boolean { - const obj = context.store[idValue.id]; - if (! obj) { + if (!obj) { return false; } - if (! obj.__typename) { - if (! haveWarned) { + if (!obj.__typename) { + if (!haveWarned) { console.warn(`You're using fragments in your queries, but either don't have the addTypename: true option set in Apollo Client, or you are trying to write a fragment to the store without the __typename. Please turn on the addTypename option and include __typename when writing fragments so that Apollo Client can accurately match fragments.`); - console.warn('Could not find __typename on Fragment ', typeCondition, obj); - console.warn(`DEPRECATION WARNING: using fragments without __typename is unsupported behavior ` + - `and will be removed in future versions of Apollo client. You should fix this and set addTypename to true now.`); + console.warn( + 'Could not find __typename on Fragment ', + typeCondition, + obj, + ); + console.warn( + `DEPRECATION WARNING: using fragments without __typename is unsupported behavior ` + + `and will be removed in future versions of Apollo client. You should fix this and set addTypename to true now.`, + ); /* istanbul ignore if */ if (!isTest()) { @@ -151,10 +172,13 @@ export class HeuristicFragmentMatcher implements FragmentMatcherInterface { // 2. A fragment on a matching interface or union // If it's 1, we don't want to return anything, if it's 2 we want to match. We can't tell the // difference, so we warn the user, but still try to match it (backcompat). - warnOnceInDevelopment(`You are using the simple (heuristic) fragment matcher, but your queries contain union or interface types. + warnOnceInDevelopment( + `You are using the simple (heuristic) fragment matcher, but your queries contain union or interface types. Apollo Client will not be able to able to accurately map fragments.` + - `To make this error go away, use the IntrospectionFragmentMatcher as described in the docs: ` + - `http://dev.apollodata.com/react/initialization.html#fragment-matcher`, 'error'); + `To make this error go away, use the IntrospectionFragmentMatcher as described in the docs: ` + + `http://dev.apollodata.com/react/initialization.html#fragment-matcher`, + 'error', + ); context.returnPartialData = true; return true; diff --git a/src/data/mutationResults.ts b/src/data/mutationResults.ts index 4619ed164d4..8837848e87b 100644 --- a/src/data/mutationResults.ts +++ b/src/data/mutationResults.ts @@ -1,23 +1,26 @@ -import { - ApolloAction, -} from '../actions'; +import { ApolloAction } from '../actions'; -import { - ApolloExecutionResult, -} from '../core/types'; +import { ApolloExecutionResult } from '../core/types'; // This is part of the public API, people write these functions in `updateQueries`. -export type MutationQueryReducer = (previousResult: Object, options: { - mutationResult: ApolloExecutionResult, - queryName: string | null, - queryVariables: Object, -}) => Object; +export type MutationQueryReducer = ( + previousResult: Object, + options: { + mutationResult: ApolloExecutionResult; + queryName: string | null; + queryVariables: Object; + }, +) => Object; -export type MutationQueryReducersMap = { +export type MutationQueryReducersMap = { [queryName: string]: MutationQueryReducer; }; -export type OperationResultReducer = (previousResult: Object, action: ApolloAction, variables: Object) => Object; +export type OperationResultReducer = ( + previousResult: Object, + action: ApolloAction, + variables: Object, +) => Object; export type OperationResultReducerMap = { [queryId: string]: OperationResultReducer; diff --git a/src/data/proxy.ts b/src/data/proxy.ts index 818868f9614..bde100eda21 100644 --- a/src/data/proxy.ts +++ b/src/data/proxy.ts @@ -3,7 +3,10 @@ import { ApolloStore, Store, ApolloReducerConfig } from '../store'; import { DataWrite } from '../actions'; import { IdGetter } from '../core/types'; import { NormalizedCache } from '../data/storeUtils'; -import {getFragmentQueryDocument, getOperationName} from '../queries/getFromAST'; +import { + getFragmentQueryDocument, + getOperationName, +} from '../queries/getFromAST'; import { getDataWithOptimisticResults } from '../optimistic-data/store'; import { readQueryFromStore } from './readFromStore'; import { writeResultToStore } from './writeToStore'; @@ -122,7 +125,9 @@ export interface DataProxy { * one fragments in the provided document then a `fragmentName` must be * provided to select the correct fragment. */ - readFragment(options: DataProxyReadFragmentOptions): FragmentType | null; + readFragment( + options: DataProxyReadFragmentOptions, + ): FragmentType | null; /** * Writes a GraphQL query to the root query id. @@ -179,14 +184,15 @@ export class ReduxDataProxy implements DataProxy { query, variables, }: DataProxyReadQueryOptions): QueryType { - if (this.reducerConfig.addTypename) { query = addTypenameToDocument(query); } return readQueryFromStore({ rootId: 'ROOT_QUERY', - store: getDataWithOptimisticResults(this.reduxRootSelector(this.store.getState())), + store: getDataWithOptimisticResults( + this.reduxRootSelector(this.store.getState()), + ), query, variables, fragmentMatcherFunction: this.fragmentMatcher.match, @@ -204,7 +210,9 @@ export class ReduxDataProxy implements DataProxy { variables, }: DataProxyReadFragmentOptions): FragmentType | null { let query = getFragmentQueryDocument(fragment, fragmentName); - const data = getDataWithOptimisticResults(this.reduxRootSelector(this.store.getState())); + const data = getDataWithOptimisticResults( + this.reduxRootSelector(this.store.getState()), + ); // If we could not find an item in the store with the provided id then we // just return `null`. @@ -234,20 +242,21 @@ export class ReduxDataProxy implements DataProxy { query, variables, }: DataProxyWriteQueryOptions): void { - if (this.reducerConfig.addTypename) { query = addTypenameToDocument(query); } this.store.dispatch({ type: 'APOLLO_WRITE', - writes: [{ - rootId: 'ROOT_QUERY', - result: data, - document: query, - operationName: getOperationName(query), - variables: variables || {}, - }], + writes: [ + { + rootId: 'ROOT_QUERY', + result: data, + document: query, + operationName: getOperationName(query), + variables: variables || {}, + }, + ], }); } @@ -261,7 +270,6 @@ export class ReduxDataProxy implements DataProxy { fragmentName, variables, }: DataProxyWriteFragmentOptions): void { - let document = getFragmentQueryDocument(fragment, fragmentName); if (this.reducerConfig.addTypename) { @@ -270,13 +278,15 @@ export class ReduxDataProxy implements DataProxy { this.store.dispatch({ type: 'APOLLO_WRITE', - writes: [{ - rootId: id, - result: data, - document, - operationName: getOperationName(document), - variables: variables || {}, - }], + writes: [ + { + rootId: id, + result: data, + document, + operationName: getOperationName(document), + variables: variables || {}, + }, + ], }); } } @@ -341,7 +351,6 @@ export class TransactionDataProxy implements DataProxy { }: DataProxyReadQueryOptions): QueryType { this.assertNotFinished(); - if (this.reducerConfig.addTypename) { query = addTypenameToDocument(query); } @@ -370,7 +379,9 @@ export class TransactionDataProxy implements DataProxy { this.assertNotFinished(); if (!fragment) { - throw new Error('fragment option is required. Please pass a GraphQL fragment to readFragment.'); + throw new Error( + 'fragment option is required. Please pass a GraphQL fragment to readFragment.', + ); } const { data } = this; @@ -436,7 +447,9 @@ export class TransactionDataProxy implements DataProxy { this.assertNotFinished(); if (!fragment) { - throw new Error('fragment option is required. Please pass a GraphQL fragment to writeFragment.'); + throw new Error( + 'fragment option is required. Please pass a GraphQL fragment to writeFragment.', + ); } let query = getFragmentQueryDocument(fragment, fragmentName); @@ -460,7 +473,9 @@ export class TransactionDataProxy implements DataProxy { */ private assertNotFinished() { if (this.isFinished) { - throw new Error('Cannot call transaction methods after the transaction has finished.'); + throw new Error( + 'Cannot call transaction methods after the transaction has finished.', + ); } } diff --git a/src/data/readFromStore.ts b/src/data/readFromStore.ts index 54934ccd418..4f999ca1d9d 100644 --- a/src/data/readFromStore.ts +++ b/src/data/readFromStore.ts @@ -1,6 +1,4 @@ -import { - DocumentNode, -} from 'graphql'; +import { DocumentNode } from 'graphql'; import graphqlAnywhere, { Resolver, @@ -8,37 +6,19 @@ import graphqlAnywhere, { ExecInfo, } from 'graphql-anywhere'; -import { - NormalizedCache, - isJsonValue, - isIdValue, - IdValue, -} from './storeUtils'; +import { NormalizedCache, isJsonValue, isIdValue, IdValue } from './storeUtils'; -import { - getStoreKeyName, -} from './storeUtils'; +import { getStoreKeyName } from './storeUtils'; -import { - getDefaultValues, - getQueryDefinition, -} from '../queries/getFromAST'; +import { getDefaultValues, getQueryDefinition } from '../queries/getFromAST'; -import { - ApolloReducerConfig, -} from '../store'; +import { ApolloReducerConfig } from '../store'; -import { - isEqual, -} from '../util/isEqual'; +import { isEqual } from '../util/isEqual'; -import { - assign, -} from '../util/assign'; +import { assign } from '../util/assign'; -import { - isTest, -} from '../util/environment'; +import { isTest } from '../util/environment'; /** * The key which the cache id for a given value is stored in the result object. This key is private @@ -56,25 +36,28 @@ export type DiffResult = { }; export type ReadQueryOptions = { - store: NormalizedCache, - query: DocumentNode, - fragmentMatcherFunction?: FragmentMatcher, // TODO make this required to prevent bugs - variables?: Object, - previousResult?: any, - rootId?: string, - config?: ApolloReducerConfig, + store: NormalizedCache; + query: DocumentNode; + fragmentMatcherFunction?: FragmentMatcher; // TODO make this required to prevent bugs + variables?: Object; + previousResult?: any; + rootId?: string; + config?: ApolloReducerConfig; }; export type DiffQueryAgainstStoreOptions = ReadQueryOptions & { - returnPartialData?: boolean, + returnPartialData?: boolean; }; -export type CustomResolver = (rootValue: any, args: { [argName: string]: any }) => any; +export type CustomResolver = ( + rootValue: any, + args: { [argName: string]: any }, +) => any; export type CustomResolverMap = { [typeName: string]: { - [fieldName: string]: CustomResolver, - }, + [fieldName: string]: CustomResolver; + }; }; /** @@ -111,12 +94,14 @@ interface IdValueWithPreviousResult extends IdValue { * If nothing in the store changed since that previous result then values from the previous result * will be returned to preserve referential equality. */ -export function readQueryFromStore(options: ReadQueryOptions): QueryType { +export function readQueryFromStore( + options: ReadQueryOptions, +): QueryType { const optsPatch = { returnPartialData: false }; return diffQueryAgainstStore({ - ... options, - ... optsPatch, + ...options, + ...optsPatch, }).result; } @@ -142,7 +127,11 @@ const readStoreResolver: Resolver = ( let fieldValue = (obj || {})[storeKeyName]; if (typeof fieldValue === 'undefined') { - if (context.customResolvers && obj && (obj.__typename || objId === 'ROOT_QUERY')) { + if ( + context.customResolvers && + obj && + (obj.__typename || objId === 'ROOT_QUERY') + ) { const typename = obj.__typename || 'Query'; // Look for the type in the custom resolver map @@ -156,8 +145,14 @@ const readStoreResolver: Resolver = ( } } - if (! context.returnPartialData) { - throw new Error(`Can't find field ${storeKeyName} on object (${objId}) ${JSON.stringify(obj, null, 2)}.`); + if (!context.returnPartialData) { + throw new Error( + `Can't find field ${storeKeyName} on object (${objId}) ${JSON.stringify( + obj, + null, + 2, + )}.`, + ); } context.hasMissingField = true; @@ -173,7 +168,10 @@ const readStoreResolver: Resolver = ( // `isEqual` will first perform a referential equality check (with `===`) in case the JSON // value has not changed in the store, and then a deep equality check if that fails in case a // new JSON object was returned by the API but that object may still be the same. - if (idValue.previousResult && isEqual(idValue.previousResult[resultKey], fieldValue.json)) { + if ( + idValue.previousResult && + isEqual(idValue.previousResult[resultKey], fieldValue.json) + ) { return idValue.previousResult[resultKey]; } return fieldValue.json; @@ -182,7 +180,10 @@ const readStoreResolver: Resolver = ( // If we had a previous result, try adding that previous result value for this field to our field // value. This will create a new value without mutating the old one. if (idValue.previousResult) { - fieldValue = addPreviousResultToIdValues(fieldValue, idValue.previousResult[resultKey]); + fieldValue = addPreviousResultToIdValues( + fieldValue, + idValue.previousResult[resultKey], + ); } return fieldValue; @@ -227,10 +228,17 @@ export function diffQueryAgainstStore({ previousResult, }; - const result = graphqlAnywhere(readStoreResolver, query, rootIdValue, context, variables, { - fragmentMatcher: fragmentMatcherFunction, - resultMapper, - }); + const result = graphqlAnywhere( + readStoreResolver, + query, + rootIdValue, + context, + variables, + { + fragmentMatcher: fragmentMatcherFunction, + resultMapper, + }, + ); return { result, @@ -239,7 +247,7 @@ export function diffQueryAgainstStore({ } export function assertIdValue(idValue: IdValue) { - if (! isIdValue(idValue)) { + if (!isIdValue(idValue)) { throw new Error(`Encountered a sub-selection on the query, but the store doesn't have \ an object reference. This should never happen during normal use unless you have custom code \ that is directly manipulating the store; please file an issue.`); @@ -257,7 +265,7 @@ that is directly manipulating the store; please file an issue.`); * * @private */ -function addPreviousResultToIdValues (value: any, previousResult: any): any { +function addPreviousResultToIdValues(value: any, previousResult: any): any { // If the value is an `IdValue`, add the previous result to it whether or not that // `previousResult` is undefined. // @@ -311,24 +319,31 @@ function addPreviousResultToIdValues (value: any, previousResult: any): any { * * @private */ -function resultMapper (resultFields: any, idValue: IdValueWithPreviousResult) { +function resultMapper(resultFields: any, idValue: IdValueWithPreviousResult) { // If we had a previous result, we may be able to return that and preserve referential equality if (idValue.previousResult) { const currentResultKeys = Object.keys(resultFields); const sameAsPreviousResult = // Confirm that we have the same keys in both the current result and the previous result. - Object.keys(idValue.previousResult) - .reduce((sameKeys, key) => sameKeys && currentResultKeys.indexOf(key) > -1, true) && - + Object.keys(idValue.previousResult).reduce( + (sameKeys, key) => sameKeys && currentResultKeys.indexOf(key) > -1, + true, + ) && // Perform a shallow comparison of the result fields with the previous result. If all of // the shallow fields are referentially equal to the fields of the previous result we can // just return the previous result. // // While we do a shallow comparison of objects, but we do a deep comparison of arrays. - currentResultKeys.reduce((same, key) => ( - same && areNestedArrayItemsStrictlyEqual(resultFields[key], idValue.previousResult[key]) - ), true); + currentResultKeys.reduce( + (same, key) => + same && + areNestedArrayItemsStrictlyEqual( + resultFields[key], + idValue.previousResult[key], + ), + true, + ); if (sameAsPreviousResult) { return idValue.previousResult; @@ -355,7 +370,10 @@ type NestedArray = T | Array>>; * * @private */ -function areNestedArrayItemsStrictlyEqual (a: NestedArray, b: NestedArray): boolean { +function areNestedArrayItemsStrictlyEqual( + a: NestedArray, + b: NestedArray, +): boolean { // If `a` and `b` are referentially equal, return true. if (a === b) { return true; @@ -367,5 +385,8 @@ function areNestedArrayItemsStrictlyEqual (a: NestedArray, b: NestedArray same && areNestedArrayItemsStrictlyEqual(item, b[i]), true); + return a.reduce( + (same, item, i) => same && areNestedArrayItemsStrictlyEqual(item, b[i]), + true, + ); } diff --git a/src/data/replaceQueryResults.ts b/src/data/replaceQueryResults.ts index 257d2061c83..acbdd96a9cf 100644 --- a/src/data/replaceQueryResults.ts +++ b/src/data/replaceQueryResults.ts @@ -1,28 +1,24 @@ -import { - NormalizedCache, -} from './storeUtils'; +import { NormalizedCache } from './storeUtils'; -import { - writeResultToStore, -} from './writeToStore'; +import { writeResultToStore } from './writeToStore'; -import { - ApolloReducerConfig, -} from '../store'; +import { ApolloReducerConfig } from '../store'; -import { - DocumentNode, -} from 'graphql'; +import { DocumentNode } from 'graphql'; -export function replaceQueryResults(state: NormalizedCache, { - variables, - document, - newResult, -}: { - variables: any; - document: DocumentNode; - newResult: Object; -}, config: ApolloReducerConfig) { +export function replaceQueryResults( + state: NormalizedCache, + { + variables, + document, + newResult, + }: { + variables: any; + document: DocumentNode; + newResult: Object; + }, + config: ApolloReducerConfig, +) { const clonedState = { ...state } as NormalizedCache; return writeResultToStore({ diff --git a/src/data/resultReducers.ts b/src/data/resultReducers.ts index 82cc4ccbab1..10bd4844fd5 100644 --- a/src/data/resultReducers.ts +++ b/src/data/resultReducers.ts @@ -1,31 +1,16 @@ -import { - DocumentNode, -} from 'graphql'; +import { DocumentNode } from 'graphql'; -import { - diffQueryAgainstStore, -} from './readFromStore'; +import { diffQueryAgainstStore } from './readFromStore'; -import { - writeResultToStore, -} from './writeToStore'; +import { writeResultToStore } from './writeToStore'; -import { - NormalizedCache, -} from './storeUtils'; +import { NormalizedCache } from './storeUtils'; -import { - ApolloReducer, - ApolloReducerConfig, -} from '../store'; +import { ApolloReducer, ApolloReducerConfig } from '../store'; -import { - ApolloAction, -} from '../actions'; +import { ApolloAction } from '../actions'; -import { - OperationResultReducer, -} from './mutationResults'; +import { OperationResultReducer } from './mutationResults'; /** * This function takes a result reducer and all other necessary information to obtain a proper @@ -38,9 +23,7 @@ export function createStoreReducer( variables: Object, config: ApolloReducerConfig, ): ApolloReducer { - return (store: NormalizedCache, action: ApolloAction) => { - const { result, isMissing } = diffQueryAgainstStore({ store, query: document, diff --git a/src/data/store.ts b/src/data/store.ts index 5ac1f7479d8..7aa2fe1db3d 100644 --- a/src/data/store.ts +++ b/src/data/store.ts @@ -8,47 +8,25 @@ import { isWriteAction, } from '../actions'; -import { - writeResultToStore, -} from './writeToStore'; +import { writeResultToStore } from './writeToStore'; -import { - TransactionDataProxy, -} from '../data/proxy'; +import { TransactionDataProxy } from '../data/proxy'; -import { - QueryStore, -} from '../queries/store'; +import { QueryStore } from '../queries/store'; -import { - getOperationName, -} from '../queries/getFromAST'; +import { getOperationName } from '../queries/getFromAST'; -import { - MutationStore, -} from '../mutations/store'; +import { MutationStore } from '../mutations/store'; -import { - ApolloReducerConfig, -} from '../store'; +import { ApolloReducerConfig } from '../store'; -import { - graphQLResultHasError, - NormalizedCache, -} from './storeUtils'; +import { graphQLResultHasError, NormalizedCache } from './storeUtils'; -import { - replaceQueryResults, -} from './replaceQueryResults'; +import { replaceQueryResults } from './replaceQueryResults'; -import { - readQueryFromStore, - diffQueryAgainstStore, -} from './readFromStore'; +import { readQueryFromStore, diffQueryAgainstStore } from './readFromStore'; -import { - tryFunctionOrLogError, -} from '../util/errorHandling'; +import { tryFunctionOrLogError } from '../util/errorHandling'; export function data( previousState: NormalizedCache = {}, @@ -61,7 +39,7 @@ export function data( if (isQueryResultAction(action)) { // XXX handle partial result due to errors - if (! graphQLResultHasError(action.result)) { + if (!graphQLResultHasError(action.result)) { // XXX use immutablejs instead of cloning const clonedState = { ...previousState } as NormalizedCache; @@ -80,7 +58,7 @@ export function data( // XXX each reducer gets the state from the previous reducer. // Maybe they should all get a clone instead and then compare at the end to make sure it's consistent. if (action.extraReducers) { - action.extraReducers.forEach( reducer => { + action.extraReducers.forEach(reducer => { newState = reducer(newState, constAction); }); } @@ -90,8 +68,7 @@ export function data( } else if (isSubscriptionResultAction(action)) { // the subscription interface should handle not sending us results we no longer subscribe to. // XXX I don't think we ever send in an object with errors, but we might in the future... - if (! graphQLResultHasError(action.result)) { - + if (!graphQLResultHasError(action.result)) { // XXX use immutablejs instead of cloning const clonedState = { ...previousState } as NormalizedCache; @@ -110,7 +87,7 @@ export function data( // XXX each reducer gets the state from the previous reducer. // Maybe they should all get a clone instead and then compare at the end to make sure it's consistent. if (action.extraReducers) { - action.extraReducers.forEach( reducer => { + action.extraReducers.forEach(reducer => { newState = reducer(newState, constAction); }); } @@ -136,43 +113,50 @@ export function data( // If this action wants us to update certain queries. Let’s do it! const { updateQueries } = constAction; if (updateQueries) { - Object.keys(updateQueries).filter(id => updateQueries[id]).forEach(queryId => { - const { query, reducer } = updateQueries[queryId]; - - // Read the current query result from the store. - const { result: currentQueryResult, isMissing } = diffQueryAgainstStore({ - store: previousState, - query: query.document, - variables: query.variables, - returnPartialData: true, - fragmentMatcherFunction: config.fragmentMatcher, - config, - }); - - if (isMissing) { - return; - } - - // Run our reducer using the current query result and the mutation result. - const nextQueryResult = tryFunctionOrLogError(() => reducer(currentQueryResult, { - mutationResult: constAction.result, - queryName: getOperationName(query.document), - queryVariables: query.variables, - })); - - // Write the modified result back into the store if we got a new result. - if (nextQueryResult) { - newState = writeResultToStore({ - result: nextQueryResult, - dataId: 'ROOT_QUERY', - document: query.document, + Object.keys(updateQueries) + .filter(id => updateQueries[id]) + .forEach(queryId => { + const { query, reducer } = updateQueries[queryId]; + + // Read the current query result from the store. + const { + result: currentQueryResult, + isMissing, + } = diffQueryAgainstStore({ + store: previousState, + query: query.document, variables: query.variables, - store: newState, - dataIdFromObject: config.dataIdFromObject, + returnPartialData: true, fragmentMatcherFunction: config.fragmentMatcher, + config, }); - } - }); + + if (isMissing) { + return; + } + + // Run our reducer using the current query result and the mutation result. + const nextQueryResult = tryFunctionOrLogError(() => + reducer(currentQueryResult, { + mutationResult: constAction.result, + queryName: getOperationName(query.document), + queryVariables: query.variables, + }), + ); + + // Write the modified result back into the store if we got a new result. + if (nextQueryResult) { + newState = writeResultToStore({ + result: nextQueryResult, + dataId: 'ROOT_QUERY', + document: query.document, + variables: query.variables, + store: newState, + dataIdFromObject: config.dataIdFromObject, + fragmentMatcherFunction: config.fragmentMatcher, + }); + } + }); } // If the mutation has some writes associated with it then we need to @@ -180,23 +164,16 @@ export function data( // write action. if (constAction.update) { const update = constAction.update; - const proxy = new TransactionDataProxy( - newState, - config, - ); + const proxy = new TransactionDataProxy(newState, config); tryFunctionOrLogError(() => update(proxy, constAction.result)); const writes = proxy.finish(); - newState = data( - newState, - { type: 'APOLLO_WRITE', writes }, - config, - ); + newState = data(newState, { type: 'APOLLO_WRITE', writes }, config); } // XXX each reducer gets the state from the previous reducer. // Maybe they should all get a clone instead and then compare at the end to make sure it's consistent. if (constAction.extraReducers) { - constAction.extraReducers.forEach( reducer => { + constAction.extraReducers.forEach(reducer => { newState = reducer(newState, constAction); }); } @@ -204,7 +181,11 @@ export function data( return newState; } } else if (isUpdateQueryResultAction(constAction)) { - return replaceQueryResults(previousState, constAction, config) as NormalizedCache; + return replaceQueryResults( + previousState, + constAction, + config, + ) as NormalizedCache; } else if (isStoreResetAction(action)) { // If we are resetting the store, we no longer need any of the data that is currently in // the store so we can just throw it all away. @@ -213,15 +194,16 @@ export function data( // Simply write our result to the store for this action for all of the // writes that were specified. return action.writes.reduce( - (currentState, write) => writeResultToStore({ - result: write.result, - dataId: write.rootId, - document: write.document, - variables: write.variables, - store: currentState, - dataIdFromObject: config.dataIdFromObject, - fragmentMatcherFunction: config.fragmentMatcher, - }), + (currentState, write) => + writeResultToStore({ + result: write.result, + dataId: write.rootId, + document: write.document, + variables: write.variables, + store: currentState, + dataIdFromObject: config.dataIdFromObject, + fragmentMatcherFunction: config.fragmentMatcher, + }), { ...previousState } as NormalizedCache, ); } diff --git a/src/data/storeUtils.ts b/src/data/storeUtils.ts index c3e2b4fb134..6cec191449d 100644 --- a/src/data/storeUtils.ts +++ b/src/data/storeUtils.ts @@ -47,42 +47,64 @@ function isEnumValue(value: ValueNode): value is EnumValueNode { return value.kind === 'EnumValue'; } -export function valueToObjectRepresentation(argObj: any, name: NameNode, value: ValueNode, variables?: Object) { +export function valueToObjectRepresentation( + argObj: any, + name: NameNode, + value: ValueNode, + variables?: Object, +) { if (isIntValue(value) || isFloatValue(value)) { argObj[name.value] = Number(value.value); } else if (isBooleanValue(value) || isStringValue(value)) { argObj[name.value] = value.value; } else if (isObjectValue(value)) { const nestedArgObj = {}; - value.fields.map((obj) => valueToObjectRepresentation(nestedArgObj, obj.name, obj.value, variables)); + value.fields.map(obj => + valueToObjectRepresentation(nestedArgObj, obj.name, obj.value, variables), + ); argObj[name.value] = nestedArgObj; } else if (isVariable(value)) { - const variableValue = (variables || {} as any)[value.name.value]; + const variableValue = (variables || ({} as any))[value.name.value]; argObj[name.value] = variableValue; } else if (isListValue(value)) { - argObj[name.value] = value.values.map((listValue) => { + argObj[name.value] = value.values.map(listValue => { const nestedArgArrayObj = {}; - valueToObjectRepresentation(nestedArgArrayObj, name, listValue, variables); + valueToObjectRepresentation( + nestedArgArrayObj, + name, + listValue, + variables, + ); return (nestedArgArrayObj as any)[name.value]; }); } else if (isEnumValue(value)) { argObj[name.value] = (value as EnumValueNode).value; } else { - throw new Error(`The inline argument "${name.value}" of kind "${(value as any).kind}" is not supported. + throw new Error(`The inline argument "${name.value}" of kind "${(value as any) + .kind}" is not supported. Use variables instead of inline arguments to overcome this limitation.`); } } -export function storeKeyNameFromField(field: FieldNode, variables?: Object): string { +export function storeKeyNameFromField( + field: FieldNode, + variables?: Object, +): string { let directivesObj: any = null; if (field.directives) { directivesObj = {}; - field.directives.forEach((directive) => { + field.directives.forEach(directive => { directivesObj[directive.name.value] = {}; if (directive.arguments) { - directive.arguments.forEach((({name, value}) => valueToObjectRepresentation( - directivesObj[directive.name.value], name, value, variables))); + directive.arguments.forEach(({ name, value }) => + valueToObjectRepresentation( + directivesObj[directive.name.value], + name, + value, + variables, + ), + ); } }); } @@ -90,32 +112,48 @@ export function storeKeyNameFromField(field: FieldNode, variables?: Object): str let argObj: any = null; if (field.arguments && field.arguments.length) { argObj = {}; - field.arguments.forEach(({name, value}) => valueToObjectRepresentation( - argObj, name, value, variables)); + field.arguments.forEach(({ name, value }) => + valueToObjectRepresentation(argObj, name, value, variables), + ); } return getStoreKeyName(field.name.value, argObj, directivesObj); } export type Directives = { - [directiveName: string]: { - [argName: string]: any; - }; + [directiveName: string]: { + [argName: string]: any; + }; }; -export function getStoreKeyName(fieldName: string, args?: Object, directives?: Directives): string { - if (directives && directives['connection'] && directives['connection']['key']) { - if (directives['connection']['filter'] && (directives['connection']['filter'] as string[]).length > 0) { - const filterKeys = directives['connection']['filter'] ? (directives['connection']['filter'] as string[]) : []; +export function getStoreKeyName( + fieldName: string, + args?: Object, + directives?: Directives, +): string { + if ( + directives && + directives['connection'] && + directives['connection']['key'] + ) { + if ( + directives['connection']['filter'] && + (directives['connection']['filter'] as string[]).length > 0 + ) { + const filterKeys = directives['connection']['filter'] + ? directives['connection']['filter'] as string[] + : []; filterKeys.sort(); - const queryArgs = args as {[key: string]: any}; - const filteredArgs = {} as {[key: string]: any}; - filterKeys.forEach((key) => { + const queryArgs = args as { [key: string]: any }; + const filteredArgs = {} as { [key: string]: any }; + filterKeys.forEach(key => { filteredArgs[key] = queryArgs[key]; }); - return `${directives['connection']['key']}(${JSON.stringify(filteredArgs)})`; + return `${directives['connection']['key']}(${JSON.stringify( + filteredArgs, + )})`; } else { return directives['connection']['key']; } @@ -131,16 +169,16 @@ export function getStoreKeyName(fieldName: string, args?: Object, directives?: D } export function resultKeyNameFromField(field: FieldNode): string { - return field.alias ? - field.alias.value : - field.name.value; + return field.alias ? field.alias.value : field.name.value; } export function isField(selection: SelectionNode): selection is FieldNode { return selection.kind === 'Field'; } -export function isInlineFragment(selection: SelectionNode): selection is InlineFragmentNode { +export function isInlineFragment( + selection: SelectionNode, +): selection is InlineFragmentNode { return selection.kind === 'InlineFragment'; } @@ -174,13 +212,23 @@ export interface JsonValue { export type ListValue = Array; -export type StoreValue = number | string | string[] | IdValue | ListValue | JsonValue | null | undefined | void | Object; +export type StoreValue = + | number + | string + | string[] + | IdValue + | ListValue + | JsonValue + | null + | undefined + | void + | Object; export function isIdValue(idObject: StoreValue): idObject is IdValue { return ( idObject != null && typeof idObject === 'object' && - (idObject as (IdValue | JsonValue)).type === 'id' + (idObject as IdValue | JsonValue).type === 'id' ); } @@ -196,6 +244,6 @@ export function isJsonValue(jsonObject: StoreValue): jsonObject is JsonValue { return ( jsonObject != null && typeof jsonObject === 'object' && - (jsonObject as (IdValue | JsonValue)).type === 'json' + (jsonObject as IdValue | JsonValue).type === 'json' ); } diff --git a/src/data/writeToStore.ts b/src/data/writeToStore.ts index 2570444e991..e3e4fac328c 100644 --- a/src/data/writeToStore.ts +++ b/src/data/writeToStore.ts @@ -1,4 +1,3 @@ - import { getDefaultValues, getOperationDefinition, @@ -15,9 +14,7 @@ import { isInlineFragment, } from './storeUtils'; -import { - ReadStoreContext, -} from '../data/readFromStore'; +import { ReadStoreContext } from '../data/readFromStore'; import { OperationDefinitionNode, @@ -28,32 +25,17 @@ import { FragmentDefinitionNode, } from 'graphql'; -import { - FragmentMatcher, -} from 'graphql-anywhere'; +import { FragmentMatcher } from 'graphql-anywhere'; -import { - NormalizedCache, - StoreObject, - IdValue, - isIdValue, -} from './storeUtils'; +import { NormalizedCache, StoreObject, IdValue, isIdValue } from './storeUtils'; -import { - IdGetter, -} from '../core/types'; +import { IdGetter } from '../core/types'; -import { - shouldInclude, -} from '../queries/directives'; +import { shouldInclude } from '../queries/directives'; -import { - isProduction, -} from '../util/environment'; +import { isProduction } from '../util/environment'; -import { - assign, -} from '../util/assign'; +import { assign } from '../util/assign'; class WriteError extends Error { public type = 'WriteError'; @@ -61,9 +43,11 @@ class WriteError extends Error { function enhanceErrorWithDocument(error: Error, document: DocumentNode) { // XXX A bit hacky maybe ... - const enhancedError = new WriteError(`Error writing result to store for query ${ - document.loc && document.loc.source && document.loc.source.body - }`); + const enhancedError = new WriteError( + `Error writing result to store for query ${document.loc && + document.loc.source && + document.loc.source.body}`, + ); enhancedError.message += '/n' + error.message; enhancedError.stack = error.stack; return enhancedError; @@ -98,13 +82,13 @@ export function writeQueryToStore({ fragmentMap = {} as FragmentMap, fragmentMatcherFunction, }: { - result: Object, - query: DocumentNode, - store?: NormalizedCache, - variables?: Object, - dataIdFromObject?: IdGetter, - fragmentMap?: FragmentMap, - fragmentMatcherFunction?: FragmentMatcher, + result: Object; + query: DocumentNode; + store?: NormalizedCache; + variables?: Object; + dataIdFromObject?: IdGetter; + fragmentMap?: FragmentMap; + fragmentMatcherFunction?: FragmentMatcher; }): NormalizedCache { const queryDefinition: OperationDefinitionNode = getQueryDefinition(query); @@ -131,7 +115,7 @@ export function writeQueryToStore({ export type WriteContext = { store: NormalizedCache; - processedData?: { [x: string]: FieldNode[] }, + processedData?: { [x: string]: FieldNode[] }; variables?: any; dataIdFromObject?: IdGetter; fragmentMap?: FragmentMap; @@ -147,15 +131,14 @@ export function writeResultToStore({ dataIdFromObject, fragmentMatcherFunction, }: { - dataId: string, - result: any, - document: DocumentNode, - store?: NormalizedCache, - variables?: Object, - dataIdFromObject?: IdGetter, - fragmentMatcherFunction?: FragmentMatcher, + dataId: string; + result: any; + document: DocumentNode; + store?: NormalizedCache; + variables?: Object; + dataIdFromObject?: IdGetter; + fragmentMatcherFunction?: FragmentMatcher; }): NormalizedCache { - // XXX TODO REFACTOR: this is a temporary workaround until query normalization is made to work with documents. const operationDefinition = getOperationDefinition(document); const selectionSet = operationDefinition.selectionSet; @@ -188,14 +171,14 @@ export function writeSelectionSetToStore({ selectionSet, context, }: { - dataId: string, - result: any, - selectionSet: SelectionSetNode, - context: WriteContext, + dataId: string; + result: any; + selectionSet: SelectionSetNode; + context: WriteContext; }): NormalizedCache { const { variables, store, dataIdFromObject, fragmentMap } = context; - selectionSet.selections.forEach((selection) => { + selectionSet.selections.forEach(selection => { const included = shouldInclude(selection, variables); if (isField(selection)) { @@ -216,7 +199,13 @@ export function writeSelectionSetToStore({ // we just print a warning for the time being. //throw new WriteError(`Missing field ${resultFieldKey} in ${JSON.stringify(result, null, 2).substring(0, 100)}`); if (!isProduction()) { - console.warn(`Missing field ${resultFieldKey} in ${JSON.stringify(result, null, 2).substring(0, 100)}`); + console.warn( + `Missing field ${resultFieldKey} in ${JSON.stringify( + result, + null, + 2, + ).substring(0, 100)}`, + ); } } } @@ -243,7 +232,7 @@ export function writeSelectionSetToStore({ // on the context. const idValue: IdValue = { type: 'id', id: 'self', generated: false }; const fakeContext: ReadStoreContext = { - store: { 'self': result }, + store: { self: result }, returnPartialData: false, hasMissingField: false, customResolvers: {}, @@ -272,23 +261,24 @@ export function writeSelectionSetToStore({ return store; } - // Checks if the id given is an id that was generated by Apollo // rather than by dataIdFromObject. function isGeneratedId(id: string): boolean { - return (id[0] === '$'); + return id[0] === '$'; } -function mergeWithGenerated(generatedKey: string, realKey: string, cache: NormalizedCache) { +function mergeWithGenerated( + generatedKey: string, + realKey: string, + cache: NormalizedCache, +) { const generated = cache[generatedKey]; const real = cache[realKey]; - Object.keys(generated).forEach((key) => { + Object.keys(generated).forEach(key => { const value = generated[key]; const realValue = real[key]; - if (isIdValue(value) - && isGeneratedId(value.id) - && isIdValue(realValue)) { + if (isIdValue(value) && isGeneratedId(value.id) && isIdValue(realValue)) { mergeWithGenerated(value.id, realValue.id, cache); } delete cache[generatedKey]; @@ -296,9 +286,11 @@ function mergeWithGenerated(generatedKey: string, realKey: string, cache: Normal }); } -function isDataProcessed(dataId: string, - field: FieldNode|SelectionSetNode, - processedData?: {[x: string]: (FieldNode|SelectionSetNode)[]}): boolean { +function isDataProcessed( + dataId: string, + field: FieldNode | SelectionSetNode, + processedData?: { [x: string]: (FieldNode | SelectionSetNode)[] }, +): boolean { if (!processedData) { return false; } @@ -322,10 +314,10 @@ function writeFieldToStore({ dataId, context, }: { - field: FieldNode, - value: any, - dataId: string, - context: WriteContext, + field: FieldNode; + value: any; + dataId: string; + context: WriteContext; }) { const { variables, dataIdFromObject, store, fragmentMap } = context; @@ -341,15 +333,20 @@ function writeFieldToStore({ if (!field.selectionSet || value === null) { storeValue = value != null && typeof value === 'object' - // If the scalar value is a JSON blob, we have to "escape" it so it can’t pretend to be - // an id. - ? { type: 'json', json: value } - // Otherwise, just store the scalar directly in the store. - : value; + ? // If the scalar value is a JSON blob, we have to "escape" it so it can’t pretend to be + // an id. + { type: 'json', json: value } + : // Otherwise, just store the scalar directly in the store. + value; } else if (Array.isArray(value)) { const generatedId = `${dataId}.${storeFieldName}`; - storeValue = processArrayValue(value, generatedId, field.selectionSet, context); + storeValue = processArrayValue( + value, + generatedId, + field.selectionSet, + context, + ); } else { // It's an object let valueDataId = `${dataId}.${storeFieldName}`; @@ -369,7 +366,9 @@ function writeFieldToStore({ // and we use the distinction between user-desiginated and application-provided // ids when managing overwrites. if (semanticId && isGeneratedId(semanticId)) { - throw new Error('IDs returned by dataIdFromObject cannot begin with the "$" character.'); + throw new Error( + 'IDs returned by dataIdFromObject cannot begin with the "$" character.', + ); } if (semanticId) { @@ -403,10 +402,16 @@ function writeFieldToStore({ // If there is already a real id in the store and the current id we // are dealing with is generated, we throw an error. - if (isIdValue(storeValue) && storeValue.generated - && isIdValue(escapedId) && !escapedId.generated) { - throw new Error(`Store error: the application attempted to write an object with no provided id` + - ` but the store already contains an id of ${escapedId.id} for this object.`); + if ( + isIdValue(storeValue) && + storeValue.generated && + isIdValue(escapedId) && + !escapedId.generated + ) { + throw new Error( + `Store error: the application attempted to write an object with no provided id` + + ` but the store already contains an id of ${escapedId.id} for this object.`, + ); } if (isIdValue(escapedId) && escapedId.generated) { diff --git a/src/errors/ApolloError.ts b/src/errors/ApolloError.ts index 1619e146798..9a27cf9f9b4 100644 --- a/src/errors/ApolloError.ts +++ b/src/errors/ApolloError.ts @@ -1,34 +1,34 @@ import { GraphQLError } from 'graphql'; - // XXX some duck typing here because for some reason new ApolloError is not instanceof ApolloError - export function isApolloError(err: Error): err is ApolloError { - return err.hasOwnProperty('graphQLErrors'); - } - - // Sets the error message on this error according to the - // the GraphQL and network errors that are present. - // If the error message has already been set through the - // constructor or otherwise, this function is a nop. - const generateErrorMessage = (err: ApolloError) => { - +// XXX some duck typing here because for some reason new ApolloError is not instanceof ApolloError +export function isApolloError(err: Error): err is ApolloError { + return err.hasOwnProperty('graphQLErrors'); +} - let message = ''; - // If we have GraphQL errors present, add that to the error message. - if (Array.isArray(err.graphQLErrors) && err.graphQLErrors.length !== 0) { - err.graphQLErrors.forEach((graphQLError: GraphQLError) => { - const errorMessage = graphQLError ? graphQLError.message : 'Error message not found.'; - message += `GraphQL error: ${errorMessage}\n`; - }); - } +// Sets the error message on this error according to the +// the GraphQL and network errors that are present. +// If the error message has already been set through the +// constructor or otherwise, this function is a nop. +const generateErrorMessage = (err: ApolloError) => { + let message = ''; + // If we have GraphQL errors present, add that to the error message. + if (Array.isArray(err.graphQLErrors) && err.graphQLErrors.length !== 0) { + err.graphQLErrors.forEach((graphQLError: GraphQLError) => { + const errorMessage = graphQLError + ? graphQLError.message + : 'Error message not found.'; + message += `GraphQL error: ${errorMessage}\n`; + }); + } - if (err.networkError) { - message += 'Network error: ' + err.networkError.message + '\n'; - } + if (err.networkError) { + message += 'Network error: ' + err.networkError.message + '\n'; + } - // strip newline from the end of the message - message = message.replace(/\n$/, ''); - return message; - }; + // strip newline from the end of the message + message = message.replace(/\n$/, ''); + return message; +}; export class ApolloError extends Error { public message: string; @@ -49,10 +49,10 @@ export class ApolloError extends Error { errorMessage, extraInfo, }: { - graphQLErrors?: GraphQLError[], - networkError?: Error | null, - errorMessage?: string, - extraInfo?: any, + graphQLErrors?: GraphQLError[]; + networkError?: Error | null; + errorMessage?: string; + extraInfo?: any; }) { super(errorMessage); this.graphQLErrors = graphQLErrors || []; @@ -66,5 +66,4 @@ export class ApolloError extends Error { this.extraInfo = extraInfo; } - } diff --git a/src/index.ts b/src/index.ts index 77399c22d3f..5dee2d6e39e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -11,15 +11,9 @@ import { HTTPBatchedNetworkInterface, } from './transport/batchedNetworkInterface'; -import { - print, -} from 'graphql/language/printer'; +import { print } from 'graphql/language/printer'; -import { - createApolloStore, - ApolloStore, - createApolloReducer, -} from './store'; +import { createApolloStore, ApolloStore, createApolloReducer } from './store'; import { ObservableQuery, @@ -27,9 +21,7 @@ import { UpdateQueryOptions, } from './core/ObservableQuery'; -import { - Subscription, -} from './util/Observable'; +import { Subscription } from './util/Observable'; import { WatchQueryOptions, @@ -40,17 +32,11 @@ import { SubscribeToMoreOptions, } from './core/watchQueryOptions'; -import { - readQueryFromStore, -} from './data/readFromStore'; +import { readQueryFromStore } from './data/readFromStore'; -import { - writeQueryToStore, -} from './data/writeToStore'; +import { writeQueryToStore } from './data/writeToStore'; -import { - MutationQueryReducersMap, -} from './data/mutationResults'; +import { MutationQueryReducersMap } from './data/mutationResults'; import { getQueryDefinition, @@ -60,28 +46,17 @@ import { createFragmentMap, } from './queries/getFromAST'; -import { - NetworkStatus, -} from './queries/networkStatus'; +import { NetworkStatus } from './queries/networkStatus'; -import { - addTypenameToDocument, -} from './queries/queryTransform'; +import { addTypenameToDocument } from './queries/queryTransform'; -import { - ApolloError, -} from './errors/ApolloError'; +import { ApolloError } from './errors/ApolloError'; import ApolloClient from './ApolloClient'; -import { - ApolloQueryResult, - ApolloExecutionResult, -} from './core/types'; +import { ApolloQueryResult, ApolloExecutionResult } from './core/types'; -import { - toIdValue, -} from './data/storeUtils'; +import { toIdValue } from './data/storeUtils'; import { IntrospectionFragmentMatcher, @@ -107,14 +82,11 @@ export { ApolloQueryResult, ApolloExecutionResult, toIdValue, - IntrospectionFragmentMatcher, FragmentMatcherInterface, - // Expose the print method from GraphQL so that people that implement // custom network interfaces can turn query ASTs into query strings as needed. print as printAST, - // Internal type definitions NetworkInterface, SubscriptionNetworkInterface, diff --git a/src/mutations/store.ts b/src/mutations/store.ts index 7e29df28918..13c54fa88b7 100644 --- a/src/mutations/store.ts +++ b/src/mutations/store.ts @@ -1,7 +1,7 @@ export class MutationStore { - private store: {[mutationId: string]: MutationStoreValue} = {}; + private store: { [mutationId: string]: MutationStoreValue } = {}; - public getStore(): {[mutationId: string]: MutationStoreValue} { + public getStore(): { [mutationId: string]: MutationStoreValue } { return this.store; } @@ -9,7 +9,11 @@ export class MutationStore { return this.store[mutationId]; } - public initMutation(mutationId: string, mutationString: string, variables: Object | undefined) { + public initMutation( + mutationId: string, + mutationString: string, + variables: Object | undefined, + ) { this.store[mutationId] = { mutationString: mutationString, variables: variables || {}, @@ -39,4 +43,3 @@ export interface MutationStoreValue { loading: boolean; error: Error | null; } - diff --git a/src/optimistic-data/store.ts b/src/optimistic-data/store.ts index 1c7ba8606e3..a6cc49ac534 100644 --- a/src/optimistic-data/store.ts +++ b/src/optimistic-data/store.ts @@ -6,32 +6,21 @@ import { isMutationErrorAction, } from '../actions'; -import { - data, -} from '../data/store'; +import { data } from '../data/store'; -import { - NormalizedCache, -} from '../data/storeUtils'; +import { NormalizedCache } from '../data/storeUtils'; -import { - QueryStore, -} from '../queries/store'; +import { QueryStore } from '../queries/store'; -import { - MutationStore, -} from '../mutations/store'; +import { MutationStore } from '../mutations/store'; -import { - Store, - ApolloReducerConfig, -} from '../store'; +import { Store, ApolloReducerConfig } from '../store'; - import { assign } from '../util/assign'; +import { assign } from '../util/assign'; export type OptimisticStoreItem = { - mutationId: string, - data: NormalizedCache, + mutationId: string; + data: NormalizedCache; }; // a stack of patches of new or changed documents @@ -94,8 +83,10 @@ export function optimistic( const newState = [...previousState, optimisticState]; return newState; - } else if ((isMutationErrorAction(action) || isMutationResultAction(action)) - && previousState.some(change => change.mutationId === action.mutationId)) { + } else if ( + (isMutationErrorAction(action) || isMutationResultAction(action)) && + previousState.some(change => change.mutationId === action.mutationId) + ) { return rollbackOptimisticData( change => change.mutationId === action.mutationId, previousState, @@ -107,18 +98,14 @@ export function optimistic( return previousState; } -function getOptimisticDataPatch ( +function getOptimisticDataPatch( previousData: NormalizedCache, optimisticAction: MutationResultAction | WriteAction, queries: QueryStore, mutations: MutationStore, config: ApolloReducerConfig, ): any { - const optimisticData = data( - previousData, - optimisticAction, - config, - ); + const optimisticData = data(previousData, optimisticAction, config); const patch: any = {}; @@ -139,7 +126,7 @@ function getOptimisticDataPatch ( * The filter function should return true for all items that we want to * rollback. */ -function rollbackOptimisticData ( +function rollbackOptimisticData( filterFn: (item: OptimisticStoreItem) => boolean, previousState = optimisticDefaultState, store: any, diff --git a/src/queries/directives.ts b/src/queries/directives.ts index b2e034d9532..c322064a293 100644 --- a/src/queries/directives.ts +++ b/src/queries/directives.ts @@ -1,19 +1,17 @@ // Provides the methods that allow QueryManager to handle // the `skip` and `include` directives within GraphQL. -import { - SelectionNode, - VariableNode, - BooleanValueNode, -} from 'graphql'; +import { SelectionNode, VariableNode, BooleanValueNode } from 'graphql'; - -export function shouldInclude(selection: SelectionNode, variables: { [name: string]: any } = {}): boolean { +export function shouldInclude( + selection: SelectionNode, + variables: { [name: string]: any } = {}, +): boolean { if (!selection.directives) { return true; } let res: boolean = true; - selection.directives.forEach((directive) => { + selection.directives.forEach(directive => { // TODO should move this validation to GraphQL validation once that's implemented. if (directive.name.value !== 'skip' && directive.name.value !== 'include') { // Just don't worry about directives we don't understand @@ -24,10 +22,11 @@ export function shouldInclude(selection: SelectionNode, variables: { [name: stri const directiveArguments = directive.arguments || []; const directiveName = directive.name.value; if (directiveArguments.length !== 1) { - throw new Error(`Incorrect number of arguments for the @${directiveName} directive.`); + throw new Error( + `Incorrect number of arguments for the @${directiveName} directive.`, + ); } - const ifArgument = directiveArguments[0]; if (!ifArgument.name || ifArgument.name.value !== 'if') { throw new Error(`Invalid argument for the @${directiveName} directive.`); @@ -38,11 +37,15 @@ export function shouldInclude(selection: SelectionNode, variables: { [name: stri if (!ifValue || ifValue.kind !== 'BooleanValue') { // means it has to be a variable value if this is a valid @skip or @include directive if (ifValue.kind !== 'Variable') { - throw new Error(`Argument for the @${directiveName} directive must be a variable or a bool ean value.`); + throw new Error( + `Argument for the @${directiveName} directive must be a variable or a bool ean value.`, + ); } else { evaledValue = variables[(ifValue as VariableNode).name.value]; if (evaledValue === undefined) { - throw new Error(`Invalid variable referenced in @${directiveName} directive.`); + throw new Error( + `Invalid variable referenced in @${directiveName} directive.`, + ); } } } else { diff --git a/src/queries/getFromAST.ts b/src/queries/getFromAST.ts index c2e6c99bc42..df936bc6e5f 100644 --- a/src/queries/getFromAST.ts +++ b/src/queries/getFromAST.ts @@ -5,20 +5,21 @@ import { ValueNode, } from 'graphql'; - -import { - valueToObjectRepresentation, -} from '../data/storeUtils'; +import { valueToObjectRepresentation } from '../data/storeUtils'; import { assign } from '../util/assign'; -export function getMutationDefinition(doc: DocumentNode): OperationDefinitionNode { +export function getMutationDefinition( + doc: DocumentNode, +): OperationDefinitionNode { checkDocument(doc); let mutationDef: OperationDefinitionNode | null = null; - doc.definitions.forEach((definition) => { - if (definition.kind === 'OperationDefinition' - && (definition as OperationDefinitionNode).operation === 'mutation') { + doc.definitions.forEach(definition => { + if ( + definition.kind === 'OperationDefinition' && + (definition as OperationDefinitionNode).operation === 'mutation' + ) { mutationDef = definition as OperationDefinitionNode; } }); @@ -39,7 +40,7 @@ string in a "gql" tag? http://docs.apollostack.com/apollo-client/core.html#gql`) let foundOperation = false; - doc.definitions.forEach((definition) => { + doc.definitions.forEach(definition => { switch (definition.kind) { // If this is a fragment that’s fine. case 'FragmentDefinition': @@ -48,20 +49,24 @@ string in a "gql" tag? http://docs.apollostack.com/apollo-client/core.html#gql`) // encounter an operation definition we throw an error. case 'OperationDefinition': if (foundOperation) { - throw new Error('Queries must have exactly one operation definition.'); + throw new Error( + 'Queries must have exactly one operation definition.', + ); } foundOperation = true; break; // If this is any other operation kind, throw an error. default: - throw new Error(`Schema type definitions not allowed in queries. Found: "${definition.kind}"`); + throw new Error( + `Schema type definitions not allowed in queries. Found: "${definition.kind}"`, + ); } }); } export function getOperationName(doc: DocumentNode): string | null { let res: string | null = null; - doc.definitions.forEach((definition) => { + doc.definitions.forEach(definition => { if (definition.kind === 'OperationDefinition' && definition.name) { res = definition.name.value; } @@ -70,14 +75,18 @@ export function getOperationName(doc: DocumentNode): string | null { } // Returns the FragmentDefinitions from a particular document as an array -export function getFragmentDefinitions(doc: DocumentNode): FragmentDefinitionNode[] { - let fragmentDefinitions: FragmentDefinitionNode[] = doc.definitions.filter((definition) => { - if (definition.kind === 'FragmentDefinition') { - return true; - } else { - return false; - } - }) as FragmentDefinitionNode[]; +export function getFragmentDefinitions( + doc: DocumentNode, +): FragmentDefinitionNode[] { + let fragmentDefinitions: FragmentDefinitionNode[] = doc.definitions.filter( + definition => { + if (definition.kind === 'FragmentDefinition') { + return true; + } else { + return false; + } + }, + ) as FragmentDefinitionNode[]; return fragmentDefinitions; } @@ -86,9 +95,11 @@ export function getQueryDefinition(doc: DocumentNode): OperationDefinitionNode { checkDocument(doc); let queryDef: OperationDefinitionNode | null = null; - doc.definitions.map((definition) => { - if (definition.kind === 'OperationDefinition' - && (definition as OperationDefinitionNode).operation === 'query') { + doc.definitions.map(definition => { + if ( + definition.kind === 'OperationDefinition' && + (definition as OperationDefinitionNode).operation === 'query' + ) { queryDef = definition as OperationDefinitionNode; } }); @@ -101,11 +112,13 @@ export function getQueryDefinition(doc: DocumentNode): OperationDefinitionNode { } // TODO REFACTOR: fix this and query/mutation definition to not use map, please. -export function getOperationDefinition(doc: DocumentNode): OperationDefinitionNode { +export function getOperationDefinition( + doc: DocumentNode, +): OperationDefinitionNode { checkDocument(doc); let opDef: OperationDefinitionNode | null = null; - doc.definitions.map((definition) => { + doc.definitions.map(definition => { if (definition.kind === 'OperationDefinition') { opDef = definition as OperationDefinitionNode; } @@ -118,7 +131,9 @@ export function getOperationDefinition(doc: DocumentNode): OperationDefinitionNo return opDef; } -export function getFragmentDefinition(doc: DocumentNode): FragmentDefinitionNode { +export function getFragmentDefinition( + doc: DocumentNode, +): FragmentDefinitionNode { if (doc.kind !== 'Document') { throw new Error(`Expecting a parsed GraphQL document. Perhaps you need to wrap the query \ string in a "gql" tag? http://docs.apollostack.com/apollo-client/core.html#gql`); @@ -146,9 +161,11 @@ export interface FragmentMap { // Utility function that takes a list of fragment definitions and makes a hash out of them // that maps the name of the fragment to the fragment definition. -export function createFragmentMap(fragments: FragmentDefinitionNode[] = []): FragmentMap { +export function createFragmentMap( + fragments: FragmentDefinitionNode[] = [], +): FragmentMap { const symTable: FragmentMap = {}; - fragments.forEach((fragment) => { + fragments.forEach(fragment => { symTable[fragment.name.value] = fragment; }); @@ -177,7 +194,10 @@ export function createFragmentMap(fragments: FragmentDefinitionNode[] = []): Fra * fragment specified by the provided `fragmentName`. If there is more then one * fragment, but a `fragmentName` was not defined then an error will be thrown. */ -export function getFragmentQueryDocument(document: DocumentNode, fragmentName?: string): DocumentNode { +export function getFragmentQueryDocument( + document: DocumentNode, + fragmentName?: string, +): DocumentNode { let actualFragmentName = fragmentName; // Build an array of all our fragment definitions that will be used for @@ -189,8 +209,10 @@ export function getFragmentQueryDocument(document: DocumentNode, fragmentName?: // define our own operation definition later on. if (definition.kind === 'OperationDefinition') { throw new Error( - `Found a ${definition.operation} operation${definition.name ? ` named '${definition.name.value}'` : ''}. ` + - 'No operations are allowed when using a fragment as a query. Only fragments are allowed.', + `Found a ${definition.operation} operation${definition.name + ? ` named '${definition.name.value}'` + : ''}. ` + + 'No operations are allowed when using a fragment as a query. Only fragments are allowed.', ); } // Add our definition to the fragments array if it is a fragment @@ -204,7 +226,9 @@ export function getFragmentQueryDocument(document: DocumentNode, fragmentName?: // name from a single fragment in the definition. if (typeof actualFragmentName === 'undefined') { if (fragments.length !== 1) { - throw new Error(`Found ${fragments.length} fragments. \`fragmentName\` must be provided when there is not exactly 1 fragment.`); + throw new Error( + `Found ${fragments.length} fragments. \`fragmentName\` must be provided when there is not exactly 1 fragment.`, + ); } actualFragmentName = fragments[0].name.value; } @@ -237,7 +261,9 @@ export function getFragmentQueryDocument(document: DocumentNode, fragmentName?: return query; } -export function getDefaultValues(definition: OperationDefinitionNode): { [key: string]: any } { +export function getDefaultValues( + definition: OperationDefinitionNode, +): { [key: string]: any } { if (definition.variableDefinitions && definition.variableDefinitions.length) { const defaultValues = definition.variableDefinitions .filter(({ defaultValue }) => defaultValue) diff --git a/src/queries/networkStatus.ts b/src/queries/networkStatus.ts index f01b08243f3..c49fab9882c 100644 --- a/src/queries/networkStatus.ts +++ b/src/queries/networkStatus.ts @@ -49,6 +49,8 @@ export enum NetworkStatus { * Returns true if there is currently a network request in flight according to a given network * status. */ -export function isNetworkRequestInFlight (networkStatus: NetworkStatus): boolean { +export function isNetworkRequestInFlight( + networkStatus: NetworkStatus, +): boolean { return networkStatus < 7; } diff --git a/src/queries/queryTransform.ts b/src/queries/queryTransform.ts index e1ac790ca61..baf8b66bbe7 100644 --- a/src/queries/queryTransform.ts +++ b/src/queries/queryTransform.ts @@ -7,9 +7,7 @@ import { InlineFragmentNode, } from 'graphql'; -import { - checkDocument, -} from './getFromAST'; +import { checkDocument } from './getFromAST'; import { cloneDeep } from '../util/cloneDeep'; @@ -26,20 +24,26 @@ function addTypenameToSelectionSet( isRoot = false, ) { if (selectionSet.selections) { - if (! isRoot) { - const alreadyHasThisField = selectionSet.selections.some((selection) => { - return selection.kind === 'Field' && (selection as FieldNode).name.value === '__typename'; + if (!isRoot) { + const alreadyHasThisField = selectionSet.selections.some(selection => { + return ( + selection.kind === 'Field' && + (selection as FieldNode).name.value === '__typename' + ); }); - if (! alreadyHasThisField) { + if (!alreadyHasThisField) { selectionSet.selections.push(TYPENAME_FIELD); } } - selectionSet.selections.forEach((selection) => { + selectionSet.selections.forEach(selection => { // Must not add __typename if we're inside an introspection query if (selection.kind === 'Field') { - if (selection.name.value.lastIndexOf('__', 0) !== 0 && selection.selectionSet) { + if ( + selection.name.value.lastIndexOf('__', 0) !== 0 && + selection.selectionSet + ) { addTypenameToSelectionSet(selection.selectionSet); } } else if (selection.kind === 'InlineFragment') { @@ -51,16 +55,27 @@ function addTypenameToSelectionSet( } } -function removeConnectionDirectiveFromSelectionSet(selectionSet: SelectionSetNode) { +function removeConnectionDirectiveFromSelectionSet( + selectionSet: SelectionSetNode, +) { if (selectionSet.selections) { - selectionSet.selections.forEach((selection) => { - if (selection.kind === 'Field' && selection as FieldNode && selection.directives) { - selection.directives = selection.directives.filter((directive) => { + selectionSet.selections.forEach(selection => { + if ( + selection.kind === 'Field' && + (selection as FieldNode) && + selection.directives + ) { + selection.directives = selection.directives.filter(directive => { const willRemove = directive.name.value === 'connection'; if (willRemove) { - if (!directive.arguments || !directive.arguments.some((arg) => arg.name.value === 'key')) { - console.warn('Removing an @connection directive even though it does not have a key. ' + - 'You may want to use the key parameter to specify a store key.'); + if ( + !directive.arguments || + !directive.arguments.some(arg => arg.name.value === 'key') + ) { + console.warn( + 'Removing an @connection directive even though it does not have a key. ' + + 'You may want to use the key parameter to specify a store key.', + ); } } @@ -69,7 +84,7 @@ function removeConnectionDirectiveFromSelectionSet(selectionSet: SelectionSetNod } }); - selectionSet.selections.forEach((selection) => { + selectionSet.selections.forEach(selection => { if (selection.kind === 'Field') { if (selection.selectionSet) { removeConnectionDirectiveFromSelectionSet(selection.selectionSet); @@ -89,7 +104,10 @@ export function addTypenameToDocument(doc: DocumentNode) { docClone.definitions.forEach((definition: DefinitionNode) => { const isRoot = definition.kind === 'OperationDefinition'; - addTypenameToSelectionSet((definition as OperationDefinitionNode).selectionSet, isRoot); + addTypenameToSelectionSet( + (definition as OperationDefinitionNode).selectionSet, + isRoot, + ); }); return docClone; @@ -100,7 +118,9 @@ export function removeConnectionDirectiveFromDocument(doc: DocumentNode) { const docClone = cloneDeep(doc); docClone.definitions.forEach((definition: DefinitionNode) => { - removeConnectionDirectiveFromSelectionSet((definition as OperationDefinitionNode).selectionSet); + removeConnectionDirectiveFromSelectionSet( + (definition as OperationDefinitionNode).selectionSet, + ); }); return docClone; diff --git a/src/queries/store.ts b/src/queries/store.ts index d9f45097a3d..4f1edecfe8c 100644 --- a/src/queries/store.ts +++ b/src/queries/store.ts @@ -1,8 +1,4 @@ -import { - DocumentNode, - GraphQLError, - ExecutionResult, -} from 'graphql'; +import { DocumentNode, GraphQLError, ExecutionResult } from 'graphql'; import { isEqual } from '../util/isEqual'; @@ -20,9 +16,9 @@ export type QueryStoreValue = { }; export class QueryStore { - private store: {[queryId: string]: QueryStoreValue} = {}; + private store: { [queryId: string]: QueryStoreValue } = {}; - public getStore(): {[queryId: string]: QueryStoreValue} { + public getStore(): { [queryId: string]: QueryStoreValue } { return this.store; } @@ -31,23 +27,25 @@ export class QueryStore { } public initQuery(query: { - queryId: string, - queryString: string, - document: DocumentNode, - storePreviousVariables: boolean, - variables: Object, - isPoll: boolean, - isRefetch: boolean, - metadata: any, - fetchMoreForQueryId: string | undefined, - }) { + queryId: string; + queryString: string; + document: DocumentNode; + storePreviousVariables: boolean; + variables: Object; + isPoll: boolean; + isRefetch: boolean; + metadata: any; + fetchMoreForQueryId: string | undefined; + }) { const previousQuery = this.store[query.queryId]; if (previousQuery && previousQuery.queryString !== query.queryString) { // XXX we're throwing an error here to catch bugs where a query gets overwritten by a new one. // we should implement a separate action for refetching so that QUERY_INIT may never overwrite // an existing query (see also: https://github.com/apollostack/apollo-client/issues/732) - throw new Error('Internal Error: may not update existing query string in store'); + throw new Error( + 'Internal Error: may not update existing query string in store', + ); } let isSetVariables = false; @@ -100,17 +98,23 @@ export class QueryStore { // This is because the implementation of `fetchMore` *always* sets // `fetchPolicy` to `network-only` so we would never have a client result. if (typeof query.fetchMoreForQueryId === 'string') { - this.store[query.fetchMoreForQueryId].networkStatus = NetworkStatus.fetchMore; + this.store[query.fetchMoreForQueryId].networkStatus = + NetworkStatus.fetchMore; } } - public markQueryResult(queryId: string, result: ExecutionResult, fetchMoreForQueryId: string | undefined) { + public markQueryResult( + queryId: string, + result: ExecutionResult, + fetchMoreForQueryId: string | undefined, + ) { if (!this.store[queryId]) { return; } this.store[queryId].networkError = null; - this.store[queryId].graphQLErrors = (result.errors && result.errors.length) ? result.errors : []; + this.store[queryId].graphQLErrors = + result.errors && result.errors.length ? result.errors : []; this.store[queryId].previousVariables = null; this.store[queryId].networkStatus = NetworkStatus.ready; @@ -122,7 +126,11 @@ export class QueryStore { } } - public markQueryError(queryId: string, error: Error, fetchMoreForQueryId: string | undefined) { + public markQueryError( + queryId: string, + error: Error, + fetchMoreForQueryId: string | undefined, + ) { if (!this.store[queryId]) { return; } @@ -145,7 +153,9 @@ export class QueryStore { this.store[queryId].networkError = null; this.store[queryId].previousVariables = null; - this.store[queryId].networkStatus = complete ? NetworkStatus.ready : NetworkStatus.loading; + this.store[queryId].networkStatus = complete + ? NetworkStatus.ready + : NetworkStatus.loading; } public stopQuery(queryId: string) { @@ -154,16 +164,18 @@ export class QueryStore { public reset(observableQueryIds: string[]) { // keep only the queries with query ids that are associated with observables - this.store = Object.keys(this.store).filter((queryId) => { - return (observableQueryIds.indexOf(queryId) > -1); - }).reduce((res, key) => { - // XXX set loading to true so listeners don't trigger unless they want results with partial data - res[key] = { - ...this.store[key], - networkStatus: NetworkStatus.loading, - }; - - return res; - }, {} as {[queryId: string]: QueryStoreValue}); + this.store = Object.keys(this.store) + .filter(queryId => { + return observableQueryIds.indexOf(queryId) > -1; + }) + .reduce((res, key) => { + // XXX set loading to true so listeners don't trigger unless they want results with partial data + res[key] = { + ...this.store[key], + networkStatus: NetworkStatus.loading, + }; + + return res; + }, {} as { [queryId: string]: QueryStoreValue }); } } diff --git a/src/scheduler/scheduler.ts b/src/scheduler/scheduler.ts index 3ac91408ac0..7e9e2ed0221 100644 --- a/src/scheduler/scheduler.ts +++ b/src/scheduler/scheduler.ts @@ -8,14 +8,9 @@ // At the moment, the QueryScheduler implements the one-polling-instance-at-a-time logic and // adds queries to the QueryBatcher queue. -import { - QueryManager, -} from '../core/QueryManager'; +import { QueryManager } from '../core/QueryManager'; -import { - FetchType, - QueryListener, -} from '../core/types'; +import { FetchType, QueryListener } from '../core/types'; import { ObservableQuery } from '../core/ObservableQuery'; @@ -42,11 +37,7 @@ export class QueryScheduler { // Map going from polling interval widths to polling timers. private pollingTimers: { [interval: number]: any }; - constructor({ - queryManager, - }: { - queryManager: QueryManager; - }) { + constructor({ queryManager }: { queryManager: QueryManager }) { this.queryManager = queryManager; this.pollingTimers = {}; this.inFlightQueries = {}; @@ -58,16 +49,26 @@ export class QueryScheduler { const queries = this.queryManager.queryStore; // XXX we do this because some legacy tests use a fake queryId. We should rewrite those tests - return queries.get(queryId) && queries.get(queryId).networkStatus !== NetworkStatus.ready; + return ( + queries.get(queryId) && + queries.get(queryId).networkStatus !== NetworkStatus.ready + ); } - public fetchQuery(queryId: string, options: WatchQueryOptions, fetchType: FetchType) { + public fetchQuery( + queryId: string, + options: WatchQueryOptions, + fetchType: FetchType, + ) { return new Promise((resolve, reject) => { - this.queryManager.fetchQuery(queryId, options, fetchType).then((result) => { - resolve(result); - }).catch((error) => { - reject(error); - }); + this.queryManager + .fetchQuery(queryId, options, fetchType) + .then(result => { + resolve(result); + }) + .catch(error => { + reject(error); + }); }); } @@ -77,7 +78,9 @@ export class QueryScheduler { listener?: QueryListener, ): string { if (!options.pollInterval) { - throw new Error('Attempted to start a polling query without a polling interval.'); + throw new Error( + 'Attempted to start a polling query without a polling interval.', + ); } if (this.queryManager.ssrMode) { @@ -107,7 +110,9 @@ export class QueryScheduler { // 1. remove queries that have stopped polling // 2. call fetchQueries for queries that are polling and not in flight. // TODO: refactor this to make it cleaner - this.intervalQueries[interval] = this.intervalQueries[interval].filter((queryId) => { + this.intervalQueries[interval] = this.intervalQueries[ + interval + ].filter(queryId => { // If queryOptions can't be found from registeredQueries, it means that this queryId // is no longer registered and should be removed from the list of queries firing on this // interval. @@ -137,16 +142,24 @@ export class QueryScheduler { // Adds a query on a particular interval to this.intervalQueries and then fires // that query with all the other queries executing on that interval. Note that the query id // and query options must have been added to this.registeredQueries before this function is called. - public addQueryOnInterval(queryId: string, queryOptions: WatchQueryOptions) { + public addQueryOnInterval( + queryId: string, + queryOptions: WatchQueryOptions, + ) { const interval = queryOptions.pollInterval; if (!interval) { - throw new Error(`A poll interval is required to start polling query with id '${queryId}'.`); + throw new Error( + `A poll interval is required to start polling query with id '${queryId}'.`, + ); } // If there are other queries on this interval, this query will just fire with those // and we don't need to create a new timer. - if (this.intervalQueries.hasOwnProperty(interval.toString()) && this.intervalQueries[interval].length > 0) { + if ( + this.intervalQueries.hasOwnProperty(interval.toString()) && + this.intervalQueries[interval].length > 0 + ) { this.intervalQueries[interval].push(queryId); } else { this.intervalQueries[interval] = [queryId]; @@ -158,9 +171,13 @@ export class QueryScheduler { } // Used only for unit testing. - public registerPollingQuery(queryOptions: WatchQueryOptions): ObservableQuery { + public registerPollingQuery( + queryOptions: WatchQueryOptions, + ): ObservableQuery { if (!queryOptions.pollInterval) { - throw new Error('Attempted to register a non-polling query with the scheduler.'); + throw new Error( + 'Attempted to register a non-polling query with the scheduler.', + ); } return new ObservableQuery({ scheduler: this, diff --git a/src/store.ts b/src/store.ts index 48a0687262e..9adcd514fc3 100644 --- a/src/store.ts +++ b/src/store.ts @@ -7,21 +7,13 @@ import { Action, } from 'redux'; -import { - FragmentMatcher, -} from 'graphql-anywhere'; +import { FragmentMatcher } from 'graphql-anywhere'; -import { - data, -} from './data/store'; +import { data } from './data/store'; -import { - NormalizedCache, -} from './data/storeUtils'; +import { NormalizedCache } from './data/storeUtils'; -import { - QueryStore, -} from './queries/store'; +import { QueryStore } from './queries/store'; import { // mutations, @@ -42,13 +34,9 @@ import { isSubscriptionResultAction, } from './actions'; -import { - IdGetter, -} from './core/types'; +import { IdGetter } from './core/types'; -import { - CustomResolverMap, -} from './data/readFromStore'; +import { CustomResolverMap } from './data/readFromStore'; import { assign } from './util/assign'; @@ -87,7 +75,10 @@ const crashReporter = (store: any) => (next: any) => (action: any) => { } }; -const createReducerError = (error: Error, action: ApolloAction): ReducerError => { +const createReducerError = ( + error: Error, + action: ApolloAction, +): ReducerError => { const reducerError: ReducerError = { error }; if (isQueryResultAction(action)) { @@ -102,9 +93,14 @@ const createReducerError = (error: Error, action: ApolloAction): ReducerError => }; // Reducer -export type ApolloReducer = (store: NormalizedCache, action: ApolloAction) => NormalizedCache; - -export function createApolloReducer(config: ApolloReducerConfig): (state: Store, action: ApolloAction | Action) => Store { +export type ApolloReducer = ( + store: NormalizedCache, + action: ApolloAction, +) => NormalizedCache; + +export function createApolloReducer( + config: ApolloReducerConfig, +): (state: Store, action: ApolloAction | Action) => Store { return function apolloReducer(state = {} as Store, action: ApolloAction) { try { const newState: Store = { @@ -129,9 +125,11 @@ export function createApolloReducer(config: ApolloReducerConfig): (state: Store, config, ); - if (state.data === newState.data && - state.optimistic === newState.optimistic && - state.reducerError === newState.reducerError) { + if ( + state.data === newState.data && + state.optimistic === newState.optimistic && + state.reducerError === newState.reducerError + ) { return state; } @@ -145,19 +143,21 @@ export function createApolloReducer(config: ApolloReducerConfig): (state: Store, }; } -export function createApolloStore({ - reduxRootKey = 'apollo', - initialState, - config = {}, - reportCrashes = true, - logger, -}: { - reduxRootKey?: string, - initialState?: any, - config?: ApolloReducerConfig, - reportCrashes?: boolean, - logger?: Middleware, -} = {}): ApolloStore { +export function createApolloStore( + { + reduxRootKey = 'apollo', + initialState, + config = {}, + reportCrashes = true, + logger, + }: { + reduxRootKey?: string; + initialState?: any; + config?: ApolloReducerConfig; + reportCrashes?: boolean; + logger?: Middleware; + } = {}, +): ApolloStore { const enhancers: any[] = []; const middlewares: Middleware[] = []; @@ -187,12 +187,22 @@ export function createApolloStore({ // Note: The below checks are what make it OK for QueryManager to start from 0 when generating // new query IDs. If we let people rehydrate query state for some reason, we would need to make // sure newly generated IDs don't overlap with old queries. - if ( initialState && initialState[reduxRootKey] && initialState[reduxRootKey]['queries']) { + if ( + initialState && + initialState[reduxRootKey] && + initialState[reduxRootKey]['queries'] + ) { throw new Error('Apollo initial state may not contain queries, only data'); } - if ( initialState && initialState[reduxRootKey] && initialState[reduxRootKey]['mutations']) { - throw new Error('Apollo initial state may not contain mutations, only data'); + if ( + initialState && + initialState[reduxRootKey] && + initialState[reduxRootKey]['mutations'] + ) { + throw new Error( + 'Apollo initial state may not contain mutations, only data', + ); } return createStore( @@ -206,5 +216,5 @@ export type ApolloReducerConfig = { dataIdFromObject?: IdGetter; customResolvers?: CustomResolverMap; fragmentMatcher?: FragmentMatcher; - addTypename?: boolean, + addTypename?: boolean; }; diff --git a/src/transport/Deduplicator.ts b/src/transport/Deduplicator.ts index 95261c53a90..c10ea21e705 100644 --- a/src/transport/Deduplicator.ts +++ b/src/transport/Deduplicator.ts @@ -1,15 +1,9 @@ -import { - NetworkInterface, - Request, -} from '../transport/networkInterface'; +import { NetworkInterface, Request } from '../transport/networkInterface'; -import { - print, -} from 'graphql/language/printer'; +import { print } from 'graphql/language/printer'; export class Deduplicator { - - private inFlightRequestPromises: { [key: string]: Promise}; + private inFlightRequestPromises: { [key: string]: Promise }; private networkInterface: NetworkInterface; constructor(networkInterface: NetworkInterface) { @@ -18,7 +12,6 @@ export class Deduplicator { } public query(request: Request, deduplicate = true) { - // sometimes we might not want to deduplicate a request, for example when we want to force fetch it. if (!deduplicate) { return this.networkInterface.query(request); @@ -29,19 +22,21 @@ export class Deduplicator { this.inFlightRequestPromises[key] = this.networkInterface.query(request); } return this.inFlightRequestPromises[key] - .then( res => { - delete this.inFlightRequestPromises[key]; - return res; - }) - .catch( err => { + .then(res => { + delete this.inFlightRequestPromises[key]; + return res; + }) + .catch(err => { delete this.inFlightRequestPromises[key]; throw err; - }); + }); } private getKey(request: Request) { // XXX we're assuming here that variables will be serialized in the same order. // that might not always be true - return `${print(request.query)}|${JSON.stringify(request.variables)}|${request.operationName}`; + return `${print(request.query)}|${JSON.stringify( + request.variables, + )}|${request.operationName}`; } } diff --git a/src/transport/afterware.ts b/src/transport/afterware.ts index 9f327e1975b..6032a9fc4d9 100644 --- a/src/transport/afterware.ts +++ b/src/transport/afterware.ts @@ -7,7 +7,11 @@ export interface AfterwareResponse { } export interface AfterwareInterface { - applyAfterware(this: HTTPFetchNetworkInterface, response: AfterwareResponse, next: Function): any; + applyAfterware( + this: HTTPFetchNetworkInterface, + response: AfterwareResponse, + next: Function, + ): any; } export interface BatchAfterwareResponse { @@ -16,5 +20,9 @@ export interface BatchAfterwareResponse { } export interface BatchAfterwareInterface { - applyBatchAfterware(this: HTTPBatchedNetworkInterface, response: BatchAfterwareResponse, next: Function): any; + applyBatchAfterware( + this: HTTPBatchedNetworkInterface, + response: BatchAfterwareResponse, + next: Function, + ): any; } diff --git a/src/transport/batchedNetworkInterface.ts b/src/transport/batchedNetworkInterface.ts index bc38c6abbec..3727cf2ca79 100644 --- a/src/transport/batchedNetworkInterface.ts +++ b/src/transport/batchedNetworkInterface.ts @@ -1,6 +1,4 @@ -import { - ExecutionResult, -} from 'graphql'; +import { ExecutionResult } from 'graphql'; import 'whatwg-fetch'; @@ -12,17 +10,11 @@ import { printRequest, } from './networkInterface'; -import { - BatchAfterwareInterface, -} from './afterware'; +import { BatchAfterwareInterface } from './afterware'; -import { - BatchMiddlewareInterface, -} from './middleware'; +import { BatchMiddlewareInterface } from './middleware'; -import { - QueryBatcher, -} from './batching'; +import { QueryBatcher } from './batching'; import { assign } from '../util/assign'; @@ -41,7 +33,6 @@ export interface BatchResponseAndOptions { // for GraphQL server implementations that support batching. If such a server is not available, you // should see `addQueryMerging` instead. export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { - public _middlewares: BatchMiddlewareInterface[]; public _afterwares: BatchAfterwareInterface[]; private batcher: QueryBatcher; @@ -52,10 +43,10 @@ export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { batchMax = 0, fetchOpts, }: { - uri: string, - batchInterval?: number, - batchMax?: number, - fetchOpts: RequestInit, + uri: string; + batchInterval?: number; + batchMax?: number; + fetchOpts: RequestInit; }) { super(uri, fetchOpts); @@ -83,65 +74,82 @@ export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { public batchQuery(requests: Request[]): Promise { const options = { ...this._opts }; - const middlewarePromise: Promise = - this.applyBatchMiddlewares({ - requests, - options, - }); + const middlewarePromise: Promise< + BatchRequestAndOptions + > = this.applyBatchMiddlewares({ + requests, + options, + }); return new Promise((resolve, reject) => { - middlewarePromise.then((batchRequestAndOptions: BatchRequestAndOptions) => { - return this.batchedFetchFromRemoteEndpoint(batchRequestAndOptions) - .then(result => { - const httpResponse = result as Response; - - if (!httpResponse.ok) { - return this.applyBatchAfterwares({ responses: [httpResponse], options: batchRequestAndOptions.options }) - .then(() => { - const httpError = new Error(`Network request failed with status ${httpResponse.status} - "${httpResponse.statusText}"`); - (httpError as any).response = httpResponse; - - throw httpError; - }); - } - - // XXX can we be stricter with the type here? - return result.json() as any; - }) - .then(responses => { - if (typeof responses.map !== 'function') { - throw new Error('BatchingNetworkInterface: server response is not an array'); - } - - type ResponseAndOptions = { - response: Response; - options: RequestInit; - }; - - this.applyBatchAfterwares({ - responses, - options: batchRequestAndOptions.options, - }).then((responseAndOptions) => { - // In a batch response, the response is actually an Array of responses, refine it. - resolve(responseAndOptions.responses as any); - }).catch((error: Error) => { - reject(error); + middlewarePromise + .then((batchRequestAndOptions: BatchRequestAndOptions) => { + return this.batchedFetchFromRemoteEndpoint(batchRequestAndOptions) + .then(result => { + const httpResponse = result as Response; + + if (!httpResponse.ok) { + return this.applyBatchAfterwares({ + responses: [httpResponse], + options: batchRequestAndOptions.options, + }).then(() => { + const httpError = new Error( + `Network request failed with status ${httpResponse.status} - "${httpResponse.statusText}"`, + ); + (httpError as any).response = httpResponse; + + throw httpError; + }); + } + + // XXX can we be stricter with the type here? + return result.json() as any; + }) + .then(responses => { + if (typeof responses.map !== 'function') { + throw new Error( + 'BatchingNetworkInterface: server response is not an array', + ); + } + + type ResponseAndOptions = { + response: Response; + options: RequestInit; + }; + + this.applyBatchAfterwares({ + responses, + options: batchRequestAndOptions.options, + }) + .then(responseAndOptions => { + // In a batch response, the response is actually an Array of responses, refine it. + resolve(responseAndOptions.responses as any); + }) + .catch((error: Error) => { + reject(error); + }); }); - }); - }).catch((error) => { - reject(error); - }); + }) + .catch(error => { + reject(error); + }); }); } - public applyBatchMiddlewares({requests, options}: BatchRequestAndOptions): Promise { + public applyBatchMiddlewares({ + requests, + options, + }: BatchRequestAndOptions): Promise { return new Promise((resolve, reject) => { const queue = (funcs: BatchMiddlewareInterface[], scope: any) => { const next = () => { if (funcs.length > 0) { const f = funcs.shift(); if (f) { - f.applyBatchMiddleware.apply(scope, [{ requests, options }, next]); + f.applyBatchMiddleware.apply(scope, [ + { requests, options }, + next, + ]); } } else { resolve({ @@ -157,9 +165,12 @@ export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { }); } - public applyBatchAfterwares({responses, options}: BatchResponseAndOptions): Promise { + public applyBatchAfterwares({ + responses, + options, + }: BatchResponseAndOptions): Promise { return new Promise((resolve, reject) => { - const responseObject = {responses, options}; + const responseObject = { responses, options }; const queue = (funcs: BatchAfterwareInterface[], scope: any) => { const next = () => { if (funcs.length > 0) { @@ -180,11 +191,13 @@ export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { } public use(middlewares: BatchMiddlewareInterface[]): HTTPNetworkInterface { - middlewares.map((middleware) => { + middlewares.map(middleware => { if (typeof middleware.applyBatchMiddleware === 'function') { this._middlewares.push(middleware); } else { - throw new Error('Batch middleware must implement the applyBatchMiddleware function'); + throw new Error( + 'Batch middleware must implement the applyBatchMiddleware function', + ); } }); @@ -196,7 +209,9 @@ export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { if (typeof afterware.applyBatchAfterware === 'function') { this._afterwares.push(afterware); } else { - throw new Error('Batch afterware must implement the applyBatchAfterware function'); + throw new Error( + 'Batch afterware must implement the applyBatchAfterware function', + ); } }); @@ -212,7 +227,7 @@ export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { assign(options, batchRequestAndOptions.options); // Serialize the requests to strings of JSON - const printedRequests = batchRequestAndOptions.requests.map((request) => { + const printedRequests = batchRequestAndOptions.requests.map(request => { return printRequest(request); }); @@ -224,7 +239,7 @@ export class HTTPBatchedNetworkInterface extends BaseNetworkInterface { headers: { Accept: '*/*', 'Content-Type': 'application/json', - ...(options.headers as { [headerName: string]: string }), + ...options.headers as { [headerName: string]: string }, }, }); } @@ -237,9 +252,13 @@ export interface BatchingNetworkInterfaceOptions { opts?: RequestInit; } -export function createBatchingNetworkInterface(options: BatchingNetworkInterfaceOptions): HTTPNetworkInterface { - if (! options) { - throw new Error('You must pass an options argument to createNetworkInterface.'); +export function createBatchingNetworkInterface( + options: BatchingNetworkInterfaceOptions, +): HTTPNetworkInterface { + if (!options) { + throw new Error( + 'You must pass an options argument to createNetworkInterface.', + ); } return new HTTPBatchedNetworkInterface({ uri: options.uri, diff --git a/src/transport/batching.ts b/src/transport/batching.ts index 5e62edbd133..65097147d2e 100644 --- a/src/transport/batching.ts +++ b/src/transport/batching.ts @@ -1,10 +1,6 @@ -import { - Request, -} from './networkInterface'; +import { Request } from './networkInterface'; -import { - ExecutionResult, -} from 'graphql'; +import { ExecutionResult } from 'graphql'; export interface QueryFetchRequest { request: Request; @@ -28,16 +24,18 @@ export class QueryBatcher { private batchMax: number; //This function is called to the queries in the queue to the server. - private batchFetchFunction: (request: Request[]) => Promise; + private batchFetchFunction: ( + request: Request[], + ) => Promise; constructor({ batchInterval, batchMax = 0, batchFetchFunction, }: { - batchInterval: number, - batchMax?: number, - batchFetchFunction: (request: Request[]) => Promise, + batchInterval: number; + batchMax?: number; + batchFetchFunction: (request: Request[]) => Promise; }) { this.queuedRequests = []; this.batchInterval = batchInterval; @@ -62,7 +60,7 @@ export class QueryBatcher { // When amount of requests reaches `batchMax`, trigger the queue consumption without waiting on the `batchInterval`. if (this.queuedRequests.length === this.batchMax) { - this.consumeQueue(); + this.consumeQueue(); } return fetchRequest.promise; @@ -72,7 +70,7 @@ export class QueryBatcher { // Returns a list of promises (one for each query). public consumeQueue(): (Promise | undefined)[] | undefined { const requests: Request[] = this.queuedRequests.map( - (queuedRequest) => queuedRequest.request, + queuedRequest => queuedRequest.request, ); const promises: (Promise | undefined)[] = []; @@ -88,15 +86,17 @@ export class QueryBatcher { const batchedPromise = this.batchFetchFunction(requests); - batchedPromise.then((results) => { - results.forEach((result, index) => { - resolvers[index](result); - }); - }).catch((error) => { - rejecters.forEach((rejecter, index) => { - rejecters[index](error); + batchedPromise + .then(results => { + results.forEach((result, index) => { + resolvers[index](result); + }); + }) + .catch(error => { + rejecters.forEach((rejecter, index) => { + rejecters[index](error); + }); }); - }); return promises; } diff --git a/src/transport/middleware.ts b/src/transport/middleware.ts index 27dfcb845bc..f4c079da799 100644 --- a/src/transport/middleware.ts +++ b/src/transport/middleware.ts @@ -1,14 +1,17 @@ import { Request, HTTPFetchNetworkInterface } from './networkInterface'; import { HTTPBatchedNetworkInterface } from './batchedNetworkInterface'; - export interface MiddlewareRequest { request: Request; options: RequestInit; } export interface MiddlewareInterface { - applyMiddleware(this: HTTPFetchNetworkInterface, request: MiddlewareRequest, next: Function): void; + applyMiddleware( + this: HTTPFetchNetworkInterface, + request: MiddlewareRequest, + next: Function, + ): void; } export interface BatchMiddlewareRequest { @@ -17,5 +20,9 @@ export interface BatchMiddlewareRequest { } export interface BatchMiddlewareInterface { - applyBatchMiddleware(this: HTTPBatchedNetworkInterface, request: BatchMiddlewareRequest, next: Function): void; + applyBatchMiddleware( + this: HTTPBatchedNetworkInterface, + request: BatchMiddlewareRequest, + next: Function, + ): void; } diff --git a/src/transport/networkInterface.ts b/src/transport/networkInterface.ts index 78ff3376bb5..7c7819b667c 100644 --- a/src/transport/networkInterface.ts +++ b/src/transport/networkInterface.ts @@ -1,25 +1,14 @@ import 'whatwg-fetch'; -import { - ExecutionResult, - DocumentNode, -} from 'graphql'; +import { ExecutionResult, DocumentNode } from 'graphql'; import { print } from 'graphql/language/printer'; -import { - MiddlewareInterface, - BatchMiddlewareInterface, -} from './middleware'; +import { MiddlewareInterface, BatchMiddlewareInterface } from './middleware'; -import { - AfterwareInterface, - BatchAfterwareInterface, -} from './afterware'; +import { AfterwareInterface, BatchAfterwareInterface } from './afterware'; -import { - removeConnectionDirectiveFromDocument, -} from '../queries/queryTransform'; +import { removeConnectionDirectiveFromDocument } from '../queries/queryTransform'; import { Observable } from '../util/Observable'; @@ -67,7 +56,10 @@ export interface BatchedNetworkInterface extends NetworkInterface { // XXX why does this have to extend network interface? does it even have a 'query' function? export interface SubscriptionNetworkInterface extends NetworkInterface { - subscribe(request: Request, handler: (error: any, result: any) => void): number; + subscribe( + request: Request, + handler: (error: any, result: any) => void, + ): number; unsubscribe(id: Number): void; } @@ -76,8 +68,12 @@ export interface HTTPNetworkInterface extends NetworkInterface { _opts: RequestInit; _middlewares: MiddlewareInterface[] | BatchMiddlewareInterface[]; _afterwares: AfterwareInterface[] | BatchAfterwareInterface[]; - use(middlewares: MiddlewareInterface[] | BatchMiddlewareInterface[]): HTTPNetworkInterface; - useAfter(afterwares: AfterwareInterface[] | BatchAfterwareInterface[]): HTTPNetworkInterface; + use( + middlewares: MiddlewareInterface[] | BatchMiddlewareInterface[], + ): HTTPNetworkInterface; + useAfter( + afterwares: AfterwareInterface[] | BatchAfterwareInterface[], + ): HTTPNetworkInterface; } export interface RequestAndOptions { @@ -132,7 +128,9 @@ export class HTTPFetchNetworkInterface extends BaseNetworkInterface { public _middlewares: MiddlewareInterface[]; public _afterwares: AfterwareInterface[]; - public applyMiddlewares(requestAndOptions: RequestAndOptions): Promise { + public applyMiddlewares( + requestAndOptions: RequestAndOptions, + ): Promise { return new Promise((resolve, reject) => { const { request, options } = requestAndOptions; const queue = (funcs: MiddlewareInterface[], scope: any) => { @@ -156,10 +154,13 @@ export class HTTPFetchNetworkInterface extends BaseNetworkInterface { }); } - public applyAfterwares({response, options}: ResponseAndOptions): Promise { + public applyAfterwares({ + response, + options, + }: ResponseAndOptions): Promise { return new Promise((resolve, reject) => { // Declare responseObject so that afterware can mutate it. - const responseObject = {response, options}; + const responseObject = { response, options }; const queue = (funcs: AfterwareInterface[], scope: any) => { const next = () => { if (funcs.length > 0) { @@ -191,7 +192,7 @@ export class HTTPFetchNetworkInterface extends BaseNetworkInterface { headers: { Accept: '*/*', 'Content-Type': 'application/json', - ...(options.headers as { [headerName: string]: string }), + ...options.headers as { [headerName: string]: string }, }, }); } @@ -202,22 +203,30 @@ export class HTTPFetchNetworkInterface extends BaseNetworkInterface { return this.applyMiddlewares({ request, options, - }).then((rao) => { - if (rao.request.query) { - rao.request.query = removeConnectionDirectiveFromDocument(rao.request.query); - } + }) + .then(rao => { + if (rao.request.query) { + rao.request.query = removeConnectionDirectiveFromDocument( + rao.request.query, + ); + } - return rao; - }).then( (rao) => this.fetchFromRemoteEndpoint.call(this, rao)) - .then(response => this.applyAfterwares({ - response: response as Response, - options, - })) + return rao; + }) + .then(rao => this.fetchFromRemoteEndpoint.call(this, rao)) + .then(response => + this.applyAfterwares({ + response: response as Response, + options, + }), + ) .then(({ response }) => { const httpResponse = response as Response; - return httpResponse.json().catch((error) => { - const httpError = new Error(`Network request failed with status ${response.status} - "${response.statusText}"`); + return httpResponse.json().catch(error => { + const httpError = new Error( + `Network request failed with status ${response.status} - "${response.statusText}"`, + ); (httpError as any).response = httpResponse; (httpError as any).parseError = error; @@ -225,7 +234,10 @@ export class HTTPFetchNetworkInterface extends BaseNetworkInterface { }); }) .then((payload: ExecutionResult) => { - if (!payload.hasOwnProperty('data') && !payload.hasOwnProperty('errors')) { + if ( + !payload.hasOwnProperty('data') && + !payload.hasOwnProperty('errors') + ) { throw new Error( `Server response was missing for query '${request.debugName}'.`, ); @@ -236,11 +248,13 @@ export class HTTPFetchNetworkInterface extends BaseNetworkInterface { } public use(middlewares: MiddlewareInterface[]): HTTPNetworkInterface { - middlewares.map((middleware) => { + middlewares.map(middleware => { if (typeof middleware.applyMiddleware === 'function') { this._middlewares.push(middleware); } else { - throw new Error('Middleware must implement the applyMiddleware function'); + throw new Error( + 'Middleware must implement the applyMiddleware function', + ); } }); @@ -269,8 +283,10 @@ export function createNetworkInterface( uriOrInterfaceOpts: string | NetworkInterfaceOptions, secondArgOpts: NetworkInterfaceOptions = {}, ): HTTPNetworkInterface { - if (! uriOrInterfaceOpts) { - throw new Error('You must pass an options argument to createNetworkInterface.'); + if (!uriOrInterfaceOpts) { + throw new Error( + 'You must pass an options argument to createNetworkInterface.', + ); } let uri: string | undefined; diff --git a/src/util/Observable.ts b/src/util/Observable.ts index 06f75489e9c..c6366a7b666 100644 --- a/src/util/Observable.ts +++ b/src/util/Observable.ts @@ -4,9 +4,13 @@ import $$observable from 'symbol-observable'; export type CleanupFunction = () => void; -export type SubscriberFunction = (observer: Observer) => (Subscription | CleanupFunction); +export type SubscriberFunction = ( + observer: Observer, +) => Subscription | CleanupFunction; -function isSubscription(subscription: Function | Subscription): subscription is Subscription { +function isSubscription( + subscription: Function | Subscription, +): subscription is Subscription { return (subscription).unsubscribe !== undefined; } diff --git a/src/util/assign.ts b/src/util/assign.ts index 80523da16b0..3a224329980 100644 --- a/src/util/assign.ts +++ b/src/util/assign.ts @@ -4,23 +4,28 @@ * * @see https://github.com/apollostack/apollo-client/pull/1009 */ -export function assign (a: A, b: B): A & B; -export function assign (a: A, b: B, c: C): A & B & C; -export function assign (a: A, b: B, c: C, d: D): A & B & C & D; -export function assign (a: A, b: B, c: C, d: D, e: E): A & B & C & D & E; -export function assign (target: any, ...sources: Array): any; -export function assign ( +export function assign(a: A, b: B): A & B; +export function assign(a: A, b: B, c: C): A & B & C; +export function assign(a: A, b: B, c: C, d: D): A & B & C & D; +export function assign( + a: A, + b: B, + c: C, + d: D, + e: E, +): A & B & C & D & E; +export function assign(target: any, ...sources: Array): any; +export function assign( target: { [key: string]: any }, - ...sources: Array<{ [key: string]: any }>, + ...sources: Array<{ [key: string]: any }> ): { [key: string]: any } { sources.forEach(source => { - - if (typeof(source) === 'undefined' || source === null) { - return; - } - Object.keys(source).forEach(key => { - target[key] = source[key]; - }); + if (typeof source === 'undefined' || source === null) { + return; + } + Object.keys(source).forEach(key => { + target[key] = source[key]; + }); }); return target; } diff --git a/src/util/cloneDeep.ts b/src/util/cloneDeep.ts index bfe056e0633..e8ccd735b46 100644 --- a/src/util/cloneDeep.ts +++ b/src/util/cloneDeep.ts @@ -1,7 +1,7 @@ /** * Deeply clones a value to create a new instance. */ -export function cloneDeep (value: T): T { +export function cloneDeep(value: T): T { // If the value is an array, create a new array where every item has been cloned. if (Array.isArray(value)) { return value.map(item => cloneDeep(item)) as any; diff --git a/src/util/errorHandling.ts b/src/util/errorHandling.ts index ebfa0ddf8fc..f2201012ae6 100644 --- a/src/util/errorHandling.ts +++ b/src/util/errorHandling.ts @@ -1,4 +1,4 @@ -export function tryFunctionOrLogError (f: Function) { +export function tryFunctionOrLogError(f: Function) { try { return f(); } catch (e) { diff --git a/src/util/isEqual.ts b/src/util/isEqual.ts index 8223788a526..bd2bc33fe91 100644 --- a/src/util/isEqual.ts +++ b/src/util/isEqual.ts @@ -1,14 +1,19 @@ /** * Performs a deep equality check on two JavaScript values. */ -export function isEqual (a: any, b: any): boolean { +export function isEqual(a: any, b: any): boolean { // If the two values are strictly equal, we are good. if (a === b) { return true; } // If a and b are both objects, we will compare their properties. This will compare arrays as // well. - if (a != null && typeof a === 'object' && b != null && typeof b === 'object') { + if ( + a != null && + typeof a === 'object' && + b != null && + typeof b === 'object' + ) { // Compare all of the keys in `a`. If one of the keys has a different value, or that key does // not exist in `b` return false immediately. for (const key in a) { diff --git a/src/util/maybeDeepFreeze.ts b/src/util/maybeDeepFreeze.ts index c670e168dcf..8c4d84eceab 100644 --- a/src/util/maybeDeepFreeze.ts +++ b/src/util/maybeDeepFreeze.ts @@ -1,17 +1,16 @@ -import { - isDevelopment, - isTest, -} from './environment'; +import { isDevelopment, isTest } from './environment'; // taken straight from https://github.com/substack/deep-freeze to avoid import hassles with rollup -function deepFreeze (o: any) { +function deepFreeze(o: any) { Object.freeze(o); - Object.getOwnPropertyNames(o).forEach(function (prop) { - if (o.hasOwnProperty(prop) - && o[prop] !== null - && (typeof o[prop] === 'object' || typeof o[prop] === 'function') - && !Object.isFrozen(o[prop])) { + Object.getOwnPropertyNames(o).forEach(function(prop) { + if ( + o.hasOwnProperty(prop) && + o[prop] !== null && + (typeof o[prop] === 'object' || typeof o[prop] === 'function') && + !Object.isFrozen(o[prop]) + ) { deepFreeze(o[prop]); } }); diff --git a/test/ApolloClient.ts b/test/ApolloClient.ts index 922b5884241..b12ec4efec5 100644 --- a/test/ApolloClient.ts +++ b/test/ApolloClient.ts @@ -12,7 +12,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, @@ -22,9 +22,39 @@ describe('ApolloClient', () => { }, }); - assert.deepEqual<{}>(client.readQuery({ query: gql`{ a }` }), { a: 1 }); - assert.deepEqual<{}>(client.readQuery({ query: gql`{ b c }` }), { b: 2, c: 3 }); - assert.deepEqual<{}>(client.readQuery({ query: gql`{ a b c }` }), { a: 1, b: 2, c: 3 }); + assert.deepEqual<{}>( + client.readQuery({ + query: gql` + { + a + } + `, + }), + { a: 1 }, + ); + assert.deepEqual<{}>( + client.readQuery({ + query: gql` + { + b + c + } + `, + }), + { b: 2, c: 3 }, + ); + assert.deepEqual<{}>( + client.readQuery({ + query: gql` + { + a + b + c + } + `, + }), + { a: 1, b: 2, c: 3 }, + ); }); it('will read some deeply nested data from the store', () => { @@ -32,7 +62,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, @@ -42,7 +72,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -53,7 +83,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 7, j: 8, @@ -65,16 +95,69 @@ describe('ApolloClient', () => { }); assert.deepEqual<{}>( - client.readQuery({ query: gql`{ a d { e } }` }), + client.readQuery({ + query: gql` + { + a + d { + e + } + } + `, + }), { a: 1, d: { e: 4, __typename: 'Foo' } }, ); assert.deepEqual<{}>( - client.readQuery({ query: gql`{ a d { e h { i } } }` }), - { a: 1, d: { __typename: 'Foo', e: 4, h: { i: 7, __typename: 'Bar' } } }, + client.readQuery({ + query: gql` + { + a + d { + e + h { + i + } + } + } + `, + }), + { + a: 1, + d: { __typename: 'Foo', e: 4, h: { i: 7, __typename: 'Bar' } }, + }, ); assert.deepEqual<{}>( - client.readQuery({ query: gql`{ a b c d { e f g h { i j k } } }` }), - { a: 1, b: 2, c: 3, d: { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', i: 7, j: 8, k: 9 } } }, + client.readQuery({ + query: gql` + { + a + b + c + d { + e + f + g + h { + i + j + k + } + } + } + `, + }), + { + a: 1, + b: 2, + c: 3, + d: { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', i: 7, j: 8, k: 9 }, + }, + }, ); }); @@ -83,7 +166,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, }, @@ -92,16 +175,21 @@ describe('ApolloClient', () => { }, }); - assert.deepEqual<{}>(client.readQuery({ - query: gql`query ($literal: Boolean, $value: Int) { - a: field(literal: true, value: 42) - b: field(literal: $literal, value: $value) - }`, - variables: { - literal: false, - value: 42, - }, - }), { a: 1, b: 2 }); + assert.deepEqual<{}>( + client.readQuery({ + query: gql` + query($literal: Boolean, $value: Int) { + a: field(literal: true, value: 42) + b: field(literal: $literal, value: $value) + } + `, + variables: { + literal: false, + value: 42, + }, + }), + { a: 1, b: 2 }, + ); }); }); @@ -110,7 +198,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { 'field({"literal":true,"value":-1})': 1, 'field({"literal":false,"value":42})': 2, }, @@ -119,24 +207,34 @@ describe('ApolloClient', () => { }, }); - assert.deepEqual<{}>(client.readQuery({ - query: gql`query ($literal: Boolean, $value: Int = -1) { - a: field(literal: $literal, value: $value) - }`, - variables: { - literal: false, - value: 42, - }, - }), { a: 2 }); - - assert.deepEqual<{}>(client.readQuery({ - query: gql`query ($literal: Boolean, $value: Int = -1) { - a: field(literal: $literal, value: $value) - }`, - variables: { - literal: true, - }, - }), { a: 1 }); + assert.deepEqual<{}>( + client.readQuery({ + query: gql` + query($literal: Boolean, $value: Int = -1) { + a: field(literal: $literal, value: $value) + } + `, + variables: { + literal: false, + value: 42, + }, + }), + { a: 2 }, + ); + + assert.deepEqual<{}>( + client.readQuery({ + query: gql` + query($literal: Boolean, $value: Int = -1) { + a: field(literal: $literal, value: $value) + } + `, + variables: { + literal: true, + }, + }), + { a: 1 }, + ); }); describe('readFragment', () => { @@ -144,10 +242,26 @@ describe('ApolloClient', () => { const client = new ApolloClient(); assert.throws(() => { - client.readFragment({ id: 'x', fragment: gql`query { a b c }` }); + client.readFragment({ + id: 'x', + fragment: gql` + query { + a + b + c + } + `, + }); }, 'Found a query operation. No operations are allowed when using a fragment as a query. Only fragments are allowed.'); assert.throws(() => { - client.readFragment({ id: 'x', fragment: gql`schema { query: Query }` }); + client.readFragment({ + id: 'x', + fragment: gql` + schema { + query: Query + } + `, + }); }, 'Found 0 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -155,10 +269,36 @@ describe('ApolloClient', () => { const client = new ApolloClient(); assert.throws(() => { - client.readFragment({ id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b }` }); + client.readFragment({ + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + `, + }); }, 'Found 2 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); assert.throws(() => { - client.readFragment({ id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b } fragment c on C { c }` }); + client.readFragment({ + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + + fragment c on C { + c + } + `, + }); }, 'Found 3 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -167,7 +307,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { __typename: 'Foo', a: 1, b: 2, @@ -178,7 +318,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -189,7 +329,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 7, j: 8, @@ -201,33 +341,119 @@ describe('ApolloClient', () => { }); assert.deepEqual<{} | null>( - client.readFragment({ id: 'foo', fragment: gql`fragment fragmentFoo on Foo { e h { i } }` }), + client.readFragment({ + id: 'foo', + fragment: gql` + fragment fragmentFoo on Foo { + e + h { + i + } + } + `, + }), { __typename: 'Foo', e: 4, h: { __typename: 'Bar', i: 7 } }, ); assert.deepEqual<{} | null>( - client.readFragment({ id: 'foo', fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } }` }), - { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', i: 7, j: 8, k: 9 } }, + client.readFragment({ + id: 'foo', + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + `, + }), + { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', i: 7, j: 8, k: 9 }, + }, ); assert.deepEqual<{} | null>( - client.readFragment({ id: 'bar', fragment: gql`fragment fragmentBar on Bar { i }` }), + client.readFragment({ + id: 'bar', + fragment: gql` + fragment fragmentBar on Bar { + i + } + `, + }), { __typename: 'Bar', i: 7 }, ); assert.deepEqual<{} | null>( - client.readFragment({ id: 'bar', fragment: gql`fragment fragmentBar on Bar { i j k }` }), + client.readFragment({ + id: 'bar', + fragment: gql` + fragment fragmentBar on Bar { + i + j + k + } + `, + }), { __typename: 'Bar', i: 7, j: 8, k: 9 }, ); assert.deepEqual<{} | null>( client.readFragment({ id: 'foo', - fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } } fragment fragmentBar on Bar { i j k }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + + fragment fragmentBar on Bar { + i + j + k + } + `, fragmentName: 'fragmentFoo', }), - { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', i: 7, j: 8, k: 9 } }, + { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', i: 7, j: 8, k: 9 }, + }, ); assert.deepEqual<{} | null>( client.readFragment({ id: 'bar', - fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } } fragment fragmentBar on Bar { i j k }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + + fragment fragmentBar on Bar { + i + j + k + } + `, fragmentName: 'fragmentBar', }), { __typename: 'Bar', i: 7, j: 8, k: 9 }, @@ -239,7 +465,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'foo': { + foo: { __typename: 'Foo', 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, @@ -249,19 +475,22 @@ describe('ApolloClient', () => { }, }); - assert.deepEqual<{} | null>(client.readFragment({ - id: 'foo', - fragment: gql` - fragment foo on Foo { - a: field(literal: true, value: 42) - b: field(literal: $literal, value: $value) - } - `, - variables: { - literal: false, - value: 42, - }, - }), { __typename: 'Foo', a: 1, b: 2 }); + assert.deepEqual<{} | null>( + client.readFragment({ + id: 'foo', + fragment: gql` + fragment foo on Foo { + a: field(literal: true, value: 42) + b: field(literal: $literal, value: $value) + } + `, + variables: { + literal: false, + value: 42, + }, + }), + { __typename: 'Foo', a: 1, b: 2 }, + ); }); it('will return null when an id that can’t be found is provided', () => { @@ -270,7 +499,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'bar': { __typename: 'Foo', a: 1, b: 2, c: 3 }, + bar: { __typename: 'Foo', a: 1, b: 2, c: 3 }, }, }, }, @@ -279,16 +508,51 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'foo': { __typename: 'Foo', a: 1, b: 2, c: 3 }, + foo: { __typename: 'Foo', a: 1, b: 2, c: 3 }, }, }, }, }); - assert.equal(client1.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), null); - assert.equal(client2.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), null); - assert.deepEqual<{} | null>(client3.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), - { __typename: 'Foo', a: 1, b: 2, c: 3 }); + assert.equal( + client1.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + null, + ); + assert.equal( + client2.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + null, + ); + assert.deepEqual<{} | null>( + client3.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + { __typename: 'Foo', a: 1, b: 2, c: 3 }, + ); }); }); @@ -296,28 +560,52 @@ describe('ApolloClient', () => { it('will write some data to the store', () => { const client = new ApolloClient(); - client.writeQuery({ data: { a: 1 }, query: gql`{ a }` }); + client.writeQuery({ + data: { a: 1 }, + query: gql` + { + a + } + `, + }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, }, }); - client.writeQuery({ data: { b: 2, c: 3 }, query: gql`{ b c }` }); + client.writeQuery({ + data: { b: 2, c: 3 }, + query: gql` + { + b + c + } + `, + }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, }, }); - client.writeQuery({ data: { a: 4, b: 5, c: 6 }, query: gql`{ a b c }` }); + client.writeQuery({ + data: { a: 4, b: 5, c: 6 }, + query: gql` + { + a + b + c + } + `, + }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 4, b: 5, c: 6, @@ -330,11 +618,18 @@ describe('ApolloClient', () => { client.writeQuery({ data: { a: 1, d: { __typename: 'D', e: 4 } }, - query: gql`{ a d { e } }`, + query: gql` + { + a + d { + e + } + } + `, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, d: { type: 'id', @@ -350,11 +645,20 @@ describe('ApolloClient', () => { client.writeQuery({ data: { a: 1, d: { __typename: 'D', h: { __typename: 'H', i: 7 } } }, - query: gql`{ a d { h { i } } }`, + query: gql` + { + a + d { + h { + i + } + } + } + `, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, d: { type: 'id', @@ -378,16 +682,44 @@ describe('ApolloClient', () => { }); client.writeQuery({ - data: { a: 1, b: 2, c: 3, d: { - __typename: 'D', e: 4, f: 5, g: 6, h: { - __typename: 'H', i: 7, j: 8, k: 9, + data: { + a: 1, + b: 2, + c: 3, + d: { + __typename: 'D', + e: 4, + f: 5, + g: 6, + h: { + __typename: 'H', + i: 7, + j: 8, + k: 9, + }, }, - } }, - query: gql`{ a b c d { e f g h { i j k } } }`, + }, + query: gql` + { + a + b + c + d { + e + f + g + h { + i + j + k + } + } + } + `, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, @@ -426,7 +758,7 @@ describe('ApolloClient', () => { b: 2, }, query: gql` - query ($literal: Boolean, $value: Int) { + query($literal: Boolean, $value: Int) { a: field(literal: true, value: 42) b: field(literal: $literal, value: $value) } @@ -438,7 +770,7 @@ describe('ApolloClient', () => { }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, }, @@ -453,7 +785,7 @@ describe('ApolloClient', () => { a: 2, }, query: gql` - query ($literal: Boolean, $value: Int = -1) { + query($literal: Boolean, $value: Int = -1) { a: field(literal: $literal, value: $value) } `, @@ -468,7 +800,7 @@ describe('ApolloClient', () => { a: 1, }, query: gql` - query ($literal: Boolean, $value: Int = -1) { + query($literal: Boolean, $value: Int = -1) { a: field(literal: $literal, value: $value) } `, @@ -478,7 +810,7 @@ describe('ApolloClient', () => { }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { 'field({"literal":true,"value":42})': 2, 'field({"literal":false,"value":-1})': 1, }, @@ -500,7 +832,13 @@ describe('ApolloClient', () => { ], }, query: gql` - query { todos { id name description } } + query { + todos { + id + name + description + } + } `, }); }, /Missing field description/); @@ -512,10 +850,28 @@ describe('ApolloClient', () => { const client = new ApolloClient(); assert.throws(() => { - client.writeFragment({ data: {}, id: 'x', fragment: gql`query { a b c }` }); + client.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + query { + a + b + c + } + `, + }); }, 'Found a query operation. No operations are allowed when using a fragment as a query. Only fragments are allowed.'); assert.throws(() => { - client.writeFragment({ data: {}, id: 'x', fragment: gql`schema { query: Query }` }); + client.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + schema { + query: Query + } + `, + }); }, 'Found 0 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -523,10 +879,38 @@ describe('ApolloClient', () => { const client = new ApolloClient(); assert.throws(() => { - client.writeFragment({ data: {}, id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b }` }); + client.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + `, + }); }, 'Found 2 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); assert.throws(() => { - client.writeFragment({ data: {}, id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b } fragment c on C { c }` }); + client.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + + fragment c on C { + c + } + `, + }); }, 'Found 3 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -536,13 +920,24 @@ describe('ApolloClient', () => { }); client.writeFragment({ - data: { __typename: 'Foo', e: 4, h: { __typename: 'Bar', id: 'bar', i: 7 } }, + data: { + __typename: 'Foo', + e: 4, + h: { __typename: 'Bar', id: 'bar', i: 7 }, + }, id: 'foo', - fragment: gql`fragment fragmentFoo on Foo { e h { i } }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + h { + i + } + } + `, }); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', e: 4, h: { @@ -551,20 +946,34 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 7, }, }); client.writeFragment({ - data: { __typename: 'Foo', f: 5, g: 6, h: { __typename: 'Bar', id: 'bar', j: 8, k: 9 } }, + data: { + __typename: 'Foo', + f: 5, + g: 6, + h: { __typename: 'Bar', id: 'bar', j: 8, k: 9 }, + }, id: 'foo', - fragment: gql`fragment fragmentFoo on Foo { f g h { j k } }`, + fragment: gql` + fragment fragmentFoo on Foo { + f + g + h { + j + k + } + } + `, }); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -575,7 +984,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 7, j: 8, @@ -586,11 +995,15 @@ describe('ApolloClient', () => { client.writeFragment({ data: { __typename: 'Bar', i: 10 }, id: 'bar', - fragment: gql`fragment fragmentBar on Bar { i }`, + fragment: gql` + fragment fragmentBar on Bar { + i + } + `, }); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -601,7 +1014,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 10, j: 8, @@ -612,11 +1025,16 @@ describe('ApolloClient', () => { client.writeFragment({ data: { __typename: 'Bar', j: 11, k: 12 }, id: 'bar', - fragment: gql`fragment fragmentBar on Bar { j k }`, + fragment: gql` + fragment fragmentBar on Bar { + j + k + } + `, }); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -627,7 +1045,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 10, j: 11, @@ -636,14 +1054,37 @@ describe('ApolloClient', () => { }); client.writeFragment({ - data: { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', id: 'bar', i: 7, j: 8, k: 9 } }, + data: { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', id: 'bar', i: 7, j: 8, k: 9 }, + }, id: 'foo', - fragment: gql`fragment fooFragment on Foo { e f g h { i j k } } fragment barFragment on Bar { i j k }`, + fragment: gql` + fragment fooFragment on Foo { + e + f + g + h { + i + j + k + } + } + + fragment barFragment on Bar { + i + j + k + } + `, fragmentName: 'fooFragment', }); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -654,7 +1095,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 7, j: 8, @@ -665,12 +1106,29 @@ describe('ApolloClient', () => { client.writeFragment({ data: { __typename: 'Bar', i: 10, j: 11, k: 12 }, id: 'bar', - fragment: gql`fragment fooFragment on Foo { e f g h { i j k } } fragment barFragment on Bar { i j k }`, + fragment: gql` + fragment fooFragment on Foo { + e + f + g + h { + i + j + k + } + } + + fragment barFragment on Bar { + i + j + k + } + `, fragmentName: 'barFragment', }); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -681,7 +1139,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 10, j: 11, @@ -713,7 +1171,7 @@ describe('ApolloClient', () => { }); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, @@ -725,10 +1183,15 @@ describe('ApolloClient', () => { const client = new ApolloClient(); return withWarning(() => { - client.writeFragment({ + client.writeFragment({ data: { __typename: 'Bar', i: 10 }, id: 'bar', - fragment: gql`fragment fragmentBar on Bar { i e }`, + fragment: gql` + fragment fragmentBar on Bar { + i + e + } + `, }); }, /Missing field e/); }); @@ -740,7 +1203,7 @@ describe('ApolloClient', () => { initialState: { apollo: { data: { - 'foo': { + foo: { __typename: 'Foo', a: 1, b: 2, @@ -763,45 +1226,139 @@ describe('ApolloClient', () => { }); assert.deepEqual<{} | null>( - client.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 1, b: 2, c: 3, bar: { d: 4, e: 5, f: 6, __typename: 'Bar' } }, + client.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 1, + b: 2, + c: 3, + bar: { d: 4, e: 5, f: 6, __typename: 'Bar' }, + }, ); client.writeFragment({ id: 'foo', - fragment: gql`fragment x on Foo { a }`, + fragment: gql` + fragment x on Foo { + a + } + `, data: { __typename: 'Foo', a: 7 }, }); assert.deepEqual<{} | null>( - client.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 7, b: 2, c: 3, bar: { __typename: 'Bar', d: 4, e: 5, f: 6 } }, + client.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 7, + b: 2, + c: 3, + bar: { __typename: 'Bar', d: 4, e: 5, f: 6 }, + }, ); client.writeFragment({ id: 'foo', - fragment: gql`fragment x on Foo { bar { d } }`, + fragment: gql` + fragment x on Foo { + bar { + d + } + } + `, data: { __typename: 'Foo', bar: { __typename: 'Bar', d: 8 } }, }); assert.deepEqual<{} | null>( - client.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 7, b: 2, c: 3, bar: { __typename: 'Bar', d: 8, e: 5, f: 6 } }, + client.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 7, + b: 2, + c: 3, + bar: { __typename: 'Bar', d: 8, e: 5, f: 6 }, + }, ); client.writeFragment({ id: '$foo.bar', - fragment: gql`fragment y on Bar { e }`, + fragment: gql` + fragment y on Bar { + e + } + `, data: { __typename: 'Bar', e: 9 }, }); assert.deepEqual<{} | null>( - client.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 7, b: 2, c: 3, bar: { __typename: 'Bar', d: 8, e: 9, f: 6 } }, + client.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 7, + b: 2, + c: 3, + bar: { __typename: 'Bar', d: 8, e: 9, f: 6 }, + }, ); assert.deepEqual(client.store.getState().apollo.data, { - 'foo': { + foo: { __typename: 'Foo', a: 7, b: 2, @@ -828,17 +1385,65 @@ describe('ApolloClient', () => { }); client.writeQuery({ - query: gql`{ a b foo { c d bar { key e f } } }`, - data: { a: 1, b: 2, foo: { __typename: 'foo', c: 3, d: 4, bar: { key: 'foobar', __typename: 'bar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + key + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { + __typename: 'foo', + c: 3, + d: 4, + bar: { key: 'foobar', __typename: 'bar', e: 5, f: 6 }, + }, + }, }); assert.deepEqual<{} | null>( - client.readQuery({ query: gql`{ a b foo { c d bar { key e f } } }` }), - { a: 1, b: 2, foo: { __typename: 'foo', c: 3, d: 4, bar: { __typename: 'bar', key: 'foobar', e: 5, f: 6 } } }, + client.readQuery({ + query: gql` + { + a + b + foo { + c + d + bar { + key + e + f + } + } + } + `, + }), + { + a: 1, + b: 2, + foo: { + __typename: 'foo', + c: 3, + d: 4, + bar: { __typename: 'bar', key: 'foobar', e: 5, f: 6 }, + }, + }, ); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, foo: { @@ -857,7 +1462,7 @@ describe('ApolloClient', () => { generated: false, }, }, - 'foobar': { + foobar: { key: 'foobar', __typename: 'bar', e: 5, @@ -866,7 +1471,6 @@ describe('ApolloClient', () => { }); }); - it('will not use a default id getter if __typename is not present', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, @@ -874,17 +1478,53 @@ describe('ApolloClient', () => { }); client.writeQuery({ - query: gql`{ a b foo { c d bar { id e f } } }`, - data: { a: 1, b: 2, foo: { c: 3, d: 4, bar: { id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { c: 3, d: 4, bar: { id: 'foobar', e: 5, f: 6 } }, + }, }); client.writeQuery({ - query: gql`{ g h bar { i j foo { _id k l } } }`, - data: { g: 8, h: 9, bar: { i: 10, j: 11, foo: { _id: 'barfoo', k: 12, l: 13 } } }, + query: gql` + { + g + h + bar { + i + j + foo { + _id + k + l + } + } + } + `, + data: { + g: 8, + h: 9, + bar: { i: 10, j: 11, foo: { _id: 'barfoo', k: 12, l: 13 } }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, g: 8, @@ -931,24 +1571,67 @@ describe('ApolloClient', () => { }); }); - it('will not use a default id getter if id and _id are not present', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, }); client.writeQuery({ - query: gql`{ a b foo { c d bar { e f } } }`, - data: { a: 1, b: 2, foo: { __typename: 'foo', c: 3, d: 4, bar: { __typename: 'bar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { + __typename: 'foo', + c: 3, + d: 4, + bar: { __typename: 'bar', e: 5, f: 6 }, + }, + }, }); client.writeQuery({ - query: gql`{ g h bar { i j foo { k l } } }`, - data: { g: 8, h: 9, bar: { __typename: 'bar', i: 10, j: 11, foo: { __typename: 'foo', k: 12, l: 13 } } }, + query: gql` + { + g + h + bar { + i + j + foo { + k + l + } + } + } + `, + data: { + g: 8, + h: 9, + bar: { + __typename: 'bar', + i: 10, + j: 11, + foo: { __typename: 'foo', k: 12, l: 13 }, + }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, g: 8, @@ -997,19 +1680,41 @@ describe('ApolloClient', () => { }); }); - it('will use a default id getter if __typename and id are present', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, }); client.writeQuery({ - query: gql`{ a b foo { c d bar { id e f } } }`, - data: { a: 1, b: 2, foo: { __typename: 'foo', c: 3, d: 4, bar: { __typename: 'bar', id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { + __typename: 'foo', + c: 3, + d: 4, + bar: { __typename: 'bar', id: 'foobar', e: 5, f: 6 }, + }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, foo: { @@ -1037,19 +1742,41 @@ describe('ApolloClient', () => { }); }); - it('will use a default id getter if __typename and _id are present', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, }); client.writeQuery({ - query: gql`{ a b foo { c d bar { _id e f } } }`, - data: { a: 1, b: 2, foo: { __typename: 'foo', c: 3, d: 4, bar: { __typename: 'bar', _id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + _id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { + __typename: 'foo', + c: 3, + d: 4, + bar: { __typename: 'bar', _id: 'foobar', e: 5, f: 6 }, + }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, foo: { @@ -1077,7 +1804,6 @@ describe('ApolloClient', () => { }); }); - it('will not use a default id getter if id is present and __typename is not present', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, @@ -1085,12 +1811,30 @@ describe('ApolloClient', () => { }); client.writeQuery({ - query: gql`{ a b foo { c d bar { id e f } } }`, - data: { a: 1, b: 2, foo: { c: 3, d: 4, bar: { id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { c: 3, d: 4, bar: { id: 'foobar', e: 5, f: 6 } }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, foo: { @@ -1116,7 +1860,6 @@ describe('ApolloClient', () => { }); }); - it('will not use a default id getter if _id is present but __typename is not present', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, @@ -1124,12 +1867,30 @@ describe('ApolloClient', () => { }); client.writeQuery({ - query: gql`{ a b foo { c d bar { _id e f } } }`, - data: { a: 1, b: 2, foo: { c: 3, d: 4, bar: { _id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + _id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { c: 3, d: 4, bar: { _id: 'foobar', e: 5, f: 6 } }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, foo: { @@ -1155,7 +1916,6 @@ describe('ApolloClient', () => { }); }); - it('will not use a default id getter if either _id or id is present when __typename is not also present', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, @@ -1163,17 +1923,57 @@ describe('ApolloClient', () => { }); client.writeQuery({ - query: gql`{ a b foo { c d bar { id e f } } }`, - data: { a: 1, b: 2, foo: { c: 3, d: 4, bar: { __typename: 'bar', id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { + c: 3, + d: 4, + bar: { __typename: 'bar', id: 'foobar', e: 5, f: 6 }, + }, + }, }); client.writeQuery({ - query: gql`{ g h bar { i j foo { _id k l } } }`, - data: { g: 8, h: 9, bar: { i: 10, j: 11, foo: { _id: 'barfoo', k: 12, l: 13 } } }, + query: gql` + { + g + h + bar { + i + j + foo { + _id + k + l + } + } + } + `, + data: { + g: 8, + h: 9, + bar: { i: 10, j: 11, foo: { _id: 'barfoo', k: 12, l: 13 } }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, g: 8, @@ -1220,24 +2020,69 @@ describe('ApolloClient', () => { }); }); - it('will use a default id getter if one is not specified and __typename is present along with either _id or id', () => { const client = new ApolloClient({ initialState: { apollo: { data: {} } }, }); client.writeQuery({ - query: gql`{ a b foo { c d bar { id e f } } }`, - data: { a: 1, b: 2, foo: { __typename: 'foo', c: 3, d: 4, bar: { __typename: 'bar', id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { + __typename: 'foo', + c: 3, + d: 4, + bar: { __typename: 'bar', id: 'foobar', e: 5, f: 6 }, + }, + }, }); client.writeQuery({ - query: gql`{ g h bar { i j foo { _id k l } } }`, - data: { g: 8, h: 9, bar: { __typename: 'bar', i: 10, j: 11, foo: { __typename: 'foo', _id: 'barfoo', k: 12, l: 13 } } }, + query: gql` + { + g + h + bar { + i + j + foo { + _id + k + l + } + } + } + `, + data: { + g: 8, + h: 9, + bar: { + __typename: 'bar', + i: 10, + j: 11, + foo: { __typename: 'foo', _id: 'barfoo', k: 12, l: 13 }, + }, + }, }); assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, g: 8, @@ -1289,4 +2134,3 @@ describe('ApolloClient', () => { }); }); }); - diff --git a/test/ObservableQuery.ts b/test/ObservableQuery.ts index 7d494f31393..5da8adb8815 100644 --- a/test/ObservableQuery.ts +++ b/test/ObservableQuery.ts @@ -3,20 +3,11 @@ const { assert } = chai; import * as sinon from 'sinon'; import gql from 'graphql-tag'; -import { - ExecutionResult, -} from 'graphql'; +import { ExecutionResult } from 'graphql'; -import { - QueryManager, -} from '../src/core/QueryManager'; -import { - createApolloStore, - ApolloStore, -} from '../src/store'; -import ApolloClient, { - ApolloStateSelector, -} from '../src/ApolloClient'; +import { QueryManager } from '../src/core/QueryManager'; +import { createApolloStore, ApolloStore } from '../src/store'; +import ApolloClient, { ApolloStateSelector } from '../src/ApolloClient'; import mockQueryManager from './mocks/mockQueryManager'; import mockWatchQuery from './mocks/mockWatchQuery'; @@ -28,13 +19,9 @@ import { ApolloCurrentResult, } from '../src/core/ObservableQuery'; import { ApolloQueryResult } from '../src/core/types'; -import { - NetworkInterface, -} from '../src/transport/networkInterface'; +import { NetworkInterface } from '../src/transport/networkInterface'; -import { - IntrospectionFragmentMatcher, -} from '../src/data/fragmentMatcher'; +import { IntrospectionFragmentMatcher } from '../src/data/fragmentMatcher'; import wrap from './util/wrap'; import subscribeAndCount from './util/subscribeAndCount'; @@ -89,12 +76,11 @@ describe('ObservableQuery', () => { reduxRootSelector, addTypename = false, }: { - networkInterface?: NetworkInterface, - store?: ApolloStore, - reduxRootSelector?: ApolloStateSelector, - addTypename?: boolean, + networkInterface?: NetworkInterface; + store?: ApolloStore; + reduxRootSelector?: ApolloStateSelector; + addTypename?: boolean; }) => { - return new QueryManager({ networkInterface: networkInterface || mockNetworkInterface(), store: store || createApolloStore(), @@ -108,19 +94,26 @@ describe('ObservableQuery', () => { let timer: any; // We need to use this to jump over promise.then boundaries let defer: Function = setImmediate; - beforeEach(() => timer = sinon.useFakeTimers()); + beforeEach(() => (timer = sinon.useFakeTimers())); afterEach(() => timer.restore()); - it('starts polling if goes from 0 -> something', (done) => { - const manager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); + it('starts polling if goes from 0 -> something', done => { + const manager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); - const observable = manager.watchQuery({ query, variables, notifyOnNetworkStatusChange: false }); + const observable = manager.watchQuery({ + query, + variables, + notifyOnNetworkStatusChange: false, + }); subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount === 1) { assert.deepEqual(result.data, dataOne); @@ -137,14 +130,17 @@ describe('ObservableQuery', () => { timer.tick(0); }); - it('stops polling if goes from something -> 0', (done) => { - const manager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); + it('stops polling if goes from something -> 0', done => { + const manager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); const observable = manager.watchQuery({ query, @@ -168,14 +164,17 @@ describe('ObservableQuery', () => { timer.tick(0); }); - it('can change from x>0 to y>0', (done) => { - const manager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); + it('can change from x>0 to y>0', done => { + const manager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); const observable = manager.watchQuery({ query, @@ -199,7 +198,6 @@ describe('ObservableQuery', () => { timer.tick(11); }); }); - } else if (handleCount === 2) { assert.deepEqual(result.data, dataTwo); done(); @@ -211,54 +209,66 @@ describe('ObservableQuery', () => { }); }); - it('does not break refetch', (done) => { + it('does not break refetch', done => { // This query and variables are copied from react-apollo - const queryWithVars = gql`query people($first: Int) { - allPeople(first: $first) { people { name } } - }`; + const queryWithVars = gql` + query people($first: Int) { + allPeople(first: $first) { + people { + name + } + } + } + `; - const data = { allPeople: { people: [ { name: 'Luke Skywalker' } ] } }; + const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; const variables1 = { first: 0 }; - const data2 = { allPeople: { people: [ { name: 'Leia Skywalker' } ] } }; + const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; const variables2 = { first: 1 }; - - const observable: ObservableQuery = mockWatchQuery({ - request: { query: queryWithVars, variables: variables1 }, - result: { data }, - }, { - request: { query: queryWithVars, variables: variables2 }, - result: { data: data2 }, - }); + const observable: ObservableQuery = mockWatchQuery( + { + request: { query: queryWithVars, variables: variables1 }, + result: { data }, + }, + { + request: { query: queryWithVars, variables: variables2 }, + result: { data: data2 }, + }, + ); subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount === 1) { assert.deepEqual(result.data, data); observable.refetch(variables2); - } else if (handleCount === 3) { // 3 because there is an intermediate loading state + } else if (handleCount === 3) { + // 3 because there is an intermediate loading state assert.deepEqual(result.data, data2); done(); } }); }); - - it('if query is refetched, and an error is returned, a second refetch without error will trigger the observer callback', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { errors: [error] }, - }, { - request: { query, variables }, - result: { data: dataOne }, - }); + it('if query is refetched, and an error is returned, a second refetch without error will trigger the observer callback', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { errors: [error] }, + }, + { + request: { query, variables }, + result: { data: dataOne }, + }, + ); let handleCount = 0; observable.subscribe({ - next: (result) => { + next: result => { handleCount++; if (handleCount === 1) { assert.deepEqual(result.data, dataOne); @@ -268,7 +278,7 @@ describe('ObservableQuery', () => { done(); } }, - error: (err) => { + error: err => { handleCount++; assert.equal(handleCount, 2); observable.refetch(); @@ -276,15 +286,17 @@ describe('ObservableQuery', () => { }); }); - - it('does a network request if fetchPolicy becomes networkOnly', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); + it('does a network request if fetchPolicy becomes networkOnly', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount === 1) { @@ -297,7 +309,7 @@ describe('ObservableQuery', () => { }); }); - it('does a network request if fetchPolicy is cache-only then store is reset then fetchPolicy becomes not cache-only', (done) => { + it('does a network request if fetchPolicy is cache-only then store is reset then fetchPolicy becomes not cache-only', done => { let queryManager: QueryManager; let observable: ObservableQuery; const testQuery = gql` @@ -306,7 +318,8 @@ describe('ObservableQuery', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -330,7 +343,7 @@ describe('ObservableQuery', () => { assert.equal(timesFired, 1); setTimeout(() => { - observable.setOptions({fetchPolicy: 'cache-only'}); + observable.setOptions({ fetchPolicy: 'cache-only' }); queryManager.resetStore(); }, 0); @@ -339,7 +352,7 @@ describe('ObservableQuery', () => { assert.equal(timesFired, 1); setTimeout(() => { - observable.setOptions({fetchPolicy: 'cache-first'}); + observable.setOptions({ fetchPolicy: 'cache-first' }); }, 0); } else if (handleCount === 3) { assert.deepEqual(result.data, data); @@ -350,7 +363,7 @@ describe('ObservableQuery', () => { }); }); - it('does a network request if fetchPolicy changes from cache-only', (done) => { + it('does a network request if fetchPolicy changes from cache-only', done => { let queryManager: QueryManager; let observable: ObservableQuery; const testQuery = gql` @@ -359,7 +372,8 @@ describe('ObservableQuery', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -375,7 +389,11 @@ describe('ObservableQuery', () => { }, }; queryManager = createQueryManager({ networkInterface }); - observable = queryManager.watchQuery({ query: testQuery, fetchPolicy: 'cache-only', notifyOnNetworkStatusChange: false }); + observable = queryManager.watchQuery({ + query: testQuery, + fetchPolicy: 'cache-only', + notifyOnNetworkStatusChange: false, + }); subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount === 2) { @@ -383,7 +401,7 @@ describe('ObservableQuery', () => { assert.equal(timesFired, 0); setTimeout(() => { - observable.setOptions({fetchPolicy: 'cache-first'}); + observable.setOptions({ fetchPolicy: 'cache-first' }); }, 0); } else if (handleCount === 3) { assert.deepEqual(result.data, data); @@ -394,7 +412,7 @@ describe('ObservableQuery', () => { }); }); - it('can set queries to standby and will not fetch when doing so', (done) => { + it('can set queries to standby and will not fetch when doing so', done => { let queryManager: QueryManager; let observable: ObservableQuery; const testQuery = gql` @@ -403,7 +421,8 @@ describe('ObservableQuery', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -431,7 +450,7 @@ describe('ObservableQuery', () => { assert.equal(timesFired, 1); setTimeout(() => { - observable.setOptions({fetchPolicy: 'standby'}); + observable.setOptions({ fetchPolicy: 'standby' }); }, 0); setTimeout(() => { // make sure the query didn't get fired again. @@ -444,7 +463,7 @@ describe('ObservableQuery', () => { }); }); - it('will not fetch when setting a cache-only query to standby', (done) => { + it('will not fetch when setting a cache-only query to standby', done => { let queryManager: QueryManager; let observable: ObservableQuery; const testQuery = gql` @@ -453,7 +472,8 @@ describe('ObservableQuery', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -470,7 +490,7 @@ describe('ObservableQuery', () => { }; queryManager = createQueryManager({ networkInterface }); - queryManager.query({ query: testQuery }).then( () => { + queryManager.query({ query: testQuery }).then(() => { observable = queryManager.watchQuery({ query: testQuery, fetchPolicy: 'cache-first', @@ -482,7 +502,7 @@ describe('ObservableQuery', () => { assert.deepEqual(result.data, data); assert.equal(timesFired, 1); setTimeout(() => { - observable.setOptions({fetchPolicy: 'standby'}); + observable.setOptions({ fetchPolicy: 'standby' }); }, 0); setTimeout(() => { // make sure the query didn't get fired again. @@ -495,42 +515,49 @@ describe('ObservableQuery', () => { }); }); }); - it('returns a promise which eventually returns data', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); - + it('returns a promise which eventually returns data', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount !== 1) { return; } - observable.setOptions({ fetchPolicy: 'cache-and-network', fetchResults: true }) - .then((res) => { + observable + .setOptions({ fetchPolicy: 'cache-and-network', fetchResults: true }) + .then(res => { // returns dataOne from cache assert.deepEqual(res.data, dataOne); done(); }); }); }); - it('can bypass looking up results if passed to options', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); + it('can bypass looking up results if passed to options', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); let errored = false; subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount === 1) { - observable.setOptions({ fetchResults: false, fetchPolicy: 'standby' }) - .then((res) => { + observable + .setOptions({ fetchResults: false, fetchPolicy: 'standby' }) + .then(res => { assert.equal(res, null); setTimeout(() => !errored && done(), 5); }); @@ -540,18 +567,20 @@ describe('ObservableQuery', () => { } }); }); - }); describe('setVariables', () => { - it('reruns query if the variables change', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }); + it('reruns query if the variables change', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ); subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount === 1) { @@ -568,22 +597,31 @@ describe('ObservableQuery', () => { }); }); - it('returns results that are frozen in development mode', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }); - const nop = () => { return 1; }; + it('returns results that are frozen in development mode', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ); + const nop = () => { + return 1; + }; const sub = observable.subscribe({ next: nop }); observable.setVariables(differentVariables).then(result2 => { assert.deepEqual(result2.data, dataTwo); try { (result2.data as any).stuff = 'awful'; - done(new Error('results from setVariables should be frozen in development mode')); + done( + new Error( + 'results from setVariables should be frozen in development mode', + ), + ); } catch (e) { done(); } finally { @@ -600,18 +638,21 @@ describe('ObservableQuery', () => { return observable.setVariables(differentVariables); }); - it('sets networkStatus to `setVariables` when fetching', (done) => { - const mockedResponses = [{ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }]; + it('sets networkStatus to `setVariables` when fetching', done => { + const mockedResponses = [ + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ]; const queryManager = mockQueryManager(...mockedResponses); const firstRequest = mockedResponses[0].request; - const observable = queryManager.watchQuery({ + const observable = queryManager.watchQuery({ query: firstRequest.query, variables: firstRequest.variables, notifyOnNetworkStatusChange: true, @@ -635,18 +676,21 @@ describe('ObservableQuery', () => { }); }); - it('sets networkStatus to `setVariables` when calling refetch with new variables', (done) => { - const mockedResponses = [{ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }]; + it('sets networkStatus to `setVariables` when calling refetch with new variables', done => { + const mockedResponses = [ + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ]; const queryManager = mockQueryManager(...mockedResponses); const firstRequest = mockedResponses[0].request; - const observable = queryManager.watchQuery({ + const observable = queryManager.watchQuery({ query: firstRequest.query, variables: firstRequest.variables, notifyOnNetworkStatusChange: true, @@ -670,14 +714,17 @@ describe('ObservableQuery', () => { }); }); - it('reruns observer callback if the variables change but data does not', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables: differentVariables }, - result: { data: dataOne }, - }); + it('reruns observer callback if the variables change but data does not', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataOne }, + }, + ); subscribeAndCount(done, observable, (handleCount, result) => { if (handleCount === 1) { @@ -693,47 +740,52 @@ describe('ObservableQuery', () => { }); }); - it('does not rerun observer callback if the variables change but new data is in store', (done) => { - const manager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables: differentVariables }, - result: { data: dataOne }, - }); + it('does not rerun observer callback if the variables change but new data is in store', done => { + const manager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataOne }, + }, + ); - manager.query({ query, variables: differentVariables }) - .then(() => { - const observable: ObservableQuery = manager.watchQuery({ - query, - variables, - notifyOnNetworkStatusChange: false, - }); + manager.query({ query, variables: differentVariables }).then(() => { + const observable: ObservableQuery = manager.watchQuery({ + query, + variables, + notifyOnNetworkStatusChange: false, + }); - let errored = false; - subscribeAndCount(done, observable, (handleCount, result) => { - if (handleCount === 1) { - assert.deepEqual(result.data, dataOne); - observable.setVariables(differentVariables); - - // Nothing should happen, so we'll wait a moment to check that - setTimeout(() => !errored && done(), 10); - } else if (handleCount === 2) { - errored = true; - throw new Error('Observable callback should not fire twice'); - } - }); + let errored = false; + subscribeAndCount(done, observable, (handleCount, result) => { + if (handleCount === 1) { + assert.deepEqual(result.data, dataOne); + observable.setVariables(differentVariables); + + // Nothing should happen, so we'll wait a moment to check that + setTimeout(() => !errored && done(), 10); + } else if (handleCount === 2) { + errored = true; + throw new Error('Observable callback should not fire twice'); + } }); + }); }); - it('does not rerun query if variables do not change', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); + it('does not rerun query if variables do not change', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); let errored = false; subscribeAndCount(done, observable, (handleCount, result) => { @@ -750,14 +802,17 @@ describe('ObservableQuery', () => { }); }); - it('does not rerun query if set to not refetch', (done) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); + it('does not rerun query if set to not refetch', done => { + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); let errored = false; subscribeAndCount(done, observable, (handleCount, result) => { @@ -774,19 +829,22 @@ describe('ObservableQuery', () => { }); }); - it('handles variables changing while a query is in-flight', (done) => { + it('handles variables changing while a query is in-flight', done => { // The expected behavior is that the original variables are forgotten // and the query stays in loading state until the result for the new variables // has returned. - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - delay: 20, - }, { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - delay: 20, - }); + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + delay: 20, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + delay: 20, + }, + ); setTimeout(() => observable.setVariables(differentVariables), 10); @@ -802,9 +860,7 @@ describe('ObservableQuery', () => { }); describe('currentResult', () => { - - it('returns the same value as observableQuery.next got', (done) => { - + it('returns the same value as observableQuery.next got', done => { const queryWithFragment = gql` fragment MaleInfo on Man { trouserSize @@ -821,12 +877,12 @@ describe('ObservableQuery', () => { name sex ... on Man { - ...MaleInfo - __typename + ...MaleInfo + __typename } ... on Woman { - ...FemaleInfo - __typename + ...FemaleInfo + __typename } __typename } @@ -840,9 +896,27 @@ describe('ObservableQuery', () => { `; const peopleData = [ - { id: 1, name: 'John Smith', sex: 'male', trouserSize: 6, __typename: 'Man' }, - { id: 2, name: 'Sara Smith', sex: 'female', skirtSize: 4, __typename: 'Woman' }, - { id: 3, name: 'Budd Deey', sex: 'male', trouserSize: 10, __typename: 'Man' }, + { + id: 1, + name: 'John Smith', + sex: 'male', + trouserSize: 6, + __typename: 'Man', + }, + { + id: 2, + name: 'Sara Smith', + sex: 'female', + skirtSize: 4, + __typename: 'Woman', + }, + { + id: 3, + name: 'Budd Deey', + sex: 'male', + trouserSize: 10, + __typename: 'Man', + }, ]; const dataOneWithTypename = { @@ -853,36 +927,49 @@ describe('ObservableQuery', () => { people: peopleData.slice(0, 3), }; - - const ni = mockNetworkInterface({ - request: { query: queryWithFragment, variables }, - result: { data: dataOneWithTypename }, - }, { - request: { query: queryWithFragment, variables }, - result: { data: dataTwoWithTypename }, - }); + const ni = mockNetworkInterface( + { + request: { query: queryWithFragment, variables }, + result: { data: dataOneWithTypename }, + }, + { + request: { query: queryWithFragment, variables }, + result: { data: dataTwoWithTypename }, + }, + ); const client = new ApolloClient({ networkInterface: ni, fragmentMatcher: new IntrospectionFragmentMatcher({ introspectionQueryResultData: { __schema: { - types: [{ - kind: 'UNION', - name: 'Creature', - possibleTypes: [{ name: 'Person' }], - }], + types: [ + { + kind: 'UNION', + name: 'Creature', + possibleTypes: [{ name: 'Person' }], + }, + ], }, }, }), }); - const observable = client.watchQuery({ query: queryWithFragment, variables, notifyOnNetworkStatusChange: true }); + const observable = client.watchQuery({ + query: queryWithFragment, + variables, + notifyOnNetworkStatusChange: true, + }); subscribeAndCount(done, observable, (count, result) => { const { data, loading, networkStatus } = observable.currentResult(); try { - assert.deepEqual(result, { data, loading, networkStatus, stale: false }); + assert.deepEqual(result, { + data, + loading, + networkStatus, + stale: false, + }); } catch (e) { done(e); } @@ -899,8 +986,7 @@ describe('ObservableQuery', () => { }); }); - - it('returns the current query status immediately', (done) => { + it('returns the current query status immediately', done => { const observable: ObservableQuery = mockWatchQuery({ request: { query, variables }, result: { data: dataOne }, @@ -923,14 +1009,19 @@ describe('ObservableQuery', () => { networkStatus: 1, partial: true, }); - setTimeout(wrap(done, () => { - assert.deepEqual>(observable.currentResult(), { - loading: true, - data: {}, - networkStatus: 1, - partial: true, - }); - }), 0); + setTimeout( + wrap(done, () => { + assert.deepEqual< + ApolloCurrentResult + >(observable.currentResult(), { + loading: true, + data: {}, + networkStatus: 1, + partial: true, + }); + }), + 0, + ); }); it('returns results from the store immediately', () => { @@ -939,25 +1030,24 @@ describe('ObservableQuery', () => { result: { data: dataOne }, }); - return queryManager.query({ query, variables }) - .then((result: any) => { - assert.deepEqual(result, { - data: dataOne, - loading: false, - networkStatus: 7, - stale: false, - }); - const observable = queryManager.watchQuery({ - query, - variables, - }); - assert.deepEqual>(observable.currentResult(), { - data: dataOne, - loading: false, - networkStatus: 7, - partial: false, - }); + return queryManager.query({ query, variables }).then((result: any) => { + assert.deepEqual(result, { + data: dataOne, + loading: false, + networkStatus: 7, + stale: false, + }); + const observable = queryManager.watchQuery({ + query, + variables, }); + assert.deepEqual>(observable.currentResult(), { + data: dataOne, + loading: false, + networkStatus: 7, + partial: false, + }); + }); }); it('returns errors from the store immediately', () => { @@ -971,55 +1061,61 @@ describe('ObservableQuery', () => { variables, }); - return observable.result() - .catch((theError: any) => { - assert.deepEqual(theError.graphQLErrors, [error]); + return observable.result().catch((theError: any) => { + assert.deepEqual(theError.graphQLErrors, [error]); - const currentResult = observable.currentResult(); + const currentResult = observable.currentResult(); - assert.equal(currentResult.loading, false); - assert.deepEqual(currentResult.error!.graphQLErrors, [error]); - }); - }); - - it('returns loading even if full data is available when using network-only fetchPolicy', (done) => { - const queryManager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, + assert.equal(currentResult.loading, false); + assert.deepEqual(currentResult.error!.graphQLErrors, [error]); }); + }); - queryManager.query({ query, variables }) - .then((result: any) => { - const observable = queryManager.watchQuery({ - query, - variables, - fetchPolicy: 'network-only', - }); - assert.deepEqual>(observable.currentResult(), { - data: dataOne, - loading: true, - networkStatus: 1, - partial: false, - }); + it('returns loading even if full data is available when using network-only fetchPolicy', done => { + const queryManager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); - subscribeAndCount(done, observable, (handleCount, subResult) => { - const { data, loading, networkStatus } = observable.currentResult(); - assert.deepEqual(subResult, { data, loading, networkStatus, stale: false }); + queryManager.query({ query, variables }).then((result: any) => { + const observable = queryManager.watchQuery({ + query, + variables, + fetchPolicy: 'network-only', + }); + assert.deepEqual>(observable.currentResult(), { + data: dataOne, + loading: true, + networkStatus: 1, + partial: false, + }); - if (handleCount === 1) { - assert.deepEqual>(subResult, { - data: dataTwo, - loading: false, - networkStatus: 7, - stale: false, - }); - done(); - } + subscribeAndCount(done, observable, (handleCount, subResult) => { + const { data, loading, networkStatus } = observable.currentResult(); + assert.deepEqual(subResult, { + data, + loading, + networkStatus, + stale: false, }); + + if (handleCount === 1) { + assert.deepEqual>(subResult, { + data: dataTwo, + loading: false, + networkStatus: 7, + stale: false, + }); + done(); + } }); + }); }); describe('mutations', () => { @@ -1038,21 +1134,24 @@ describe('ObservableQuery', () => { }; const updateQueries = { - query: (previousQueryResult: any, { mutationResult }: any ) => { + query: (previousQueryResult: any, { mutationResult }: any) => { return { people_one: { name: mutationResult.data.name }, }; }, }; - it('returns optimistic mutation results from the store', (done) => { - const queryManager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query: mutation }, - result: { data: mutationData }, - }); + it('returns optimistic mutation results from the store', done => { + const queryManager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query: mutation }, + result: { data: mutationData }, + }, + ); const observable = queryManager.watchQuery({ query, @@ -1061,7 +1160,12 @@ describe('ObservableQuery', () => { subscribeAndCount(done, observable, (count, result) => { const { data, loading, networkStatus } = observable.currentResult(); - assert.deepEqual(result, { data, loading, networkStatus, stale: false }); + assert.deepEqual(result, { + data, + loading, + networkStatus, + stale: false, + }); if (count === 1) { assert.deepEqual>(result, { @@ -1070,7 +1174,11 @@ describe('ObservableQuery', () => { networkStatus: 7, stale: false, }); - queryManager.mutate({ mutation, optimisticResponse, updateQueries }); + queryManager.mutate({ + mutation, + optimisticResponse, + updateQueries, + }); } else if (count === 2) { assert.deepEqual(result.data.people_one, optimisticResponse); } else if (count === 3) { @@ -1080,25 +1188,29 @@ describe('ObservableQuery', () => { }); }); - it('applies query reducers with correct variables', (done) => { - const queryManager = mockQueryManager({ - // First we make the query - request: { query, variables }, - result: { data: dataOne }, - }, { - // Then we make a mutation - request: { query: mutation }, - result: { data: mutationData }, - }, { - // Then we make another query - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }, { - // Then we make another mutation - request: { query: mutation }, - result: { data: mutationData }, - }); - + it('applies query reducers with correct variables', done => { + const queryManager = mockQueryManager( + { + // First we make the query + request: { query, variables }, + result: { data: dataOne }, + }, + { + // Then we make a mutation + request: { query: mutation }, + result: { data: mutationData }, + }, + { + // Then we make another query + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + { + // Then we make another mutation + request: { query: mutation }, + result: { data: mutationData }, + }, + ); let lastReducerVars: Array = []; let lastReducerData: Array = []; @@ -1145,18 +1257,20 @@ describe('ObservableQuery', () => { describe('stopPolling', () => { let timer: any; let defer: Function = setImmediate; - beforeEach(() => timer = sinon.useFakeTimers()); + beforeEach(() => (timer = sinon.useFakeTimers())); afterEach(() => timer.restore()); - it('does not restart polling after stopping and resubscribing', (done) => { - const observable = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }, { - request: { query, variables }, - result: { data: dataTwo }, - }); - + it('does not restart polling after stopping and resubscribing', done => { + const observable = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + ); observable.startPolling(100); observable.stopPolling(); diff --git a/test/QueryManager.ts b/test/QueryManager.ts index cca0141a19e..c1eb8e30b32 100644 --- a/test/QueryManager.ts +++ b/test/QueryManager.ts @@ -1,6 +1,4 @@ -import { - QueryManager, -} from '../src/core/QueryManager'; +import { QueryManager } from '../src/core/QueryManager'; import mockQueryManager from './mocks/mockQueryManager'; @@ -10,29 +8,17 @@ import { ObservableQuery } from '../src/core/ObservableQuery'; import { WatchQueryOptions } from '../src/core/watchQueryOptions'; -import { - createApolloStore, - ApolloStore, -} from '../src/store'; +import { createApolloStore, ApolloStore } from '../src/store'; import gql from 'graphql-tag'; -import { - assert, -} from 'chai'; +import { assert } from 'chai'; -import { - DocumentNode, - ExecutionResult, -} from 'graphql'; +import { DocumentNode, ExecutionResult } from 'graphql'; -import ApolloClient, { - ApolloStateSelector, -} from '../src/ApolloClient'; +import ApolloClient, { ApolloStateSelector } from '../src/ApolloClient'; -import { - ApolloQueryResult, -} from '../src/core/types'; +import { ApolloQueryResult } from '../src/core/types'; import { createStore, combineReducers, applyMiddleware } from 'redux'; @@ -44,17 +30,11 @@ import mockNetworkInterface, { ParsedRequest, } from './mocks/mockNetworkInterface'; -import { - NetworkInterface, -} from '../src/transport/networkInterface'; +import { NetworkInterface } from '../src/transport/networkInterface'; -import { - ApolloError, -} from '../src/errors/ApolloError'; +import { ApolloError } from '../src/errors/ApolloError'; -import { - Observer, -} from '../src/util/Observable'; +import { Observer } from '../src/util/Observable'; import { NetworkStatus } from '../src/queries/networkStatus'; @@ -65,7 +45,6 @@ import observableToPromise, { } from './util/observableToPromise'; describe('QueryManager', () => { - // Standard "get id from object" method. const dataIdFromObject = (object: any) => { if (object.__typename && object.id) { @@ -85,12 +64,11 @@ describe('QueryManager', () => { reduxRootSelector, addTypename = false, }: { - networkInterface?: NetworkInterface, - store?: ApolloStore, - reduxRootSelector?: ApolloStateSelector, - addTypename?: boolean, + networkInterface?: NetworkInterface; + store?: ApolloStore; + reduxRootSelector?: ApolloStateSelector; + addTypename?: boolean; }) => { - return new QueryManager({ networkInterface: networkInterface || mockNetworkInterface(), store: store || createApolloStore(), @@ -111,14 +89,14 @@ describe('QueryManager', () => { delay, observer, }: { - done: MochaDone, - query: DocumentNode, - variables?: Object, - queryOptions?: Object, - error?: Error, - result?: ExecutionResult, - delay?: number, - observer: Observer>, + done: MochaDone; + query: DocumentNode; + variables?: Object; + queryOptions?: Object; + error?: Error; + result?: ExecutionResult; + delay?: number; + observer: Observer>; }) => { const queryManager = mockQueryManager({ request: { query, variables }, @@ -126,7 +104,10 @@ describe('QueryManager', () => { error, delay, }); - const finalOptions = assign({ query, variables }, queryOptions) as WatchQueryOptions; + const finalOptions = assign( + { query, variables }, + queryOptions, + ) as WatchQueryOptions; return queryManager.watchQuery(finalOptions).subscribe({ next: wrap(done, observer.next!), error: observer.error, @@ -141,10 +122,10 @@ describe('QueryManager', () => { data, variables = {}, }: { - done: MochaDone, - query: DocumentNode, - data: Object, - variables?: Object, + done: MochaDone; + query: DocumentNode; + data: Object; + variables?: Object; }) => { assertWithObserver({ done, @@ -166,10 +147,10 @@ describe('QueryManager', () => { variables = {}, store, }: { - mutation: DocumentNode, - data: Object, - variables?: Object, - store?: ApolloStore, + mutation: DocumentNode; + data: Object; + variables?: Object; + store?: ApolloStore; }) => { if (!store) { store = createApolloStore(); @@ -179,19 +160,25 @@ describe('QueryManager', () => { result: { data }, }); const queryManager = createQueryManager({ networkInterface, store }); - return new Promise<{ result: ExecutionResult, queryManager: QueryManager }>((resolve, reject) => { - queryManager.mutate({ mutation, variables }).then((result) => { - resolve({ result, queryManager }); - }).catch((error) => { - reject(error); - }); + return new Promise<{ + result: ExecutionResult; + queryManager: QueryManager; + }>((resolve, reject) => { + queryManager + .mutate({ mutation, variables }) + .then(result => { + resolve({ result, queryManager }); + }) + .catch(error => { + reject(error); + }); }); }; const assertMutationRoundtrip = (opts: { - mutation: DocumentNode, - data: Object, - variables?: Object, + mutation: DocumentNode; + data: Object; + variables?: Object; }) => { return mockMutation(opts).then(({ result }) => { assert.deepEqual(result.data, opts.data); @@ -206,10 +193,10 @@ describe('QueryManager', () => { secondResult, thirdResult, }: { - request: ParsedRequest, - firstResult: ExecutionResult, - secondResult: ExecutionResult, - thirdResult?: ExecutionResult, + request: ParsedRequest; + firstResult: ExecutionResult; + secondResult: ExecutionResult; + thirdResult?: ExecutionResult; }) => { const args = [ { @@ -229,16 +216,17 @@ describe('QueryManager', () => { return mockQueryManager(...args); }; - it('properly roundtrips through a Redux store', (done) => { + it('properly roundtrips through a Redux store', done => { assertRoundtrip({ query: gql` - query people { - allPeople(first: 1) { - people { - name + query people { + allPeople(first: 1) { + people { + name + } } } - }`, + `, data: { allPeople: { people: [ @@ -252,20 +240,20 @@ describe('QueryManager', () => { }); }); - it('runs multiple root queries', (done) => { + it('runs multiple root queries', done => { assertRoundtrip({ query: gql` - query people { - allPeople(first: 1) { - people { + query people { + allPeople(first: 1) { + people { + name + } + } + person(id: "1") { name } } - person(id: "1") { - name - } - } - `, + `, data: { allPeople: { people: [ @@ -282,16 +270,17 @@ describe('QueryManager', () => { }); }); - it('properly roundtrips through a Redux store with variables', (done) => { + it('properly roundtrips through a Redux store with variables', done => { assertRoundtrip({ query: gql` - query people($firstArg: Int) { - allPeople(first: $firstArg) { - people { - name + query people($firstArg: Int) { + allPeople(first: $firstArg) { + people { + name + } } } - }`, + `, variables: { firstArg: 1, @@ -310,17 +299,18 @@ describe('QueryManager', () => { }); }); - it('handles GraphQL errors', (done) => { + it('handles GraphQL errors', done => { assertWithObserver({ done, query: gql` - query people { - allPeople(first: 1) { - people { - name - } + query people { + allPeople(first: 1) { + people { + name } - }`, + } + } + `, variables: {}, result: { errors: [ @@ -332,7 +322,9 @@ describe('QueryManager', () => { }, observer: { next(result) { - done(new Error('Returned a result when it was supposed to error out')); + done( + new Error('Returned a result when it was supposed to error out'), + ); }, error(apolloError) { @@ -343,17 +335,18 @@ describe('QueryManager', () => { }); }); - it('handles GraphQL errors with data returned', (done) => { + it('handles GraphQL errors with data returned', done => { assertWithObserver({ done, query: gql` - query people { - allPeople(first: 1) { - people { - name + query people { + allPeople(first: 1) { + people { + name + } } } - }`, + `, result: { data: { allPeople: { @@ -380,20 +373,20 @@ describe('QueryManager', () => { }, }, }); - }); - it('empty error array (handle non-spec-compliant server) #156', (done) => { + it('empty error array (handle non-spec-compliant server) #156', done => { assertWithObserver({ done, query: gql` - query people { - allPeople(first: 1) { - people { - name + query people { + allPeople(first: 1) { + people { + name + } } } - }`, + `, result: { data: { allPeople: { @@ -416,17 +409,18 @@ describe('QueryManager', () => { // Easy to get into this state if you write an incorrect `formatError` // function with graphql-server or express-graphql - it('error array with nulls (handle non-spec-compliant server) #1185', (done) => { + it('error array with nulls (handle non-spec-compliant server) #1185', done => { assertWithObserver({ done, query: gql` - query people { - allPeople(first: 1) { - people { - name + query people { + allPeople(first: 1) { + people { + name + } } } - }`, + `, result: { errors: [null as any], }, @@ -436,31 +430,34 @@ describe('QueryManager', () => { }, error(error) { assert.deepEqual((error as any).graphQLErrors, [null]); - assert.equal(error.message, 'GraphQL error: Error message not found.'); + assert.equal( + error.message, + 'GraphQL error: Error message not found.', + ); done(); }, }, }); }); - - it('handles network errors', (done) => { + it('handles network errors', done => { assertWithObserver({ done, query: gql` - query people { - allPeople(first: 1) { - people { - name + query people { + allPeople(first: 1) { + people { + name + } } } - }`, + `, error: new Error('Network error'), observer: { - next: (result) => { + next: result => { done(new Error('Should not deliver result')); }, - error: (error) => { + error: error => { const apolloError = error as ApolloError; assert(apolloError.networkError); assert.include(apolloError.networkError!.message, 'Network error'); @@ -470,7 +467,7 @@ describe('QueryManager', () => { }); }); - it('uses console.error to log unhandled errors', (done) => { + it('uses console.error to log unhandled errors', done => { const oldError = console.error; let printed: any; console.error = (...args: any[]) => { @@ -480,16 +477,17 @@ describe('QueryManager', () => { assertWithObserver({ done, query: gql` - query people { - allPeople(first: 1) { - people { - name + query people { + allPeople(first: 1) { + people { + name + } } } - }`, + `, error: new Error('Network error'), observer: { - next: (result) => { + next: result => { done(new Error('Should not deliver result')); }, }, @@ -502,23 +500,24 @@ describe('QueryManager', () => { }, 10); }); - it('handles an unsubscribe action that happens before data returns', (done) => { + it('handles an unsubscribe action that happens before data returns', done => { const subscription = assertWithObserver({ done, query: gql` - query people { - allPeople(first: 1) { - people { - name + query people { + allPeople(first: 1) { + people { + name + } } } - }`, + `, delay: 1000, observer: { - next: (result) => { + next: result => { done(new Error('Should not deliver result')); }, - error: (error) => { + error: error => { done(new Error('Should not deliver result')); }, }, @@ -528,7 +527,7 @@ describe('QueryManager', () => { done(); }); - it('supports interoperability with other Observable implementations like RxJS', (done) => { + it('supports interoperability with other Observable implementations like RxJS', done => { const expResult = { data: { allPeople: { @@ -547,36 +546,38 @@ describe('QueryManager', () => { query people { allPeople(first: 1) { people { - name + name + } } } - }`, + `, }, result: expResult, }); const observable = Rx.Observable.from(handle as any); - - observable - .map(result => (assign({ fromRx: true }, result))) - .subscribe({ - next: wrap(done, (newResult) => { - const expectedResult = assign({ fromRx: true, loading: false, networkStatus: 7, stale: false }, expResult); + observable.map(result => assign({ fromRx: true }, result)).subscribe({ + next: wrap(done, newResult => { + const expectedResult = assign( + { fromRx: true, loading: false, networkStatus: 7, stale: false }, + expResult, + ); assert.deepEqual(newResult, expectedResult); done(); }), }); }); - it('allows you to subscribe twice to one query', (done) => { + it('allows you to subscribe twice to one query', done => { const request = { query: gql` query fetchLuke($id: String) { people_one(id: $id) { name } - }`, + } + `, variables: { id: '1', }, @@ -599,67 +600,74 @@ describe('QueryManager', () => { }, }; - const queryManager = mockQueryManager({ - request, - result: { data: data1 }, - }, { - request, - result: { data: data2 }, + const queryManager = mockQueryManager( + { + request, + result: { data: data1 }, + }, + { + request, + result: { data: data2 }, - // Wait for both to subscribe - delay: 100, - }, { - request, - result: { data: data3 }, - }); + // Wait for both to subscribe + delay: 100, + }, + { + request, + result: { data: data3 }, + }, + ); let subOneCount = 0; // pre populate data to avoid contention - queryManager.query(request) - .then(() => { - const handle = queryManager.watchQuery(request); + queryManager.query(request).then(() => { + const handle = queryManager.watchQuery(request); - const subOne = handle.subscribe({ - next(result) { - subOneCount++; + const subOne = handle.subscribe({ + next(result) { + subOneCount++; - if (subOneCount === 1) { - assert.deepEqual(result.data, data1); - } else if (subOneCount === 2) { - assert.deepEqual(result.data, data2); - } - }, - }); + if (subOneCount === 1) { + assert.deepEqual(result.data, data1); + } else if (subOneCount === 2) { + assert.deepEqual(result.data, data2); + } + }, + }); - let subTwoCount = 0; - handle.subscribe({ - next(result) { - subTwoCount++; - if (subTwoCount === 1) { - assert.deepEqual(result.data, data1); - handle.refetch(); - } else if (subTwoCount === 2) { - assert.deepEqual(result.data, data2); - setTimeout(() => { - try { - assert.equal(subOneCount, 2); - - subOne.unsubscribe(); - handle.refetch(); - } catch (e) { done(e); } - }, 0); - } else if (subTwoCount === 3) { - setTimeout(() => { - try { - assert.equal(subOneCount, 2); - done(); - } catch (e) { done(e); } - }, 0); - } - }, - }); + let subTwoCount = 0; + handle.subscribe({ + next(result) { + subTwoCount++; + if (subTwoCount === 1) { + assert.deepEqual(result.data, data1); + handle.refetch(); + } else if (subTwoCount === 2) { + assert.deepEqual(result.data, data2); + setTimeout(() => { + try { + assert.equal(subOneCount, 2); + + subOne.unsubscribe(); + handle.refetch(); + } catch (e) { + done(e); + } + }, 0); + } else if (subTwoCount === 3) { + setTimeout(() => { + try { + assert.equal(subOneCount, 2); + done(); + } catch (e) { + done(e); + } + }, 0); + } + }, }); + }); }); it('allows you to refetch queries', () => { @@ -669,7 +677,8 @@ describe('QueryManager', () => { people_one(id: $id) { name } - }`, + } + `, variables: { id: '1', }, @@ -694,12 +703,13 @@ describe('QueryManager', () => { }); const observable = queryManager.watchQuery(request); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); observable.refetch(); }, - (result) => assert.deepEqual(result.data, data2), + result => assert.deepEqual(result.data, data2), ); }); @@ -708,8 +718,15 @@ describe('QueryManager', () => { query: gql` { a - b { c } - d { e f { g } } + b { + c + } + d { + e + f { + g + } + } } `, notifyOnNetworkStatusChange: false, @@ -777,8 +794,7 @@ describe('QueryManager', () => { done(error); } }, - error: error => - done(error), + error: error => done(error), }); }); @@ -787,8 +803,15 @@ describe('QueryManager', () => { query: gql` { a - b { c } - d { e f { g } } + b { + c + } + d { + e + f { + g + } + } } `, notifyOnNetworkStatusChange: false, @@ -817,8 +840,7 @@ describe('QueryManager', () => { done(error); } }, - error: error => - done(error), + error: error => done(error), }); }); @@ -829,7 +851,8 @@ describe('QueryManager', () => { people_one(id: $id) { name } - }`, + } + `, variables: { id: '1', }, @@ -854,13 +877,14 @@ describe('QueryManager', () => { }); const observable = queryManager.watchQuery(request); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); observable.refetch(); }, - (result) => assert.equal(result.networkStatus, NetworkStatus.refetch), - (result) => { + result => assert.equal(result.networkStatus, NetworkStatus.refetch), + result => { assert.equal(result.networkStatus, NetworkStatus.ready); assert.deepEqual(result.data, data2); }, @@ -870,11 +894,12 @@ describe('QueryManager', () => { it('allows you to refetch queries with promises', () => { const request = { query: gql` - { - people_one(id: 1) { - name + { + people_one(id: 1) { + name + } } - }`, + `, }; const data1 = { people_one: { @@ -897,19 +922,20 @@ describe('QueryManager', () => { const handle = queryManager.watchQuery(request); handle.subscribe({}); - return handle.refetch().then( - (result) => assert.deepEqual(result.data, data2), - ); + return handle + .refetch() + .then(result => assert.deepEqual(result.data, data2)); }); it('returns frozen results from refetch', () => { const request = { query: gql` - { - people_one(id: 1) { - name + { + people_one(id: 1) { + name + } } - }`, + `, }; const data1 = { people_one: { @@ -932,9 +958,9 @@ describe('QueryManager', () => { const handle = queryManager.watchQuery(request); handle.subscribe({}); - return handle.refetch().then( result => { + return handle.refetch().then(result => { assert.deepEqual(result.data, data2); - assert.throws( () => (result.data as any).stuff = 'awful'); + assert.throws(() => ((result.data as any).stuff = 'awful')); }); }); @@ -998,29 +1024,33 @@ describe('QueryManager', () => { }, ); - const observable = queryManager.watchQuery({ query, notifyOnNetworkStatusChange: false }); - return observableToPromise({ observable }, - (result) => { + const observable = queryManager.watchQuery({ + query, + notifyOnNetworkStatusChange: false, + }); + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); observable.refetch(); }, - (result) => { + result => { assert.deepEqual(result.data, data2); observable.refetch(variables1); }, - (result) => { + result => { assert.isTrue(result.loading); assert.deepEqual(result.data, data2); }, - (result) => { + result => { assert.deepEqual(result.data, data3); observable.refetch(variables2); }, - (result) => { + result => { assert.isTrue(result.loading); assert.deepEqual(result.data, data3); }, - (result) => { + result => { assert.deepEqual(result.data, data4); }, ); @@ -1058,14 +1088,18 @@ describe('QueryManager', () => { }, ); - const observable = queryManager.watchQuery({ query, notifyOnNetworkStatusChange: false }); + const observable = queryManager.watchQuery({ + query, + notifyOnNetworkStatusChange: false, + }); const originalOptions = assign({}, observable.options); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); observable.refetch(); }, - (result) => { + result => { assert.deepEqual(result.data, data2); const updatedOptions = assign({}, observable.options); delete originalOptions.variables; @@ -1123,13 +1157,14 @@ describe('QueryManager', () => { notifyOnNetworkStatusChange: false, }); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); observable.refetch(); }, - (result) => assert.deepEqual(result.data, data2), - (result) => { + result => assert.deepEqual(result.data, data2), + result => { assert.deepEqual(result.data, data3); observable.stopPolling(); assert(result); @@ -1137,7 +1172,7 @@ describe('QueryManager', () => { ); }); - it('sets networkStatus to `poll` if a polling query is in flight', (done) => { + it('sets networkStatus to `poll` if a polling query is in flight', done => { const query = gql` { people_one(id: 1) { @@ -1201,9 +1236,15 @@ describe('QueryManager', () => { }); }); - it('can handle null values in arrays (#1551)', (done) => { - const query = gql`{ list { value } }`; - const data = { list: [ null, { value: 1 } ] }; + it('can handle null values in arrays (#1551)', done => { + const query = gql` + { + list { + value + } + } + `; + const data = { list: [null, { value: 1 }] }; const queryManager = mockQueryManager({ request: { query }, result: { data }, @@ -1211,7 +1252,7 @@ describe('QueryManager', () => { const observable = queryManager.watchQuery({ query }); observable.subscribe({ - next: (result) => { + next: result => { assert.deepEqual(result.data, data); assert.deepEqual(observable.currentResult().data, data); done(); @@ -1220,21 +1261,24 @@ describe('QueryManager', () => { }); it('deepFreezes results in development mode', () => { - const query = gql`{ stuff }`; + const query = gql` + { + stuff + } + `; const data = { stuff: 'wonderful' }; const queryManager = mockQueryManager({ request: { query }, result: { data }, }); - return queryManager.query({ query }) - .then(result => { + return queryManager.query({ query }).then(result => { assert.deepEqual(result.data, data); - assert.throws( () => (result.data as any).stuff = 'awful' ); + assert.throws(() => ((result.data as any).stuff = 'awful')); }); }); - it('should error if we pass fetchPolicy = cache-first or cache-only on a polling query', (done) => { + it('should error if we pass fetchPolicy = cache-first or cache-only on a polling query', done => { assert.throw(() => { assertWithObserver({ done, @@ -1249,7 +1293,8 @@ describe('QueryManager', () => { firstName lastName } - }`, + } + `, queryOptions: { pollInterval: 200, fetchPolicy: 'cache-only' }, }); }); @@ -1267,7 +1312,8 @@ describe('QueryManager', () => { firstName lastName } - }`, + } + `, queryOptions: { pollInterval: 200, fetchPolicy: 'cache-first' }, }); }); @@ -1300,27 +1346,27 @@ describe('QueryManager', () => { }, }; - const queryManager = mockQueryManager( - { - request: { query: primeQuery }, - result: { data: data1 }, - }, - ); + const queryManager = mockQueryManager({ + request: { query: primeQuery }, + result: { data: data1 }, + }); // First, prime the cache - return queryManager.query({ - query: primeQuery, - }).then(() => { - const handle = queryManager.watchQuery({ - query: complexQuery, - fetchPolicy: 'cache-only', - }); + return queryManager + .query({ + query: primeQuery, + }) + .then(() => { + const handle = queryManager.watchQuery({ + query: complexQuery, + fetchPolicy: 'cache-only', + }); - return handle.result().then((result) => { - assert.equal(result.data['luke'].name, 'Luke Skywalker'); - assert.notProperty(result.data, 'vader'); + return handle.result().then(result => { + assert.equal(result.data['luke'].name, 'Luke Skywalker'); + assert.notProperty(result.data, 'vader'); + }); }); - }); }); it('runs a mutation', () => { @@ -1328,7 +1374,8 @@ describe('QueryManager', () => { mutation: gql` mutation makeListPrivate { makeListPrivate(id: "5") - }`, + } + `, data: { makeListPrivate: true }, }); }); @@ -1338,13 +1385,14 @@ describe('QueryManager', () => { mutation: gql` mutation makeListPrivate($listId: ID!) { makeListPrivate(id: $listId) - }`, + } + `, variables: { listId: '1' }, data: { makeListPrivate: true }, }); }); - const getIdField = ({id}: {id: string}) => id; + const getIdField = ({ id }: { id: string }) => id; it('runs a mutation with object parameters and puts the result in the store', () => { const data = { @@ -1356,11 +1404,12 @@ describe('QueryManager', () => { return mockMutation({ mutation: gql` mutation makeListPrivate { - makeListPrivate(input: {id: "5"}) { - id, - isPrivate, + makeListPrivate(input: { id: "5" }) { + id + isPrivate } - }`, + } + `, data, store: createApolloStore({ config: { dataIdFromObject: getIdField }, @@ -1369,10 +1418,10 @@ describe('QueryManager', () => { assert.deepEqual(result.data, data); // Make sure we updated the store with the new data - assert.deepEqual( - queryManager.store.getState()['apollo'].data['5'], - { id: '5', isPrivate: true }, - ); + assert.deepEqual(queryManager.store.getState()['apollo'].data['5'], { + id: '5', + isPrivate: true, + }); }); }); @@ -1388,10 +1437,11 @@ describe('QueryManager', () => { mutation: gql` mutation makeListPrivate { makeListPrivate(id: "5") { - id, - isPrivate, + id + isPrivate } - }`, + } + `, data, store: createApolloStore({ config: { dataIdFromObject: getIdField }, @@ -1400,19 +1450,19 @@ describe('QueryManager', () => { assert.deepEqual(result.data, data); // Make sure we updated the store with the new data - assert.deepEqual( - queryManager.store.getState()['apollo'].data['5'], - { id: '5', isPrivate: true }, - ); + assert.deepEqual(queryManager.store.getState()['apollo'].data['5'], { + id: '5', + isPrivate: true, + }); }); }); it('runs a mutation and puts the result in the store with root key', () => { - const mutation = gql` + const mutation = gql` mutation makeListPrivate { makeListPrivate(id: "5") { - id, - isPrivate, + id + isPrivate } } `; @@ -1431,24 +1481,27 @@ describe('QueryManager', () => { config: { dataIdFromObject: getIdField }, }); const queryManager = createQueryManager({ - networkInterface: mockNetworkInterface( - { - request: { query: mutation }, - result: { data }, - }, - ), + networkInterface: mockNetworkInterface({ + request: { query: mutation }, + result: { data }, + }), store, reduxRootSelector, }); - return queryManager.mutate({ - mutation, - }).then((result) => { - assert.deepEqual(result.data, data); + return queryManager + .mutate({ + mutation, + }) + .then(result => { + assert.deepEqual(result.data, data); - // Make sure we updated the store with the new data - assert.deepEqual(reduxRootSelector(store.getState()).data['5'], { id: '5', isPrivate: true }); - }); + // Make sure we updated the store with the new data + assert.deepEqual(reduxRootSelector(store.getState()).data['5'], { + id: '5', + isPrivate: true, + }); + }); }); it('does not broadcast queries when non-apollo actions are dispatched', () => { @@ -1476,7 +1529,7 @@ describe('QueryManager', () => { }, }; - function testReducer (state = false, action: any): boolean { + function testReducer(state = false, action: any): boolean { if (action.type === 'TOGGLE') { return true; } @@ -1504,12 +1557,13 @@ describe('QueryManager', () => { store: store, }).watchQuery({ query, variables, notifyOnNetworkStatusChange: false }); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); observable.refetch(); }, - (result) => { + result => { assert.deepEqual(result.data, data2); store.dispatch({ type: 'TOGGLE', @@ -1543,7 +1597,7 @@ describe('QueryManager', () => { }, }; - function testReducer (state = false, action: any): boolean { + function testReducer(state = false, action: any): boolean { if (action.type === 'TOGGLE') { return true; } @@ -1571,14 +1625,19 @@ describe('QueryManager', () => { store: store, }); - const observable = qm.watchQuery({ query, variables, notifyOnNetworkStatusChange: false }); + const observable = qm.watchQuery({ + query, + variables, + notifyOnNetworkStatusChange: false, + }); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); observable.refetch(); }, - (result) => { + result => { assert.deepEqual(result.data, data2); // here's the actual test. Everything else is just setup. @@ -1589,8 +1648,16 @@ describe('QueryManager', () => { store.dispatch({ type: 'TOGGLE', }); - assert.equal((store.getState() as any).test, true, 'test state should have been updated'); - assert.equal(called, false, 'broadcastNewStore should not have been called'); + assert.equal( + (store.getState() as any).test, + true, + 'test state should have been updated', + ); + assert.equal( + called, + false, + 'broadcastNewStore should not have been called', + ); }, ); }); @@ -1640,11 +1707,11 @@ describe('QueryManager', () => { const observable2 = queryManager.watchQuery({ query: query2 }); return Promise.all([ - observableToPromise({ observable: observable1 }, - (result) => assert.deepEqual(result.data, data1), + observableToPromise({ observable: observable1 }, result => + assert.deepEqual(result.data, data1), ), - observableToPromise({ observable: observable2 }, - (result) => assert.deepEqual(result.data, data2), + observableToPromise({ observable: observable2 }, result => + assert.deepEqual(result.data, data2), ), ]); }); @@ -1695,23 +1762,24 @@ describe('QueryManager', () => { ); const observable = queryManager.watchQuery({ query: query1 }); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); queryManager.query({ query: query2 }); }, // 3 because the query init action for the second query causes a callback - (result) => assert.deepEqual(result.data, { - people_one: { - name: 'Luke Skywalker has a new name', - age: 50, - }, - }), + result => + assert.deepEqual(result.data, { + people_one: { + name: 'Luke Skywalker has a new name', + age: 50, + }, + }), ); }); describe('polling queries', () => { - it('allows you to poll queries', () => { const query = gql` query fetchLuke($id: String) { @@ -1754,14 +1822,14 @@ describe('QueryManager', () => { notifyOnNetworkStatusChange: false, }); - return observableToPromise({ observable }, - (result) => assert.deepEqual(result.data, data1), - (result) => assert.deepEqual(result.data, data2), + return observableToPromise( + { observable }, + result => assert.deepEqual(result.data, data1), + result => assert.deepEqual(result.data, data2), ); - }); - it('does not poll during SSR', (done) => { + it('does not poll during SSR', done => { const query = gql` query fetchLuke($id: String) { people_one(id: $id) { @@ -1787,18 +1855,20 @@ describe('QueryManager', () => { }; const queryManager = new QueryManager({ - networkInterface: mockNetworkInterface({ - request: { query, variables }, - result: { data: data1 }, - }, - { - request: { query, variables }, - result: { data: data2 }, - }, - { - request: { query, variables }, - result: { data: data2 }, - }), + networkInterface: mockNetworkInterface( + { + request: { query, variables }, + result: { data: data1 }, + }, + { + request: { query, variables }, + result: { data: data2 }, + }, + { + request: { query, variables }, + result: { data: data2 }, + }, + ), store: createApolloStore(), reduxRootSelector: defaultReduxRootSelector, addTypename: false, @@ -1836,20 +1906,22 @@ describe('QueryManager', () => { }); }); - it('should let you handle multiple polled queries and unsubscribe from one of them', (done) => { + it('should let you handle multiple polled queries and unsubscribe from one of them', done => { const query1 = gql` query { author { firstName lastName } - }`; + } + `; const query2 = gql` query { person { name } - }`; + } + `; const data11 = { author: { firstName: 'John', @@ -1895,10 +1967,10 @@ describe('QueryManager', () => { }, { request: { query: query1 }, - result: { data: data13}, + result: { data: data13 }, }, { - request: {query: query1 }, + request: { query: query1 }, result: { data: data14 }, }, { @@ -1914,28 +1986,32 @@ describe('QueryManager', () => { let handleCount = 0; let setMilestone = false; - const subscription1 = queryManager.watchQuery({ - query: query1, - pollInterval: 150, - }).subscribe({ - next(result) { - handle1Count++; - handleCount++; - if (handle1Count > 1 && !setMilestone) { - subscription1.unsubscribe(); - setMilestone = true; - } - }, - }); + const subscription1 = queryManager + .watchQuery({ + query: query1, + pollInterval: 150, + }) + .subscribe({ + next(result) { + handle1Count++; + handleCount++; + if (handle1Count > 1 && !setMilestone) { + subscription1.unsubscribe(); + setMilestone = true; + } + }, + }); - const subscription2 = queryManager.watchQuery({ - query: query2, - pollInterval: 2000, - }).subscribe({ - next(result) { - handleCount++; - }, - }); + const subscription2 = queryManager + .watchQuery({ + query: query2, + pollInterval: 2000, + }) + .subscribe({ + next(result) { + handleCount++; + }, + }); setTimeout(() => { assert.equal(handleCount, 3); @@ -1988,12 +2064,13 @@ describe('QueryManager', () => { notifyOnNetworkStatusChange: false, }); - const { promise, subscription } = observableToPromiseAndSubscription({ + const { promise, subscription } = observableToPromiseAndSubscription( + { observable, wait: 60, }, - (result) => assert.deepEqual(result.data, data1), - (result) => { + result => assert.deepEqual(result.data, data1), + result => { assert.deepEqual(result.data, data2); // we unsubscribe here manually, rather than waiting for the timeout. @@ -2051,17 +2128,18 @@ describe('QueryManager', () => { notifyOnNetworkStatusChange: false, }); - const { promise, subscription } = observableToPromiseAndSubscription({ + const { promise, subscription } = observableToPromiseAndSubscription( + { observable, wait: 60, errorCallbacks: [ - (error) => { + error => { assert.include(error.message, 'Network error'); subscription.unsubscribe(); }, ], }, - (result) => assert.deepEqual(result.data, data1), + result => assert.deepEqual(result.data, data1), ); return promise; @@ -2103,12 +2181,17 @@ describe('QueryManager', () => { }, ); - const observable = queryManager.watchQuery({ query, variables, notifyOnNetworkStatusChange: false }); + const observable = queryManager.watchQuery({ + query, + variables, + notifyOnNetworkStatusChange: false, + }); observable.startPolling(50); - return observableToPromise({ observable }, - (result) => assert.deepEqual(result.data, data1), - (result) => assert.deepEqual(result.data, data2), + return observableToPromise( + { observable }, + result => assert.deepEqual(result.data, data1), + result => assert.deepEqual(result.data, data2), ); }); @@ -2153,12 +2236,10 @@ describe('QueryManager', () => { pollInterval: 50, }); - return observableToPromise({ observable, wait: 60}, - (result) => { - assert.deepEqual(result.data, data1); - observable.stopPolling(); - }, - ); + return observableToPromise({ observable, wait: 60 }, result => { + assert.deepEqual(result.data, data1); + observable.stopPolling(); + }); }); it('stopped polling queries still get updates', () => { @@ -2204,13 +2285,15 @@ describe('QueryManager', () => { let timeout: Function; return Promise.race([ - observableToPromise({ observable }, - (result) => { + observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data1); - queryManager.query({ query, variables, fetchPolicy: 'network-only' }) + queryManager + .query({ query, variables, fetchPolicy: 'network-only' }) .then(() => timeout(new Error('Should have two results by now'))); }, - (result) => assert.deepEqual(result.data, data2), + result => assert.deepEqual(result.data, data2), ), // Ensure that the observable has recieved 2 results *before* // the rejection triggered above @@ -2226,33 +2309,34 @@ describe('QueryManager', () => { assert.throws(() => { queryManager.query({ // Bamboozle TypeScript into letting us do this - query: 'string' as any as DocumentNode, + query: ('string' as any) as DocumentNode, }); }, /wrap the query string in a "gql" tag/); assert.throws(() => { queryManager.mutate({ // Bamboozle TypeScript into letting us do this - mutation: 'string' as any as DocumentNode, + mutation: ('string' as any) as DocumentNode, }); }, /wrap the query string in a "gql" tag/); assert.throws(() => { queryManager.watchQuery({ // Bamboozle TypeScript into letting us do this - query: 'string' as any as DocumentNode, + query: ('string' as any) as DocumentNode, }); }, /wrap the query string in a "gql" tag/); }); - it('should transform queries correctly when given a QueryTransformer', (done) => { + it('should transform queries correctly when given a QueryTransformer', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const transformedQuery = gql` query { author { @@ -2260,18 +2344,19 @@ describe('QueryManager', () => { lastName __typename } - }`; + } + `; const unmodifiedQueryResult = { - 'author': { - 'firstName': 'John', - 'lastName': 'Smith', + author: { + firstName: 'John', + lastName: 'Smith', }, }; const transformedQueryResult = { - 'author': { - 'firstName': 'John', - 'lastName': 'Smith', - '__typename': 'Author', + author: { + firstName: 'John', + lastName: 'Smith', + __typename: 'Author', }, }; @@ -2280,29 +2365,32 @@ describe('QueryManager', () => { createQueryManager({ networkInterface: mockNetworkInterface( { - request: {query}, - result: {data: unmodifiedQueryResult}, + request: { query }, + result: { data: unmodifiedQueryResult }, }, { - request: {query: transformedQuery}, - result: {data: transformedQueryResult}, + request: { query: transformedQuery }, + result: { data: transformedQueryResult }, }, ), addTypename: true, - }).query({query: query}).then((result) => { - assert.deepEqual(result.data, transformedQueryResult); - done(); - }); + }) + .query({ query: query }) + .then(result => { + assert.deepEqual(result.data, transformedQueryResult); + done(); + }); }); - it('should transform mutations correctly', (done) => { + it('should transform mutations correctly', done => { const mutation = gql` mutation { createAuthor(firstName: "John", lastName: "Smith") { firstName lastName } - }`; + } + `; const transformedMutation = gql` mutation { createAuthor(firstName: "John", lastName: "Smith") { @@ -2310,36 +2398,40 @@ describe('QueryManager', () => { lastName __typename } - }`; + } + `; const unmodifiedMutationResult = { - 'createAuthor': { - 'firstName': 'It works!', - 'lastName': 'It works!', + createAuthor: { + firstName: 'It works!', + lastName: 'It works!', }, }; const transformedMutationResult = { - 'createAuthor': { - 'firstName': 'It works!', - 'lastName': 'It works!', - '__typename': 'Author', + createAuthor: { + firstName: 'It works!', + lastName: 'It works!', + __typename: 'Author', }, }; createQueryManager({ networkInterface: mockNetworkInterface( { - request: {query: mutation}, - result: {data: unmodifiedMutationResult}, + request: { query: mutation }, + result: { data: unmodifiedMutationResult }, }, { - request: {query: transformedMutation}, - result: {data: transformedMutationResult}, - }), + request: { query: transformedMutation }, + result: { data: transformedMutationResult }, + }, + ), addTypename: true, - }).mutate({mutation: mutation}).then((result) => { - assert.deepEqual(result.data, transformedMutationResult); - done(); - }); + }) + .mutate({ mutation: mutation }) + .then(result => { + assert.deepEqual(result.data, transformedMutationResult); + done(); + }); }); describe('store resets', () => { @@ -2350,7 +2442,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { @@ -2372,7 +2465,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data2 = { author2: { @@ -2391,20 +2485,20 @@ describe('QueryManager', () => { const queryManager = createQueryManager({ networkInterface: mockNetworkInterface( { - request: {query}, - result: {data}, + request: { query }, + result: { data }, }, { - request: {query: query2}, - result: {data: data2}, + request: { query: query2 }, + result: { data: data2 }, }, { - request: {query}, - result: {data: dataChanged}, + request: { query }, + result: { data: dataChanged }, }, { - request: {query: query2}, - result: {data: data2Changed}, + request: { query: query2 }, + result: { data: data2Changed }, }, ), }); @@ -2413,11 +2507,11 @@ describe('QueryManager', () => { const observable2 = queryManager.watchQuery({ query: query2 }); return Promise.all([ - observableToPromise({ observable }, - result => assert.deepEqual(result.data, data), + observableToPromise({ observable }, result => + assert.deepEqual(result.data, data), ), - observableToPromise({ observable: observable2 }, - result => assert.deepEqual(result.data, data2), + observableToPromise({ observable: observable2 }, result => + assert.deepEqual(result.data, data2), ), ]).then(() => { observable.subscribe({ next: () => null }); @@ -2457,7 +2551,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -2481,14 +2576,14 @@ describe('QueryManager', () => { const observable = queryManager.watchQuery({ query }); // wait just to make sure the observable doesn't fire again - return observableToPromise({ observable, wait: 0 }, - (result) => assert.deepEqual(result.data, data), + return observableToPromise({ observable, wait: 0 }, result => + assert.deepEqual(result.data, data), ).then(() => { assert.equal(timesFired, 2); }); }); - it('should not refetch toredown queries', (done) => { + it('should not refetch toredown queries', done => { let queryManager: QueryManager; let observable: ObservableQuery; const query = gql` @@ -2497,7 +2592,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -2515,9 +2611,8 @@ describe('QueryManager', () => { queryManager = createQueryManager({ networkInterface }); observable = queryManager.watchQuery({ query }); - - observableToPromise({ observable, wait: 0 }, - (result) => assert.deepEqual(result.data, data), + observableToPromise({ observable, wait: 0 }, result => + assert.deepEqual(result.data, data), ).then(() => { assert.equal(timesFired, 1); @@ -2541,7 +2636,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -2562,25 +2658,28 @@ describe('QueryManager', () => { }, }; queryManager = createQueryManager({ networkInterface }); - const observable = queryManager.watchQuery({ query, notifyOnNetworkStatusChange: false }); + const observable = queryManager.watchQuery({ + query, + notifyOnNetworkStatusChange: false, + }); // wait to make sure store reset happened - return observableToPromise({ observable, wait: 20 }, - result => assert.deepEqual(result.data, data), + return observableToPromise({ observable, wait: 20 }, result => + assert.deepEqual(result.data, data), ).then(() => { assert.equal(timesFired, 2); }); }); - - it('should throw an error on an inflight fetch query if the store is reset', (done) => { + it('should throw an error on an inflight fetch query if the store is reset', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -2592,26 +2691,30 @@ describe('QueryManager', () => { result: { data }, delay: 10000, //i.e. forever }); - queryManager.fetchQuery('made up id', { query }).then((result) => { - done(new Error('Returned a result.')); - }).catch((error) => { - assert.include(error.message, 'Store reset'); - done(); - }); + queryManager + .fetchQuery('made up id', { query }) + .then(result => { + done(new Error('Returned a result.')); + }) + .catch(error => { + assert.include(error.message, 'Store reset'); + done(); + }); queryManager.resetStore(); }); - it('should call refetch on a mocked Observable if the store is reset', (done) => { + it('should call refetch on a mocked Observable if the store is reset', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const queryManager = mockQueryManager(); - const mockObservableQuery: ObservableQuery = { + const mockObservableQuery: ObservableQuery = ({ refetch(variables: any): Promise { done(); return null as never; @@ -2620,34 +2723,35 @@ describe('QueryManager', () => { query: query, }, scheduler: queryManager.scheduler, - } as any as ObservableQuery; + } as any) as ObservableQuery; const queryId = 'super-fake-id'; queryManager.addObservableQuery(queryId, mockObservableQuery); queryManager.resetStore(); }); - it('should not call refetch on a cache-only Observable if the store is reset', (done) => { + it('should not call refetch on a cache-only Observable if the store is reset', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const queryManager = createQueryManager({}); const options = assign({}) as WatchQueryOptions; options.fetchPolicy = 'cache-only'; options.query = query; let refetchCount = 0; - const mockObservableQuery: ObservableQuery = { + const mockObservableQuery: ObservableQuery = ({ refetch(variables: any): Promise { - refetchCount ++; + refetchCount++; return null as never; }, options, queryManager: queryManager, - } as any as ObservableQuery; + } as any) as ObservableQuery; const queryId = 'super-fake-id'; queryManager.addObservableQuery(queryId, mockObservableQuery); @@ -2656,30 +2760,30 @@ describe('QueryManager', () => { assert.equal(refetchCount, 0); done(); }, 50); - }); - it('should not call refetch on a standby Observable if the store is reset', (done) => { + it('should not call refetch on a standby Observable if the store is reset', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const queryManager = createQueryManager({}); const options = assign({}) as WatchQueryOptions; options.fetchPolicy = 'standby'; options.query = query; let refetchCount = 0; - const mockObservableQuery: ObservableQuery = { + const mockObservableQuery: ObservableQuery = ({ refetch(variables: any): Promise { - refetchCount ++; + refetchCount++; return null as never; }, options, queryManager: queryManager, - } as any as ObservableQuery; + } as any) as ObservableQuery; const queryId = 'super-fake-id'; queryManager.addObservableQuery(queryId, mockObservableQuery); @@ -2690,7 +2794,7 @@ describe('QueryManager', () => { }, 50); }); - it('should throw an error on an inflight query() if the store is reset', (done) => { + it('should throw an error on an inflight query() if the store is reset', done => { let queryManager: QueryManager; const query = gql` query { @@ -2698,7 +2802,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { @@ -2715,39 +2820,47 @@ describe('QueryManager', () => { }; queryManager = createQueryManager({ networkInterface }); - queryManager.query({ query }).then((result) => { - done(new Error('query() gave results on a store reset')); - }).catch((error) => { - done(); - }); + queryManager + .query({ query }) + .then(result => { + done(new Error('query() gave results on a store reset')); + }) + .catch(error => { + done(); + }); }); }); - it('should reject a query promise given a network error', (done) => { + it('should reject a query promise given a network error', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const networkError = new Error('Network error'); mockQueryManager({ request: { query }, error: networkError, - }).query({ query }).then((result) => { - done(new Error('Returned result on an errored fetchQuery')); - }).catch((error) => { - const apolloError = error as ApolloError; - - assert(apolloError.message); - assert.equal(apolloError.networkError, networkError); - assert.deepEqual(apolloError.graphQLErrors, []); - done(); - }).catch(done); + }) + .query({ query }) + .then(result => { + done(new Error('Returned result on an errored fetchQuery')); + }) + .catch(error => { + const apolloError = error as ApolloError; + + assert(apolloError.message); + assert.equal(apolloError.networkError, networkError); + assert.deepEqual(apolloError.graphQLErrors, []); + done(); + }) + .catch(done); }); - it('should error when we attempt to give an id beginning with $', (done) => { + it('should error when we attempt to give an id beginning with $', done => { const query = gql` query { author { @@ -2756,7 +2869,8 @@ describe('QueryManager', () => { id __typename } - }`; + } + `; const data = { author: { firstName: 'John', @@ -2765,19 +2879,27 @@ describe('QueryManager', () => { __typename: 'Author', }, }; - const reducerConfig = { dataIdFromObject: (x: any) => '$' + dataIdFromObject(x) }; - const store = createApolloStore({ config: reducerConfig, reportCrashes: false }); + const reducerConfig = { + dataIdFromObject: (x: any) => '$' + dataIdFromObject(x), + }; + const store = createApolloStore({ + config: reducerConfig, + reportCrashes: false, + }); createQueryManager({ networkInterface: mockNetworkInterface({ request: { query }, result: { data }, }), store, - }).query({ query }).then((result) => { - done(new Error('Returned a result when it should not have.')); - }).catch((error) => { - done(); - }); + }) + .query({ query }) + .then(result => { + done(new Error('Returned a result when it should not have.')); + }) + .catch(error => { + done(); + }); }); it('should reject a query promise given a GraphQL error', () => { @@ -2787,32 +2909,37 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const graphQLErrors = [new Error('GraphQL error')]; return mockQueryManager({ request: { query }, result: { errors: graphQLErrors }, - }).query({ query }).then( - (result) => { - throw new Error('Returned result on an errored fetchQuery'); - }, - // don't use .catch() for this or it will catch the above error - (error) => { - const apolloError = error as ApolloError; - assert(apolloError.message); - assert.equal(apolloError.graphQLErrors, graphQLErrors); - assert(!apolloError.networkError); - }); + }) + .query({ query }) + .then( + result => { + throw new Error('Returned result on an errored fetchQuery'); + }, + // don't use .catch() for this or it will catch the above error + error => { + const apolloError = error as ApolloError; + assert(apolloError.message); + assert.equal(apolloError.graphQLErrors, graphQLErrors); + assert(!apolloError.networkError); + }, + ); }); - it('should not empty the store when a non-polling query fails due to a network error', (done) => { + it('should not empty the store when a non-polling query fails due to a network error', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'Dhaivat', @@ -2829,19 +2956,28 @@ describe('QueryManager', () => { error: new Error('Network error ocurred'), }, ); - queryManager.query({ query }).then((result) => { - assert.deepEqual(result.data, data); + queryManager + .query({ query }) + .then(result => { + assert.deepEqual(result.data, data); - queryManager.query({ query, fetchPolicy: 'network-only' }).then(() => { - done(new Error('Returned a result when it was not supposed to.')); - }).catch((error) => { - // make that the error thrown doesn't empty the state - assert.deepEqual(queryManager.store.getState().apollo.data['$ROOT_QUERY.author'], data['author']); - done(); + queryManager + .query({ query, fetchPolicy: 'network-only' }) + .then(() => { + done(new Error('Returned a result when it was not supposed to.')); + }) + .catch(error => { + // make that the error thrown doesn't empty the state + assert.deepEqual( + queryManager.store.getState().apollo.data['$ROOT_QUERY.author'], + data['author'], + ); + done(); + }); + }) + .catch(error => { + done(new Error('Threw an error on the first query.')); }); - }).catch((error) => { - done(new Error('Threw an error on the first query.')); - }); }); it('should be able to unsubscribe from a polling query subscription', () => { @@ -2851,7 +2987,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -2864,7 +3001,8 @@ describe('QueryManager', () => { result: { data }, }).watchQuery({ query, pollInterval: 20 }); - const { promise, subscription } = observableToPromiseAndSubscription({ + const { promise, subscription } = observableToPromiseAndSubscription( + { observable, wait: 60, }, @@ -2883,7 +3021,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -2900,9 +3039,14 @@ describe('QueryManager', () => { error: new Error('Network error occurred.'), }, ); - const observable = queryManager.watchQuery({ query, pollInterval: 20, notifyOnNetworkStatusChange: false }); + const observable = queryManager.watchQuery({ + query, + pollInterval: 20, + notifyOnNetworkStatusChange: false, + }); - return observableToPromise({ + return observableToPromise( + { observable, errorCallbacks: [ () => { @@ -2913,7 +3057,7 @@ describe('QueryManager', () => { }, ], }, - (result) => { + result => { assert.deepEqual(result.data, data); assert.deepEqual( queryManager.store.getState().apollo.data['$ROOT_QUERY.author'], @@ -2930,7 +3074,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { @@ -2943,7 +3088,6 @@ describe('QueryManager', () => { request: { query }, result: { data }, }, - { request: { query }, result: { data }, @@ -2954,12 +3098,10 @@ describe('QueryManager', () => { return Promise.all([ // we wait for a little bit to ensure the result of the second query // don't trigger another subscription event - observableToPromise({ observable, wait: 100 }, - (result) => { - assert.deepEqual(result.data, data); - }, - ), - queryManager.query({ query }).then((result) => { + observableToPromise({ observable, wait: 100 }, result => { + assert.deepEqual(result.data, data); + }), + queryManager.query({ query }).then(result => { assert.deepEqual(result.data, data); }), ]); @@ -2972,7 +3114,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { @@ -2980,26 +3123,22 @@ describe('QueryManager', () => { lastName: 'Smith', }, }; - const queryManager = mockQueryManager( - { - request: { query }, - result: { data }, - }, - ); + const queryManager = mockQueryManager({ + request: { query }, + result: { data }, + }); const observable = queryManager.watchQuery({ query, metadata: { foo: 'bar' }, }); - return observableToPromise({ observable }, - (result) => { - assert.deepEqual(result.data, data); - assert.deepEqual( - queryManager.queryStore.get(observable.queryId).metadata, - { foo: 'bar' }, - ); - }, - ); + return observableToPromise({ observable }, result => { + assert.deepEqual(result.data, data); + assert.deepEqual( + queryManager.queryStore.get(observable.queryId).metadata, + { foo: 'bar' }, + ); + }); }); it('should return stale data when we orphan a real-id node in the store with a real-id node', () => { @@ -3025,7 +3164,8 @@ describe('QueryManager', () => { id __typename } - }`; + } + `; const data1 = { author: { name: { @@ -3047,7 +3187,10 @@ describe('QueryManager', () => { }, }; const reducerConfig = { dataIdFromObject }; - const store = createApolloStore({ config: reducerConfig, reportCrashes: false }); + const store = createApolloStore({ + config: reducerConfig, + reportCrashes: false, + }); const queryManager = createQueryManager({ networkInterface: mockNetworkInterface( { @@ -3072,7 +3215,7 @@ describe('QueryManager', () => { observable: observable1, wait: 60, }, - (result) => { + result => { assert.deepEqual(result, { data: data1, loading: false, @@ -3080,7 +3223,7 @@ describe('QueryManager', () => { stale: false, }); }, - (result) => { + result => { assert.deepEqual(result, { data: data1, loading: false, @@ -3094,7 +3237,7 @@ describe('QueryManager', () => { observable: observable2, wait: 60, }, - (result) => { + result => { assert.deepEqual(result, { data: data2, loading: false, @@ -3115,7 +3258,8 @@ describe('QueryManager', () => { __typename id } - }`; + } + `; const dataWithId = { author: { firstName: 'John', @@ -3129,14 +3273,18 @@ describe('QueryManager', () => { author { address } - }`; + } + `; const dataWithoutId = { author: { address: 'fake address', }, }; const reducerConfig = { dataIdFromObject }; - const store = createApolloStore({ config: reducerConfig, reportCrashes: false }); + const store = createApolloStore({ + config: reducerConfig, + reportCrashes: false, + }); const queryManager = createQueryManager({ networkInterface: mockNetworkInterface( { @@ -3151,25 +3299,28 @@ describe('QueryManager', () => { store, }); - const observableWithId = queryManager.watchQuery({ query: queryWithId }); - const observableWithoutId = queryManager.watchQuery({ query: queryWithoutId }); + const observableWithId = queryManager.watchQuery({ + query: queryWithId, + }); + const observableWithoutId = queryManager.watchQuery({ + query: queryWithoutId, + }); // I'm not sure the waiting 60 here really is required, but the test used to do it return Promise.all([ - observableToPromise({ observable: observableWithId, wait: 60 }, - (result) => assert.deepEqual(result.data, dataWithId), + observableToPromise({ observable: observableWithId, wait: 60 }, result => + assert.deepEqual(result.data, dataWithId), ), observableToPromise({ - observable: observableWithoutId, - errorCallbacks: [ - (error) => assert.include(error.message, 'Store error'), - // The error gets triggered a second time when we unsubscribe the - // the first promise, as there is no duplicate prevention for errors - (error) => assert.include(error.message, 'Store error'), - ], - wait: 60, - }, - ), + observable: observableWithoutId, + errorCallbacks: [ + error => assert.include(error.message, 'Store error'), + // The error gets triggered a second time when we unsubscribe the + // the first promise, as there is no duplicate prevention for errors + error => assert.include(error.message, 'Store error'), + ], + wait: 60, + }), ]); }); @@ -3184,7 +3335,8 @@ describe('QueryManager', () => { age __typename } - }`; + } + `; const queryWithId = gql` query { author { @@ -3194,7 +3346,8 @@ describe('QueryManager', () => { id __typename } - }`; + } + `; const dataWithoutId = { author: { name: { @@ -3226,7 +3379,7 @@ describe('QueryManager', () => { }; const store = createApolloStore({ config: { dataIdFromObject } }); const queryManager = createQueryManager({ - networkInterface: mockNetworkInterface( + networkInterface: mockNetworkInterface( { request: { query: queryWithoutId }, result: { data: dataWithoutId }, @@ -3239,17 +3392,22 @@ describe('QueryManager', () => { store, }); - const observableWithId = queryManager.watchQuery({ query: queryWithId }); - const observableWithoutId = queryManager.watchQuery({ query: queryWithoutId }); + const observableWithId = queryManager.watchQuery({ + query: queryWithId, + }); + const observableWithoutId = queryManager.watchQuery({ + query: queryWithoutId, + }); // I'm not sure the waiting 60 here really is required, but the test used to do it return Promise.all([ - observableToPromise({ observable: observableWithoutId, wait: 120 }, - (result) => assert.deepEqual(result.data, dataWithoutId), - (result) => assert.deepEqual(result.data, mergedDataWithoutId), + observableToPromise( + { observable: observableWithoutId, wait: 120 }, + result => assert.deepEqual(result.data, dataWithoutId), + result => assert.deepEqual(result.data, mergedDataWithoutId), ), - observableToPromise({ observable: observableWithId, wait: 120 }, - (result) => assert.deepEqual(result.data, dataWithId), + observableToPromise({ observable: observableWithId, wait: 120 }, result => + assert.deepEqual(result.data, dataWithId), ), ]); }); @@ -3259,20 +3417,23 @@ describe('QueryManager', () => { const query = gql` query { fortuneCookie - }`; + } + `; const data = { fortuneCookie: 'Buy it', }; - return mockQueryManager({ + return mockQueryManager({ request: { query }, result: { data }, - }).query({ query }).then((result) => { - assert(!result.loading); - assert.deepEqual(result.data, data); - }); + }) + .query({ query }) + .then(result => { + assert(!result.loading); + assert.deepEqual(result.data, data); + }); }); - it('should be passed to the observer as false if we are returning all the data', (done) => { + it('should be passed to the observer as false if we are returning all the data', done => { assertWithObserver({ done, query: gql` @@ -3281,7 +3442,8 @@ describe('QueryManager', () => { firstName lastName } - }`, + } + `, result: { data: { author: { @@ -3306,7 +3468,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data1 = { author: { firstName: 'John', @@ -3331,30 +3494,32 @@ describe('QueryManager', () => { ); let count = 0; - queryManager.watchQuery({ - query: testQuery, - notifyOnNetworkStatusChange: false, - }).subscribe({ - next: result => { - switch (count++) { - case 0: - assert.isFalse(result.loading); - assert.deepEqual(result.data, data1); - setTimeout(() => { - queryManager.resetStore(); - }, 0); - break; - case 1: - assert.isFalse(result.loading); - assert.deepEqual(result.data, data2); - done(); - break; - default: - done(new Error('`next` was called to many times.')); - } - }, - error: error => done(error), - }); + queryManager + .watchQuery({ + query: testQuery, + notifyOnNetworkStatusChange: false, + }) + .subscribe({ + next: result => { + switch (count++) { + case 0: + assert.isFalse(result.loading); + assert.deepEqual(result.data, data1); + setTimeout(() => { + queryManager.resetStore(); + }, 0); + break; + case 1: + assert.isFalse(result.loading); + assert.deepEqual(result.data, data2); + done(); + break; + default: + done(new Error('`next` was called to many times.')); + } + }, + error: error => done(error), + }); }); }); @@ -3363,7 +3528,7 @@ describe('QueryManager', () => { let warned: any; let timesWarned = 0; - beforeEach((done) => { + beforeEach(done => { // clear warnings warned = null; timesWarned = 0; @@ -3382,7 +3547,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const mutationData = { changeAuthorName: { firstName: 'Jack', @@ -3395,7 +3561,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -3422,13 +3589,17 @@ describe('QueryManager', () => { result: { data: mutationData }, }, ); - const observable = queryManager.watchQuery({ query, notifyOnNetworkStatusChange: false }); - return observableToPromise({ observable }, - (result) => { + const observable = queryManager.watchQuery({ + query, + notifyOnNetworkStatusChange: false, + }); + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data); queryManager.mutate({ mutation, refetchQueries: ['getAuthors'] }); }, - (result) => assert.deepEqual(result.data, secondReqData), + result => assert.deepEqual(result.data, secondReqData), ); }); @@ -3439,7 +3610,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const mutationData = { changeAuthorName: { firstName: 'Jack', @@ -3452,7 +3624,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -3479,15 +3652,25 @@ describe('QueryManager', () => { result: { data: mutationData }, }, ); - const observable = queryManager.watchQuery({ query, notifyOnNetworkStatusChange: false }); - return observableToPromise({ observable }, - (result) => { + const observable = queryManager.watchQuery({ + query, + notifyOnNetworkStatusChange: false, + }); + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data); - queryManager.mutate({ mutation, refetchQueries: ['fakeQuery', 'getAuthors'] }); + queryManager.mutate({ + mutation, + refetchQueries: ['fakeQuery', 'getAuthors'], + }); }, - (result) => { + result => { assert.deepEqual(result.data, secondReqData); - assert.include(warned[0], 'Warning: unknown query with name fakeQuery'); + assert.include( + warned[0], + 'Warning: unknown query with name fakeQuery', + ); assert.equal(timesWarned, 1); }, ); @@ -3500,7 +3683,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const mutationData = { changeAuthorName: { firstName: 'Jack', @@ -3513,7 +3697,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -3542,15 +3727,17 @@ describe('QueryManager', () => { ); const observable = queryManager.watchQuery({ query }); - return observableToPromise({ observable }, - (result) => { - assert.deepEqual(result.data, data); - }, - ).then(() => { - // The subscription has been stopped already - return queryManager.mutate({ mutation, refetchQueries: ['getAuthors'] }); + return observableToPromise({ observable }, result => { + assert.deepEqual(result.data, data); }) - .then(() => assert.equal(timesWarned, 0)); + .then(() => { + // The subscription has been stopped already + return queryManager.mutate({ + mutation, + refetchQueries: ['getAuthors'], + }); + }) + .then(() => assert.equal(timesWarned, 0)); }); it('also works with a query document and variables', () => { @@ -3560,7 +3747,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const mutationData = { changeAuthorName: { firstName: 'Jack', @@ -3573,7 +3761,8 @@ describe('QueryManager', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -3601,30 +3790,32 @@ describe('QueryManager', () => { }, ); const observable = queryManager.watchQuery({ query }); - return observableToPromise({ observable }, - (result) => { + return observableToPromise( + { observable }, + result => { assert.deepEqual(result.data, data); queryManager.mutate({ mutation, refetchQueries: [{ query }] }); }, - (result) => assert.deepEqual(result.data, secondReqData), + result => assert.deepEqual(result.data, secondReqData), ); }); - afterEach((done) => { + afterEach(done => { // restore standard method console.warn = oldWarn; done(); }); }); - it('exposes errors on a refetch as a rejection', (done) => { + it('exposes errors on a refetch as a rejection', done => { const request = { query: gql` - { - people_one(id: 1) { - name + { + people_one(id: 1) { + name + } } - }`, + `, }; const firstResult = { data: { @@ -3647,22 +3838,25 @@ describe('QueryManager', () => { const handle = queryManager.watchQuery(request); handle.subscribe({ - error: () => { /* nothing */ }, + error: () => { + /* nothing */ + }, }); - handle.refetch() - .then(() => { - done(new Error('Error on refetch should reject promise')); - }) - .catch((error) => { - assert.deepEqual(error.graphQLErrors, [ - { - name: 'PeopleError', - message: 'This is not the person you are looking for.', - }, - ]); - done(); - }); + handle + .refetch() + .then(() => { + done(new Error('Error on refetch should reject promise')); + }) + .catch(error => { + assert.deepEqual(error.graphQLErrors, [ + { + name: 'PeopleError', + message: 'This is not the person you are looking for.', + }, + ]); + done(); + }); // We have an unhandled error warning from the `subscribe` above, which has no `error` cb }); diff --git a/test/assign.ts b/test/assign.ts index 5022c22a78c..22d057aeb29 100644 --- a/test/assign.ts +++ b/test/assign.ts @@ -4,14 +4,34 @@ import { assert } from 'chai'; describe('assign', () => { it('will merge many objects together', () => { assert.deepEqual(assign({ a: 1 }, { b: 2 }), { a: 1, b: 2 }); - assert.deepEqual(assign({ a: 1 }, { b: 2 }, { c: 3 }), { a: 1, b: 2, c: 3 }); - assert.deepEqual(assign({ a: 1 }, { b: 2 }, { c: 3 }, { d: 4 }), { a: 1, b: 2, c: 3, d: 4 }); + assert.deepEqual(assign({ a: 1 }, { b: 2 }, { c: 3 }), { + a: 1, + b: 2, + c: 3, + }); + assert.deepEqual(assign({ a: 1 }, { b: 2 }, { c: 3 }, { d: 4 }), { + a: 1, + b: 2, + c: 3, + d: 4, + }); }); it('will merge many objects together shallowly', () => { assert.deepEqual(assign({ x: { a: 1 } }, { x: { b: 2 } }), { x: { b: 2 } }); - assert.deepEqual(assign({ x: { a: 1 } }, { x: { b: 2 } }, { x: { c: 3 } }), { x: { c: 3 } }); - assert.deepEqual(assign({ x: { a: 1 } }, { x: { b: 2 } }, { x: { c: 3 } }, { x: { d: 4 } }), { x: { d: 4 } }); + assert.deepEqual( + assign({ x: { a: 1 } }, { x: { b: 2 } }, { x: { c: 3 } }), + { x: { c: 3 } }, + ); + assert.deepEqual( + assign( + { x: { a: 1 } }, + { x: { b: 2 } }, + { x: { c: 3 } }, + { x: { d: 4 } }, + ), + { x: { d: 4 } }, + ); }); it('will mutate and return the source objects', () => { diff --git a/test/batchedNetworkInterface.ts b/test/batchedNetworkInterface.ts index 2ffb4b957d3..dea79bd662d 100644 --- a/test/batchedNetworkInterface.ts +++ b/test/batchedNetworkInterface.ts @@ -6,15 +6,9 @@ import * as sinon from 'sinon'; import { HTTPBatchedNetworkInterface } from '../src/transport/batchedNetworkInterface'; -import { - createMockFetch, - createMockedIResponse, -} from './mocks/mockFetch'; +import { createMockFetch, createMockedIResponse } from './mocks/mockFetch'; -import { - Request, - printRequest, -} from '../src/transport/networkInterface'; +import { Request, printRequest } from '../src/transport/networkInterface'; import { BatchMiddlewareInterface } from '../src/transport/middleware'; import { BatchAfterwareInterface } from '../src/transport/afterware'; @@ -38,13 +32,13 @@ describe('HTTPBatchedNetworkInterface', () => { opts = {}, }: { requestResultPairs: { - request: Request, - result: ExecutionResult, + request: Request; + result: ExecutionResult; }[]; fetchFunc?: any; middlewares?: BatchMiddlewareInterface[]; afterwares?: BatchAfterwareInterface[]; - opts?: RequestInit, + opts?: RequestInit; }) => { const url = 'http://fake.com/graphql'; const batchedNetworkInterface = new HTTPBatchedNetworkInterface({ @@ -63,21 +57,27 @@ describe('HTTPBatchedNetworkInterface', () => { resultList.push(result); }); - fetch = fetchFunc || createMockFetch({ - url, - opts: merge({ - body: JSON.stringify(printedRequests), - headers: { - Accept: '*/*', - 'Content-Type': 'application/json', - }, - method: 'POST', - }, opts), - result: createMockedIResponse(resultList), - }); + fetch = + fetchFunc || + createMockFetch({ + url, + opts: merge( + { + body: JSON.stringify(printedRequests), + headers: { + Accept: '*/*', + 'Content-Type': 'application/json', + }, + method: 'POST', + }, + opts, + ), + result: createMockedIResponse(resultList), + }); - return batchedNetworkInterface.batchQuery(requestResultPairs.map(({ request }) => request)) - .then((results) => { + return batchedNetworkInterface + .batchQuery(requestResultPairs.map(({ request }) => request)) + .then(results => { assert.deepEqual(results, resultList); }); }; @@ -89,7 +89,8 @@ describe('HTTPBatchedNetworkInterface', () => { firstName lastName } - }`; + } + `; const authorResult = { data: { @@ -105,7 +106,8 @@ describe('HTTPBatchedNetworkInterface', () => { person { name } - }`; + } + `; const personResult = { data: { person: { @@ -128,7 +130,6 @@ describe('HTTPBatchedNetworkInterface', () => { assert(batchedNetworkInterface.batchQuery); }); - it('should have a default value of 10ms for batchInterval', () => { const url = 'http://notreal.com/graphql'; const opts = {}; @@ -142,10 +143,12 @@ describe('HTTPBatchedNetworkInterface', () => { it('should correctly return the result for a single request', () => { return assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], }); }); @@ -178,15 +181,16 @@ describe('HTTPBatchedNetworkInterface', () => { result: personResult, }, ], - middlewares: [{ - applyBatchMiddleware(req, next) { - middlewareCallCounter(); + middlewares: [ + { + applyBatchMiddleware(req, next) { + middlewareCallCounter(); - next(); + next(); + }, }, - }], - }) - .then(() => { + ], + }).then(() => { assert.equal(middlewareCallCounter.callCount, 1); }); }); @@ -205,58 +209,80 @@ describe('HTTPBatchedNetworkInterface', () => { result: personResult, }, ], - afterwares: [{ - applyBatchAfterware({ responses }, next) { - afterwareCallCounter(); + afterwares: [ + { + applyBatchAfterware({ responses }, next) { + afterwareCallCounter(); - next(); + next(); + }, }, - }], - }) - .then(() => { + ], + }).then(() => { assert.equal(afterwareCallCounter.callCount, 1); }); }); describe('errors', () => { - it('should return errors thrown by fetch', (done) => { + it('should return errors thrown by fetch', done => { const err = new Error('Error of some kind thrown by fetch.'); - const fetchFunc = () => { throw err; }; + const fetchFunc = () => { + throw err; + }; assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], fetchFunc, - }).then(() => { - done(new Error('Assertion passed when it should not have.')); - }).catch((error) => { - assert(error); - assert.deepEqual(error, err); - done(); - }); + }) + .then(() => { + done(new Error('Assertion passed when it should not have.')); + }) + .catch(error => { + assert(error); + assert.deepEqual(error, err); + done(); + }); }); - it('should throw an error with the response when a non-200 response is received', (done) => { - const fakeForbiddenResponse = createMockedIResponse([], { status: 401, statusText: 'Unauthorized'}); + it('should throw an error with the response when a non-200 response is received', done => { + const fakeForbiddenResponse = createMockedIResponse([], { + status: 401, + statusText: 'Unauthorized', + }); const fetchFunc = () => Promise.resolve(fakeForbiddenResponse); assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], fetchFunc, - }).then(() => { - done(new Error('An error should have been thrown')); - }).catch(err => { - assert.strictEqual(err.response, fakeForbiddenResponse, 'Incorrect response provided'); - assert.equal(err.message, 'Network request failed with status 401 - "Unauthorized"', 'Incorrect message generated'); - done(); - }); + }) + .then(() => { + done(new Error('An error should have been thrown')); + }) + .catch(err => { + assert.strictEqual( + err.response, + fakeForbiddenResponse, + 'Incorrect response provided', + ); + assert.equal( + err.message, + 'Network request failed with status 401 - "Unauthorized"', + 'Incorrect message generated', + ); + done(); + }); }); - it('should return errors thrown by middleware', (done) => { + it('should return errors thrown by middleware', done => { const err = new Error('Error of some kind thrown by middleware.'); const errorMiddleware: BatchMiddlewareInterface = { applyBatchMiddleware() { @@ -264,20 +290,24 @@ describe('HTTPBatchedNetworkInterface', () => { }, }; assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], - middlewares: [ errorMiddleware ], - }).then(() => { - done(new Error('Returned a result when it should not have.')); - }).catch((error) => { - assert.deepEqual(error, err); - done(); - }); + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], + middlewares: [errorMiddleware], + }) + .then(() => { + done(new Error('Returned a result when it should not have.')); + }) + .catch(error => { + assert.deepEqual(error, err); + done(); + }); }); - it('should return errors thrown by afterware', (done) => { + it('should return errors thrown by afterware', done => { const err = new Error('Error of some kind thrown by afterware.'); const errorAfterware: BatchAfterwareInterface = { applyBatchAfterware() { @@ -285,17 +315,21 @@ describe('HTTPBatchedNetworkInterface', () => { }, }; assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], - afterwares: [ errorAfterware ], - }).then(() => { - done(new Error('Returned a result when it should not have.')); - }).catch((error) => { - assert.deepEqual(error, err); - done(); - }); + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], + afterwares: [errorAfterware], + }) + .then(() => { + done(new Error('Returned a result when it should not have.')); + }) + .catch(error => { + assert.deepEqual(error, err); + done(); + }); }); }); @@ -312,12 +346,14 @@ describe('HTTPBatchedNetworkInterface', () => { }; const options = { headers: customHeaders }; return assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], opts: options, - middlewares: [ changeMiddleware ], + middlewares: [changeMiddleware], }); }); @@ -328,10 +364,12 @@ describe('HTTPBatchedNetworkInterface', () => { }; const options = { method: 'GET', headers: customHeaders }; return assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], opts: options, }); }); @@ -357,36 +395,55 @@ describe('HTTPBatchedNetworkInterface', () => { it('executes afterware when valid responses given back', done => { assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], afterwares: testAfterwares, - }).then(() => { - assert.equal(afterwareStub.callCount, testAfterwares.length, 'Afterwares provided were not invoked'); - done(); - }).catch(err => { - done(err); - }); + }) + .then(() => { + assert.equal( + afterwareStub.callCount, + testAfterwares.length, + 'Afterwares provided were not invoked', + ); + done(); + }) + .catch(err => { + done(err); + }); }); it('executes afterware when an invalid response is given back', done => { - const fakeForbiddenResponse = createMockedIResponse([], { status: 401, statusText: 'Unauthorized'}); + const fakeForbiddenResponse = createMockedIResponse([], { + status: 401, + statusText: 'Unauthorized', + }); const fetchFunc = () => Promise.resolve(fakeForbiddenResponse); assertRoundtrip({ - requestResultPairs: [{ - request: { query: authorQuery }, - result: authorResult, - }], + requestResultPairs: [ + { + request: { query: authorQuery }, + result: authorResult, + }, + ], fetchFunc, afterwares: testAfterwares, - }).then(() => { - done(new Error('The networkInterface did not reject as expected')); - }).catch(err => { - assert.equal(afterwareStub.callCount, testAfterwares.length, 'Afterwares provided were not invoked'); - done(); - }); + }) + .then(() => { + done(new Error('The networkInterface did not reject as expected')); + }) + .catch(err => { + assert.equal( + afterwareStub.callCount, + testAfterwares.length, + 'Afterwares provided were not invoked', + ); + done(); + }); }); }); }); diff --git a/test/batching.ts b/test/batching.ts index 1d70b57c9de..6cf5a811d66 100644 --- a/test/batching.ts +++ b/test/batching.ts @@ -1,11 +1,7 @@ -import { QueryBatcher, - QueryFetchRequest, - } from '../src/transport/batching'; +import { QueryBatcher, QueryFetchRequest } from '../src/transport/batching'; import { assert } from 'chai'; import { Request } from '../src/transport/networkInterface'; -import { - mockBatchedNetworkInterface, -} from './mocks/mockNetworkInterface'; +import { mockBatchedNetworkInterface } from './mocks/mockNetworkInterface'; import gql from 'graphql-tag'; import { ExecutionResult } from 'graphql'; @@ -45,7 +41,8 @@ describe('QueryBatcher', () => { firstName lastName } - }`; + } + `; const request: QueryFetchRequest = { request: { query }, @@ -65,11 +62,12 @@ describe('QueryBatcher', () => { firstName lastName } - }`; + } + `; const data = { - 'author' : { - 'firstName': 'John', - 'lastName': 'Smith', + author: { + firstName: 'John', + lastName: 'Smith', }, }; const myNetworkInterface = mockBatchedNetworkInterface( @@ -84,42 +82,48 @@ describe('QueryBatcher', () => { ); const batcher = new QueryBatcher({ batchInterval: 10, - batchFetchFunction: myNetworkInterface.batchQuery.bind(myNetworkInterface), + batchFetchFunction: myNetworkInterface.batchQuery.bind( + myNetworkInterface, + ), }); const request: Request = { query, }; - it('should be able to consume from a queue containing a single query', (done) => { + it('should be able to consume from a queue containing a single query', done => { const myBatcher = new QueryBatcher({ batchInterval: 10, - batchFetchFunction: myNetworkInterface.batchQuery.bind(myNetworkInterface), + batchFetchFunction: myNetworkInterface.batchQuery.bind( + myNetworkInterface, + ), }); myBatcher.enqueueRequest(request); - const promises: (Promise | undefined)[] = myBatcher.consumeQueue()!; + const promises: ( + | Promise + | undefined)[] = myBatcher.consumeQueue()!; assert.equal(promises.length, 1); - promises[0]!.then((resultObj) => { + promises[0]!.then(resultObj => { assert.equal(myBatcher.queuedRequests.length, 0); - assert.deepEqual(resultObj, { data } ); + assert.deepEqual(resultObj, { data }); done(); }); }); - it('should be able to consume from a queue containing multiple queries', (done) => { + it('should be able to consume from a queue containing multiple queries', done => { const request2: Request = { query, }; const NI = mockBatchedNetworkInterface( - { - request: { query }, - result: {data }, - }, - { - request: { query }, - result: { data }, - }, - ); + { + request: { query }, + result: { data }, + }, + { + request: { query }, + result: { data }, + }, + ); const myBatcher = new QueryBatcher({ batchInterval: 10, @@ -127,39 +131,39 @@ describe('QueryBatcher', () => { }); myBatcher.enqueueRequest(request); myBatcher.enqueueRequest(request2); - const promises: (Promise | undefined)[] = myBatcher.consumeQueue()!; + const promises: ( + | Promise + | undefined)[] = myBatcher.consumeQueue()!; assert.equal(batcher.queuedRequests.length, 0); assert.equal(promises.length, 2); - promises[0]!.then((resultObj1) => { + promises[0]!.then(resultObj1 => { assert.deepEqual(resultObj1, { data }); - promises[1]!.then((resultObj2) => { + promises[1]!.then(resultObj2 => { assert.deepEqual(resultObj2, { data }); done(); }); }); }); - it('should return a promise when we enqueue a request and resolve it with a result', (done) => { - const NI = mockBatchedNetworkInterface( - { - request: { query }, - result: { data }, - }, - ); + it('should return a promise when we enqueue a request and resolve it with a result', done => { + const NI = mockBatchedNetworkInterface({ + request: { query }, + result: { data }, + }); const myBatcher = new QueryBatcher({ batchInterval: 10, batchFetchFunction: NI.batchQuery.bind(NI), }); const promise = myBatcher.enqueueRequest(request); myBatcher.consumeQueue(); - promise.then((result) => { + promise.then(result => { assert.deepEqual(result, { data }); done(); }); }); }); - it('should work when single query', (done) => { + it('should work when single query', done => { const batcher = new QueryBatcher({ batchInterval: 10, batchFetchFunction: networkInterface.batchQuery.bind(networkInterface), @@ -170,7 +174,8 @@ describe('QueryBatcher', () => { firstName lastName } - }`; + } + `; const request: Request = { query }; batcher.enqueueRequest(request); @@ -182,7 +187,7 @@ describe('QueryBatcher', () => { }, 20); }); - it('should correctly batch multiple queries', (done) => { + it('should correctly batch multiple queries', done => { const batcher = new QueryBatcher({ batchInterval: 10, batchFetchFunction: networkInterface.batchQuery.bind(networkInterface), @@ -193,7 +198,8 @@ describe('QueryBatcher', () => { firstName lastName } - }`; + } + `; const request: Request = { query }; batcher.enqueueRequest(request); @@ -213,27 +219,28 @@ describe('QueryBatcher', () => { }, 20); }); - it('should reject the promise if there is a network error', (done) => { + it('should reject the promise if there is a network error', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const request: Request = { query: query, }; const error = new Error('Network error'); - const myNetworkInterface = mockBatchedNetworkInterface( - { - request: { query }, - error, - }, - ); + const myNetworkInterface = mockBatchedNetworkInterface({ + request: { query }, + error, + }); const batcher = new QueryBatcher({ batchInterval: 10, - batchFetchFunction: myNetworkInterface.batchQuery.bind(myNetworkInterface), + batchFetchFunction: myNetworkInterface.batchQuery.bind( + myNetworkInterface, + ), }); const promise = batcher.enqueueRequest(request); batcher.consumeQueue(); diff --git a/test/client.ts b/test/client.ts index 5839975356b..e1ae8d9189e 100644 --- a/test/client.ts +++ b/test/client.ts @@ -3,13 +3,9 @@ const { assert } = chai; import * as sinon from 'sinon'; import * as fetchMock from 'fetch-mock'; -import ApolloClient, { - printAST, -} from '../src'; +import ApolloClient, { printAST } from '../src'; -import { - disableFragmentWarnings as graphqlTagDisableFragmentWarnings, -} from 'graphql-tag'; +import { disableFragmentWarnings as graphqlTagDisableFragmentWarnings } from 'graphql-tag'; import { GraphQLError, @@ -18,19 +14,13 @@ import { FragmentDefinitionNode, } from 'graphql'; -import { - rootReducer as todosReducer, -} from './fixtures/redux-todomvc'; +import { rootReducer as todosReducer } from './fixtures/redux-todomvc'; -import { - Store, -} from '../src/store'; +import { Store } from '../src/store'; import gql from 'graphql-tag'; -import { - print, -} from 'graphql/language/printer'; +import { print } from 'graphql/language/printer'; import { NetworkStatus } from '../src/queries/networkStatus'; @@ -41,9 +31,7 @@ import { applyMiddleware, } from 'redux'; -import { - QueryManager, -} from '../src/core/QueryManager'; +import { QueryManager } from '../src/core/QueryManager'; import { FragmentMatcherInterface, @@ -59,25 +47,16 @@ import { NetworkInterface, } from '../src/transport/networkInterface'; -import { - createBatchingNetworkInterface, -} from '../src/transport/batchedNetworkInterface'; +import { createBatchingNetworkInterface } from '../src/transport/batchedNetworkInterface'; import mockNetworkInterface from './mocks/mockNetworkInterface'; import { mockObservableNetworkInterface } from './mocks/mockNetworkInterface'; -import { - getFragmentDefinitions, -} from '../src/queries/getFromAST'; +import { getFragmentDefinitions } from '../src/queries/getFromAST'; -import { - createMockFetch, - createMockedIResponse, -} from './mocks/mockFetch'; +import { createMockFetch, createMockedIResponse } from './mocks/mockFetch'; -import { - WatchQueryOptions, -} from '../src/core/watchQueryOptions'; +import { WatchQueryOptions } from '../src/core/watchQueryOptions'; import subscribeAndCount from './util/subscribeAndCount'; @@ -126,14 +105,16 @@ describe('client', () => { assert.isDefined(client.store.getState().apollo); }); - it('can allow passing in a network interface', () => { const networkInterface = createNetworkInterface({ uri: 'swapi' }); const client = new ApolloClient({ networkInterface, }); - assert.equal((client.networkInterface as HTTPNetworkInterface)._uri, networkInterface._uri); + assert.equal( + (client.networkInterface as HTTPNetworkInterface)._uri, + networkInterface._uri, + ); }); it('can allow passing in a store', () => { @@ -142,7 +123,7 @@ describe('client', () => { const store: ReduxStore = createStore( combineReducers({ todos: todosReducer, - apollo: client.reducer()as any, + apollo: client.reducer() as any, }), applyMiddleware(client.middleware()), ); @@ -166,10 +147,9 @@ describe('client', () => { assert.equal( error.message, 'Existing store does not use apolloReducer. Please make sure the store ' + - 'is properly configured and "reduxRootSelector" is correctly specified.', + 'is properly configured and "reduxRootSelector" is correctly specified.', ); } - }); it('has a top level key by default', () => { @@ -177,16 +157,13 @@ describe('client', () => { client.initStore(); - assert.deepEqual( - client.store.getState(), - { - apollo: { - data: {}, - optimistic: [], - reducerError: null, - }, + assert.deepEqual(client.store.getState(), { + apollo: { + data: {}, + optimistic: [], + reducerError: null, }, - ); + }); }); it('should allow passing in a selector function for apollo state', () => { @@ -197,13 +174,15 @@ describe('client', () => { // shouldn't throw createStore( - combineReducers({ + combineReducers( + { testApollo: client.reducer(), - } as any), - // here "client.setStore(store)" will be called internally, - // this method throws if "reduxRootSelector" or "reduxRootKey" - // are not configured properly - applyMiddleware(client.middleware()), + } as any, + ), + // here "client.setStore(store)" will be called internally, + // this method throws if "reduxRootSelector" or "reduxRootKey" + // are not configured properly + applyMiddleware(client.middleware()), ); }); @@ -218,8 +197,8 @@ describe('client', () => { assert.fail(); } catch (error) { assert.equal( - error.message, - 'Cannot initialize the store because "reduxRootSelector" is provided. ' + + error.message, + 'Cannot initialize the store because "reduxRootSelector" is provided. ' + 'reduxRootSelector should only be used when the store is created outside of the client. ' + 'This may lead to unexpected results when querying the store internally. ' + `Please remove that option from ApolloClient constructor.`, @@ -231,7 +210,13 @@ describe('client', () => { const client = new ApolloClient(); assert.throws(() => { - client.query(gql`{ a }` as any); + client.query( + gql` + { + a + } + ` as any, + ); }, 'query option is required. You must specify your GraphQL document in the query option.'); assert.throws(() => { client.query({ query: '{ a }' } as any); @@ -242,7 +227,15 @@ describe('client', () => { const client = new ApolloClient(); assert.throws(() => { - client.mutate({ query: gql`{ a }` } as any); + client.mutate( + { + query: gql` + { + a + } + `, + } as any, + ); }, 'mutation option is required. You must specify your GraphQL document in the mutation option.'); }); @@ -274,7 +267,7 @@ describe('client', () => { return clientRoundtrip(query, { data }); }); - it('should allow a single query with an observable enabled network interface', (done) => { + it('should allow a single query with an observable enabled network interface', done => { const query = gql` query people { allPeople(first: 1) { @@ -311,7 +304,7 @@ describe('client', () => { addTypename: false, }); - const basic = client.query({ query, variables }).then((actualResult) => { + const basic = client.query({ query, variables }).then(actualResult => { assert.deepEqual(actualResult.data, data); done(); }); @@ -319,7 +312,9 @@ describe('client', () => { it('should allow for a single query with complex default variables to take place', () => { const query = gql` - query stuff($test: Input = {key1: ["value", "value2"], key2: {key3: 4}}) { + query stuff( + $test: Input = { key1: ["value", "value2"], key2: { key3: 4 } } + ) { allStuff(test: $test) { people { name @@ -341,7 +336,9 @@ describe('client', () => { }, }; - const variables = {test: { key1: ['value', 'value2'], key2: { key3: 4 } } }; + const variables = { + test: { key1: ['value', 'value2'], key2: { key3: 4 } }, + }; const networkInterface = mockNetworkInterface({ request: { query, variables }, @@ -353,11 +350,11 @@ describe('client', () => { addTypename: false, }); - const basic = client.query({ query, variables }).then((actualResult) => { + const basic = client.query({ query, variables }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); - const withDefault = client.query({ query }).then((actualResult) => { + const withDefault = client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); @@ -401,30 +398,35 @@ describe('client', () => { }, }; - const networkInterface = mockNetworkInterface({ - request: { query, variables }, - result: { data: result }, - }, { - request: { query, variables: override }, - result: { data: overriddenResult }, - }); + const networkInterface = mockNetworkInterface( + { + request: { query, variables }, + result: { data: result }, + }, + { + request: { query, variables: override }, + result: { data: overriddenResult }, + }, + ); const client = new ApolloClient({ networkInterface, addTypename: false, }); - const basic = client.query({ query, variables }).then((actualResult) => { + const basic = client.query({ query, variables }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); - const withDefault = client.query({ query }).then((actualResult) => { + const withDefault = client.query({ query }).then(actualResult => { return assert.deepEqual(actualResult.data, result); }); - const withOverride = client.query({ query, variables: override }).then((actualResult) => { - return assert.deepEqual(actualResult.data, overriddenResult); - }); + const withOverride = client + .query({ query, variables: override }) + .then(actualResult => { + return assert.deepEqual(actualResult.data, overriddenResult); + }); return Promise.all([basic, withDefault, withOverride]); }); @@ -462,13 +464,17 @@ describe('client', () => { const ifm = new IntrospectionFragmentMatcher({ introspectionQueryResultData: { __schema: { - types: [{ - kind: 'UNION', - name: 'Query', - possibleTypes: [{ - name: 'Record', - }], - }], + types: [ + { + kind: 'UNION', + name: 'Query', + possibleTypes: [ + { + name: 'Record', + }, + ], + }, + ], }, }, }); @@ -515,14 +521,12 @@ describe('client', () => { applyMiddleware(client.middleware()), ); - return client.query({ query }) - .then((result) => { - assert.deepEqual(result.data, data); - }); + return client.query({ query }).then(result => { + assert.deepEqual(result.data, data); + }); }); it('store can be rehydrated from the server', () => { - const query = gql` query people { allPeople(first: 1) { @@ -555,11 +559,13 @@ describe('client', () => { name: 'Luke Skywalker', }, 'ROOT_QUERY.allPeople({"first":1})': { - people: [ { - type: 'id', - generated: true, - id: 'ROOT_QUERY.allPeople({"first":"1"}).people.0', - } ], + people: [ + { + type: 'id', + generated: true, + id: 'ROOT_QUERY.allPeople({"first":"1"}).people.0', + }, + ], }, ROOT_QUERY: { 'allPeople({"first":1})': { @@ -573,9 +579,11 @@ describe('client', () => { }, }; - const finalState = { apollo: assign({}, initialState.apollo, { - reducerError: null, - }) }; + const finalState = { + apollo: assign({}, initialState.apollo, { + reducerError: null, + }), + }; const client = new ApolloClient({ networkInterface, @@ -583,11 +591,10 @@ describe('client', () => { addTypename: false, }); - return client.query({ query }) - .then((result) => { - assert.deepEqual(result.data, data); - assert.deepEqual(finalState, client.store.getState()); - }); + return client.query({ query }).then(result => { + assert.deepEqual(result.data, data); + assert.deepEqual(finalState, client.store.getState()); + }); }); it('allows for a single query with existing store and custom key', () => { @@ -627,19 +634,17 @@ describe('client', () => { createStore( combineReducers({ todos: todosReducer, - test: client.reducer()as any, + test: client.reducer() as any, }), applyMiddleware(client.middleware()), ); - return client.query({ query }) - .then((result: any) => { - assert.deepEqual(result.data, data); - }); + return client.query({ query }).then((result: any) => { + assert.deepEqual(result.data, data); + }); }); it('should return errors correctly for a single query', () => { - const query = gql` query people { allPeople(first: 1) { @@ -667,13 +672,12 @@ describe('client', () => { addTypename: false, }); - return client.query({ query }) - .catch((error: ApolloError) => { - assert.deepEqual(error.graphQLErrors, errors); - }); + return client.query({ query }).catch((error: ApolloError) => { + assert.deepEqual(error.graphQLErrors, errors); + }); }); - it('should return GraphQL errors correctly for a single query with an observable enabled network interface', (done) => { + it('should return GraphQL errors correctly for a single query with an observable enabled network interface', done => { const query = gql` query people { allPeople(first: 1) { @@ -701,14 +705,13 @@ describe('client', () => { addTypename: false, }); - client.query({ query }) - .catch((error: ApolloError) => { - assert.deepEqual(error.graphQLErrors, errors); - done(); - }); + client.query({ query }).catch((error: ApolloError) => { + assert.deepEqual(error.graphQLErrors, errors); + done(); + }); }); - it('should pass a network error correctly on a query with observable network interface', (done) => { + it('should pass a network error correctly on a query with observable network interface', done => { const query = gql` query people { allPeople(first: 1) { @@ -739,15 +742,14 @@ describe('client', () => { addTypename: false, }); - client.query({ query }) - .catch((error: ApolloError) => { - assert(error.networkError); - assert.deepEqual(error.networkError!.message, networkError.message); - done(); - }); + client.query({ query }).catch((error: ApolloError) => { + assert(error.networkError); + assert.deepEqual(error.networkError!.message, networkError.message); + done(); + }); }); - it('should surface errors in observer.next as uncaught', (done) => { + it('should surface errors in observer.next as uncaught', done => { const expectedError = new Error('this error should not reach the store'); const listeners = process.listeners('uncaughtException'); const oldHandler = listeners[listeners.length - 1]; @@ -772,7 +774,7 @@ describe('client', () => { } `; - const data = { + const data = { allPeople: { people: [ { @@ -801,7 +803,7 @@ describe('client', () => { }); }); - it('should surfaces errors in observer.error as uncaught', (done) => { + it('should surfaces errors in observer.error as uncaught', done => { const expectedError = new Error('this error should not reach the store'); const listeners = process.listeners('uncaughtException'); const oldHandler = listeners[listeners.length - 1]; @@ -829,7 +831,7 @@ describe('client', () => { const networkInterface = mockNetworkInterface({ request: { query }, - result: { }, + result: {}, }); const client = new ApolloClient({ @@ -848,8 +850,7 @@ describe('client', () => { }); }); - it('should allow for subscribing to a request', (done) => { - + it('should allow for subscribing to a request', done => { const query = gql` query people { allPeople(first: 1) { @@ -860,7 +861,7 @@ describe('client', () => { } `; - const data = { + const data = { allPeople: { people: [ { @@ -897,7 +898,8 @@ describe('client', () => { firstName lastName } - }`; + } + `; const transformedQuery = gql` query { author { @@ -905,38 +907,40 @@ describe('client', () => { lastName __typename } - }`; + } + `; const result = { - 'author': { - 'firstName': 'John', - 'lastName': 'Smith', + author: { + firstName: 'John', + lastName: 'Smith', }, }; const transformedResult = { - 'author': { - 'firstName': 'John', - 'lastName': 'Smith', - '__typename': 'Author', + author: { + firstName: 'John', + lastName: 'Smith', + __typename: 'Author', }, }; const networkInterface = mockNetworkInterface( - { - request: { query }, - result: { data: result }, - }, - { - request: { query: transformedQuery }, - result: { data: transformedResult }, - }); + { + request: { query }, + result: { data: result }, + }, + { + request: { query: transformedQuery }, + result: { data: transformedResult }, + }, + ); const client = new ApolloClient({ networkInterface, addTypename: true, }); - return client.query({ query }).then((actualResult) => { + return client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, transformedResult); }); }); @@ -948,7 +952,8 @@ describe('client', () => { firstName lastName } - }`; + } + `; const transformedQuery = gql` query { author { @@ -956,38 +961,42 @@ describe('client', () => { lastName __typename } - }`; + } + `; const result = { - 'author': { - 'firstName': 'John', - 'lastName': 'Smith', + author: { + firstName: 'John', + lastName: 'Smith', }, }; const transformedResult = { - 'author': { - 'firstName': 'John', - 'lastName': 'Smith', - '__typename': 'Author', + author: { + firstName: 'John', + lastName: 'Smith', + __typename: 'Author', }, }; const networkInterface = mockNetworkInterface( - { - request: { query }, - result: { data: result }, - }, - { - request: { query: transformedQuery }, - result: { data: transformedResult }, - }); + { + request: { query }, + result: { data: result }, + }, + { + request: { query: transformedQuery }, + result: { data: transformedResult }, + }, + ); const client = new ApolloClient({ networkInterface, addTypename: true, }); - return client.query({ fetchPolicy: 'network-only', query }).then((actualResult) => { - assert.deepEqual(actualResult.data, transformedResult); - }); + return client + .query({ fetchPolicy: 'network-only', query }) + .then(actualResult => { + assert.deepEqual(actualResult.data, transformedResult); + }); }); it('should handle named fragments on mutations', () => { @@ -1000,30 +1009,31 @@ describe('client', () => { } } } + fragment authorDetails on Author { firstName lastName - }`; + } + `; const result = { - 'starAuthor': { - 'author': { + starAuthor: { + author: { __typename: 'Author', - 'firstName': 'John', - 'lastName': 'Smith', + firstName: 'John', + lastName: 'Smith', }, }, }; - const networkInterface = mockNetworkInterface( - { - request: { query: mutation }, - result: { data: result }, - }); + const networkInterface = mockNetworkInterface({ + request: { query: mutation }, + result: { data: result }, + }); const client = new ApolloClient({ networkInterface, addTypename: false, }); - return client.mutate({ mutation }).then((actualResult) => { + return client.mutate({ mutation }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); }); @@ -1034,17 +1044,19 @@ describe('client', () => { firstName lastName } + query { author { __typename ...authorDetails } - }`; + } + `; const result = { - 'author': { + author: { __typename: 'Author', - 'firstName': 'John', - 'lastName': 'Smith', + firstName: 'John', + lastName: 'Smith', }, }; @@ -1058,9 +1070,11 @@ describe('client', () => { addTypename: false, }); - return client.query({ fetchPolicy: 'network-only', query }).then((actualResult) => { - assert.deepEqual(actualResult.data, result); - }); + return client + .query({ fetchPolicy: 'network-only', query }) + .then(actualResult => { + assert.deepEqual(actualResult.data, result); + }); }); it('should be able to handle named fragments with multiple fragments', () => { @@ -1072,24 +1086,26 @@ describe('client', () => { ...moreDetails } } + fragment authorDetails on Author { firstName lastName } + fragment moreDetails on Author { address - }`; + } + `; const result = { - 'author' : { + author: { __typename: 'Author', - 'firstName': 'John', - 'lastName': 'Smith', - 'address': '1337 10th St.', + firstName: 'John', + lastName: 'Smith', + address: '1337 10th St.', }, }; - const networkInterface = mockNetworkInterface( - { + const networkInterface = mockNetworkInterface({ request: { query }, result: { data: result }, }); @@ -1098,7 +1114,7 @@ describe('client', () => { addTypename: false, }); - return client.query({ query }).then((actualResult) => { + return client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); }); @@ -1111,20 +1127,21 @@ describe('client', () => { ...authorDetails } } + fragment authorDetails on Author { firstName lastName - }`; + } + `; const result = { - 'author' : { + author: { __typename: 'Author', - 'firstName': 'John', - 'lastName': 'Smith', + firstName: 'John', + lastName: 'Smith', }, }; - const networkInterface = mockNetworkInterface( - { + const networkInterface = mockNetworkInterface({ request: { query }, result: { data: result }, }); @@ -1133,7 +1150,7 @@ describe('client', () => { addTypename: false, }); - return client.query({ query }).then((actualResult) => { + return client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); }); @@ -1154,36 +1171,35 @@ describe('client', () => { color __typename } - }`; + } + `; const result = { - 'items': [ + items: [ { - '__typename': 'ColorItem', - 'id': '27tlpoPeXm6odAxj3paGQP', - 'color': 'red', + __typename: 'ColorItem', + id: '27tlpoPeXm6odAxj3paGQP', + color: 'red', }, { - '__typename': 'MonochromeItem', - 'id': '1t3iFLsHBm4c4RjOMdMgOO', + __typename: 'MonochromeItem', + id: '1t3iFLsHBm4c4RjOMdMgOO', }, ], }; - const fancyFragmentMatcher = ( idValue: any, // TODO types, please. typeCondition: string, context: any, ): boolean => { - const obj = context.store[idValue.id]; - if (! obj) { + if (!obj) { return false; } - const implementingTypesMap: {[key: string]: string[]} = { - 'Item': ['ColorItem', 'MonochromeItem'], + const implementingTypesMap: { [key: string]: string[] } = { + Item: ['ColorItem', 'MonochromeItem'], }; if (obj.__typename === typeCondition) { @@ -1198,9 +1214,7 @@ describe('client', () => { return false; }; - - const networkInterface = mockNetworkInterface( - { + const networkInterface = mockNetworkInterface({ request: { query }, result: { data: result }, }); @@ -1231,39 +1245,44 @@ describe('client', () => { __typename } __typename - }`; + } + `; const result = { - 'items': [ + items: [ { - '__typename': 'ColorItem', - 'id': '27tlpoPeXm6odAxj3paGQP', - 'color': 'red', + __typename: 'ColorItem', + id: '27tlpoPeXm6odAxj3paGQP', + color: 'red', }, { - '__typename': 'MonochromeItem', - 'id': '1t3iFLsHBm4c4RjOMdMgOO', + __typename: 'MonochromeItem', + id: '1t3iFLsHBm4c4RjOMdMgOO', }, ], }; - const networkInterface = mockNetworkInterface( - { - request: { query }, - result: { data: result }, - }); + const networkInterface = mockNetworkInterface({ + request: { query }, + result: { data: result }, + }); const ifm = new IntrospectionFragmentMatcher({ introspectionQueryResultData: { __schema: { - types: [{ - kind: 'UNION', - name: 'Item', - possibleTypes: [{ - name: 'ColorItem', - }, { - name: 'MonochromeItem', - }], - }], + types: [ + { + kind: 'UNION', + name: 'Item', + possibleTypes: [ + { + name: 'ColorItem', + }, + { + name: 'MonochromeItem', + }, + ], + }, + ], }, }, }); @@ -1273,12 +1292,12 @@ describe('client', () => { fragmentMatcher: ifm, }); - return client.query({ query }).then((actualResult) => { + return client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); }); - it('should call updateQueries, update and reducer after mutation on query with inlined fragments on an Interface type', (done) => { + it('should call updateQueries, update and reducer after mutation on query with inlined fragments on an Interface type', done => { const query = gql` query items { items { @@ -1294,17 +1313,18 @@ describe('client', () => { __typename } __typename - }`; + } + `; const result = { - 'items': [ + items: [ { - '__typename': 'ColorItem', - 'id': '27tlpoPeXm6odAxj3paGQP', - 'color': 'red', + __typename: 'ColorItem', + id: '27tlpoPeXm6odAxj3paGQP', + color: 'red', }, { - '__typename': 'MonochromeItem', - 'id': '1t3iFLsHBm4c4RjOMdMgOO', + __typename: 'MonochromeItem', + id: '1t3iFLsHBm4c4RjOMdMgOO', }, ], }; @@ -1312,16 +1332,18 @@ describe('client', () => { const mutation = gql` mutation myMutationName { fortuneCookie - }`; + } + `; const mutationResult = { - 'fortuneCookie': 'The waiter spit in your food', + fortuneCookie: 'The waiter spit in your food', }; const networkInterface = mockNetworkInterface( { request: { query }, result: { data: result }, - }, { + }, + { request: { query: mutation }, result: { data: mutationResult }, }, @@ -1330,15 +1352,20 @@ describe('client', () => { const ifm = new IntrospectionFragmentMatcher({ introspectionQueryResultData: { __schema: { - types: [{ - kind: 'UNION', - name: 'Item', - possibleTypes: [{ - name: 'ColorItem', - }, { - name: 'MonochromeItem', - }], - }], + types: [ + { + kind: 'UNION', + name: 'Item', + possibleTypes: [ + { + name: 'ColorItem', + }, + { + name: 'MonochromeItem', + }, + ], + }, + ], }, }, }); @@ -1360,7 +1387,7 @@ describe('client', () => { return prev; }; const updateQueries = { - 'items': queryUpdater, + items: queryUpdater, }; const updateSpy = sinon.spy(); @@ -1369,7 +1396,8 @@ describe('client', () => { const sub = obs.subscribe({ next() { - client.mutate({ mutation, updateQueries, update: updateSpy }) + client + .mutate({ mutation, updateQueries, update: updateSpy }) .then(() => { assert.isTrue(reducerSpy.called); assert.isTrue(queryUpdaterSpy.called); @@ -1377,7 +1405,9 @@ describe('client', () => { sub.unsubscribe(); done(); }) - .catch((err) => { done(err); }); + .catch(err => { + done(err); + }); }, error(err) { done(err); @@ -1389,9 +1419,10 @@ describe('client', () => { const query = gql` query myQueryName { fortuneCookie - }`; + } + `; const data = { - 'fortuneCookie': 'The waiter spit in your food', + fortuneCookie: 'The waiter spit in your food', }; const networkInterface: NetworkInterface = { query(request: Request): Promise { @@ -1404,7 +1435,7 @@ describe('client', () => { addTypename: false, }); - return client.query({ query }).then((actualResult) => { + return client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, data); }); }); @@ -1413,9 +1444,10 @@ describe('client', () => { const mutation = gql` mutation myMutationName { fortuneCookie - }`; + } + `; const data = { - 'fortuneCookie': 'The waiter spit in your food', + fortuneCookie: 'The waiter spit in your food', }; const networkInterface: NetworkInterface = { query(request: Request): Promise { @@ -1428,7 +1460,7 @@ describe('client', () => { addTypename: false, }); - return client.mutate({ mutation }).then((actualResult) => { + return client.mutate({ mutation }).then(actualResult => { assert.deepEqual(actualResult.data, data); }); }); @@ -1439,7 +1471,8 @@ describe('client', () => { author { name } - }`; + } + `; const data = { author: { name: 'Jonas', @@ -1453,15 +1486,17 @@ describe('client', () => { // we have two responses for identical queries, but only the first should be requested. // the second one should never make it through to the network interface. - const networkInterface = mockNetworkInterface({ - request: { query: queryDoc }, - result: { data }, - delay: 10, - }, - { - request: { query: queryDoc }, - result: { data: data2 }, - }); + const networkInterface = mockNetworkInterface( + { + request: { query: queryDoc }, + result: { data }, + delay: 10, + }, + { + request: { query: queryDoc }, + result: { data: data2 }, + }, + ); const client = new ApolloClient({ networkInterface, addTypename: false, @@ -1484,7 +1519,8 @@ describe('client', () => { author { name } - }`; + } + `; const data = { author: { name: 'Jonas', @@ -1498,15 +1534,17 @@ describe('client', () => { // we have two responses for identical queries, but only the first should be requested. // the second one should never make it through to the network interface. - const networkInterface = mockNetworkInterface({ - request: { query: queryDoc }, - result: { data }, - delay: 10, - }, - { - request: { query: queryDoc }, - result: { data: data2 }, - }); + const networkInterface = mockNetworkInterface( + { + request: { query: queryDoc }, + result: { data }, + delay: 10, + }, + { + request: { query: queryDoc }, + result: { data: data2 }, + }, + ); const client = new ApolloClient({ networkInterface, addTypename: false, @@ -1562,7 +1600,7 @@ describe('client', () => { const orig = client.store.dispatch; let actionEmitted = false; - client.store.dispatch = (action) => { + client.store.dispatch = action => { if (action.type === 'APOLLO_QUERY_INIT') { actionEmitted = true; } @@ -1570,7 +1608,7 @@ describe('client', () => { orig(action); }; - const queryPromise = client.query({ query }).then((result) => { + const queryPromise = client.query({ query }).then(result => { assert.deepEqual(result.data, data); }); @@ -1590,62 +1628,44 @@ describe('client', () => { it('errors when returnPartialData is used on query', () => { const client = new ApolloClient(); - assert.throws( - () => { - client.query({ query, returnPartialData: true } as WatchQueryOptions ); - }, - /returnPartialData/, - ); + assert.throws(() => { + client.query({ query, returnPartialData: true } as WatchQueryOptions); + }, /returnPartialData/); }); it('errors when noFetch is used on query', () => { const client = new ApolloClient(); - assert.throws( - () => { - client.query({ query, noFetch: true } as WatchQueryOptions ); - }, - /noFetch/, - ); + assert.throws(() => { + client.query({ query, noFetch: true } as WatchQueryOptions); + }, /noFetch/); }); it('errors when forceFetch is used on query', () => { const client = new ApolloClient(); - assert.throws( - () => { - client.query({ query, forceFetch: true } as WatchQueryOptions ); - }, - /forceFetch/, - ); + assert.throws(() => { + client.query({ query, forceFetch: true } as WatchQueryOptions); + }, /forceFetch/); }); it('errors when returnPartialData is used on watchQuery', () => { const client = new ApolloClient(); - assert.throws( - () => { - client.query({ query, returnPartialData: true } as WatchQueryOptions ); - }, - /returnPartialData/, - ); + assert.throws(() => { + client.query({ query, returnPartialData: true } as WatchQueryOptions); + }, /returnPartialData/); }); it('errors when noFetch is used on watchQuery', () => { const client = new ApolloClient(); - assert.throws( - () => { - client.query({ query, noFetch: true } as WatchQueryOptions ); - }, - /noFetch/, - ); + assert.throws(() => { + client.query({ query, noFetch: true } as WatchQueryOptions); + }, /noFetch/); }); it('errors when forceFetch is used on watchQuery', () => { const client = new ApolloClient(); - assert.throws( - () => { - client.query({ query, forceFetch: true } as WatchQueryOptions ); - }, - /forceFetch/, - ); + assert.throws(() => { + client.query({ query, forceFetch: true } as WatchQueryOptions); + }, /forceFetch/); }); }); @@ -1684,16 +1704,13 @@ describe('client', () => { addTypename: false, }); - return client.query({ query }) - .then((result) => { - assert.deepEqual(result.data, data); - assert.deepEqual(client.store.getState()['apollo'].data['1'], - { - id: '1', - name: 'Luke Skywalker', - }, - ); + return client.query({ query }).then(result => { + assert.deepEqual(result.data, data); + assert.deepEqual(client.store.getState()['apollo'].data['1'], { + id: '1', + name: 'Luke Skywalker', }); + }); }); it('for existing store', () => { @@ -1710,22 +1727,18 @@ describe('client', () => { const store = createStore( combineReducers({ - apollo: client.reducer()as any, + apollo: client.reducer() as any, }), applyMiddleware(client.middleware()), ); - - return client.query({ query }) - .then((result) => { - assert.deepEqual(result.data, data); - assert.deepEqual((store.getState() as any)['apollo'].data['1'], - { - id: '1', - name: 'Luke Skywalker', - }, - ); + return client.query({ query }).then(result => { + assert.deepEqual(result.data, data); + assert.deepEqual((store.getState() as any)['apollo'].data['1'], { + id: '1', + name: 'Luke Skywalker', }); + }); }); }); @@ -1752,14 +1765,12 @@ describe('client', () => { // Test that cache-and-network can only be used on watchQuery, not query. it('errors when being used on query', () => { const client = new ApolloClient(); - assert.throws( - () => { - client.query({ query, fetchPolicy: 'cache-and-network' }); - }, - ); + assert.throws(() => { + client.query({ query, fetchPolicy: 'cache-and-network' }); + }); }); - it('fetches from cache first, then network', (done) => { + it('fetches from cache first, then network', done => { const networkInterface = mockNetworkInterface({ request: { query }, result: { data: networkFetch }, @@ -1774,7 +1785,10 @@ describe('client', () => { data: initialData, }); - const obs = client.watchQuery({ query, fetchPolicy: 'cache-and-network'}); + const obs = client.watchQuery({ + query, + fetchPolicy: 'cache-and-network', + }); subscribeAndCount(done, obs, (handleCount, result) => { if (handleCount === 1) { @@ -1786,7 +1800,7 @@ describe('client', () => { }); }); - it('does not fail if cache entry is not present', (done) => { + it('does not fail if cache entry is not present', done => { const networkInterface = mockNetworkInterface({ request: { query }, result: { data: networkFetch }, @@ -1796,7 +1810,10 @@ describe('client', () => { addTypename: false, }); - const obs = client.watchQuery({ query, fetchPolicy: 'cache-and-network'}); + const obs = client.watchQuery({ + query, + fetchPolicy: 'cache-and-network', + }); subscribeAndCount(done, obs, (handleCount, result) => { if (handleCount === 1) { @@ -1810,23 +1827,26 @@ describe('client', () => { }); }); - it('fails if network request fails', (done) => { + it('fails if network request fails', done => { const networkInterface = mockNetworkInterface(); // no queries = no replies. const client = new ApolloClient({ networkInterface, addTypename: false, }); - const obs = client.watchQuery({ query, fetchPolicy: 'cache-and-network'}); + const obs = client.watchQuery({ + query, + fetchPolicy: 'cache-and-network', + }); let count = 0; obs.subscribe({ - next: (result) => { + next: result => { assert.equal(result.data, undefined); assert(result.loading); count++; - }, - error: (e) => { + }, + error: e => { assert.match(e.message, /No more mocked responses/); assert.equal(count, 1); // make sure next was called. done(); @@ -1842,13 +1862,25 @@ describe('client', () => { it('cannot be started with watchQuery or query', () => { const client = new ApolloClient(); assert.throws( - () => client.watchQuery({ query: gql`{ abc }`, fetchPolicy: 'standby'}), + () => + client.watchQuery({ + query: gql` + { + abc + } + `, + fetchPolicy: 'standby', + }), 'client.watchQuery cannot be called with fetchPolicy set to "standby"', ); }); - it('are not watching the store or notifying on updates', (done) => { - const query = gql`{ test }`; + it('are not watching the store or notifying on updates', done => { + const query = gql` + { + test + } + `; const data = { test: 'ok' }; const data2 = { test: 'not ok' }; @@ -1865,11 +1897,11 @@ describe('client', () => { subscribeAndCount(done, obs, (handleCount, result) => { if (handleCount === 1) { assert.deepEqual(result.data, data); - obs.setOptions({ fetchPolicy: 'standby' }).then( () => { + obs.setOptions({ fetchPolicy: 'standby' }).then(() => { client.writeQuery({ query, data: data2 }); // this write should be completely ignored by the standby query }); - setTimeout( () => { + setTimeout(() => { if (!handleCalled) { done(); } @@ -1882,8 +1914,12 @@ describe('client', () => { }); }); - it('return the current result when coming out of standby', (done) => { - const query = gql`{ test }`; + it('return the current result when coming out of standby', done => { + const query = gql` + { + test + } + `; const data = { test: 'ok' }; const data2 = { test: 'not ok' }; @@ -1900,10 +1936,10 @@ describe('client', () => { subscribeAndCount(done, obs, (handleCount, result) => { if (handleCount === 1) { assert.deepEqual(result.data, data); - obs.setOptions({ fetchPolicy: 'standby' }).then( () => { + obs.setOptions({ fetchPolicy: 'standby' }).then(() => { client.writeQuery({ query, data: data2 }); // this write should be completely ignored by the standby query - setTimeout( () => { + setTimeout(() => { obs.setOptions({ fetchPolicy: 'cache-first' }); }, 10); }); @@ -1937,17 +1973,19 @@ describe('client', () => { }, }; - let networkInterface: any; let clock: any; beforeEach(() => { - networkInterface = mockNetworkInterface({ - request: { query }, - result: { data: firstFetch }, - }, { - request: { query }, - result: { data: secondFetch }, - }); + networkInterface = mockNetworkInterface( + { + request: { query }, + result: { data: firstFetch }, + }, + { + request: { query }, + result: { data: secondFetch }, + }, + ); }); afterEach(() => { @@ -1963,12 +2001,15 @@ describe('client', () => { }); // Run a query first to initialize the store - return client.query({ query }) - // then query for real - .then(() => client.query({ query, fetchPolicy: 'network-only' })) - .then((result) => { - assert.deepEqual<{}>(result.data, { myNumber: { n: 2 } }); - }); + return ( + client + .query({ query }) + // then query for real + .then(() => client.query({ query, fetchPolicy: 'network-only' })) + .then(result => { + assert.deepEqual<{}>(result.data, { myNumber: { n: 2 } }); + }) + ); }); it('can be disabled with ssrMode', () => { @@ -1981,15 +2022,21 @@ describe('client', () => { const options: WatchQueryOptions = { query, fetchPolicy: 'network-only' }; // Run a query first to initialize the store - return client.query({ query }) - // then query for real - .then(() => client.query(options)) - .then((result) => { - assert.deepEqual<{}>(result.data, { myNumber: { n: 1 } }); - - // Test that options weren't mutated, issue #339 - assert.deepEqual(options, { query, fetchPolicy: 'network-only' }); - }); + return ( + client + .query({ query }) + // then query for real + .then(() => client.query(options)) + .then(result => { + assert.deepEqual<{}>(result.data, { myNumber: { n: 1 } }); + + // Test that options weren't mutated, issue #339 + assert.deepEqual(options, { + query, + fetchPolicy: 'network-only', + }); + }) + ); }); it('can temporarily be disabled with ssrForceFetchDelay', () => { @@ -2002,21 +2049,22 @@ describe('client', () => { }); // Run a query first to initialize the store - const outerPromise = client.query({ query }) + const outerPromise = client + .query({ query }) // then query for real .then(() => { const promise = client.query({ query, fetchPolicy: 'network-only' }); clock.tick(0); return promise; }) - .then((result) => { + .then(result => { assert.deepEqual<{}>(result.data, { myNumber: { n: 1 } }); clock.tick(100); const promise = client.query({ query, fetchPolicy: 'network-only' }); clock.tick(0); return promise; }) - .then((result) => { + .then(result => { assert.deepEqual<{}>(result.data, { myNumber: { n: 2 } }); }); clock.tick(0); @@ -2028,19 +2076,21 @@ describe('client', () => { const query = gql` query { fortuneCookie - }`; + } + `; assert.equal(printAST(query), print(query)); }); - it('should pass a network error correctly on a mutation', (done) => { + it('should pass a network error correctly on a mutation', done => { const mutation = gql` mutation { person { firstName lastName } - }`; + } + `; const data = { person: { firstName: 'John', @@ -2057,16 +2107,19 @@ describe('client', () => { addTypename: false, }); - client.mutate({ mutation }).then((result) => { - done(new Error('Returned a result when it should not have.')); - }).catch((error: ApolloError) => { - assert(error.networkError); - assert.equal(error.networkError!.message, networkError.message); - done(); - }); + client + .mutate({ mutation }) + .then(result => { + done(new Error('Returned a result when it should not have.')); + }) + .catch((error: ApolloError) => { + assert(error.networkError); + assert.equal(error.networkError!.message, networkError.message); + done(); + }); }); - it('should pass a GraphQL error correctly on a mutation', (done) => { + it('should pass a GraphQL error correctly on a mutation', done => { const mutation = gql` mutation { newPerson { @@ -2075,14 +2128,15 @@ describe('client', () => { lastName } } - }`; + } + `; const data = { person: { firstName: 'John', lastName: 'Smith', }, }; - const errors = [ new Error('Some kind of GraphQL error.') ]; + const errors = [new Error('Some kind of GraphQL error.')]; const client = new ApolloClient({ networkInterface: mockNetworkInterface({ request: { query: mutation }, @@ -2090,17 +2144,20 @@ describe('client', () => { }), addTypename: false, }); - client.mutate({ mutation }).then((result) => { - done(new Error('Returned a result when it should not have.')); - }).catch((error: ApolloError) => { - assert(error.graphQLErrors); - assert.equal(error.graphQLErrors.length, 1); - assert.equal(error.graphQLErrors[0].message, errors[0].message); - done(); - }); + client + .mutate({ mutation }) + .then(result => { + done(new Error('Returned a result when it should not have.')); + }) + .catch((error: ApolloError) => { + assert(error.graphQLErrors); + assert.equal(error.graphQLErrors.length, 1); + assert.equal(error.graphQLErrors[0].message, errors[0].message); + done(); + }); }); - it('should rollback optimistic after mutation got a GraphQL error', (done) => { + it('should rollback optimistic after mutation got a GraphQL error', done => { const mutation = gql` mutation { newPerson { @@ -2109,7 +2166,8 @@ describe('client', () => { lastName } } - }`; + } + `; const data = { newPerson: { person: { @@ -2118,7 +2176,7 @@ describe('client', () => { }, }, }; - const errors = [ new Error('Some kind of GraphQL error.') ]; + const errors = [new Error('Some kind of GraphQL error.')]; const client = new ApolloClient({ networkInterface: mockNetworkInterface({ request: { query: mutation }, @@ -2138,15 +2196,17 @@ describe('client', () => { }, }); assert.equal(client.store.getState().apollo.optimistic.length, 1); - mutatePromise.then((result) => { - done(new Error('Returned a result when it should not have.')); - }).catch((error: ApolloError) => { - assert.equal(client.store.getState().apollo.optimistic.length, 0); - done(); - }); + mutatePromise + .then(result => { + done(new Error('Returned a result when it should not have.')); + }) + .catch((error: ApolloError) => { + assert.equal(client.store.getState().apollo.optimistic.length, 0); + done(); + }); }); - it('has a resetStore method which calls QueryManager', (done) => { + it('has a resetStore method which calls QueryManager', done => { const client = new ApolloClient(); client.queryManager = { resetStore: () => { @@ -2156,14 +2216,15 @@ describe('client', () => { client.resetStore(); }); - it('should allow us to create a network interface with transport-level batching', (done) => { + it('should allow us to create a network interface with transport-level batching', done => { const firstQuery = gql` query { author { firstName lastName } - }`; + } + `; const firstResult = { data: { author: { @@ -2178,7 +2239,8 @@ describe('client', () => { person { name } - }`; + } + `; const secondResult = { data: { person: { @@ -2215,23 +2277,29 @@ describe('client', () => { Promise.all([ networkInterface.query({ query: firstQuery }), networkInterface.query({ query: secondQuery }), - ]).then((results) => { - assert.deepEqual<[ExecutionResult]>(results, [firstResult, secondResult]); - fetch = oldFetch; - done(); - }).catch( e => { - console.error(e); - }); + ]) + .then(results => { + assert.deepEqual<[ExecutionResult]>(results, [ + firstResult, + secondResult, + ]); + fetch = oldFetch; + done(); + }) + .catch(e => { + console.error(e); + }); }); - it('should throw an error if response to batch request is not an array', (done) => { + it('should throw an error if response to batch request is not an array', done => { const firstQuery = gql` query { author { firstName lastName } - }`; + } + `; const firstResult = { data: { author: { @@ -2246,7 +2314,8 @@ describe('client', () => { person { name } - }`; + } + `; const url = 'http://not-a-real-url.com'; const oldFetch = fetch; fetch = createMockFetch({ @@ -2276,23 +2345,29 @@ describe('client', () => { Promise.all([ networkInterface.query({ query: firstQuery }), networkInterface.query({ query: secondQuery }), - ]).then((results) => { - assert.equal(true, false, 'expected response to throw an error'); - }).catch( e => { - assert.equal(e.message, 'BatchingNetworkInterface: server response is not an array'); - fetch = oldFetch; - done(); - }); + ]) + .then(results => { + assert.equal(true, false, 'expected response to throw an error'); + }) + .catch(e => { + assert.equal( + e.message, + 'BatchingNetworkInterface: server response is not an array', + ); + fetch = oldFetch; + done(); + }); }); - it('should not do transport-level batching when the interval is exceeded', (done) => { + it('should not do transport-level batching when the interval is exceeded', done => { const firstQuery = gql` query { author { firstName lastName } - }`; + } + `; const firstResult = { data: { author: { @@ -2307,7 +2382,8 @@ describe('client', () => { person { name } - }`; + } + `; const secondResult = { data: { person: { @@ -2317,37 +2393,40 @@ describe('client', () => { }; const url = 'http://not-a-real-url.com'; const oldFetch = fetch; - fetch = createMockFetch({ - url, - opts: { - body: JSON.stringify([ - { - query: print(firstQuery), + fetch = createMockFetch( + { + url, + opts: { + body: JSON.stringify([ + { + query: print(firstQuery), + }, + ]), + headers: { + Accept: '*/*', + 'Content-Type': 'application/json', }, - ]), - headers: { - Accept: '*/*', - 'Content-Type': 'application/json', + method: 'POST', }, - method: 'POST', + result: createMockedIResponse([firstResult]), }, - result: createMockedIResponse([firstResult]), - }, { - url, - opts: { - body: JSON.stringify([ - { - query: print(secondQuery), + { + url, + opts: { + body: JSON.stringify([ + { + query: print(secondQuery), + }, + ]), + headers: { + Accept: '*/*', + 'Content-Type': 'application/json', }, - ]), - headers: { - Accept: '*/*', - 'Content-Type': 'application/json', + method: 'POST', }, - method: 'POST', + result: createMockedIResponse([secondResult]), }, - result: createMockedIResponse([secondResult]), - }); + ); const networkInterface = createBatchingNetworkInterface({ uri: 'http://not-a-real-url.com', batchInterval: 5, @@ -2355,24 +2434,34 @@ describe('client', () => { }); Promise.all([ networkInterface.query({ query: firstQuery }), - new Promise( (resolve, reject) => - setTimeout(() => resolve(networkInterface.query({ query: secondQuery })), 10)), - ]).then((results) => { - assert.deepEqual<[ExecutionResult]>(results, [firstResult, secondResult]); - fetch = oldFetch; - done(); - }).catch( e => { - console.error(e); - }); + new Promise((resolve, reject) => + setTimeout( + () => resolve(networkInterface.query({ query: secondQuery })), + 10, + ), + ), + ]) + .then(results => { + assert.deepEqual<[ExecutionResult]>(results, [ + firstResult, + secondResult, + ]); + fetch = oldFetch; + done(); + }) + .catch(e => { + console.error(e); + }); }); - it('should limit the amount of queries in a batch according to the batchMax value', (done) => { + it('should limit the amount of queries in a batch according to the batchMax value', done => { const authorQuery = gql` query { author { firstName } - }`; + } + `; const authorResult = { data: { author: { @@ -2385,7 +2474,8 @@ describe('client', () => { person { name } - }`; + } + `; const personResult = { data: { person: { @@ -2404,48 +2494,50 @@ describe('client', () => { }); const oldFetch = fetch; - fetch = createMockFetch({ - url, - opts: { - body: JSON.stringify([ - { query: print(authorQuery) }, - { query: print(personQuery) }, - ]), - headers: { - Accept: '*/*', - 'Content-Type': 'application/json', + fetch = createMockFetch( + { + url, + opts: { + body: JSON.stringify([ + { query: print(authorQuery) }, + { query: print(personQuery) }, + ]), + headers: { + Accept: '*/*', + 'Content-Type': 'application/json', + }, + method: 'POST', }, - method: 'POST', + result: createMockedIResponse([authorResult, personResult]), }, - result: createMockedIResponse([authorResult, personResult]), - }, { - url, - opts: { - body: JSON.stringify([ - { query: print(authorQuery) }, - { query: print(personQuery) }, - ]), - headers: { - Accept: '*/*', - 'Content-Type': 'application/json', + { + url, + opts: { + body: JSON.stringify([ + { query: print(authorQuery) }, + { query: print(personQuery) }, + ]), + headers: { + Accept: '*/*', + 'Content-Type': 'application/json', + }, + method: 'POST', }, - method: 'POST', + result: createMockedIResponse([authorResult, personResult]), }, - result: createMockedIResponse([authorResult, personResult]), - }, { - url, - opts: { - body: JSON.stringify([ - { query: print(authorQuery) }, - ]), - headers: { - Accept: '*/*', - 'Content-Type': 'application/json', + { + url, + opts: { + body: JSON.stringify([{ query: print(authorQuery) }]), + headers: { + Accept: '*/*', + 'Content-Type': 'application/json', + }, + method: 'POST', }, - method: 'POST', + result: createMockedIResponse([authorResult]), }, - result: createMockedIResponse([authorResult]), - }); + ); Promise.all([ networkInterface.query({ query: authorQuery }), @@ -2453,35 +2545,39 @@ describe('client', () => { networkInterface.query({ query: authorQuery }), networkInterface.query({ query: personQuery }), networkInterface.query({ query: authorQuery }), - ]).then((results) => { - assert.deepEqual<[ - ExecutionResult, - ExecutionResult, - ExecutionResult, - ExecutionResult, - ExecutionResult - ]>(results, [ - authorResult, - personResult, - authorResult, - personResult, - authorResult, - ]); - fetch = oldFetch; - done(); - }).catch( e => { - console.error(e); - }); - + ]) + .then(results => { + assert.deepEqual< + [ + ExecutionResult, + ExecutionResult, + ExecutionResult, + ExecutionResult, + ExecutionResult + ] + >(results, [ + authorResult, + personResult, + authorResult, + personResult, + authorResult, + ]); + fetch = oldFetch; + done(); + }) + .catch(e => { + console.error(e); + }); }); - it('should not limit the amount of queries in a batch when batchMax is not set', (done) => { + it('should not limit the amount of queries in a batch when batchMax is not set', done => { const authorQuery = gql` query { author { firstName } - }`; + } + `; const authorResult = { data: { author: { @@ -2494,7 +2590,8 @@ describe('client', () => { person { name } - }`; + } + `; const personResult = { data: { person: { @@ -2543,25 +2640,29 @@ describe('client', () => { networkInterface.query({ query: authorQuery }), networkInterface.query({ query: personQuery }), networkInterface.query({ query: authorQuery }), - ]).then((results) => { - assert.deepEqual<[ - ExecutionResult, - ExecutionResult, - ExecutionResult, - ExecutionResult, - ExecutionResult - ]>(results, [ - authorResult, - personResult, - authorResult, - personResult, - authorResult, - ]); - fetch = oldFetch; - done(); - }).catch( e => { - console.error(e); - }); + ]) + .then(results => { + assert.deepEqual< + [ + ExecutionResult, + ExecutionResult, + ExecutionResult, + ExecutionResult, + ExecutionResult + ] + >(results, [ + authorResult, + personResult, + authorResult, + personResult, + authorResult, + ]); + fetch = oldFetch; + done(); + }) + .catch(e => { + console.error(e); + }); }); it('should enable dev tools logging', () => { @@ -2601,11 +2702,10 @@ describe('client', () => { log.push(entry); }); - return client.query({ query }) - .then(() => { - assert.equal(log.length, 2); - assert.equal(log[1].state.queries['0'].loading, false); - }); + return client.query({ query }).then(() => { + assert.equal(log.length, 2); + assert.equal(log[1].state.queries['0'].loading, false); + }); }); it('with passed in store', () => { @@ -2632,15 +2732,13 @@ describe('client', () => { log.push(entry); }); - return client.query({ query }) - .then(() => { - assert.equal(log.length, 2); - }); + return client.query({ query }).then(() => { + assert.equal(log.length, 2); + }); }); }); - it('should propagate errors from network interface to observers', (done) => { - + it('should propagate errors from network interface to observers', done => { const networkInterface = { query: () => Promise.reject(new Error('Uh oh!')), }; @@ -2650,7 +2748,15 @@ describe('client', () => { addTypename: false, }); - const handle = client.watchQuery({ query: gql`query { a b c }` }); + const handle = client.watchQuery({ + query: gql` + query { + a + b + c + } + `, + }); handle.subscribe({ error(error) { @@ -2669,10 +2775,12 @@ describe('client', () => { } } `; - const errors: GraphQLError[] = [{ - name: 'test', - message: 'Cannot query field "foo" on type "Post".', - }]; + const errors: GraphQLError[] = [ + { + name: 'test', + message: 'Cannot query field "foo" on type "Post".', + }, + ]; const networkInterface = mockNetworkInterface({ request: { query }, result: { errors }, @@ -2682,7 +2790,10 @@ describe('client', () => { }); return client.query({ query }).catch(err => { - assert.equal(err.message, 'GraphQL error: Cannot query field "foo" on type "Post".'); + assert.equal( + err.message, + 'GraphQL error: Cannot query field "foo" on type "Post".', + ); }); }); @@ -2717,7 +2828,10 @@ describe('client', () => { networkInterface, }); - return withWarning(() => client.query({ query }), /Missing field description/); + return withWarning( + () => client.query({ query }), + /Missing field description/, + ); }); it('runs a query with the connection directive and writes it to the store key defined in the directive', () => { @@ -2726,7 +2840,8 @@ describe('client', () => { books(skip: 0, limit: 2) @connection(key: "abc") { name } - }`; + } + `; const transformedQuery = gql` { @@ -2734,13 +2849,14 @@ describe('client', () => { name __typename } - }`; + } + `; const result = { - 'books': [ + books: [ { - 'name': 'abcd', - '__typename': 'Book', + name: 'abcd', + __typename: 'Book', }, ], }; @@ -2754,7 +2870,7 @@ describe('client', () => { networkInterface, }); - return client.query({ query }).then((actualResult) => { + return client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); }); @@ -2765,7 +2881,8 @@ describe('client', () => { books(skip: 0, limit: 2) @connection { name } - }`; + } + `; const transformedQuery = gql` { @@ -2773,13 +2890,14 @@ describe('client', () => { name __typename } - }`; + } + `; const result = { - 'books': [ + books: [ { - 'name': 'abcd', - '__typename': 'Book', + name: 'abcd', + __typename: 'Book', }, ], }; @@ -2793,108 +2911,115 @@ describe('client', () => { networkInterface, }); - return client.query({ query }).then((actualResult) => { + return client.query({ query }).then(actualResult => { assert.deepEqual(actualResult.data, result); }); }); }); it('should run a query with the connection directive and write the result to the store key defined in the directive', () => { - const query = gql` - { - books(skip: 0, limit: 2) @connection(key: "abc") { - name - } - }`; + const query = gql` + { + books(skip: 0, limit: 2) @connection(key: "abc") { + name + } + } + `; - const transformedQuery = gql` - { - books(skip: 0, limit: 2) @connection(key: "abc") { - name - __typename - } - }`; + const transformedQuery = gql` + { + books(skip: 0, limit: 2) @connection(key: "abc") { + name + __typename + } + } + `; - const result = { - 'books': [ - { - 'name': 'abcd', - '__typename': 'Book', - }, - ], - }; + const result = { + books: [ + { + name: 'abcd', + __typename: 'Book', + }, + ], + }; - const networkInterface = mockNetworkInterface({ - request: { query: transformedQuery }, - result: { data: result }, - }); + const networkInterface = mockNetworkInterface({ + request: { query: transformedQuery }, + result: { data: result }, + }); - const client = new ApolloClient({ - networkInterface, - }); + const client = new ApolloClient({ + networkInterface, + }); - return client.query({ query }).then((actualResult) => { - assert.deepEqual(actualResult.data, result); - assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY.abc.0': { name: 'abcd', __typename: 'Book' }, - 'ROOT_QUERY': { - abc: [ - { - 'generated': true, - 'id': 'ROOT_QUERY.abc.0', - 'type': 'id', - }, - ], - }, - }); + return client.query({ query }).then(actualResult => { + assert.deepEqual(actualResult.data, result); + assert.deepEqual(client.store.getState().apollo.data, { + 'ROOT_QUERY.abc.0': { name: 'abcd', __typename: 'Book' }, + ROOT_QUERY: { + abc: [ + { + generated: true, + id: 'ROOT_QUERY.abc.0', + type: 'id', + }, + ], + }, }); }); +}); - it('should run a query with the connection directive and filter arguments and write the result to the correct store key', () => { - const query = gql` - query books($order: string) { - books(skip: 0, limit: 2, order: $order) @connection(key: "abc", filter: ["order"]) { - name - __typename - } - }`; +it('should run a query with the connection directive and filter arguments and write the result to the correct store key', () => { + const query = gql` + query books($order: string) { + books(skip: 0, limit: 2, order: $order) + @connection(key: "abc", filter: ["order"]) { + name + __typename + } + } + `; - const result = { - 'books': [ - { - 'name': 'abcd', - '__typename': 'Book', - }, - ], - }; + const result = { + books: [ + { + name: 'abcd', + __typename: 'Book', + }, + ], + }; - const variables = {order: 'popularity'}; + const variables = { order: 'popularity' }; - const networkInterface = mockNetworkInterface({ - request: { query: query, variables }, - result: { data: result }, - }); + const networkInterface = mockNetworkInterface({ + request: { query: query, variables }, + result: { data: result }, + }); - const client = new ApolloClient({ - networkInterface, - }); + const client = new ApolloClient({ + networkInterface, + }); - return client.query({ query, variables }).then((actualResult) => { - assert.deepEqual(actualResult.data, result); - assert.deepEqual(client.store.getState().apollo.data, { - 'ROOT_QUERY.abc({"order":"popularity"}).0': { name: 'abcd', __typename: 'Book' }, - 'ROOT_QUERY': { - 'abc({"order":"popularity"})': [ - { - 'generated': true, - 'id': 'ROOT_QUERY.abc({"order":"popularity"}).0', - 'type': 'id', - }, - ], - }, - }); + return client.query({ query, variables }).then(actualResult => { + assert.deepEqual(actualResult.data, result); + assert.deepEqual(client.store.getState().apollo.data, { + 'ROOT_QUERY.abc({"order":"popularity"}).0': { + name: 'abcd', + __typename: 'Book', + }, + ROOT_QUERY: { + 'abc({"order":"popularity"})': [ + { + generated: true, + id: 'ROOT_QUERY.abc({"order":"popularity"}).0', + type: 'id', + }, + ], + }, }); }); +}); function clientRoundtrip( query: DocumentNode, @@ -2912,7 +3037,7 @@ function clientRoundtrip( fragmentMatcher, }); - return client.query({ query, variables }).then((result) => { + return client.query({ query, variables }).then(result => { assert.deepEqual(result.data, data.data); }); } diff --git a/test/customResolvers.ts b/test/customResolvers.ts index a40024369aa..ec73b10e115 100644 --- a/test/customResolvers.ts +++ b/test/customResolvers.ts @@ -5,9 +5,7 @@ import ApolloClient, { toIdValue } from '../src'; import { NetworkStatus } from '../src/queries/networkStatus'; -import { - ApolloQueryResult, -} from '../src/core/types'; +import { ApolloQueryResult } from '../src/core/types'; describe('custom resolvers', () => { it(`works for cache redirection`, () => { @@ -15,7 +13,14 @@ describe('custom resolvers', () => { return obj.id; }; - const listQuery = gql`{ people { id name } }`; + const listQuery = gql` + { + people { + id + name + } + } + `; const listData = { people: [ @@ -27,9 +32,24 @@ describe('custom resolvers', () => { ], }; - const netListQuery = gql`{ people { id name __typename } }`; + const netListQuery = gql` + { + people { + id + name + __typename + } + } + `; - const itemQuery = gql`{ person(id: 4) { id name } }`; + const itemQuery = gql` + { + person(id: 4) { + id + name + } + } + `; // We don't expect the item query to go to the server at all const networkInterface = mockNetworkInterface({ @@ -47,21 +67,24 @@ describe('custom resolvers', () => { dataIdFromObject, }); - return client.query({ query: listQuery }).then(() => { - return client.query({ query: itemQuery }); - }).then((itemResult) => { - assert.deepEqual>(itemResult, { - loading: false, - networkStatus: NetworkStatus.ready, - stale: false, - data: { - person: { - __typename: 'Person', - id: '4', - name: 'Luke Skywalker', + return client + .query({ query: listQuery }) + .then(() => { + return client.query({ query: itemQuery }); + }) + .then(itemResult => { + assert.deepEqual>(itemResult, { + loading: false, + networkStatus: NetworkStatus.ready, + stale: false, + data: { + person: { + __typename: 'Person', + id: '4', + name: 'Luke Skywalker', + }, }, - }, + }); }); - }); }); }); diff --git a/test/deduplicator.ts b/test/deduplicator.ts index 6f74236c994..092551d420f 100644 --- a/test/deduplicator.ts +++ b/test/deduplicator.ts @@ -10,10 +10,11 @@ import { NetworkStatus } from '../src/queries/networkStatus'; describe('query deduplication', () => { it(`does not affect different queries`, () => { - - const document: DocumentNode = gql`query test1($x: String){ - test(x: $x) - }`; + const document: DocumentNode = gql` + query test1($x: String) { + test(x: $x) + } + `; const variables1 = { x: 'Hello World' }; const variables2 = { x: 'Goodbye World' }; @@ -30,26 +31,28 @@ describe('query deduplication', () => { }; let called = 0; - const deduper = new Deduplicator({ - query: () => { - called += 1; - return new Promise((resolve, reject) => { - setTimeout(resolve, 5); - }); - }, - } as any ); + const deduper = new Deduplicator( + { + query: () => { + called += 1; + return new Promise((resolve, reject) => { + setTimeout(resolve, 5); + }); + }, + } as any, + ); deduper.query(request1); deduper.query(request2); assert.equal(called, 2); - }); it(`will not deduplicate requests following an errored query`, () => { - - const document: DocumentNode = gql`query test1($x: String){ - test(x: $x) - }`; + const document: DocumentNode = gql` + query test1($x: String) { + test(x: $x) + } + `; const variables = { x: 'Hello World' }; const request: Request = { @@ -59,38 +62,41 @@ describe('query deduplication', () => { }; let called = 0; - const deduper = new Deduplicator({ - query: () => { - called += 1; - switch (called) { - case 1: - return new Promise((resolve, reject) => { - setTimeout(reject); - }); - case 2: - return new Promise((resolve, reject) => { - setTimeout(resolve); - }); - default: - return assert(false, 'Should not have been called more than twice'); - } - - }, - } as any ); - - return deduper.query(request) - .catch( () => { + const deduper = new Deduplicator( + { + query: () => { + called += 1; + switch (called) { + case 1: + return new Promise((resolve, reject) => { + setTimeout(reject); + }); + case 2: + return new Promise((resolve, reject) => { + setTimeout(resolve); + }); + default: + return assert( + false, + 'Should not have been called more than twice', + ); + } + }, + } as any, + ); + + return deduper.query(request).catch(() => { deduper.query(request); return assert.equal(called, 2); }); - }); it(`deduplicates identical queries`, () => { - - const document: DocumentNode = gql`query test1($x: String){ - test(x: $x) - }`; + const document: DocumentNode = gql` + query test1($x: String) { + test(x: $x) + } + `; const variables1 = { x: 'Hello World' }; const variables2 = { x: 'Hello World' }; @@ -107,26 +113,28 @@ describe('query deduplication', () => { }; let called = 0; - const deduper = new Deduplicator({ - query: () => { - called += 1; - return new Promise((resolve, reject) => { - setTimeout(resolve, 5); - }); - }, - } as any ); + const deduper = new Deduplicator( + { + query: () => { + called += 1; + return new Promise((resolve, reject) => { + setTimeout(resolve, 5); + }); + }, + } as any, + ); deduper.query(request1); deduper.query(request2); assert.equal(called, 1); - }); it(`can bypass deduplication if desired`, () => { - - const document: DocumentNode = gql`query test1($x: String){ - test(x: $x) - }`; + const document: DocumentNode = gql` + query test1($x: String) { + test(x: $x) + } + `; const variables1 = { x: 'Hello World' }; const variables2 = { x: 'Hello World' }; @@ -143,18 +151,19 @@ describe('query deduplication', () => { }; let called = 0; - const deduper = new Deduplicator({ - query: () => { - called += 1; - return new Promise((resolve, reject) => { - setTimeout(resolve, 5); - }); - }, - } as any ); + const deduper = new Deduplicator( + { + query: () => { + called += 1; + return new Promise((resolve, reject) => { + setTimeout(resolve, 5); + }); + }, + } as any, + ); deduper.query(request1, false); deduper.query(request2, false); assert.equal(called, 2); - }); }); diff --git a/test/diffAgainstStore.ts b/test/diffAgainstStore.ts index 296724a2d01..11d16e6590c 100644 --- a/test/diffAgainstStore.ts +++ b/test/diffAgainstStore.ts @@ -1,20 +1,13 @@ import { assert } from 'chai'; -import { - diffQueryAgainstStore, - ID_KEY, -} from '../src/data/readFromStore'; +import { diffQueryAgainstStore, ID_KEY } from '../src/data/readFromStore'; import { writeQueryToStore } from '../src/data/writeToStore'; import gql from 'graphql-tag'; -import { - withError, -} from './util/wrap'; +import { withError } from './util/wrap'; -import { - HeuristicFragmentMatcher, -} from '../src/data/fragmentMatcher'; +import { HeuristicFragmentMatcher } from '../src/data/fragmentMatcher'; const fragmentMatcherFunction = new HeuristicFragmentMatcher().match; describe('diffing queries against the store', () => { @@ -38,18 +31,20 @@ describe('diffing queries against the store', () => { query, }); - assert.notOk(diffQueryAgainstStore({ - store, - query, - }).isMissing); + assert.notOk( + diffQueryAgainstStore({ + store, + query, + }).isMissing, + ); }); it('caches root queries both under the ID of the node and the query name', () => { const firstQuery = gql` { people_one(id: "1") { - __typename, - id, + __typename + id name } } @@ -63,7 +58,7 @@ describe('diffing queries against the store', () => { }, }; - const getIdField = ({id}: {id: string}) => id; + const getIdField = ({ id }: { id: string }) => id; const store = writeQueryToStore({ result, @@ -74,8 +69,8 @@ describe('diffing queries against the store', () => { const secondQuery = gql` { people_one(id: "1") { - __typename, - id, + __typename + id name } } @@ -96,7 +91,8 @@ describe('diffing queries against the store', () => { person { powers } - }`; + } + `; const firstResult = { person: { powers: 'the force', @@ -109,7 +105,8 @@ describe('diffing queries against the store', () => { const unionQuery = gql` query { ...notARealFragment - }`; + } + `; return assert.throws(() => { diffQueryAgainstStore({ store, @@ -127,7 +124,8 @@ describe('diffing queries against the store', () => { firstName lastName } - }`; + } + `; const firstResult = { person: { __typename: 'Author', @@ -147,12 +145,12 @@ describe('diffing queries against the store', () => { firstName lastName } - ... on Jedi { powers } } - }`; + } + `; const { isMissing } = diffQueryAgainstStore({ store, query: unionQuery, @@ -172,7 +170,8 @@ describe('diffing queries against the store', () => { firstName lastName } - }`; + } + `; const firstResult = { person: { __typename: 'Author', @@ -192,12 +191,15 @@ describe('diffing queries against the store', () => { ...jediInfo } } + fragment authorInfo on Author { firstName } + fragment jediInfo on Jedi { powers - }`; + } + `; const { isMissing } = diffQueryAgainstStore({ store, @@ -215,7 +217,8 @@ describe('diffing queries against the store', () => { firstName lastName } - }`; + } + `; const firstResult = { person: { __typename: 'Author', @@ -235,13 +238,16 @@ describe('diffing queries against the store', () => { ...jediInfo } } + fragment authorInfo on Author { firstName address } + fragment jediInfo on Jedi { jedi - }`; + } + `; assert.throw(() => { diffQueryAgainstStore({ store, @@ -303,10 +309,12 @@ describe('diffing queries against the store', () => { ...personInfo } } + fragment personInfo on Person { name age - }`; + } + `; const simpleDiff = diffQueryAgainstStore({ store, @@ -353,17 +361,25 @@ describe('diffing queries against the store', () => { it('will add a private id property', () => { const query = gql` query { - a { id b } - c { d e { id f } g { h } } + a { + id + b + } + c { + d + e { + id + f + } + g { + h + } + } } `; const queryResult = { - a: [ - { id: 'a:1', b: 1.1 }, - { id: 'a:2', b: 1.2 }, - { id: 'a:3', b: 1.3 }, - ], + a: [{ id: 'a:1', b: 1.1 }, { id: 'a:2', b: 1.2 }, { id: 'a:3', b: 1.3 }], c: { d: 2, e: [ @@ -406,8 +422,15 @@ describe('diffing queries against the store', () => { it('will return the previous result if there are no changes', () => { const query = gql` query { - a { b } - c { d e { f } } + a { + b + } + c { + d + e { + f + } + } } `; @@ -439,8 +462,15 @@ describe('diffing queries against the store', () => { it('will return parts of the previous result that changed', () => { const query = gql` query { - a { b } - c { d e { f } } + a { + b + } + c { + d + e { + f + } + } } `; @@ -475,14 +505,24 @@ describe('diffing queries against the store', () => { it('will return the previous result if there are no changes in child arrays', () => { const query = gql` query { - a { b } - c { d e { f } } + a { + b + } + c { + d + e { + f + } + } } `; const queryResult = { a: [{ b: 1.1 }, { b: 1.2 }, { b: 1.3 }], - c: { d: 2, e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }] }, + c: { + d: 2, + e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }], + }, }; const store = writeQueryToStore({ @@ -492,7 +532,10 @@ describe('diffing queries against the store', () => { const previousResult = { a: [{ b: 1.1 }, { b: 1.2 }, { b: 1.3 }], - c: { d: 2, e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }] }, + c: { + d: 2, + e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }], + }, }; const { result } = diffQueryAgainstStore({ @@ -508,7 +551,9 @@ describe('diffing queries against the store', () => { it('will not add zombie items when previousResult starts with the same items', () => { const query = gql` query { - a { b } + a { + b + } } `; @@ -539,14 +584,24 @@ describe('diffing queries against the store', () => { it('will return the previous result if there are no changes in nested child arrays', () => { const query = gql` query { - a { b } - c { d e { f } } + a { + b + } + c { + d + e { + f + } + } } `; const queryResult = { a: [[[[[{ b: 1.1 }, { b: 1.2 }, { b: 1.3 }]]]]], - c: { d: 2, e: [[{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }], [{ f: 3.4 }, { f: 3.5 }]] }, + c: { + d: 2, + e: [[{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }], [{ f: 3.4 }, { f: 3.5 }]], + }, }; const store = writeQueryToStore({ @@ -556,7 +611,10 @@ describe('diffing queries against the store', () => { const previousResult = { a: [[[[[{ b: 1.1 }, { b: 1.2 }, { b: 1.3 }]]]]], - c: { d: 2, e: [[{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }], [{ f: 3.4 }, { f: 3.5 }]] }, + c: { + d: 2, + e: [[{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }], [{ f: 3.4 }, { f: 3.5 }]], + }, }; const { result } = diffQueryAgainstStore({ @@ -572,14 +630,24 @@ describe('diffing queries against the store', () => { it('will return parts of the previous result if there are changes in child arrays', () => { const query = gql` query { - a { b } - c { d e { f } } + a { + b + } + c { + d + e { + f + } + } } `; const queryResult = { a: [{ b: 1.1 }, { b: 1.2 }, { b: 1.3 }], - c: { d: 2, e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }] }, + c: { + d: 2, + e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }], + }, }; const store = writeQueryToStore({ @@ -589,7 +657,10 @@ describe('diffing queries against the store', () => { const previousResult = { a: [{ b: 1.1 }, { b: -1.2 }, { b: 1.3 }], - c: { d: 20, e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }] }, + c: { + d: 20, + e: [{ f: 3.1 }, { f: 3.2 }, { f: 3.3 }, { f: 3.4 }, { f: 3.5 }], + }, }; const { result } = diffQueryAgainstStore({ @@ -616,8 +687,20 @@ describe('diffing queries against the store', () => { it('will return the same items in a different order with `dataIdFromObject`', () => { const query = gql` query { - a { id b } - c { d e { id f } g { h } } + a { + id + b + } + c { + d + e { + id + f + } + g { + h + } + } } `; @@ -690,8 +773,14 @@ describe('diffing queries against the store', () => { it('will return the same JSON scalar field object', () => { const query = gql` { - a { b c } - d { e f } + a { + b + c + } + d { + e + f + } } `; diff --git a/test/directives.ts b/test/directives.ts index 245e8aed863..a9b0d00cee2 100644 --- a/test/directives.ts +++ b/test/directives.ts @@ -1,13 +1,9 @@ import * as chai from 'chai'; const { assert } = chai; -import { - shouldInclude, -} from '../src/queries/directives'; +import { shouldInclude } from '../src/queries/directives'; -import { - getQueryDefinition, -} from '../src/queries/getFromAST'; +import { getQueryDefinition } from '../src/queries/getFromAST'; import gql from 'graphql-tag'; @@ -18,7 +14,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @skip(if: true) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(!shouldInclude(field, {})); }); @@ -27,7 +24,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @include(if: true) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(shouldInclude(field, {})); }); @@ -36,7 +34,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @include(if: false) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(!shouldInclude(field, {})); }); @@ -45,7 +44,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @skip(if: false) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(shouldInclude(field, {})); }); @@ -54,7 +54,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @skip(if: true) @include(if: true) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(!shouldInclude(field, {})); }); @@ -63,7 +64,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @skip(if: true) @include(if: false) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(!shouldInclude(field, {})); }); @@ -71,8 +73,9 @@ describe('query directives', () => { it('should include a field if skip: false and include: true', () => { const query = gql` query { - fortuneCookie @skip(if:false) @include(if: true) - }`; + fortuneCookie @skip(if: false) @include(if: true) + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(shouldInclude(field, {})); }); @@ -81,7 +84,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @skip(if: false) @include(if: false) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert(!shouldInclude(field, {})); }); @@ -90,7 +94,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @skip(if: false) @include(if: false) - }`; + } + `; const queryClone = cloneDeep(query); const field = getQueryDefinition(query).selectionSet.selections[0]; shouldInclude(field, {}); @@ -101,7 +106,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @dosomething(if: true) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert.doesNotThrow(() => { @@ -113,7 +119,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @skip(nothing: true) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert.throws(() => { @@ -125,7 +132,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @include(nothing: true) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert.throws(() => { @@ -137,7 +145,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @include(if: $neverDefined) - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert.throws(() => { shouldInclude(field, {}); @@ -148,7 +157,8 @@ describe('query directives', () => { const query = gql` query($shouldSkip: Boolean) { fortuneCookie @skip(if: $shouldSkip) - }`; + } + `; const variables = { shouldSkip: true, }; @@ -160,7 +170,8 @@ describe('query directives', () => { const query = gql` query($shouldSkip: Boolean) { fortuneCookie @include(if: $shouldInclude) - }`; + } + `; const variables = { shouldInclude: false, }; @@ -172,7 +183,8 @@ describe('query directives', () => { const query = gql` query { fortuneCookie @include(if: "string") - }`; + } + `; const field = getQueryDefinition(query).selectionSet.selections[0]; assert.throws(() => { shouldInclude(field, {}); diff --git a/test/errors.ts b/test/errors.ts index acfb1b2c901..10552d82710 100644 --- a/test/errors.ts +++ b/test/errors.ts @@ -32,7 +32,7 @@ describe('ApolloError', () => { }); it('should add a graphql error to the message', () => { - const graphQLErrors = [ new Error('this is an error message') ]; + const graphQLErrors = [new Error('this is an error message')]; const apolloError = new ApolloError({ graphQLErrors, }); @@ -42,9 +42,7 @@ describe('ApolloError', () => { }); it('should add multiple graphql errors to the message', () => { - const graphQLErrors = [ new Error('this is new'), - new Error('this is old'), - ]; + const graphQLErrors = [new Error('this is new'), new Error('this is old')]; const apolloError = new ApolloError({ graphQLErrors, }); @@ -57,7 +55,7 @@ describe('ApolloError', () => { }); it('should add both network and graphql errors to the message', () => { - const graphQLErrors = [ new Error('graphql error message') ]; + const graphQLErrors = [new Error('graphql error message')]; const networkError = new Error('network error message'); const apolloError = new ApolloError({ graphQLErrors, @@ -72,7 +70,7 @@ describe('ApolloError', () => { }); it('should contain a stack trace', () => { - const graphQLErrors = [ new Error('graphql error message') ]; + const graphQLErrors = [new Error('graphql error message')]; const networkError = new Error('network error message'); const apolloError = new ApolloError({ graphQLErrors, diff --git a/test/fetchMore.ts b/test/fetchMore.ts index a5511f6092c..41ae57786de 100644 --- a/test/fetchMore.ts +++ b/test/fetchMore.ts @@ -52,7 +52,7 @@ describe('updateQuery on a simple query', () => { }, }); - return new Promise((resolve) => setTimeout(resolve, 5)) + return new Promise(resolve => setTimeout(resolve, 5)) .then(() => obsHandle) .then((watchedQuery: ObservableQuery) => { assert.equal(latestResult.data.entry.value, 1); @@ -119,19 +119,19 @@ describe('updateQuery on a query with required and optional variables', () => { }, }); - return new Promise((resolve) => setTimeout(resolve, 5)) - .then(() => obsHandle) - .then((watchedQuery: ObservableQuery) => { - assert.equal(latestResult.data.entry.value, 1); - watchedQuery.updateQuery((prevResult: any) => { - const res = cloneDeep(prevResult); - res.entry.value = 2; - return res; - }); - - assert.equal(latestResult.data.entry.value, 2); - }) - .then(() => sub.unsubscribe()); + return new Promise(resolve => setTimeout(resolve, 5)) + .then(() => obsHandle) + .then((watchedQuery: ObservableQuery) => { + assert.equal(latestResult.data.entry.value, 1); + watchedQuery.updateQuery((prevResult: any) => { + const res = cloneDeep(prevResult); + res.entry.value = 2; + return res; + }); + + assert.equal(latestResult.data.entry.value, 2); + }) + .then(() => sub.unsubscribe()); }); }); @@ -184,11 +184,20 @@ describe('fetchMore on an observable query', () => { }, }; for (let i = 1; i <= 10; i++) { - result.data.entry.comments.push({ text: `comment ${i}`, __typename: 'Comment' }); + result.data.entry.comments.push({ + text: `comment ${i}`, + __typename: 'Comment', + }); } for (let i = 11; i <= 20; i++) { - resultMore.data.entry.comments.push({ text: `comment ${i}`, __typename: 'Comment' }); - result2.data.comments.push({ text: `new comment ${i}`, __typename: 'Comment' }); + resultMore.data.entry.comments.push({ + text: `comment ${i}`, + __typename: 'Comment', + }); + result2.data.comments.push({ + text: `new comment ${i}`, + __typename: 'Comment', + }); } let latestResult: any = null; @@ -198,13 +207,16 @@ describe('fetchMore on an observable query', () => { let sub: any; function setup(...mockedResponses: any[]) { - networkInterface = mockNetworkInterface({ - request: { - query, - variables, + networkInterface = mockNetworkInterface( + { + request: { + query, + variables, + }, + result, }, - result, - }, ...mockedResponses); + ...mockedResponses, + ); client = new ApolloClient({ networkInterface, @@ -238,25 +250,30 @@ describe('fetchMore on an observable query', () => { variables: variablesMore, }, result: resultMore, - }).then((watchedQuery) => { - return watchedQuery.fetchMore({ - variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 - updateQuery: (prev, options) => { - const state = cloneDeep(prev) as any; - state.entry.comments = [...state.entry.comments, ...(options.fetchMoreResult as any).entry.comments]; - return state; - }, + }) + .then(watchedQuery => { + return watchedQuery.fetchMore({ + variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 + updateQuery: (prev, options) => { + const state = cloneDeep(prev) as any; + state.entry.comments = [ + ...state.entry.comments, + ...(options.fetchMoreResult as any).entry.comments, + ]; + return state; + }, + }); + }) + .then(data => { + assert.lengthOf(data.data.entry.comments, 10); // this is the server result + assert.isFalse(data.loading); + const comments = latestResult.data.entry.comments; + assert.lengthOf(comments, 20); + for (let i = 1; i <= 20; i++) { + assert.equal(comments[i - 1].text, `comment ${i}`); + } + unsetup(); }); - }).then(data => { - assert.lengthOf(data.data.entry.comments, 10); // this is the server result - assert.isFalse(data.loading); - const comments = latestResult.data.entry.comments; - assert.lengthOf(comments, 20); - for (let i = 1; i <= 20; i++) { - assert.equal(comments[i - 1].text, `comment ${i}`); - } - unsetup(); - }); }); it('fetching more with a different query', () => { @@ -267,33 +284,42 @@ describe('fetchMore on an observable query', () => { variables: variables2, }, result: result2, - }).then((watchedQuery) => { - return watchedQuery.fetchMore({ - query: query2, - variables: variables2, - updateQuery: (prev, options) => { - const state = cloneDeep(prev) as any; - state.entry.comments = [...state.entry.comments, ...(options.fetchMoreResult as any).comments]; - return state; - }, + }) + .then(watchedQuery => { + return watchedQuery.fetchMore({ + query: query2, + variables: variables2, + updateQuery: (prev, options) => { + const state = cloneDeep(prev) as any; + state.entry.comments = [ + ...state.entry.comments, + ...(options.fetchMoreResult as any).comments, + ]; + return state; + }, + }); + }) + .then(() => { + const comments = latestResult.data.entry.comments; + assert.lengthOf(comments, 20); + for (let i = 1; i <= 10; i++) { + assert.equal(comments[i - 1].text, `comment ${i}`); + } + for (let i = 11; i <= 20; i++) { + assert.equal(comments[i - 1].text, `new comment ${i}`); + } + unsetup(); }); - }).then(() => { - const comments = latestResult.data.entry.comments; - assert.lengthOf(comments, 20); - for (let i = 1; i <= 10; i++) { - assert.equal(comments[i - 1].text, `comment ${i}`); - } - for (let i = 11; i <= 20; i++) { - assert.equal(comments[i - 1].text, `new comment ${i}`); - } - unsetup(); - }); }); it('will set the network status to `fetchMore`', done => { networkInterface = mockNetworkInterface( { request: { query, variables }, result, delay: 5 }, - { request: { query, variables: variablesMore }, result: resultMore, delay: 5 }, + { + request: { query, variables: variablesMore }, + result: resultMore, + delay: 5, + }, ); client = new ApolloClient({ @@ -318,7 +344,10 @@ describe('fetchMore on an observable query', () => { variables: { start: 10 }, updateQuery: (prev, options) => { const state = cloneDeep(prev) as any; - state.entry.comments = [...state.entry.comments, ...(options.fetchMoreResult as any).entry.comments]; + state.entry.comments = [ + ...state.entry.comments, + ...(options.fetchMoreResult as any).entry.comments, + ]; return state; }, }); @@ -348,7 +377,11 @@ describe('fetchMore on an observable query', () => { it('will get an error from `fetchMore` if thrown', done => { networkInterface = mockNetworkInterface( { request: { query, variables }, result, delay: 5 }, - { request: { query, variables: variablesMore }, error: new Error('Uh, oh!'), delay: 5 }, + { + request: { query, variables: variablesMore }, + error: new Error('Uh, oh!'), + delay: 5, + }, ); client = new ApolloClient({ @@ -373,7 +406,10 @@ describe('fetchMore on an observable query', () => { variables: { start: 10 }, updateQuery: (prev, options) => { const state = cloneDeep(prev) as any; - state.entry.comments = [...state.entry.comments, ...(options.fetchMoreResult as any).entry.comments]; + state.entry.comments = [ + ...state.entry.comments, + ...(options.fetchMoreResult as any).entry.comments, + ]; return state; }, }); @@ -400,7 +436,8 @@ describe('fetchMore on an observable query', () => { done(error); } }, - complete: () => done(new Error('`complete` called when it wasn’t supposed to be.')), + complete: () => + done(new Error('`complete` called when it wasn’t supposed to be.')), }); }); }); diff --git a/test/fixtures/redux-todomvc/index.ts b/test/fixtures/redux-todomvc/index.ts index 6ddbdef4f63..f5e19dc00f8 100644 --- a/test/fixtures/redux-todomvc/index.ts +++ b/test/fixtures/redux-todomvc/index.ts @@ -1,6 +1,4 @@ -import { - rootReducer, -} from './reducers'; +import { rootReducer } from './reducers'; import { addTodo, @@ -9,7 +7,7 @@ import { completeTodo, completeAll, clearCompleted, -} from './actions' +} from './actions'; import * as types from './types'; @@ -22,4 +20,4 @@ export { completeAll, clearCompleted, types, -} +}; diff --git a/test/fixtures/redux-todomvc/reducers.ts b/test/fixtures/redux-todomvc/reducers.ts index 7d56a6a0375..3331e8789f7 100644 --- a/test/fixtures/redux-todomvc/reducers.ts +++ b/test/fixtures/redux-todomvc/reducers.ts @@ -7,7 +7,7 @@ import { EDIT_TODO, COMPLETE_TODO, COMPLETE_ALL, - CLEAR_COMPLETED + CLEAR_COMPLETED, } from './types'; const initialState = [ @@ -25,48 +25,48 @@ function todos(state = initialState, action: any): any { { id: state.reduce((maxId, todo) => Math.max(todo.id, maxId), -1) + 1, completed: false, - text: action.text + text: action.text, }, - ...state - ] + ...state, + ]; case DELETE_TODO: - return state.filter(todo => - todo.id !== action.id - ) + return state.filter(todo => todo.id !== action.id); case EDIT_TODO: - return state.map(todo => - todo.id === action.id ? - assign({}, todo, { text: action.text }) : - todo - ) + return state.map( + todo => + todo.id === action.id + ? assign({}, todo, { text: action.text }) + : todo, + ); case COMPLETE_TODO: - return state.map(todo => - todo.id === action.id ? - assign({}, todo, { completed: !todo.completed }) : - todo - ) + return state.map( + todo => + todo.id === action.id + ? assign({}, todo, { completed: !todo.completed }) + : todo, + ); case COMPLETE_ALL: - const areAllMarked = state.every(todo => todo.completed) - return state.map(todo => assign({}, todo, { - completed: !areAllMarked - })) + const areAllMarked = state.every(todo => todo.completed); + return state.map(todo => + assign({}, todo, { + completed: !areAllMarked, + }), + ); case CLEAR_COMPLETED: - return state.filter(todo => todo.completed === false) + return state.filter(todo => todo.completed === false); default: - return state + return state; } } const rootReducer = combineReducers({ - todos + todos, }) as any; // XXX see why this type fails -export { - rootReducer -} +export { rootReducer }; diff --git a/test/fixtures/redux-todomvc/types.ts b/test/fixtures/redux-todomvc/types.ts index a6b38b75daa..b2579897223 100644 --- a/test/fixtures/redux-todomvc/types.ts +++ b/test/fixtures/redux-todomvc/types.ts @@ -7,6 +7,6 @@ export const COMPLETE_ALL = 'COMPLETE_ALL'; export const CLEAR_COMPLETED = 'CLEAR_COMPLETED'; // todo types -export const SHOW_ALL = 'show_all' -export const SHOW_COMPLETED = 'show_completed' -export const SHOW_ACTIVE = 'show_active' +export const SHOW_ALL = 'show_all'; +export const SHOW_COMPLETED = 'show_completed'; +export const SHOW_ACTIVE = 'show_active'; diff --git a/test/fragmentMatcher.ts b/test/fragmentMatcher.ts index d1bb23e4ab5..e067de227dc 100644 --- a/test/fragmentMatcher.ts +++ b/test/fragmentMatcher.ts @@ -7,43 +7,49 @@ import { IntrospectionFragmentMatcher } from '../src/data/fragmentMatcher'; import mockQueryManager from './mocks/mockQueryManager'; describe('IntrospectionFragmentMatcher', () => { - - const introspectionQuery = gql`{ - __schema { - types { - kind - name - possibleTypes { + const introspectionQuery = gql` + { + __schema { + types { + kind name + possibleTypes { + name + } } } } - }`; + `; it('will throw an error if match is called if it is not ready', () => { const ifm = new IntrospectionFragmentMatcher(); - assert.throws( () => (ifm.match as any)(), /called before/ ); + assert.throws(() => (ifm.match as any)(), /called before/); }); it('can be seeded with an introspection query result', () => { const ifm = new IntrospectionFragmentMatcher({ introspectionQueryResultData: { __schema: { - types: [{ - kind: 'UNION', - name: 'Item', - possibleTypes: [{ - name: 'ItemA', - }, { - name: 'ItemB', - }], - }], + types: [ + { + kind: 'UNION', + name: 'Item', + possibleTypes: [ + { + name: 'ItemA', + }, + { + name: 'ItemB', + }, + ], + }, + ], }, }, }); const store = { - 'a': { + a: { __typename: 'ItemB', }, }; @@ -61,7 +67,10 @@ describe('IntrospectionFragmentMatcher', () => { customResolvers: {}, }; - assert.equal(ifm.match(idValue as any, 'Item', readStoreContext), true ); - assert.equal(ifm.match(idValue as any, 'NotAnItem', readStoreContext), false ); + assert.equal(ifm.match(idValue as any, 'Item', readStoreContext), true); + assert.equal( + ifm.match(idValue as any, 'NotAnItem', readStoreContext), + false, + ); }); }); diff --git a/test/getFromAST.ts b/test/getFromAST.ts index 3e5e4dbbe2a..ba8594d4e5b 100644 --- a/test/getFromAST.ts +++ b/test/getFromAST.ts @@ -10,10 +10,7 @@ import { getFragmentQueryDocument, } from '../src/queries/getFromAST'; -import { - FragmentDefinitionNode, - OperationDefinitionNode, -} from 'graphql'; +import { FragmentDefinitionNode, OperationDefinitionNode } from 'graphql'; import { print } from 'graphql/language/printer'; import gql from 'graphql-tag'; @@ -28,11 +25,13 @@ describe('AST utility functions', () => { lastName } } + query { author { address } - }`; + } + `; assert.throws(() => { checkDocument(multipleQueries); }); @@ -43,10 +42,12 @@ describe('AST utility functions', () => { ...authorDetails } } + fragment authorDetails on Author { firstName lastName - }`; + } + `; assert.doesNotThrow(() => { checkDocument(namedFragment); }); @@ -59,16 +60,21 @@ describe('AST utility functions', () => { ...authorDetails } } + fragment authorDetails on Author { firstName lastName - }`; + } + `; const expectedDoc = gql` fragment authorDetails on Author { firstName lastName - }`; - const expectedResult: FragmentDefinitionNode[] = [expectedDoc.definitions[0] as FragmentDefinitionNode]; + } + `; + const expectedResult: FragmentDefinitionNode[] = [ + expectedDoc.definitions[0] as FragmentDefinitionNode, + ]; const actualResult = getFragmentDefinitions(singleFragmentDefinition); assert.equal(actualResult.length, expectedResult.length); assert.equal(print(actualResult[0]), print(expectedResult[0])); @@ -82,21 +88,26 @@ describe('AST utility functions', () => { ...moreAuthorDetails } } + fragment authorDetails on Author { firstName lastName } + fragment moreAuthorDetails on Author { address - }`; + } + `; const expectedDoc = gql` fragment authorDetails on Author { firstName lastName } + fragment moreAuthorDetails on Author { address - }`; + } + `; const expectedResult: FragmentDefinitionNode[] = [ expectedDoc.definitions[0] as FragmentDefinitionNode, expectedDoc.definitions[1] as FragmentDefinitionNode, @@ -111,23 +122,28 @@ describe('AST utility functions', () => { firstName lastName } + fragment moreAuthorDetails on Author { address } + query { author { ...authorDetails ...moreAuthorDetails } - }`; + } + `; const expectedDoc = gql` query { author { ...authorDetails ...moreAuthorDetails } - }`; - const expectedResult: OperationDefinitionNode = expectedDoc.definitions[0] as OperationDefinitionNode; + } + `; + const expectedResult: OperationDefinitionNode = expectedDoc + .definitions[0] as OperationDefinitionNode; const actualResult = getQueryDefinition(queryWithFragments); assert.equal(print(actualResult), print(expectedResult)); @@ -144,7 +160,8 @@ describe('AST utility functions', () => { createAuthor(firstName: "John", lastName: "Smith") { ...authorDetails } - }`; + } + `; assert.throws(() => { getQueryDefinition(mutationWithFragments); }); @@ -157,17 +174,21 @@ describe('AST utility functions', () => { ...authorDetails } } + fragment authorDetails on Author { firstName lastName - }`; + } + `; const expectedDoc = gql` mutation { createAuthor(firstName: "John", lastName: "Smith") { ...authorDetails } - }`; - const expectedResult: OperationDefinitionNode = expectedDoc.definitions[0] as OperationDefinitionNode; + } + `; + const expectedResult: OperationDefinitionNode = expectedDoc + .definitions[0] as OperationDefinitionNode; const actualResult = getMutationDefinition(mutationWithFragments); assert.equal(print(actualResult), print(expectedResult)); }); @@ -178,13 +199,15 @@ describe('AST utility functions', () => { firstName lastName } + fragment moreAuthorDetails on Author { address - }`); + } + `); const fragmentMap = createFragmentMap(fragments); const expectedTable: FragmentMap = { - 'authorDetails': fragments[0], - 'moreAuthorDetails': fragments[1], + authorDetails: fragments[0], + moreAuthorDetails: fragments[1], }; assert.deepEqual(fragmentMap, expectedTable); }); @@ -197,7 +220,8 @@ describe('AST utility functions', () => { const query = gql` query nameOfQuery { fortuneCookie - }`; + } + `; const operationName = getOperationName(query); assert.equal(operationName, 'nameOfQuery'); }); @@ -206,7 +230,8 @@ describe('AST utility functions', () => { const query = gql` mutation nameOfMutation { fortuneCookie - }`; + } + `; const operationName = getOperationName(query); assert.equal(operationName, 'nameOfMutation'); }); @@ -215,7 +240,8 @@ describe('AST utility functions', () => { const query = gql` { fortuneCookie - }`; + } + `; const operationName = getOperationName(query); assert.equal(operationName, null); }); @@ -235,7 +261,8 @@ describe('AST utility functions', () => { input AuthorSearchInputType { firstName: String - }`; + } + `; assert.throws(() => { getQueryDefinition(queryWithTypeDefination); }, 'Schema type definitions not allowed in queries. Found: "InputObjectTypeDefinition"'); @@ -244,41 +271,109 @@ describe('AST utility functions', () => { describe('getFragmentQueryDocument', () => { it('will throw an error if there is an operation', () => { assert.throws( - () => getFragmentQueryDocument(gql`{ a b c }`), + () => + getFragmentQueryDocument( + gql` + { + a + b + c + } + `, + ), 'Found a query operation. No operations are allowed when using a fragment as a query. Only fragments are allowed.', ); assert.throws( - () => getFragmentQueryDocument(gql`query { a b c }`), + () => + getFragmentQueryDocument( + gql` + query { + a + b + c + } + `, + ), 'Found a query operation. No operations are allowed when using a fragment as a query. Only fragments are allowed.', ); assert.throws( - () => getFragmentQueryDocument(gql`query Named { a b c }`), - 'Found a query operation named \'Named\'. No operations are allowed when using a fragment as a query. Only fragments are allowed.', + () => + getFragmentQueryDocument( + gql` + query Named { + a + b + c + } + `, + ), + "Found a query operation named 'Named'. No operations are allowed when using a fragment as a query. Only fragments are allowed.", ); assert.throws( - () => getFragmentQueryDocument(gql`mutation Named { a b c }`), - 'Found a mutation operation named \'Named\'. No operations are allowed when using a fragment as a query. ' + - 'Only fragments are allowed.', + () => + getFragmentQueryDocument( + gql` + mutation Named { + a + b + c + } + `, + ), + "Found a mutation operation named 'Named'. No operations are allowed when using a fragment as a query. " + + 'Only fragments are allowed.', ); assert.throws( - () => getFragmentQueryDocument(gql`subscription Named { a b c }`), - 'Found a subscription operation named \'Named\'. No operations are allowed when using a fragment as a query. ' + - 'Only fragments are allowed.', + () => + getFragmentQueryDocument( + gql` + subscription Named { + a + b + c + } + `, + ), + "Found a subscription operation named 'Named'. No operations are allowed when using a fragment as a query. " + + 'Only fragments are allowed.', ); }); it('will throw an error if there is not exactly one fragment but no `fragmentName`', () => { assert.throws(() => { getFragmentQueryDocument(gql` - fragment foo on Foo { a b c } - fragment bar on Bar { d e f } + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + } `); }, 'Found 2 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); assert.throws(() => { getFragmentQueryDocument(gql` - fragment foo on Foo { a b c } - fragment bar on Bar { d e f } - fragment baz on Baz { g h i } + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + } + + fragment baz on Baz { + g + h + i + } `); }, 'Found 3 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); assert.throws(() => { @@ -290,54 +385,193 @@ describe('AST utility functions', () => { it('will create a query document where the single fragment is spread in the root query', () => { assert.deepEqual( - print(getFragmentQueryDocument(gql` - fragment foo on Foo { a b c } - `)), + print( + getFragmentQueryDocument(gql` + fragment foo on Foo { + a + b + c + } + `), + ), print(gql` - { ...foo } - fragment foo on Foo { a b c } + { + ...foo + } + + fragment foo on Foo { + a + b + c + } `), ); }); it('will create a query document where the named fragment is spread in the root query', () => { assert.deepEqual( - print(getFragmentQueryDocument(gql` - fragment foo on Foo { a b c } - fragment bar on Bar { d e f ...foo } - fragment baz on Baz { g h i ...foo ...bar } - `, 'foo')), + print( + getFragmentQueryDocument( + gql` + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + ...foo + } + + fragment baz on Baz { + g + h + i + ...foo + ...bar + } + `, + 'foo', + ), + ), print(gql` - { ...foo } - fragment foo on Foo { a b c } - fragment bar on Bar { d e f ...foo } - fragment baz on Baz { g h i ...foo ...bar } + { + ...foo + } + + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + ...foo + } + + fragment baz on Baz { + g + h + i + ...foo + ...bar + } `), ); assert.deepEqual( - print(getFragmentQueryDocument(gql` - fragment foo on Foo { a b c } - fragment bar on Bar { d e f ...foo } - fragment baz on Baz { g h i ...foo ...bar } - `, 'bar')), + print( + getFragmentQueryDocument( + gql` + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + ...foo + } + + fragment baz on Baz { + g + h + i + ...foo + ...bar + } + `, + 'bar', + ), + ), print(gql` - { ...bar } - fragment foo on Foo { a b c } - fragment bar on Bar { d e f ...foo } - fragment baz on Baz { g h i ...foo ...bar } + { + ...bar + } + + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + ...foo + } + + fragment baz on Baz { + g + h + i + ...foo + ...bar + } `), ); assert.deepEqual( - print(getFragmentQueryDocument(gql` - fragment foo on Foo { a b c } - fragment bar on Bar { d e f ...foo } - fragment baz on Baz { g h i ...foo ...bar } - `, 'baz')), + print( + getFragmentQueryDocument( + gql` + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + ...foo + } + + fragment baz on Baz { + g + h + i + ...foo + ...bar + } + `, + 'baz', + ), + ), print(gql` - { ...baz } - fragment foo on Foo { a b c } - fragment bar on Bar { d e f ...foo } - fragment baz on Baz { g h i ...foo ...bar } + { + ...baz + } + + fragment foo on Foo { + a + b + c + } + + fragment bar on Bar { + d + e + f + ...foo + } + + fragment baz on Baz { + g + h + i + ...foo + ...bar + } `), ); }); @@ -370,7 +604,9 @@ describe('AST utility functions', () => { `; const complexMutation = gql` - mutation complexStuff($test: Input = {key1: ["value", "value2"], key2: {key3: 4}}) { + mutation complexStuff( + $test: Input = { key1: ["value", "value2"], key2: { key3: 4 } } + ) { complexStuff(test: $test) { people { name @@ -379,8 +615,13 @@ describe('AST utility functions', () => { } `; - assert.deepEqual(getDefaultValues(getQueryDefinition(basicQuery)), {first: 1}); - assert.deepEqual(getDefaultValues(getMutationDefinition(complexMutation)), {test: {key1: ['value', 'value2'], key2: {key3: 4}}}); + assert.deepEqual(getDefaultValues(getQueryDefinition(basicQuery)), { + first: 1, + }); + assert.deepEqual( + getDefaultValues(getMutationDefinition(complexMutation)), + { test: { key1: ['value', 'value2'], key2: { key3: 4 } } }, + ); }); }); }); diff --git a/test/graphqlSubscriptions.ts b/test/graphqlSubscriptions.ts index abfaf863a48..0ddc9514435 100644 --- a/test/graphqlSubscriptions.ts +++ b/test/graphqlSubscriptions.ts @@ -3,9 +3,7 @@ import { MockedSubscription, } from './mocks/mockNetworkInterface'; -import { - assert, -} from 'chai'; +import { assert } from 'chai'; import { cloneDeep } from 'lodash'; @@ -15,25 +13,23 @@ import ApolloClient from '../src'; import gql from 'graphql-tag'; -import { - QueryManager, -} from '../src/core/QueryManager'; +import { QueryManager } from '../src/core/QueryManager'; -import { - createApolloStore, -} from '../src/store'; +import { createApolloStore } from '../src/store'; describe('GraphQL Subscriptions', () => { - const results = ['Dahivat Pandya', 'Vyacheslav Kim', 'Changping Chen', 'Amanda Liu'].map( - name => ({ result: { data: { user: { name: name } }}, delay: 10 }), - ); + const results = [ + 'Dahivat Pandya', + 'Vyacheslav Kim', + 'Changping Chen', + 'Amanda Liu', + ].map(name => ({ result: { data: { user: { name: name } } }, delay: 10 })); let sub1: MockedSubscription; let options: any; let defaultOptions: any; let defaultSub1: MockedSubscription; beforeEach(() => { - sub1 = { request: { query: gql` @@ -60,11 +56,10 @@ describe('GraphQL Subscriptions', () => { } `, variables: { - name: 'Changping Chen', - }, + name: 'Changping Chen', + }, }; - defaultSub1 = { request: { query: gql` @@ -93,8 +88,7 @@ describe('GraphQL Subscriptions', () => { }; }); - - it('should start a subscription on network interface and unsubscribe', (done) => { + it('should start a subscription on network interface and unsubscribe', done => { const network = mockSubscriptionNetworkInterface([defaultSub1]); // This test calls directly through Apollo Client const client = new ApolloClient({ @@ -120,7 +114,7 @@ describe('GraphQL Subscriptions', () => { assert.equal(Object.keys(network.mockedSubscriptionsById).length, 1); }); - it('should subscribe with default values', (done) => { + it('should subscribe with default values', done => { const network = mockSubscriptionNetworkInterface([sub1]); // This test calls directly through Apollo Client const client = new ApolloClient({ @@ -146,7 +140,7 @@ describe('GraphQL Subscriptions', () => { assert.equal(Object.keys(network.mockedSubscriptionsById).length, 1); }); - it('should multiplex subscriptions', (done) => { + it('should multiplex subscriptions', done => { const network = mockSubscriptionNetworkInterface([sub1]); const queryManager = new QueryManager({ networkInterface: network, @@ -184,7 +178,7 @@ describe('GraphQL Subscriptions', () => { network.fireResult(id); }); - it('should receive multiple results for a subscription', (done) => { + it('should receive multiple results for a subscription', done => { const network = mockSubscriptionNetworkInterface([sub1]); let numResults = 0; const queryManager = new QueryManager({ @@ -211,7 +205,7 @@ describe('GraphQL Subscriptions', () => { } }); - it('should fire redux action and call result reducers', (done) => { + it('should fire redux action and call result reducers', done => { const query = gql` query miniQuery { number @@ -239,20 +233,22 @@ describe('GraphQL Subscriptions', () => { addTypename: false, }); - const observableQuery = queryManager.watchQuery({ - query, - reducer: (previousResult, action) => { - counter++; - if (isSubscriptionResultAction(action)) { - const newResult = cloneDeep(previousResult) as any; - newResult.number++; - return newResult; - } - return previousResult; - }, - }).subscribe({ - next: () => null, - }); + const observableQuery = queryManager + .watchQuery({ + query, + reducer: (previousResult, action) => { + counter++; + if (isSubscriptionResultAction(action)) { + const newResult = cloneDeep(previousResult) as any; + newResult.number++; + return newResult; + } + return previousResult; + }, + }) + .subscribe({ + next: () => null, + }); const sub = queryManager.startGraphQLSubscription(options).subscribe({ next(result) { @@ -262,7 +258,12 @@ describe('GraphQL Subscriptions', () => { // once for itself, four times for the subscription results. observableQuery.unsubscribe(); assert.equal(counter, 5); - assert.equal(queryManager.store.getState()['apollo']['data']['ROOT_QUERY']['number'], 4); + assert.equal( + queryManager.store.getState()['apollo']['data']['ROOT_QUERY'][ + 'number' + ], + 4, + ); done(); } }, diff --git a/test/mockNetworkInterface.ts b/test/mockNetworkInterface.ts index 80e8124b1eb..aaeea9e8786 100644 --- a/test/mockNetworkInterface.ts +++ b/test/mockNetworkInterface.ts @@ -1,6 +1,4 @@ -import { - assert, -} from 'chai'; +import { assert } from 'chai'; import { mockSubscriptionNetworkInterface, @@ -12,38 +10,36 @@ import { omit } from 'lodash'; import gql from 'graphql-tag'; describe('MockSubscriptionNetworkInterface', () => { - const result1 = { result: { - data: {user: {name: 'Dhaivat Pandya'}}, + data: { user: { name: 'Dhaivat Pandya' } }, }, delay: 50, }; const result2 = { result: { - data: {user: {name: 'Vyacheslav Kim'}}, + data: { user: { name: 'Vyacheslav Kim' } }, }, delay: 50, }; const result3 = { result: { - data: {user: {name: 'Changping Chen'}}, + data: { user: { name: 'Changping Chen' } }, }, delay: 50, }; const result4 = { result: { - data: {user: {name: 'Amanda Liu'}}, + data: { user: { name: 'Amanda Liu' } }, }, delay: 50, }; let sub1: any; beforeEach(() => { - sub1 = { request: { query: gql` @@ -105,7 +101,6 @@ describe('MockSubscriptionNetworkInterface', () => { }); }); - it('throws an error when firing a subscription id that does not exist', () => { const noResultSub = omit(sub1, 'results') as MockedSubscription; @@ -131,7 +126,7 @@ describe('MockSubscriptionNetworkInterface', () => { networkInterface.fireResult(4); }); }); - it('correctly subscribes', (done) => { + it('correctly subscribes', done => { const networkInterface = mockSubscriptionNetworkInterface([sub1]); const id = networkInterface.subscribe( { @@ -156,7 +151,7 @@ describe('MockSubscriptionNetworkInterface', () => { assert.deepEqual(networkInterface.mockedSubscriptionsById[0], sub1); }); - it('correctly fires results', (done) => { + it('correctly fires results', done => { const networkInterface = mockSubscriptionNetworkInterface([sub1]); networkInterface.subscribe( { @@ -179,7 +174,7 @@ describe('MockSubscriptionNetworkInterface', () => { networkInterface.fireResult(0); }); - it('correctly fires multiple results', (done) => { + it('correctly fires multiple results', done => { let allResults: any[] = []; const networkInterface = mockSubscriptionNetworkInterface([sub1]); networkInterface.subscribe( @@ -200,18 +195,18 @@ describe('MockSubscriptionNetworkInterface', () => { }, ); - for (let i = 0; i < 4; i++) { - networkInterface.fireResult(0); - } - setTimeout(() => { - assert.deepEqual( - allResults, - [result1.result.data, result2.result.data, result3.result.data, result4.result.data], - ); - done(); - }, 50); - - + for (let i = 0; i < 4; i++) { + networkInterface.fireResult(0); + } + setTimeout(() => { + assert.deepEqual(allResults, [ + result1.result.data, + result2.result.data, + result3.result.data, + result4.result.data, + ]); + done(); + }, 50); }); it('correctly unsubscribes', () => { diff --git a/test/mocks/mockFetch.ts b/test/mocks/mockFetch.ts index 299bdef391e..3b13f427b7a 100644 --- a/test/mocks/mockFetch.ts +++ b/test/mocks/mockFetch.ts @@ -17,9 +17,12 @@ export interface MockedFetchResponse { delay?: number; } -export function createMockedIResponse(result: Object, options?: any): MockedIResponse { - const status = options && options.status || 200; - const statusText = options && options.statusText || undefined; +export function createMockedIResponse( + result: Object, + options?: any, +): MockedIResponse { + const status = (options && options.status) || 200; + const statusText = (options && options.statusText) || undefined; return { ok: status === 200, @@ -37,7 +40,7 @@ export class MockFetch { constructor(...mockedResponses: MockedFetchResponse[]) { this.mockedResponsesByKey = {}; - mockedResponses.forEach((mockedResponse) => { + mockedResponses.forEach(mockedResponse => { this.addMockedResponse(mockedResponse); }); } @@ -58,7 +61,9 @@ export class MockFetch { const key = this.fetchParamsToKey(url, opts); const responses = this.mockedResponsesByKey[key]; if (!responses || responses.length === 0) { - throw new Error(`No more mocked fetch responses for the params ${url} and ${opts}`); + throw new Error( + `No more mocked fetch responses for the params ${url} and ${opts}`, + ); } const { result, delay } = responses.shift()!; @@ -91,13 +96,16 @@ export class MockFetch { function sortByKey(obj: any): Object { return Object.keys(obj).sort().reduce( - (ret: any, key: string): Object => ( - Object.assign({ - [key]: Object.prototype.toString.call(obj[key]).slice(8, -1) === 'Object' - ? sortByKey(obj[key]) - : obj[key], - }, ret) - ), + (ret: any, key: string): Object => + Object.assign( + { + [key]: + Object.prototype.toString.call(obj[key]).slice(8, -1) === 'Object' + ? sortByKey(obj[key]) + : obj[key], + }, + ret, + ), {}, ); } diff --git a/test/mocks/mockNetworkInterface.ts b/test/mocks/mockNetworkInterface.ts index ab7e2f28e70..85d35f934be 100644 --- a/test/mocks/mockNetworkInterface.ts +++ b/test/mocks/mockNetworkInterface.ts @@ -6,42 +6,38 @@ import { SubscriptionNetworkInterface, } from '../../src/transport/networkInterface'; -import { - ExecutionResult, - DocumentNode, -} from 'graphql'; +import { ExecutionResult, DocumentNode } from 'graphql'; -import { - print, -} from 'graphql/language/printer'; +import { print } from 'graphql/language/printer'; -import { - Observable, - Observer, -} from '../../src/util/Observable'; +import { Observable, Observer } from '../../src/util/Observable'; // Pass in multiple mocked responses, so that you can test flows that end up // making multiple queries to the server export default function mockNetworkInterface( - ...mockedResponses: MockedResponse[], + ...mockedResponses: MockedResponse[] ): NetworkInterface { return new MockNetworkInterface(mockedResponses); } export function mockObservableNetworkInterface( - ...mockedResponses: MockedResponse[], + ...mockedResponses: MockedResponse[] ): ObservableNetworkInterface { return new MockObservableNetworkInterface(mockedResponses); } export function mockSubscriptionNetworkInterface( - mockedSubscriptions: MockedSubscription[], ...mockedResponses: MockedResponse[], + mockedSubscriptions: MockedSubscription[], + ...mockedResponses: MockedResponse[] ): MockSubscriptionNetworkInterface { - return new MockSubscriptionNetworkInterface(mockedSubscriptions, mockedResponses); + return new MockSubscriptionNetworkInterface( + mockedSubscriptions, + mockedResponses, + ); } export function mockBatchedNetworkInterface( - ...mockedResponses: MockedResponse[], + ...mockedResponses: MockedResponse[] ): BatchedNetworkInterface { return new MockBatchedNetworkInterface(mockedResponses); } @@ -75,7 +71,7 @@ export class MockNetworkInterface implements NetworkInterface { private mockedResponsesByKey: { [key: string]: MockedResponse[] } = {}; constructor(mockedResponses: MockedResponse[]) { - mockedResponses.forEach((mockedResponse) => { + mockedResponses.forEach(mockedResponse => { this.addMockedResponse(mockedResponse); }); } @@ -101,13 +97,19 @@ export class MockNetworkInterface implements NetworkInterface { const key = requestToKey(parsedRequest); const responses = this.mockedResponsesByKey[key]; if (!responses || responses.length === 0) { - throw new Error(`No more mocked responses for the query: ${print(request.query)}, variables: ${JSON.stringify(request.variables)}`); + throw new Error( + `No more mocked responses for the query: ${print( + request.query, + )}, variables: ${JSON.stringify(request.variables)}`, + ); } const { result, error, delay } = responses.shift()!; if (!result && !error) { - throw new Error(`Mocked response should contain either result or error: ${key}`); + throw new Error( + `Mocked response should contain either result or error: ${key}`, + ); } setTimeout(() => { @@ -121,7 +123,8 @@ export class MockNetworkInterface implements NetworkInterface { } } -export class MockObservableNetworkInterface implements ObservableNetworkInterface { +export class MockObservableNetworkInterface + implements ObservableNetworkInterface { private mockNetworkInterface: MockNetworkInterface; constructor(mockedResponses: MockedResponse[]) { @@ -133,39 +136,51 @@ export class MockObservableNetworkInterface implements ObservableNetworkInterfac } public request(request: Request) { - return new Observable(({ next: onNext, error: onError, complete: onComplete }: Observer) => { - const result = this.mockNetworkInterface.query(request); - - result.then((data) => { - if (onNext) { - onNext(data); - } - if (onComplete) { - onComplete(); - } - }) - .catch((error) => { - if (onError) { - onError(error); - } - }); - - return () => void 0; - }); + return new Observable< + ExecutionResult + >( + ({ + next: onNext, + error: onError, + complete: onComplete, + }: Observer) => { + const result = this.mockNetworkInterface.query(request); + + result + .then(data => { + if (onNext) { + onNext(data); + } + if (onComplete) { + onComplete(); + } + }) + .catch(error => { + if (onError) { + onError(error); + } + }); + + return () => void 0; + }, + ); } } - -export class MockSubscriptionNetworkInterface extends MockNetworkInterface implements SubscriptionNetworkInterface { - public mockedSubscriptionsByKey: { [key: string ]: MockedSubscription[] } = {}; - public mockedSubscriptionsById: { [id: number]: MockedSubscription} = {}; - public handlersById: {[id: number]: (error: any, result: any) => void} = {}; +export class MockSubscriptionNetworkInterface extends MockNetworkInterface + implements SubscriptionNetworkInterface { + public mockedSubscriptionsByKey: { [key: string]: MockedSubscription[] } = {}; + public mockedSubscriptionsById: { [id: number]: MockedSubscription } = {}; + public handlersById: { [id: number]: (error: any, result: any) => void } = {}; public subId: number; - constructor(mockedSubscriptions: MockedSubscription[], mockedResponses: MockedResponse[]) { + constructor( + mockedSubscriptions: MockedSubscription[], + mockedResponses: MockedResponse[], + ) { super(mockedResponses); this.subId = 0; - mockedSubscriptions.forEach((sub) => { + mockedSubscriptions.forEach(sub => { this.addMockedSubscription(sub); }); } @@ -189,12 +204,15 @@ export class MockSubscriptionNetworkInterface extends MockNetworkInterface imple mockedSubs.push(mockedSubscription); } - public subscribe(request: Request, handler: (error: any, result: any) => void): number { - const parsedRequest: ParsedRequest = { - query: request.query, - variables: request.variables, - debugName: request.debugName, - }; + public subscribe( + request: Request, + handler: (error: any, result: any) => void, + ): number { + const parsedRequest: ParsedRequest = { + query: request.query, + variables: request.variables, + debugName: request.debugName, + }; const key = requestToKey(parsedRequest); if (this.mockedSubscriptionsByKey.hasOwnProperty(key)) { const subscription = this.mockedSubscriptionsByKey[key].shift()!; @@ -203,9 +221,10 @@ export class MockSubscriptionNetworkInterface extends MockNetworkInterface imple this.mockedSubscriptionsById[id] = subscription; return id; } else { - throw new Error('Network interface does not have subscription associated with this request.'); + throw new Error( + 'Network interface does not have subscription associated with this request.', + ); } - } public fireResult(id: number) { @@ -213,15 +232,24 @@ export class MockSubscriptionNetworkInterface extends MockNetworkInterface imple if (this.mockedSubscriptionsById.hasOwnProperty(id.toString())) { const subscription = this.mockedSubscriptionsById[id]; if (subscription.results!.length === 0) { - throw new Error(`No more mocked subscription responses for the query: ` + - `${print(subscription.request.query)}, variables: ${JSON.stringify(subscription.request.variables)}`); + throw new Error( + `No more mocked subscription responses for the query: ` + + `${print(subscription.request.query)}, variables: ${JSON.stringify( + subscription.request.variables, + )}`, + ); } const response = subscription.results!.shift()!; setTimeout(() => { - handler(response.error, response.result ? response.result.data : undefined); + handler( + response.error, + response.result ? response.result.data : undefined, + ); }, response.delay ? response.delay : 0); } else { - throw new Error('Network interface does not have subscription associated with this id.'); + throw new Error( + 'Network interface does not have subscription associated with this id.', + ); } } @@ -230,12 +258,11 @@ export class MockSubscriptionNetworkInterface extends MockNetworkInterface imple } } -export class MockBatchedNetworkInterface -extends MockNetworkInterface implements BatchedNetworkInterface { - +export class MockBatchedNetworkInterface extends MockNetworkInterface + implements BatchedNetworkInterface { public batchQuery(requests: Request[]): Promise { const resultPromises: Promise[] = []; - requests.forEach((request) => { + requests.forEach(request => { resultPromises.push(this.query(request)); }); @@ -243,7 +270,6 @@ extends MockNetworkInterface implements BatchedNetworkInterface { } } - function requestToKey(request: ParsedRequest): string { const queryString = request.query && print(request.query); diff --git a/test/mocks/mockQueryManager.ts b/test/mocks/mockQueryManager.ts index a1f7fbd39c7..cab6e1590bf 100644 --- a/test/mocks/mockQueryManager.ts +++ b/test/mocks/mockQueryManager.ts @@ -1,14 +1,8 @@ -import { - QueryManager, -} from '../../src/core/QueryManager'; +import { QueryManager } from '../../src/core/QueryManager'; -import mockNetworkInterface, { - MockedResponse, -} from './mockNetworkInterface'; +import mockNetworkInterface, { MockedResponse } from './mockNetworkInterface'; -import { - createApolloStore, -} from '../../src/store'; +import { createApolloStore } from '../../src/store'; const defaultReduxRootSelector = (state: any) => state.apollo; diff --git a/test/mutationResults.ts b/test/mutationResults.ts index 01c89c214a0..fb7eb47f300 100644 --- a/test/mutationResults.ts +++ b/test/mutationResults.ts @@ -15,7 +15,6 @@ import gql from 'graphql-tag'; import { withWarning } from './util/wrap'; describe('mutation results', () => { - const query = gql` query todoList { todoList(id: 5) { @@ -72,7 +71,7 @@ describe('mutation results', () => { `; const queryWithVars = gql` - query todoList ($id: Int){ + query todoList($id: Int) { __typename todoList(id: $id) { __typename @@ -211,17 +210,20 @@ describe('mutation results', () => { let networkInterface: any; type CustomMutationBehavior = { - type: 'CUSTOM_MUTATION_RESULT', - dataId: string, - field: string, - value: any, + type: 'CUSTOM_MUTATION_RESULT'; + dataId: string; + field: string; + value: any; }; function setupObsHandle(...mockedResponses: any[]) { - networkInterface = mockNetworkInterface({ - request: { query: queryWithTypename }, - result, - }, ...mockedResponses); + networkInterface = mockNetworkInterface( + { + request: { query: queryWithTypename }, + result, + }, + ...mockedResponses, + ); client = new ApolloClient({ networkInterface, @@ -241,11 +243,14 @@ describe('mutation results', () => { } function setupDelayObsHandle(delay: number, ...mockedResponses: any[]) { - networkInterface = mockNetworkInterface({ - request: { query: queryWithTypename }, - result, - delay, - }, ...mockedResponses); + networkInterface = mockNetworkInterface( + { + request: { query: queryWithTypename }, + result, + delay, + }, + ...mockedResponses, + ); client = new ApolloClient({ networkInterface, @@ -270,10 +275,11 @@ describe('mutation results', () => { } it('correctly primes cache for tests', () => { - return setup() - .then(() => client.query({ + return setup().then(() => + client.query({ query, - })); + }), + ); }); it('correctly integrates field changes by default', () => { @@ -303,18 +309,18 @@ describe('mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - return client.mutate({ mutation }); - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - assert.isTrue(newResult.data.todoList.todos[0].completed); - }); + .then(() => { + return client.mutate({ mutation }); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + assert.isTrue(newResult.data.todoList.todos[0].completed); + }); }); - it('should warn when the result fields don\'t match the query fields', () => { + it("should warn when the result fields don't match the query fields", () => { let handle: any; let subscriptionHandle: Subscription; let counter = 0; @@ -332,12 +338,14 @@ describe('mutation results', () => { const queryTodosResult = { data: { - todos: [{ - id: '1', - name: 'Todo 1', - description: 'Description 1', - __typename: 'todos', - }], + todos: [ + { + id: '1', + name: 'Todo 1', + description: 'Description 1', + __typename: 'todos', + }, + ], }, }; @@ -363,16 +371,19 @@ describe('mutation results', () => { }; return withWarning(() => { - return setup({ - request: { query: queryTodos }, - result: queryTodosResult, - }, { - request: { query: mutationTodo }, - result: mutationTodoResult, - }) - .then(() => { + return setup( + { + request: { query: queryTodos }, + result: queryTodosResult, + }, + { + request: { query: mutationTodo }, + result: mutationTodoResult, + }, + ) + .then(() => { // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { + return new Promise((resolve, reject) => { handle = client.watchQuery({ query: queryTodos }); subscriptionHandle = handle.subscribe({ next(res: any) { @@ -390,16 +401,14 @@ describe('mutation results', () => { const newTodo = (mutationResult as any).data.createTodo; const newResults = { - todos: [ - ...(prev as any).todos, - newTodo, - ], + todos: [...(prev as any).todos, newTodo], }; return newResults; }, }, }); - }).then(() => subscriptionHandle.unsubscribe()); + }) + .then(() => subscriptionHandle.unsubscribe()); }, /Missing field description/); }); @@ -432,7 +441,7 @@ describe('mutation results', () => { const query2 = gql` query newTodos { __typename - newTodos(since: 1){ + newTodos(since: 1) { __typename id text @@ -446,7 +455,7 @@ describe('mutation results', () => { __typename: 'Query', newTodos: [ { - __typename: 'Todo', + __typename: 'Todo', id: '3030', text: 'Recently added', completed: false, @@ -462,41 +471,48 @@ describe('mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - observableQuery = client.watchQuery({ - query, - reducer: (previousResult, action) => { - counter++; - if (isMutationResultAction(action)) { - const newResult = cloneDeep(previousResult) as any; - newResult.todoList.todos.unshift(action.result.data!['createTodo']); - return newResult; - } - return previousResult; - }, - }).subscribe({ - next: () => null, // TODO: we should actually check the new result - }); - return client.mutate({ - mutation, - }); - }) - .then(() => { - // TODO: improve this test. Now it just works because this query is the same as the watchQuery with the reducer. - return client.query({ query }); - }) - .then((newResult: any) => { - observableQuery.unsubscribe(); + .then(() => { + observableQuery = client + .watchQuery({ + query, + reducer: (previousResult, action) => { + counter++; + if (isMutationResultAction(action)) { + const newResult = cloneDeep(previousResult) as any; + newResult.todoList.todos.unshift( + action.result.data!['createTodo'], + ); + return newResult; + } + return previousResult; + }, + }) + .subscribe({ + next: () => null, // TODO: we should actually check the new result + }); + return client.mutate({ + mutation, + }); + }) + .then(() => { + // TODO: improve this test. Now it just works because this query is the same as the watchQuery with the reducer. + return client.query({ query }); + }) + .then((newResult: any) => { + observableQuery.unsubscribe(); - // The reducer should have been called once - assert.equal(counter, 1); + // The reducer should have been called once + assert.equal(counter, 1); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('passes variables', () => { @@ -504,66 +520,76 @@ describe('mutation results', () => { let observableQuery: ObservableQuery; let subscription: any; - return setup({ - request: { query: queryWithVars, variables: { id: 5 } }, - result: result5, - }, { - request: { query: mutation}, - result: mutationResult, - delay: 5, - }, { - request: { query: queryWithVars, variables: { id: 6 } }, - result: result6, - }, { - request: { query: mutation}, - result: mutationResult, - }) - .then(() => { - observableQuery = client.watchQuery({ - query: queryWithVars, - variables: { id: 5 }, - reducer: (previousResult, action, variables: any) => { - counter++; - if (isMutationResultAction(action) && variables['id'] === 5) { - const newResult = cloneDeep(previousResult) as any; - newResult.todoList.todos.unshift(action.result.data!['createTodo']); - return newResult; - } - return previousResult; - }, - }); + return setup( + { + request: { query: queryWithVars, variables: { id: 5 } }, + result: result5, + }, + { + request: { query: mutation }, + result: mutationResult, + delay: 5, + }, + { + request: { query: queryWithVars, variables: { id: 6 } }, + result: result6, + }, + { + request: { query: mutation }, + result: mutationResult, + }, + ) + .then(() => { + observableQuery = client.watchQuery({ + query: queryWithVars, + variables: { id: 5 }, + reducer: (previousResult, action, variables: any) => { + counter++; + if (isMutationResultAction(action) && variables['id'] === 5) { + const newResult = cloneDeep(previousResult) as any; + newResult.todoList.todos.unshift( + action.result.data!['createTodo'], + ); + return newResult; + } + return previousResult; + }, + }); - subscription = observableQuery.subscribe({ - next: () => null, // TODO: we should actually check the new result - }); - return client.mutate({ - mutation, - }); - }) - .then(() => { - return observableQuery.setOptions({ variables: { id: 6 } }); - }) - .then((res) => { - return client.mutate({ - mutation, - }); - }) - .then(() => { - // going back to check the result of the original query - return observableQuery.setOptions({ variables: { id: 5 } }); - }) - .then((newResult: any) => { - subscription.unsubscribe(); + subscription = observableQuery.subscribe({ + next: () => null, // TODO: we should actually check the new result + }); + return client.mutate({ + mutation, + }); + }) + .then(() => { + return observableQuery.setOptions({ variables: { id: 6 } }); + }) + .then(res => { + return client.mutate({ + mutation, + }); + }) + .then(() => { + // going back to check the result of the original query + return observableQuery.setOptions({ variables: { id: 5 } }); + }) + .then((newResult: any) => { + subscription.unsubscribe(); - // The reducer should have been called twice - assert.equal(counter, 4); + // The reducer should have been called twice + assert.equal(counter, 4); - // But there should be one more todo item than before, because variables only matched once - assert.equal(newResult.data.todoList.todos.length, 4); + // But there should be one more todo item than before, because variables only matched once + assert.equal(newResult.data.todoList.todos.length, 4); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('can filter based on operationName', () => { @@ -574,105 +600,129 @@ describe('mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - observableQuery = client.watchQuery({ - query, - reducer: (previousResult, action) => { - if (isMutationResultAction(action) && action.operationName === 'createTodo') { - counter++; - const newResult = cloneDeep(previousResult) as any; - newResult.todoList.todos.unshift(action.result.data!['createTodo']); - return newResult; - } - return previousResult; - }, - }).subscribe({ - next: () => null, // TODO: we should actually check the new result - }); + .then(() => { + observableQuery = client + .watchQuery({ + query, + reducer: (previousResult, action) => { + if ( + isMutationResultAction(action) && + action.operationName === 'createTodo' + ) { + counter++; + const newResult = cloneDeep(previousResult) as any; + newResult.todoList.todos.unshift( + action.result.data!['createTodo'], + ); + return newResult; + } + return previousResult; + }, + }) + .subscribe({ + next: () => null, // TODO: we should actually check the new result + }); - // this query subscribes to the same data, but the reducer should never run - // because the operationName doesn't match. So the number of - observableQuery2 = client.watchQuery({ - query, - reducer: (previousResult, action) => { - if (isMutationResultAction(action) && action.operationName === 'wrongName') { - counter++; // shouldn't be called, so counter shouldn't increase. - const newResult = cloneDeep(previousResult) as any; - newResult.todoList.todos.unshift(action.result.data!['createTodo']); - return newResult; - } - return previousResult; - }, - }).subscribe({ - next: () => null, // TODO: we should actually check the new result - }); - return client.mutate({ - mutation, - }); - }) - .then(() => { - // TODO: improve this test. Now it just works because this query is the same as the watchQuery with the reducer. - return client.query({ query }); - }) - .then((newResult: any) => { - observableQuery.unsubscribe(); + // this query subscribes to the same data, but the reducer should never run + // because the operationName doesn't match. So the number of + observableQuery2 = client + .watchQuery({ + query, + reducer: (previousResult, action) => { + if ( + isMutationResultAction(action) && + action.operationName === 'wrongName' + ) { + counter++; // shouldn't be called, so counter shouldn't increase. + const newResult = cloneDeep(previousResult) as any; + newResult.todoList.todos.unshift( + action.result.data!['createTodo'], + ); + return newResult; + } + return previousResult; + }, + }) + .subscribe({ + next: () => null, // TODO: we should actually check the new result + }); + return client.mutate({ + mutation, + }); + }) + .then(() => { + // TODO: improve this test. Now it just works because this query is the same as the watchQuery with the reducer. + return client.query({ query }); + }) + .then((newResult: any) => { + observableQuery.unsubscribe(); - // The reducer should have been called once - assert.equal(counter, 1); + // The reducer should have been called once + assert.equal(counter, 1); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('is called on query result as well', () => { let counter = 0; let observableQuery: any; - return setup({ - request: { query: mutation }, - result: mutationResult, - }, { - request: { query: query2 }, - result: result2, - }) - .then(() => { - observableQuery = client.watchQuery({ - query, - reducer: (previousResult, action) => { - counter++; - if (isQueryResultAction(action)) { - const newResult = cloneDeep(previousResult) as any; - newResult.todoList.todos.unshift(action.result.data!['newTodos'][0]); - return newResult; - } - return previousResult; - }, - }).subscribe({ - next: () => null, // TODO: we should actually check the new result - }); - }) - .then(() => { - return client.query({ query: query2 }); - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - observableQuery.unsubscribe(); + return setup( + { + request: { query: mutation }, + result: mutationResult, + }, + { + request: { query: query2 }, + result: result2, + }, + ) + .then(() => { + observableQuery = client + .watchQuery({ + query, + reducer: (previousResult, action) => { + counter++; + if (isQueryResultAction(action)) { + const newResult = cloneDeep(previousResult) as any; + newResult.todoList.todos.unshift( + action.result.data!['newTodos'][0], + ); + return newResult; + } + return previousResult; + }, + }) + .subscribe({ + next: () => null, // TODO: we should actually check the new result + }); + }) + .then(() => { + return client.query({ query: query2 }); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + observableQuery.unsubscribe(); - // The reducer should have been called once - assert.equal(counter, 1); + // The reducer should have been called once + assert.equal(counter, 1); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'Recently added'); - }); + // Since we used `prepend` it should be at the front + assert.equal(newResult.data.todoList.todos[0].text, 'Recently added'); + }); }); /* @@ -804,11 +854,14 @@ describe('mutation results', () => { // XXX we don't check here that the resolver still runs, we just check that no errors are thrown. // The resolver doesn't actually run. function setupReducerObsHandle(...mockedResponses: any[]) { - networkInterface = mockNetworkInterface({ - request: { query: queryWithTypename }, - result, - delay: 30, - }, ...mockedResponses); + networkInterface = mockNetworkInterface( + { + request: { query: queryWithTypename }, + result, + delay: 30, + }, + ...mockedResponses, + ); client = new ApolloClient({ networkInterface, @@ -840,11 +893,13 @@ describe('mutation results', () => { const subs = obsHandle.subscribe({ next: () => null, }); - return client.mutate({ - mutation, - }).then(res => { - subs.unsubscribe(); - }); + return client + .mutate({ + mutation, + }) + .then(res => { + subs.unsubscribe(); + }); }); describe('error handling', () => { @@ -852,7 +907,7 @@ describe('mutation results', () => { let warned: any; let timesWarned = 0; - beforeEach((done) => { + beforeEach(done => { // clear warnings warned = null; timesWarned = 0; @@ -892,7 +947,7 @@ describe('mutation results', () => { }); }); - afterEach((done) => { + afterEach(done => { // restore standard method console.warn = oldWarn; done(); @@ -900,7 +955,6 @@ describe('mutation results', () => { }); }); - describe('updateQueries', () => { const mutation = gql` mutation createTodo { @@ -933,57 +987,65 @@ describe('mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - return client.mutate({ - mutation, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); + }) + .then(() => { + return client.mutate({ + mutation, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + assert.equal(mResult.data.createTodo.id, '99'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); - const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); - return state; + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; + }, }, - }, - }); - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); + }); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('does not fail if optional query variables are not supplied', () => { let subscriptionHandle: Subscription; const mutationWithVars = gql` - mutation createTodo($requiredVar: String!, $optionalVar: String) { - createTodo(requiredVar: $requiredVar, optionalVar:$optionalVar) { - id - text - completed - __typename - } - __typename + mutation createTodo($requiredVar: String!, $optionalVar: String) { + createTodo(requiredVar: $requiredVar, optionalVar: $optionalVar) { + id + text + completed + __typename } + __typename + } `; // the test will pass if optionalVar is uncommented @@ -998,49 +1060,55 @@ describe('mutation results', () => { }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise((resolve, reject) => { - const handle = client.watchQuery({ - query, - variables, - }); - subscriptionHandle = handle.subscribe({ - next(res) { - resolve(res); - }, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ + query, + variables, + }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - return client.mutate({ - mutation: mutationWithVars, - variables, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); + }) + .then(() => { + return client.mutate({ + mutation: mutationWithVars, + variables, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + assert.equal(mResult.data.createTodo.id, '99'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); - const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); - return state; + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; + }, }, - }, - }); - }) - .then(() => { - return client.query({query}); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); + }); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('does not fail if the query did not complete correctly', () => { @@ -1059,7 +1127,10 @@ describe('mutation results', () => { todoList: (prev, options) => { const mResult = options.mutationResult as any; assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); const state = cloneDeep(prev) as any; state.todoList.todos.unshift(mResult.data.createTodo); @@ -1070,13 +1141,10 @@ describe('mutation results', () => { }); it('does not fail if the query did not finish loading', () => { - const obsHandle = setupDelayObsHandle( - 15, - { - request: { query: mutation }, - result: mutationResult, - }, - ); + const obsHandle = setupDelayObsHandle(15, { + request: { query: mutation }, + result: mutationResult, + }); const subs = obsHandle.subscribe({ next: () => null, }); @@ -1086,7 +1154,10 @@ describe('mutation results', () => { todoList: (prev, options) => { const mResult = options.mutationResult as any; assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); const state = cloneDeep(prev) as any; state.todoList.todos.unshift(mResult.data.createTodo); @@ -1096,49 +1167,56 @@ describe('mutation results', () => { }); }); - it('does not make next queries fail if a mutation fails', (done) => { - const obsHandle = setupObsHandle({ - request: { query: mutation }, - result: {errors: [new Error('mock error')]}, - }, { - request: { query: queryWithTypename }, - result, - }); + it('does not make next queries fail if a mutation fails', done => { + const obsHandle = setupObsHandle( + { + request: { query: mutation }, + result: { errors: [new Error('mock error')] }, + }, + { + request: { query: queryWithTypename }, + result, + }, + ); obsHandle.subscribe({ next(obj) { - client.mutate({ - mutation, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - const state = cloneDeep(prev) as any; - // It's unfortunate that this function is called at all, but we are removing - // the updateQueries API soon so it won't matter. - state.todoList.todos.unshift(mResult.data && mResult.data.createTodo); - return state; - }, - }, - }) - .then( - () => done(new Error('Mutation should have failed')), - () => client.mutate({ + client + .mutate({ mutation, updateQueries: { todoList: (prev, options) => { const mResult = options.mutationResult as any; const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); + // It's unfortunate that this function is called at all, but we are removing + // the updateQueries API soon so it won't matter. + state.todoList.todos.unshift( + mResult.data && mResult.data.createTodo, + ); return state; }, }, - }), - ) - .then( - () => done(new Error('Mutation should have failed')), - () => obsHandle.refetch(), - ) - .then(() => done(), done); + }) + .then( + () => done(new Error('Mutation should have failed')), + () => + client.mutate({ + mutation, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; + }, + }, + }), + ) + .then( + () => done(new Error('Mutation should have failed')), + () => obsHandle.refetch(), + ) + .then(() => done(), done); }, }); }); @@ -1155,35 +1233,37 @@ describe('mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - return client.mutate({ - mutation, - updateQueries: { - todoList: (prev, options) => { - throw new Error(`Hello... It's me.`); + }) + .then(() => { + return client.mutate({ + mutation, + updateQueries: { + todoList: (prev, options) => { + throw new Error(`Hello... It's me.`); + }, }, - }, + }); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + assert.lengthOf(errors, 1); + assert.equal(errors[0].message, `Hello... It's me.`); + console.error = oldError; }); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - assert.lengthOf(errors, 1); - assert.equal(errors[0].message, `Hello... It's me.`); - console.error = oldError; - }); }); }); - it('does not fail if one of the previous queries did not complete correctly', (done) => { + it('does not fail if one of the previous queries did not complete correctly', done => { const variableQuery = gql` query Echo($message: String) { echo(message: $message) @@ -1226,16 +1306,20 @@ describe('mutation results', () => { }, }; - networkInterface = mockNetworkInterface({ - request: { query: variableQuery, variables: variables1 }, - result: result1, - }, { - request: { query: variableQuery, variables: variables2 }, - result: result2, - }, { - request: { query: resetMutation }, - result: resetMutationResult, - }); + networkInterface = mockNetworkInterface( + { + request: { query: variableQuery, variables: variables1 }, + result: result1, + }, + { + request: { query: variableQuery, variables: variables2 }, + result: result2, + }, + { + request: { query: resetMutation }, + result: resetMutationResult, + }, + ); client = new ApolloClient({ networkInterface, @@ -1257,7 +1341,7 @@ describe('mutation results', () => { let yieldCount = 0; watchedQuery.subscribe({ - next: ({data}: any) => { + next: ({ data }: any) => { yieldCount += 1; if (yieldCount === 1) { assert.equal(data.echo, 'b'); @@ -1265,7 +1349,7 @@ describe('mutation results', () => { mutation: resetMutation, updateQueries: { Echo: (prev, options) => { - return {echo: '0'}; + return { echo: '0' }; }, }, }); @@ -1288,7 +1372,7 @@ describe('mutation results', () => { client = new ApolloClient({ addTypename: false, networkInterface: { - query ({ variables }) { + query({ variables }) { switch (count++) { case 0: assert.deepEqual(variables, { a: 1, b: 2 }); @@ -1297,7 +1381,11 @@ describe('mutation results', () => { assert.deepEqual(variables, { a: 1, c: 3 }); return Promise.resolve({ data: { result: 'world' } }); case 2: - assert.deepEqual(variables, { a: undefined, b: 2, c: 3 }); + assert.deepEqual(variables, { + a: undefined, + b: 2, + c: 3, + }); return Promise.resolve({ data: { result: 'goodbye' } }); case 3: assert.deepEqual(variables, {}); @@ -1310,7 +1398,7 @@ describe('mutation results', () => { }); const mutation = gql` - mutation ($a: Int!, $b: Int, $c: Int) { + mutation($a: Int!, $b: Int, $c: Int) { result(a: $a, b: $b, c: $c) } `; @@ -1331,17 +1419,19 @@ describe('mutation results', () => { client.mutate({ mutation, }), - ]).then(() => { - assert.deepEqual(client.queryManager.getApolloState().data, { - ROOT_MUTATION: { - 'result({"a":1,"b":2})': 'hello', - 'result({"a":1,"c":3})': 'world', - 'result({"b":2,"c":3})': 'goodbye', - 'result({})': 'moon', - }, - }); - done(); - }).catch(done); + ]) + .then(() => { + assert.deepEqual(client.queryManager.getApolloState().data, { + ROOT_MUTATION: { + 'result({"a":1,"b":2})': 'hello', + 'result({"a":1,"c":3})': 'world', + 'result({"b":2,"c":3})': 'goodbye', + 'result({})': 'moon', + }, + }); + done(); + }) + .catch(done); }); it('allows mutations with default values', done => { @@ -1350,16 +1440,27 @@ describe('mutation results', () => { client = new ApolloClient({ addTypename: false, networkInterface: { - query ({ variables }) { + query({ variables }) { switch (count++) { case 0: - assert.deepEqual(variables, { a: 1, b: 'water' }); + assert.deepEqual(variables, { + a: 1, + b: 'water', + }); return Promise.resolve({ data: { result: 'hello' } }); case 1: - assert.deepEqual(variables, { a: 2, b: 'cheese', c: 3 }); + assert.deepEqual(variables, { + a: 2, + b: 'cheese', + c: 3, + }); return Promise.resolve({ data: { result: 'world' } }); case 2: - assert.deepEqual(variables, { a: 1, b: 'cheese', c: 3 }); + assert.deepEqual(variables, { + a: 1, + b: 'cheese', + c: 3, + }); return Promise.resolve({ data: { result: 'goodbye' } }); default: return Promise.reject(new Error('Too many network calls.')); @@ -1369,7 +1470,7 @@ describe('mutation results', () => { }); const mutation = gql` - mutation ($a: Int = 1, $b: String = "cheese", $c: Int) { + mutation($a: Int = 1, $b: String = "cheese", $c: Int) { result(a: $a, b: $b, c: $c) } `; @@ -1387,16 +1488,18 @@ describe('mutation results', () => { mutation, variables: { c: 3 }, }), - ]).then(() => { - assert.deepEqual(client.queryManager.getApolloState().data, { - ROOT_MUTATION: { - 'result({"a":1,"b":"water"})': 'hello', - 'result({"a":2,"b":"cheese","c":3})': 'world', - 'result({"a":1,"b":"cheese","c":3})': 'goodbye', - }, - }); - done(); - }).catch(done); + ]) + .then(() => { + assert.deepEqual(client.queryManager.getApolloState().data, { + ROOT_MUTATION: { + 'result({"a":1,"b":"water"})': 'hello', + 'result({"a":2,"b":"cheese","c":3})': 'world', + 'result({"a":1,"b":"cheese","c":3})': 'goodbye', + }, + }); + done(); + }) + .catch(done); }); it('will pass null to the network interface when provided', done => { @@ -1405,16 +1508,28 @@ describe('mutation results', () => { client = new ApolloClient({ addTypename: false, networkInterface: { - query ({ variables }) { + query({ variables }) { switch (count++) { case 0: - assert.deepEqual(variables, { a: 1, b: 2, c: null }); + assert.deepEqual(variables, { + a: 1, + b: 2, + c: null, + }); return Promise.resolve({ data: { result: 'hello' } }); case 1: - assert.deepEqual(variables, { a: 1, b: null, c: 3 }); + assert.deepEqual(variables, { + a: 1, + b: null, + c: 3, + }); return Promise.resolve({ data: { result: 'world' } }); case 2: - assert.deepEqual(variables, { a: null, b: null, c: null }); + assert.deepEqual(variables, { + a: null, + b: null, + c: null, + }); return Promise.resolve({ data: { result: 'moon' } }); default: return Promise.reject(new Error('Too many network calls.')); @@ -1424,7 +1539,7 @@ describe('mutation results', () => { }); const mutation = gql` - mutation ($a: Int!, $b: Int, $c: Int) { + mutation($a: Int!, $b: Int, $c: Int) { result(a: $a, b: $b, c: $c) } `; @@ -1442,16 +1557,18 @@ describe('mutation results', () => { mutation, variables: { a: null, b: null, c: null }, }), - ]).then(() => { - assert.deepEqual(client.queryManager.getApolloState().data, { - ROOT_MUTATION: { - 'result({"a":1,"b":2,"c":null})': 'hello', - 'result({"a":1,"b":null,"c":3})': 'world', - 'result({"a":null,"b":null,"c":null})': 'moon', - }, - }); - done(); - }).catch(done); + ]) + .then(() => { + assert.deepEqual(client.queryManager.getApolloState().data, { + ROOT_MUTATION: { + 'result({"a":1,"b":2,"c":null})': 'hello', + 'result({"a":1,"b":null,"c":3})': 'world', + 'result({"a":null,"b":null,"c":null})': 'moon', + }, + }); + done(); + }) + .catch(done); }); describe('store transaction updater', () => { @@ -1486,60 +1603,81 @@ describe('mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - return client.mutate({ - mutation, - update: (proxy, mResult: any) => { - assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); + }) + .then(() => { + return client.mutate({ + mutation, + update: (proxy, mResult: any) => { + assert.equal(mResult.data.createTodo.id, '99'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); - const id = 'TodoList5'; - const fragment = gql`fragment todoList on TodoList { todos { id text completed __typename } }`; + const id = 'TodoList5'; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; - const data: any = proxy.readFragment({ id, fragment }); + const data: any = proxy.readFragment({ id, fragment }); - proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id, fragment, - }); - }, - }); - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, + }); + }, + }); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('does not fail if optional query variables are not supplied', () => { let subscriptionHandle: Subscription; const mutationWithVars = gql` - mutation createTodo($requiredVar: String!, $optionalVar: String) { - createTodo(requiredVar: $requiredVar, optionalVar:$optionalVar) { - id - text - completed - __typename - } - __typename + mutation createTodo($requiredVar: String!, $optionalVar: String) { + createTodo(requiredVar: $requiredVar, optionalVar: $optionalVar) { + id + text + completed + __typename } + __typename + } `; // the test will pass if optionalVar is uncommented @@ -1554,107 +1692,163 @@ describe('mutation results', () => { }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise((resolve, reject) => { - const handle = client.watchQuery({ - query, - variables, - }); - subscriptionHandle = handle.subscribe({ - next(res) { - resolve(res); - }, - }); - }); - }) - .then(() => { - return client.mutate({ - mutation: mutationWithVars, - variables, - update: (proxy, mResult: any) => { - assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); - - const id = 'TodoList5'; - const fragment = gql`fragment todoList on TodoList { todos { id text completed __typename } }`; - - const data: any = proxy.readFragment({ id, fragment }); - - proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id, fragment, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ + query, + variables, }); - }, - }); - }) - .then(() => { - return client.query({query}); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); - - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); - - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); - }); - - it('does not make next queries fail if a mutation fails', (done) => { - const obsHandle = setupObsHandle({ - request: { query: mutation }, - result: {errors: [new Error('mock error')]}, - }, { - request: { query: queryWithTypename }, - result, - }); - - obsHandle.subscribe({ - next(obj) { - client.mutate({ - mutation, + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); + }); + }) + .then(() => { + return client.mutate({ + mutation: mutationWithVars, + variables, update: (proxy, mResult: any) => { assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); const id = 'TodoList5'; - const fragment = gql`fragment todoList on TodoList { todos { id text completed __typename } }`; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; const data: any = proxy.readFragment({ id, fragment }); proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id, fragment, + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, }); }, - }) - .then( - () => done(new Error('Mutation should have failed')), - () => client.mutate({ + }); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); + + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); + + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); + }); + + it('does not make next queries fail if a mutation fails', done => { + const obsHandle = setupObsHandle( + { + request: { query: mutation }, + result: { errors: [new Error('mock error')] }, + }, + { + request: { query: queryWithTypename }, + result, + }, + ); + + obsHandle.subscribe({ + next(obj) { + client + .mutate({ mutation, update: (proxy, mResult: any) => { assert.equal(mResult.data.createTodo.id, '99'); - assert.equal(mResult.data.createTodo.text, 'This one was created with a mutation.'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); const id = 'TodoList5'; - const fragment = gql`fragment todoList on TodoList { todos { id text completed __typename } }`; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; const data: any = proxy.readFragment({ id, fragment }); proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id, fragment, + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, }); }, - }), - ) - .then( - () => done(new Error('Mutation should have failed')), - () => obsHandle.refetch(), - ) - .then(() => done(), done); + }) + .then( + () => done(new Error('Mutation should have failed')), + () => + client.mutate({ + mutation, + update: (proxy, mResult: any) => { + assert.equal(mResult.data.createTodo.id, '99'); + assert.equal( + mResult.data.createTodo.text, + 'This one was created with a mutation.', + ); + + const id = 'TodoList5'; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; + + const data: any = proxy.readFragment({ id, fragment }); + + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, + }); + }, + }), + ) + .then( + () => done(new Error('Mutation should have failed')), + () => obsHandle.refetch(), + ) + .then(() => done(), done); }, }); }); @@ -1671,29 +1865,31 @@ describe('mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); + }) + .then(() => { + return client.mutate({ + mutation, + update: () => { + throw new Error(`Hello... It's me.`); + }, + }); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + assert.lengthOf(errors, 1); + assert.equal(errors[0].message, `Hello... It's me.`); + console.error = oldError; }); - }) - .then(() => { - return client.mutate({ - mutation, - update: () => { - throw new Error(`Hello... It's me.`); - }, - }); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - assert.lengthOf(errors, 1); - assert.equal(errors[0].message, `Hello... It's me.`); - console.error = oldError; - }); }); }); }); diff --git a/test/networkInterface.ts b/test/networkInterface.ts index e82936d0ccd..cd445ff5980 100644 --- a/test/networkInterface.ts +++ b/test/networkInterface.ts @@ -11,17 +11,13 @@ const { assert, expect } = chai; import { createNetworkInterface, -// NetworkInterface, -// Request, + // NetworkInterface, + // Request, } from '../src/transport/networkInterface'; -import { - MiddlewareRequest, -} from '../src/transport/middleware'; +import { MiddlewareRequest } from '../src/transport/middleware'; -import { - AfterwareResponse, -} from '../src/transport/afterware'; +import { AfterwareResponse } from '../src/transport/afterware'; import gql from 'graphql-tag'; @@ -131,19 +127,25 @@ describe('network interface', () => { // We won't be too careful about counting calls or closely checking // parameters, but just do the basic stuff to ensure the request looks right fetchMock.post(swapiUrl, (url, opts) => { - const { query, variables } = JSON.parse((opts as RequestInit).body!.toString()); + const { query, variables } = JSON.parse( + (opts as RequestInit).body!.toString(), + ); if (query === print(simpleQueryWithNoVars)) { return simpleResult; } - if (query === print(simpleQueryWithVar) - && isEqual(variables, { personNum: 1 })) { + if ( + query === print(simpleQueryWithVar) && + isEqual(variables, { personNum: 1 }) + ) { return simpleResult; } - if (query === print(complexQueryWithTwoVars) - && isEqual(variables, { personNum: 1, filmNum: 1 })) { + if ( + query === print(complexQueryWithTwoVars) && + isEqual(variables, { personNum: 1, filmNum: 1 }) + ) { return complexResult; } @@ -187,23 +189,29 @@ describe('network interface', () => { it('should allow for storing of custom options', () => { const customOpts: RequestInit = { - headers: { 'Authorizaion': 'working' }, + headers: { Authorizaion: 'working' }, credentials: 'include', }; - const networkInterface = createNetworkInterface({ uri: '/graphql', opts: customOpts }); + const networkInterface = createNetworkInterface({ + uri: '/graphql', + opts: customOpts, + }); assert.deepEqual(networkInterface._opts, assign({}, customOpts)); }); it('should not mutate custom options', () => { const customOpts: RequestInit = { - headers: [ 'Authorizaion', 'working' ], + headers: ['Authorizaion', 'working'], credentials: 'include', }; const originalOpts = assign({}, customOpts); - const networkInterface = createNetworkInterface({ uri: '/graphql', opts: customOpts }); + const networkInterface = createNetworkInterface({ + uri: '/graphql', + opts: customOpts, + }); delete customOpts.headers; @@ -225,7 +233,6 @@ describe('network interface', () => { 'Middleware must implement the applyMiddleware function', ); } - }); it('should take a middleware and assign it', () => { @@ -248,9 +255,7 @@ describe('network interface', () => { }); it('should alter the request variables', () => { - const testWare1 = TestWare([ - { key: 'personNum', val: 1 }, - ]); + const testWare1 = TestWare([{ key: 'personNum', val: 1 }]); const swapi = createNetworkInterface({ uri: swapiUrl }); swapi.use([testWare1]); @@ -268,9 +273,7 @@ describe('network interface', () => { }); it('should alter the options but not overwrite defaults', () => { - const testWare1 = TestWare([], [ - { key: 'planet', val: 'mars' }, - ]); + const testWare1 = TestWare([], [{ key: 'planet', val: 'mars' }]); const swapi = createNetworkInterface({ uri: swapiUrl }); swapi.use([testWare1]); @@ -281,18 +284,22 @@ describe('network interface', () => { debugName: 'People query', }; - return swapi.query(simpleRequest).then((data) => { + return swapi.query(simpleRequest).then(data => { assert.equal((fetchMock.lastCall()[1] as any).planet, 'mars'); assert.notOk((swapi._opts)['planet']); }); }); it('should alter the request body params', () => { - const testWare1 = TestWare([], [], [ - { key: 'newParam', val: '0123456789' }, - ]); + const testWare1 = TestWare( + [], + [], + [{ key: 'newParam', val: '0123456789' }], + ); - const swapi = createNetworkInterface({ uri: 'http://graphql-swapi.test/' }); + const swapi = createNetworkInterface({ + uri: 'http://graphql-swapi.test/', + }); swapi.use([testWare1]); const simpleRequest = { query: simpleQueryWithVar, @@ -300,11 +307,12 @@ describe('network interface', () => { debugName: 'People query', }; - return swapi.query(simpleRequest).then((data) => { + return swapi.query(simpleRequest).then(data => { return assert.deepEqual( JSON.parse((fetchMock.lastCall()[1] as any).body), { - query: 'query people($personNum: Int!) {\n allPeople(first: $personNum) {\n people {\n name\n }\n }\n}\n', + query: + 'query people($personNum: Int!) {\n allPeople(first: $personNum) {\n people {\n name\n }\n }\n}\n', variables: { personNum: 1 }, debugName: 'People query', newParam: '0123456789', @@ -314,14 +322,12 @@ describe('network interface', () => { }); it('handle multiple middlewares', () => { - const testWare1 = TestWare([ - { key: 'personNum', val: 1 }, - ]); - const testWare2 = TestWare([ - { key: 'filmNum', val: 1 }, - ]); + const testWare1 = TestWare([{ key: 'personNum', val: 1 }]); + const testWare2 = TestWare([{ key: 'filmNum', val: 1 }]); - const swapi = createNetworkInterface({ uri: 'http://graphql-swapi.test/' }); + const swapi = createNetworkInterface({ + uri: 'http://graphql-swapi.test/', + }); swapi.use([testWare1, testWare2]); // this is a stub for the end user client api const simpleRequest = { @@ -337,16 +343,11 @@ describe('network interface', () => { }); it('should chain use() calls', () => { - const testWare1 = TestWare([ - { key: 'personNum', val: 1 }, - ]); - const testWare2 = TestWare([ - { key: 'filmNum', val: 1 }, - ]); + const testWare1 = TestWare([{ key: 'personNum', val: 1 }]); + const testWare2 = TestWare([{ key: 'filmNum', val: 1 }]); const swapi = createNetworkInterface({ uri: swapiUrl }); - swapi.use([testWare1]) - .use([testWare2]); + swapi.use([testWare1]).use([testWare2]); const simpleRequest = { query: complexQueryWithTwoVars, variables: {}, @@ -364,22 +365,22 @@ describe('network interface', () => { const testWare2 = TestAfterWare(); const networkInterface = createNetworkInterface({ uri: swapiUrl }); - networkInterface.use([testWare1]) - .useAfter([testWare2]); + networkInterface.use([testWare1]).useAfter([testWare2]); assert.deepEqual(networkInterface._middlewares, [testWare1]); assert.deepEqual(networkInterface._afterwares, [testWare2]); }); - }); describe('afterware', () => { it('should return errors thrown in afterwares', () => { const networkInterface = createNetworkInterface({ uri: swapiUrl }); - networkInterface.useAfter([{ - applyAfterware() { - throw Error('Afterware error'); + networkInterface.useAfter([ + { + applyAfterware() { + throw Error('Afterware error'); + }, }, - }]); + ]); const simpleRequest = { query: simpleQueryWithNoVars, @@ -407,7 +408,6 @@ describe('network interface', () => { 'Afterware must implement the applyAfterware function', ); } - }); it('should take a afterware and assign it', () => { @@ -434,8 +434,7 @@ describe('network interface', () => { const testWare2 = TestAfterWare(); const networkInterface = createNetworkInterface({ uri: '/graphql' }); - networkInterface.useAfter([testWare1]) - .useAfter([testWare2]); + networkInterface.useAfter([testWare1]).useAfter([testWare2]); assert.deepEqual(networkInterface._afterwares, [testWare1, testWare2]); }); @@ -445,12 +444,10 @@ describe('network interface', () => { const testWare2 = TestWare(); const networkInterface = createNetworkInterface({ uri: swapiUrl }); - networkInterface.useAfter([testWare1]) - .use([testWare2]); + networkInterface.useAfter([testWare1]).use([testWare2]); assert.deepEqual(networkInterface._middlewares, [testWare2]); assert.deepEqual(networkInterface._afterwares, [testWare1]); }); - }); describe('making a request', () => { @@ -484,22 +481,32 @@ describe('network interface', () => { }); it('should throw an error with the response when request is forbidden', () => { - const unauthorizedInterface = createNetworkInterface({ uri: unauthorizedUrl }); + const unauthorizedInterface = createNetworkInterface({ + uri: unauthorizedUrl, + }); return unauthorizedInterface.query(doomedToFail).catch(err => { assert.isOk(err.response); assert.equal(err.response.status, 403); - assert.equal(err.message, 'Network request failed with status 403 - "Forbidden"'); + assert.equal( + err.message, + 'Network request failed with status 403 - "Forbidden"', + ); }); }); it('should throw an error with the response when service is unavailable', () => { - const unauthorizedInterface = createNetworkInterface({ uri: serviceUnavailableUrl }); + const unauthorizedInterface = createNetworkInterface({ + uri: serviceUnavailableUrl, + }); return unauthorizedInterface.query(doomedToFail).catch(err => { assert.isOk(err.response); assert.equal(err.response.status, 503); - assert.equal(err.message, 'Network request failed with status 503 - "Service Unavailable"'); + assert.equal( + err.message, + 'Network request failed with status 503 - "Service Unavailable"', + ); }); }); }); @@ -529,7 +536,8 @@ describe('network interface', () => { debugName: 'People query', }; - const expected = 'Removing an @connection directive even though it does not have a ' + + const expected = + 'Removing an @connection directive even though it does not have a ' + 'key. You may want to use the key parameter to specify a store key.'; return assert.eventually.deepEqual( @@ -545,22 +553,21 @@ describe('network interface', () => { // simulate middleware by altering variables and options function TestWare( - variables: Array<{ key: string, val: any }> = [], - options: Array<{ key: string, val: any }> = [], - bodyParams: Array<{ key: string, val: any }> = [], + variables: Array<{ key: string; val: any }> = [], + options: Array<{ key: string; val: any }> = [], + bodyParams: Array<{ key: string; val: any }> = [], ) { - return { applyMiddleware: (request: MiddlewareRequest, next: Function): void => { - variables.map((variable) => { + variables.map(variable => { (request.request.variables)[variable.key] = variable.val; }); - options.map((variable) => { + options.map(variable => { (request.options)[variable.key] = variable.val; }); - bodyParams.map((param) => { + bodyParams.map(param => { request.request[param.key as string] = param.val; }); @@ -570,13 +577,10 @@ function TestWare( } // simulate afterware by altering variables and options -function TestAfterWare( - options: Array<{ key: string, val: any }> = [], -) { - +function TestAfterWare(options: Array<{ key: string; val: any }> = []) { return { applyAfterware: (response: AfterwareResponse, next: Function): void => { - options.map((variable) => { + options.map(variable => { (response.options)[variable.key] = variable.val; }); diff --git a/test/optimistic.ts b/test/optimistic.ts index c4170532c86..960fcace848 100644 --- a/test/optimistic.ts +++ b/test/optimistic.ts @@ -6,19 +6,15 @@ import ApolloClient from '../src'; import { MutationQueryReducersMap } from '../src/data/mutationResults'; import { NormalizedCache, StoreObject } from '../src/data/storeUtils'; -import { assign, cloneDeep} from 'lodash'; +import { assign, cloneDeep } from 'lodash'; import { Subscription } from '../src/util/Observable'; import gql from 'graphql-tag'; -import { - addTypenameToDocument, -} from '../src/queries/queryTransform'; +import { addTypenameToDocument } from '../src/queries/queryTransform'; -import { - isMutationResultAction, -} from '../src/actions'; +import { isMutationResultAction } from '../src/actions'; describe('optimistic mutation results', () => { const query = gql` @@ -108,17 +104,20 @@ describe('optimistic mutation results', () => { let networkInterface: any; type CustomMutationBehavior = { - type: 'CUSTOM_MUTATION_RESULT', - dataId: string, - field: string, - value: any, + type: 'CUSTOM_MUTATION_RESULT'; + dataId: string; + field: string; + value: any; }; function setup(...mockedResponses: any[]) { - networkInterface = mockNetworkInterface({ - request: { query }, - result, - }, ...mockedResponses); + networkInterface = mockNetworkInterface( + { + request: { query }, + result, + }, + ...mockedResponses, + ); client = new ApolloClient({ networkInterface, @@ -175,10 +174,10 @@ describe('optimistic mutation results', () => { interface IOptimisticResponse { __typename: string; createTodo: { - __typename: string, - id: string, - text: string, - completed: boolean, + __typename: string; + id: string; + text: string; + completed: boolean; }; } @@ -213,153 +212,208 @@ describe('optimistic mutation results', () => { request: { query: mutation }, error: new Error('forbidden (test error)'), }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - updateQueries, - }); + .then(() => { + const promise = client.mutate({ + mutation, + optimisticResponse, + updateQueries, + }); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); - return promise; - }) - .catch((err) => { - assert.instanceOf(err, Error); - assert.equal(err.message, 'Network error: forbidden (test error)'); + return promise; + }) + .catch(err => { + assert.instanceOf(err, Error); + assert.equal(err.message, 'Network error: forbidden (test error)'); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 3); - assert.notProperty(dataInStore, 'Todo99'); - }); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 3); + assert.notProperty(dataInStore, 'Todo99'); + }); }); it('handles errors produced by one mutation in a series', () => { let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - error: new Error('forbidden (test error)'), - }, { - request: { query: mutation }, - result: mutationResult2, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + return setup( + { + request: { query: mutation }, + error: new Error('forbidden (test error)'), + }, + { + request: { query: mutation }, + result: mutationResult2, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); + }); + }) + .then(() => { + const promise = client + .mutate({ + mutation, + optimisticResponse, + updateQueries, + }) + .catch(err => { + // it is ok to fail here + assert.instanceOf(err, Error); + assert.equal( + err.message, + 'Network error: forbidden (test error)', + ); + return null; + }); + + const promise2 = client.mutate({ + mutation, + optimisticResponse: optimisticResponse2, + updateQueries, }); - }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - updateQueries, - }).catch((err) => { - // it is ok to fail here - assert.instanceOf(err, Error); - assert.equal(err.message, 'Network error: forbidden (test error)'); - return null; - }); - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - updateQueries, + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); + assert.equal( + (dataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); + + return Promise.all([promise, promise2]); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.notProperty(dataInStore, 'Todo99'); + assert.property(dataInStore, 'Todo66'); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo66'), + ); + (assert).notDeepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo99'), + ); }); - - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); - assert.equal((dataInStore['Todo66'] as any).text, 'Optimistically generated 2'); - - return Promise.all([promise, promise2]); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.notProperty(dataInStore, 'Todo99'); - assert.property(dataInStore, 'Todo66'); - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo66')); - (assert).notDeepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo99')); - }); }); it('can run 2 mutations concurrently and handles all intermediate states well', () => { - function checkBothMutationsAreApplied(expectedText1: any, expectedText2: any) { + function checkBothMutationsAreApplied( + expectedText1: any, + expectedText2: any, + ) { const dataInStore = client.queryManager.getDataWithOptimisticResults(); assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); assert.property(dataInStore, 'Todo99'); assert.property(dataInStore, 'Todo66'); // can be removed once @types/chai adds deepInclude - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo66')); - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo99')); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo66'), + ); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo99'), + ); assert.equal((dataInStore['Todo99'] as any).text, expectedText1); assert.equal((dataInStore['Todo66'] as any).text, expectedText2); } let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - result: mutationResult, - }, { - request: { query: mutation }, - result: mutationResult2, - // make sure it always happens later - delay: 100, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + return setup( + { + request: { query: mutation }, + result: mutationResult, + }, + { + request: { query: mutation }, + result: mutationResult2, + // make sure it always happens later + delay: 100, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); + }) + .then(() => { + const promise = client + .mutate({ + mutation, + optimisticResponse, + updateQueries, + }) + .then(res => { + checkBothMutationsAreApplied( + 'This one was created with a mutation.', + 'Optimistically generated 2', + ); + const latestState = client.queryManager.mutationStore; + assert.equal(latestState.get('5').loading, false); + assert.equal(latestState.get('6').loading, true); + + return res; + }); + + const promise2 = client + .mutate({ + mutation, + optimisticResponse: optimisticResponse2, + updateQueries, + }) + .then(res => { + checkBothMutationsAreApplied( + 'This one was created with a mutation.', + 'Second mutation.', + ); + const latestState = client.queryManager.mutationStore; + assert.equal(latestState.get('5').loading, false); + assert.equal(latestState.get('6').loading, false); + + return res; + }); + + const mutationsState = client.queryManager.mutationStore; + assert.equal(mutationsState.get('5').loading, true); + assert.equal(mutationsState.get('6').loading, true); + + checkBothMutationsAreApplied( + 'Optimistically generated', + 'Optimistically generated 2', + ); + + return Promise.all([promise, promise2]); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + checkBothMutationsAreApplied( + 'This one was created with a mutation.', + 'Second mutation.', + ); }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - updateQueries, - }).then((res) => { - checkBothMutationsAreApplied('This one was created with a mutation.', 'Optimistically generated 2'); - const latestState = client.queryManager.mutationStore; - assert.equal(latestState.get('5').loading, false); - assert.equal(latestState.get('6').loading, true); - - return res; - }); - - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - updateQueries, - }).then((res) => { - checkBothMutationsAreApplied('This one was created with a mutation.', 'Second mutation.'); - const latestState = client.queryManager.mutationStore; - assert.equal(latestState.get('5').loading, false); - assert.equal(latestState.get('6').loading, false); - - return res; - }); - - const mutationsState = client.queryManager.mutationStore; - assert.equal(mutationsState.get('5').loading, true); - assert.equal(mutationsState.get('6').loading, true); - - checkBothMutationsAreApplied('Optimistically generated', 'Optimistically generated 2'); - - return Promise.all([promise, promise2]); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - checkBothMutationsAreApplied('This one was created with a mutation.', 'Second mutation.'); - }); }); }); @@ -367,13 +421,31 @@ describe('optimistic mutation results', () => { const update = (proxy: any, mResult: any) => { const data: any = proxy.readFragment({ id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, }); proxy.writeFragment({ data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, }); }; @@ -382,160 +454,215 @@ describe('optimistic mutation results', () => { request: { query: mutation }, error: new Error('forbidden (test error)'), }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - update, - }); + .then(() => { + const promise = client.mutate({ + mutation, + optimisticResponse, + update, + }); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); - return promise; - }) - .catch((err) => { - assert.instanceOf(err, Error); - assert.equal(err.message, 'Network error: forbidden (test error)'); + return promise; + }) + .catch(err => { + assert.instanceOf(err, Error); + assert.equal(err.message, 'Network error: forbidden (test error)'); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 3); - assert.notProperty(dataInStore, 'Todo99'); - }); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 3); + assert.notProperty(dataInStore, 'Todo99'); + }); }); it('handles errors produced by one mutation in a series', () => { let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - error: new Error('forbidden (test error)'), - }, { - request: { query: mutation }, - result: mutationResult2, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + return setup( + { + request: { query: mutation }, + error: new Error('forbidden (test error)'), + }, + { + request: { query: mutation }, + result: mutationResult2, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); + }); + }) + .then(() => { + const promise = client + .mutate({ + mutation, + optimisticResponse, + update, + }) + .catch(err => { + // it is ok to fail here + assert.instanceOf(err, Error); + assert.equal( + err.message, + 'Network error: forbidden (test error)', + ); + return null; + }); + + const promise2 = client.mutate({ + mutation, + optimisticResponse: optimisticResponse2, + update, }); - }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - update, - }).catch((err) => { - // it is ok to fail here - assert.instanceOf(err, Error); - assert.equal(err.message, 'Network error: forbidden (test error)'); - return null; - }); - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - update, + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); + assert.equal( + (dataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); + + return Promise.all([promise, promise2]); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.notProperty(dataInStore, 'Todo99'); + assert.property(dataInStore, 'Todo66'); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo66'), + ); + (assert).notDeepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo99'), + ); }); - - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); - assert.equal((dataInStore['Todo66'] as any).text, 'Optimistically generated 2'); - - return Promise.all([promise, promise2]); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.notProperty(dataInStore, 'Todo99'); - assert.property(dataInStore, 'Todo66'); - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo66')); - (assert).notDeepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo99')); - }); }); it('can run 2 mutations concurrently and handles all intermediate states well', () => { - function checkBothMutationsAreApplied(expectedText1: any, expectedText2: any) { + function checkBothMutationsAreApplied( + expectedText1: any, + expectedText2: any, + ) { const dataInStore = client.queryManager.getDataWithOptimisticResults(); assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); assert.property(dataInStore, 'Todo99'); assert.property(dataInStore, 'Todo66'); - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo66')); - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo99')); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo66'), + ); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo99'), + ); assert.equal((dataInStore['Todo99'] as any).text, expectedText1); assert.equal((dataInStore['Todo66'] as any).text, expectedText2); } let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - result: mutationResult, - }, { - request: { query: mutation }, - result: mutationResult2, - // make sure it always happens later - delay: 100, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + return setup( + { + request: { query: mutation }, + result: mutationResult, + }, + { + request: { query: mutation }, + result: mutationResult2, + // make sure it always happens later + delay: 100, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); + }) + .then(() => { + const promise = client + .mutate({ + mutation, + optimisticResponse, + update, + }) + .then(res => { + checkBothMutationsAreApplied( + 'This one was created with a mutation.', + 'Optimistically generated 2', + ); + const latestState = client.queryManager.mutationStore; + assert.equal(latestState.get('5').loading, false); + assert.equal(latestState.get('6').loading, true); + + return res; + }); + + const promise2 = client + .mutate({ + mutation, + optimisticResponse: optimisticResponse2, + update, + }) + .then(res => { + checkBothMutationsAreApplied( + 'This one was created with a mutation.', + 'Second mutation.', + ); + const latestState = client.queryManager.mutationStore; + assert.equal(latestState.get('5').loading, false); + assert.equal(latestState.get('6').loading, false); + + return res; + }); + + const mutationsState = client.queryManager.mutationStore; + assert.equal(mutationsState.get('5').loading, true); + assert.equal(mutationsState.get('6').loading, true); + + checkBothMutationsAreApplied( + 'Optimistically generated', + 'Optimistically generated 2', + ); + + return Promise.all([promise, promise2]); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + checkBothMutationsAreApplied( + 'This one was created with a mutation.', + 'Second mutation.', + ); }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - update, - }).then((res) => { - checkBothMutationsAreApplied('This one was created with a mutation.', 'Optimistically generated 2'); - const latestState = client.queryManager.mutationStore; - assert.equal(latestState.get('5').loading, false); - assert.equal(latestState.get('6').loading, true); - - return res; - }); - - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - update, - }).then((res) => { - checkBothMutationsAreApplied('This one was created with a mutation.', 'Second mutation.'); - const latestState = client.queryManager.mutationStore; - assert.equal(latestState.get('5').loading, false); - assert.equal(latestState.get('6').loading, false); - - return res; - }); - - const mutationsState = client.queryManager.mutationStore; - assert.equal(mutationsState.get('5').loading, true); - assert.equal(mutationsState.get('6').loading, true); - - checkBothMutationsAreApplied('Optimistically generated', 'Optimistically generated 2'); - - return Promise.all([promise, promise2]); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - checkBothMutationsAreApplied('This one was created with a mutation.', 'Second mutation.'); - }); }); }); }); describe('passing a function to optimisticResponse', () => { const mutation = gql` - mutation createTodo ($text: String) { - createTodo (text: $text) { + mutation createTodo($text: String) { + createTodo(text: $text) { id text completed @@ -575,52 +702,73 @@ describe('optimistic mutation results', () => { request: { query: mutation, variables }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - variables, - optimisticResponse, - update: (proxy, mResult: any) => { - assert.equal(mResult.data.createTodo.id, '99'); + }) + .then(() => { + const promise = client.mutate({ + mutation, + variables, + optimisticResponse, + update: (proxy, mResult: any) => { + assert.equal(mResult.data.createTodo.id, '99'); - const id = 'TodoList5'; - const fragment = gql`fragment todoList on TodoList { todos { id text completed __typename } }`; + const id = 'TodoList5'; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; + + const data: any = proxy.readFragment({ id, fragment }); + + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, + }); + }, + }); - const data: any = proxy.readFragment({ id, fragment }); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated from variables', + ); - proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id, fragment, - }); - }, + return promise; + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); + + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); }); - - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated from variables'); - - return promise; - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); - - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); }); }); @@ -695,209 +843,271 @@ describe('optimistic mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - assert.equal(mResult.data.createTodo.id, '99'); - - const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); - return state; + }) + .then(() => { + const promise = client.mutate({ + mutation, + optimisticResponse, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + assert.equal(mResult.data.createTodo.id, '99'); + + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; + }, }, - }, - }); - - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); + }); - return promise; - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + return promise; + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); + + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('two array insert like mutations', () => { let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - result: mutationResult, - }, { - request: { query: mutation }, - result: mutationResult2, - delay: 50, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + return setup( + { + request: { query: mutation }, + result: mutationResult, + }, + { + request: { query: mutation }, + result: mutationResult2, + delay: 50, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - const updateQueries = { - todoList: (prev, options) => { - const mResult = options.mutationResult; - - const state = cloneDeep(prev); + }) + .then(() => { + const updateQueries = { + todoList: (prev, options) => { + const mResult = options.mutationResult; - if (mResult.data) { - state.todoList.todos.unshift(mResult.data.createTodo); - } + const state = cloneDeep(prev); - return state; - }, - } as MutationQueryReducersMap; - const promise = client.mutate({ - mutation, - optimisticResponse, - updateQueries, - }).then((res) => { - const currentDataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((currentDataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((currentDataInStore['Todo99'] as any).text, 'This one was created with a mutation.'); - assert.equal((currentDataInStore['Todo66'] as any).text, 'Optimistically generated 2'); - return res; - }); + if (mResult.data) { + state.todoList.todos.unshift(mResult.data.createTodo); + } - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - updateQueries, - }); + return state; + }, + } as MutationQueryReducersMap; + const promise = client + .mutate({ + mutation, + optimisticResponse, + updateQueries, + }) + .then(res => { + const currentDataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal( + (currentDataInStore['TodoList5'] as any).todos.length, + 5, + ); + assert.equal( + (currentDataInStore['Todo99'] as any).text, + 'This one was created with a mutation.', + ); + assert.equal( + (currentDataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); + return res; + }); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); - assert.equal((dataInStore['Todo66'] as any).text, 'Optimistically generated 2'); + const promise2 = client.mutate({ + mutation, + optimisticResponse: optimisticResponse2, + updateQueries, + }); - return Promise.all([promise, promise2]); - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 5); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); + assert.equal( + (dataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'Second mutation.'); - assert.equal(newResult.data.todoList.todos[1].text, 'This one was created with a mutation.'); - }); + return Promise.all([promise, promise2]); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 5); + + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'Second mutation.', + ); + assert.equal( + newResult.data.todoList.todos[1].text, + 'This one was created with a mutation.', + ); + }); }); it('two mutations, one fails', () => { let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - error: new Error('forbidden (test error)'), - delay: 20, - }, { - request: { query: mutation }, - result: mutationResult2, - // XXX this test will uncover a flaw in the design of optimistic responses combined with - // updateQueries or result reducers if you un-comment the line below. The issue is that - // optimistic updates are not commutative but are treated as such. When undoing an - // optimistic update, other optimistic updates should be rolled back and re-applied in the - // same order as before, otherwise the store can end up in an inconsistent state. - // delay: 50, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + return setup( + { + request: { query: mutation }, + error: new Error('forbidden (test error)'), + delay: 20, + }, + { + request: { query: mutation }, + result: mutationResult2, + // XXX this test will uncover a flaw in the design of optimistic responses combined with + // updateQueries or result reducers if you un-comment the line below. The issue is that + // optimistic updates are not commutative but are treated as such. When undoing an + // optimistic update, other optimistic updates should be rolled back and re-applied in the + // same order as before, otherwise the store can end up in an inconsistent state. + // delay: 50, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }); - }) - .then(() => { - const updateQueries = { - todoList: (prev, options) => { - const mResult = options.mutationResult; + }) + .then(() => { + const updateQueries = { + todoList: (prev, options) => { + const mResult = options.mutationResult; - const state = cloneDeep(prev); + const state = cloneDeep(prev); - if (mResult.data) { - state.todoList.todos.unshift(mResult.data.createTodo); - } + if (mResult.data) { + state.todoList.todos.unshift(mResult.data.createTodo); + } - return state; - }, - } as MutationQueryReducersMap; - const promise = client.mutate({ - mutation, - optimisticResponse, - updateQueries, - }).catch((err) => { - // it is ok to fail here - assert.instanceOf(err, Error); - assert.equal(err.message, 'Network error: forbidden (test error)'); - return null; - }); + return state; + }, + } as MutationQueryReducersMap; + const promise = client + .mutate({ + mutation, + optimisticResponse, + updateQueries, + }) + .catch(err => { + // it is ok to fail here + assert.instanceOf(err, Error); + assert.equal( + err.message, + 'Network error: forbidden (test error)', + ); + return null; + }); - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - updateQueries, - }); + const promise2 = client.mutate({ + mutation, + optimisticResponse: optimisticResponse2, + updateQueries, + }); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); - assert.equal((dataInStore['Todo66'] as any).text, 'Optimistically generated 2'); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); + assert.equal( + (dataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); - return Promise.all([promise, promise2]); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.notProperty(dataInStore, 'Todo99'); - assert.property(dataInStore, 'Todo66'); - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo66')); - (assert).notDeepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo99')); - }); + return Promise.all([promise, promise2]); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.notProperty(dataInStore, 'Todo99'); + assert.property(dataInStore, 'Todo66'); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo66'), + ); + (assert).notDeepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo99'), + ); + }); }); it('will handle dependent updates', done => { - networkInterface = mockNetworkInterface({ - request: { query }, - result, - }, { - request: { query: mutation }, - result: mutationResult, - delay: 10, - }, { - request: { query: mutation }, - result: mutationResult2, - delay: 20, - }); + networkInterface = mockNetworkInterface( + { + request: { query }, + result, + }, + { + request: { query: mutation }, + result: mutationResult, + delay: 10, + }, + { + request: { query: mutation }, + result: mutationResult2, + delay: 20, + }, + ); const customOptimisticResponse1 = { __typename: 'Mutation', @@ -955,16 +1165,40 @@ describe('optimistic mutation results', () => { twoMutations(); break; case 1: - assert.deepEqual([customOptimisticResponse1.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [customOptimisticResponse1.createTodo, ...defaultTodos], + todos, + ); break; case 2: - assert.deepEqual([customOptimisticResponse2.createTodo, customOptimisticResponse1.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [ + customOptimisticResponse2.createTodo, + customOptimisticResponse1.createTodo, + ...defaultTodos, + ], + todos, + ); break; case 3: - assert.deepEqual([customOptimisticResponse2.createTodo, mutationResult.data.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [ + customOptimisticResponse2.createTodo, + mutationResult.data.createTodo, + ...defaultTodos, + ], + todos, + ); break; case 4: - assert.deepEqual([mutationResult2.data.createTodo, mutationResult.data.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [ + mutationResult2.data.createTodo, + mutationResult.data.createTodo, + ...defaultTodos, + ], + todos, + ); done(); break; default: @@ -974,19 +1208,21 @@ describe('optimistic mutation results', () => { error: error => done(error), }); - function twoMutations () { - client.mutate({ - mutation, - optimisticResponse: customOptimisticResponse1, - updateQueries, - }) + function twoMutations() { + client + .mutate({ + mutation, + optimisticResponse: customOptimisticResponse1, + updateQueries, + }) .catch(error => done(error)); - client.mutate({ - mutation, - optimisticResponse: customOptimisticResponse2, - updateQueries, - }) + client + .mutate({ + mutation, + optimisticResponse: customOptimisticResponse2, + updateQueries, + }) .catch(error => done(error)); } }); @@ -1053,210 +1289,327 @@ describe('optimistic mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, - }); - }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - update: (proxy, mResult: any) => { - assert.equal(mResult.data.createTodo.id, '99'); - - const id = 'TodoList5'; - const fragment = gql`fragment todoList on TodoList { todos { id text completed __typename } }`; - - const data: any = proxy.readFragment({ id, fragment }); - - proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id, fragment, + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, }); - }, - }); + }); + }) + .then(() => { + const promise = client.mutate({ + mutation, + optimisticResponse, + update: (proxy, mResult: any) => { + assert.equal(mResult.data.createTodo.id, '99'); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); + const id = 'TodoList5'; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; + + const data: any = proxy.readFragment({ id, fragment }); + + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, + }); + }, + }); - return promise; - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + return promise; + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); + + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('two array insert like mutations', () => { let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - result: mutationResult, - }, { - request: { query: mutation }, - result: mutationResult2, - delay: 50, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, - }); - }); - }) - .then(() => { - const update = (proxy: any, mResult: any) => { - const data: any = proxy.readFragment({ - id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, - }); - - proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, + return setup( + { + request: { query: mutation }, + result: mutationResult, + }, + { + request: { query: mutation }, + result: mutationResult2, + delay: 50, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); - }; - const promise = client.mutate({ - mutation, - optimisticResponse, - update, - }).then((res) => { - const currentDataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((currentDataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((currentDataInStore['Todo99'] as any).text, 'This one was created with a mutation.'); - assert.equal((currentDataInStore['Todo66'] as any).text, 'Optimistically generated 2'); - return res; - }); + }) + .then(() => { + const update = (proxy: any, mResult: any) => { + const data: any = proxy.readFragment({ + id: 'TodoList5', + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, + }); - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - update, - }); + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id: 'TodoList5', + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, + }); + }; + const promise = client + .mutate({ + mutation, + optimisticResponse, + update, + }) + .then(res => { + const currentDataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal( + (currentDataInStore['TodoList5'] as any).todos.length, + 5, + ); + assert.equal( + (currentDataInStore['Todo99'] as any).text, + 'This one was created with a mutation.', + ); + assert.equal( + (currentDataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); + return res; + }); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); - assert.equal((dataInStore['Todo66'] as any).text, 'Optimistically generated 2'); + const promise2 = client.mutate({ + mutation, + optimisticResponse: optimisticResponse2, + update, + }); - return Promise.all([promise, promise2]); - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 5); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); + assert.equal( + (dataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'Second mutation.'); - assert.equal(newResult.data.todoList.todos[1].text, 'This one was created with a mutation.'); - }); + return Promise.all([promise, promise2]); + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 5); + + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'Second mutation.', + ); + assert.equal( + newResult.data.todoList.todos[1].text, + 'This one was created with a mutation.', + ); + }); }); it('two mutations, one fails', () => { let subscriptionHandle: Subscription; - return setup({ - request: { query: mutation }, - error: new Error('forbidden (test error)'), - delay: 20, - }, { - request: { query: mutation }, - result: mutationResult2, - // XXX this test will uncover a flaw in the design of optimistic responses combined with - // updateQueries or result reducers if you un-comment the line below. The issue is that - // optimistic updates are not commutative but are treated as such. When undoing an - // optimistic update, other optimistic updates should be rolled back and re-applied in the - // same order as before, otherwise the store can end up in an inconsistent state. - // delay: 50, - }) - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, - }); - }); - }) - .then(() => { - const update = (proxy: any, mResult: any) => { - const data: any = proxy.readFragment({ - id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, + return setup( + { + request: { query: mutation }, + error: new Error('forbidden (test error)'), + delay: 20, + }, + { + request: { query: mutation }, + result: mutationResult2, + // XXX this test will uncover a flaw in the design of optimistic responses combined with + // updateQueries or result reducers if you un-comment the line below. The issue is that + // optimistic updates are not commutative but are treated as such. When undoing an + // optimistic update, other optimistic updates should be rolled back and re-applied in the + // same order as before, otherwise the store can end up in an inconsistent state. + // delay: 50, + }, + ) + .then(() => { + // we have to actually subscribe to the query to be able to update it + return new Promise((resolve, reject) => { + const handle = client.watchQuery({ query }); + subscriptionHandle = handle.subscribe({ + next(res) { + resolve(res); + }, + }); }); + }) + .then(() => { + const update = (proxy: any, mResult: any) => { + const data: any = proxy.readFragment({ + id: 'TodoList5', + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, + }); - proxy.writeFragment({ - data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, - id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, - }); - }; - const promise = client.mutate({ - mutation, - optimisticResponse, - update, - }).catch((err) => { - // it is ok to fail here - assert.instanceOf(err, Error); - assert.equal(err.message, 'Network error: forbidden (test error)'); - return null; - }); + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id: 'TodoList5', + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, + }); + }; + const promise = client + .mutate({ + mutation, + optimisticResponse, + update, + }) + .catch(err => { + // it is ok to fail here + assert.instanceOf(err, Error); + assert.equal( + err.message, + 'Network error: forbidden (test error)', + ); + return null; + }); - const promise2 = client.mutate({ - mutation, - optimisticResponse: optimisticResponse2, - update, - }); + const promise2 = client.mutate({ + mutation, + optimisticResponse: optimisticResponse2, + update, + }); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); - assert.equal((dataInStore['Todo66'] as any).text, 'Optimistically generated 2'); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 5); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); + assert.equal( + (dataInStore['Todo66'] as any).text, + 'Optimistically generated 2', + ); - return Promise.all([promise, promise2]); - }) - .then(() => { - subscriptionHandle.unsubscribe(); - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.notProperty(dataInStore, 'Todo99'); - assert.property(dataInStore, 'Todo66'); - (assert).deepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo66')); - (assert).notDeepInclude((dataInStore['TodoList5'] as any).todos, realIdValue('Todo99')); - }); + return Promise.all([promise, promise2]); + }) + .then(() => { + subscriptionHandle.unsubscribe(); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.notProperty(dataInStore, 'Todo99'); + assert.property(dataInStore, 'Todo66'); + (assert).deepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo66'), + ); + (assert).notDeepInclude( + (dataInStore['TodoList5'] as any).todos, + realIdValue('Todo99'), + ); + }); }); it('will handle dependent updates', done => { - networkInterface = mockNetworkInterface({ - request: { query }, - result, - }, { - request: { query: mutation }, - result: mutationResult, - delay: 10, - }, { - request: { query: mutation }, - result: mutationResult2, - delay: 20, - }); + networkInterface = mockNetworkInterface( + { + request: { query }, + result, + }, + { + request: { query: mutation }, + result: mutationResult, + delay: 10, + }, + { + request: { query: mutation }, + result: mutationResult2, + delay: 20, + }, + ); const customOptimisticResponse1 = { __typename: 'Mutation', @@ -1281,13 +1634,31 @@ describe('optimistic mutation results', () => { const update = (proxy: any, mResult: any) => { const data: any = proxy.readFragment({ id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, }); proxy.writeFragment({ data: { ...data, todos: [mResult.data.createTodo, ...data.todos] }, id: 'TodoList5', - fragment: gql`fragment todoList on TodoList { todos { id text completed __typename } }`, + fragment: gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `, }); }; @@ -1313,16 +1684,40 @@ describe('optimistic mutation results', () => { twoMutations(); break; case 1: - assert.deepEqual([customOptimisticResponse1.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [customOptimisticResponse1.createTodo, ...defaultTodos], + todos, + ); break; case 2: - assert.deepEqual([customOptimisticResponse2.createTodo, customOptimisticResponse1.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [ + customOptimisticResponse2.createTodo, + customOptimisticResponse1.createTodo, + ...defaultTodos, + ], + todos, + ); break; case 3: - assert.deepEqual([customOptimisticResponse2.createTodo, mutationResult.data.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [ + customOptimisticResponse2.createTodo, + mutationResult.data.createTodo, + ...defaultTodos, + ], + todos, + ); break; case 4: - assert.deepEqual([mutationResult2.data.createTodo, mutationResult.data.createTodo, ...defaultTodos], todos); + assert.deepEqual( + [ + mutationResult2.data.createTodo, + mutationResult.data.createTodo, + ...defaultTodos, + ], + todos, + ); done(); break; default: @@ -1332,19 +1727,21 @@ describe('optimistic mutation results', () => { error: error => done(error), }); - function twoMutations () { - client.mutate({ - mutation, - optimisticResponse: customOptimisticResponse1, - update, - }) + function twoMutations() { + client + .mutate({ + mutation, + optimisticResponse: customOptimisticResponse1, + update, + }) .catch(error => done(error)); - client.mutate({ - mutation, - optimisticResponse: customOptimisticResponse2, - update, - }) + client + .mutate({ + mutation, + optimisticResponse: customOptimisticResponse2, + update, + }) .catch(error => done(error)); } }); @@ -1436,44 +1833,54 @@ describe('optimistic mutation results', () => { request: { query: mutation }, result: mutationResult, }) - .then(() => { - observableQuery = client.watchQuery({ - query, - reducer: (previousResult, action) => { - counter++; - if (isMutationResultAction(action)) { - const newResult = cloneDeep(previousResult) as any; - newResult.todoList.todos.unshift(action.result.data!['createTodo']); - return newResult; - } - return previousResult; - }, - }).subscribe({ - next: () => null, // TODO: we should actually check the new result - }); - }) - .then(() => { - const promise = client.mutate({ - mutation, - optimisticResponse, - }); - - const dataInStore = client.queryManager.getDataWithOptimisticResults(); - assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); - assert.equal((dataInStore['Todo99'] as any).text, 'Optimistically generated'); + .then(() => { + observableQuery = client + .watchQuery({ + query, + reducer: (previousResult, action) => { + counter++; + if (isMutationResultAction(action)) { + const newResult = cloneDeep(previousResult) as any; + newResult.todoList.todos.unshift( + action.result.data!['createTodo'], + ); + return newResult; + } + return previousResult; + }, + }) + .subscribe({ + next: () => null, // TODO: we should actually check the new result + }); + }) + .then(() => { + const promise = client.mutate({ + mutation, + optimisticResponse, + }); - return promise; - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - // There should be one more todo item than before - assert.equal(newResult.data.todoList.todos.length, 4); + const dataInStore = client.queryManager.getDataWithOptimisticResults(); + assert.equal((dataInStore['TodoList5'] as any).todos.length, 4); + assert.equal( + (dataInStore['Todo99'] as any).text, + 'Optimistically generated', + ); - // Since we used `prepend` it should be at the front - assert.equal(newResult.data.todoList.todos[0].text, 'This one was created with a mutation.'); - }); + return promise; + }) + .then(() => { + return client.query({ query }); + }) + .then((newResult: any) => { + // There should be one more todo item than before + assert.equal(newResult.data.todoList.todos.length, 4); + + // Since we used `prepend` it should be at the front + assert.equal( + newResult.data.todoList.todos[0].text, + 'This one was created with a mutation.', + ); + }); }); it('will handle dependent updates', done => { @@ -1521,18 +1928,22 @@ describe('optimistic mutation results', () => { }, }; - networkInterface = mockNetworkInterface({ - request: { query }, - result, - }, { - request: { query: mutation }, - result: customMutationResult1, - delay: 10, - }, { - request: { query: mutation }, - result: customMutationResult2, - delay: 20, - }); + networkInterface = mockNetworkInterface( + { + request: { query }, + result, + }, + { + request: { query: mutation }, + result: customMutationResult1, + delay: 10, + }, + { + request: { query: mutation }, + result: customMutationResult2, + delay: 20, + }, + ); client = new ApolloClient({ networkInterface, @@ -1547,55 +1958,85 @@ describe('optimistic mutation results', () => { const defaultTodos = result.data.todoList.todos; let count = 0; - client.watchQuery({ - query, - reducer: (previousResult, action) => { - if (isMutationResultAction(action)) { - const newResult = cloneDeep(previousResult) as any; - newResult.todoList.todos.unshift(action.result.data!['createTodo']); - return newResult; - } - return previousResult; - }, - }).subscribe({ - next: (value: any) => { - const todos = value.data.todoList.todos; - switch (count++) { - case 0: - assert.deepEqual(defaultTodos, todos); - twoMutations(); - break; - case 1: - assert.deepEqual([customOptimisticResponse1.createTodo, ...defaultTodos], todos); - break; - case 2: - assert.deepEqual([customOptimisticResponse2.createTodo, customOptimisticResponse1.createTodo, ...defaultTodos], todos); - break; - case 3: - assert.deepEqual([customOptimisticResponse2.createTodo, customMutationResult1.data.createTodo, ...defaultTodos], todos); - break; - case 4: - assert.deepEqual([customMutationResult2.data.createTodo, customMutationResult1.data.createTodo, ...defaultTodos], todos); - done(); - break; - default: - done(new Error('Next should not have been called again.')); - } - }, - error: error => done(error), - }); - - function twoMutations () { - client.mutate({ - mutation, - optimisticResponse: customOptimisticResponse1, + client + .watchQuery({ + query, + reducer: (previousResult, action) => { + if (isMutationResultAction(action)) { + const newResult = cloneDeep(previousResult) as any; + newResult.todoList.todos.unshift( + action.result.data!['createTodo'], + ); + return newResult; + } + return previousResult; + }, }) + .subscribe({ + next: (value: any) => { + const todos = value.data.todoList.todos; + switch (count++) { + case 0: + assert.deepEqual(defaultTodos, todos); + twoMutations(); + break; + case 1: + assert.deepEqual( + [customOptimisticResponse1.createTodo, ...defaultTodos], + todos, + ); + break; + case 2: + assert.deepEqual( + [ + customOptimisticResponse2.createTodo, + customOptimisticResponse1.createTodo, + ...defaultTodos, + ], + todos, + ); + break; + case 3: + assert.deepEqual( + [ + customOptimisticResponse2.createTodo, + customMutationResult1.data.createTodo, + ...defaultTodos, + ], + todos, + ); + break; + case 4: + assert.deepEqual( + [ + customMutationResult2.data.createTodo, + customMutationResult1.data.createTodo, + ...defaultTodos, + ], + todos, + ); + done(); + break; + default: + done(new Error('Next should not have been called again.')); + } + }, + error: error => done(error), + }); + + function twoMutations() { + client + .mutate({ + mutation, + optimisticResponse: customOptimisticResponse1, + }) .catch(error => done(error)); - client.mutate({ - mutation, - optimisticResponse: customOptimisticResponse2, - }) + client + .mutate({ + mutation, + optimisticResponse: customOptimisticResponse2, + }) .catch(error => done(error)); } }); @@ -1659,19 +2100,23 @@ describe('optimistic mutation - githunt comments', () => { let networkInterface: any; function setup(...mockedResponses: any[]) { - networkInterface = mockNetworkInterface({ - request: { - query: addTypenameToDocument(query), - variables, + networkInterface = mockNetworkInterface( + { + request: { + query: addTypenameToDocument(query), + variables, + }, + result, }, - result, - }, { - request: { - query: addTypenameToDocument(queryWithFragment), - variables, + { + request: { + query: addTypenameToDocument(queryWithFragment), + variables, + }, + result, }, - result, - }, ...mockedResponses); + ...mockedResponses, + ); client = new ApolloClient({ networkInterface, @@ -1693,7 +2138,10 @@ describe('optimistic mutation - githunt comments', () => { const mutation = gql` mutation submitComment($repoFullName: String!, $commentContent: String!) { - submitComment(repoFullName: $repoFullName, commentContent: $commentContent) { + submitComment( + repoFullName: $repoFullName + commentContent: $commentContent + ) { postedBy { login html_url @@ -1704,7 +2152,10 @@ describe('optimistic mutation - githunt comments', () => { const mutationWithFragment = gql` mutation submitComment($repoFullName: String!, $commentContent: String!) { - submitComment(repoFullName: $repoFullName, commentContent: $commentContent) { + submitComment( + repoFullName: $repoFullName + commentContent: $commentContent + ) { ...authorFields } } @@ -1764,28 +2215,32 @@ describe('optimistic mutation - githunt comments', () => { }, result: mutationResult, }) - .then(() => { + .then(() => { // we have to actually subscribe to the query to be able to update it - return new Promise( (resolve, reject) => { + return new Promise((resolve, reject) => { const handle = client.watchQuery({ query, variables }); subscriptionHandle = handle.subscribe({ - next(res) { resolve(res); }, + next(res) { + resolve(res); + }, }); }); }) - .then(() => { - return client.mutate({ - mutation, - optimisticResponse, - variables: mutationVariables, - updateQueries, + .then(() => { + return client.mutate({ + mutation, + optimisticResponse, + variables: mutationVariables, + updateQueries, + }); + }) + .then(() => { + return client.query({ query, variables }); + }) + .then((newResult: any) => { + subscriptionHandle.unsubscribe(); + assert.equal(newResult.data.entry.comments.length, 2); }); - }).then(() => { - return client.query({ query, variables }); - }).then((newResult: any) => { - subscriptionHandle.unsubscribe(); - assert.equal(newResult.data.entry.comments.length, 2); - }); }); }); diff --git a/test/proxy.ts b/test/proxy.ts index 893967bf57a..7c443cc81ad 100644 --- a/test/proxy.ts +++ b/test/proxy.ts @@ -8,16 +8,18 @@ import { toIdValue } from '../src/data/storeUtils'; import { HeuristicFragmentMatcher } from '../src/data/fragmentMatcher'; import { addTypenameToDocument } from '../src/queries/queryTransform'; import { DataWrite } from '../src/actions'; -import {getOperationName} from '../src/queries/getFromAST'; +import { getOperationName } from '../src/queries/getFromAST'; describe('ReduxDataProxy', () => { - function createDataProxy({ - initialState, - config, - }: { - initialState?: any, - config?: ApolloReducerConfig, - } = {}) { + function createDataProxy( + { + initialState, + config, + }: { + initialState?: any; + config?: ApolloReducerConfig; + } = {}, + ) { const store = createApolloStore({ initialState, config, @@ -32,7 +34,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, @@ -42,9 +44,39 @@ describe('ReduxDataProxy', () => { }, }); - assert.deepEqual<{}>(proxy.readQuery({ query: gql`{ a }` }), { a: 1 }); - assert.deepEqual<{}>(proxy.readQuery({ query: gql`{ b c }` }), { b: 2, c: 3 }); - assert.deepEqual<{}>(proxy.readQuery({ query: gql`{ a b c }` }), { a: 1, b: 2, c: 3 }); + assert.deepEqual<{}>( + proxy.readQuery({ + query: gql` + { + a + } + `, + }), + { a: 1 }, + ); + assert.deepEqual<{}>( + proxy.readQuery({ + query: gql` + { + b + c + } + `, + }), + { b: 2, c: 3 }, + ); + assert.deepEqual<{}>( + proxy.readQuery({ + query: gql` + { + a + b + c + } + `, + }), + { a: 1, b: 2, c: 3 }, + ); }); it('will read some deeply nested data from the store', () => { @@ -52,7 +84,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, @@ -62,7 +94,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'foo': { + foo: { e: 4, f: 5, g: 6, @@ -72,7 +104,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { i: 7, j: 8, k: 9, @@ -83,15 +115,54 @@ describe('ReduxDataProxy', () => { }); assert.deepEqual<{}>( - proxy.readQuery({ query: gql`{ a d { e } }` }), + proxy.readQuery({ + query: gql` + { + a + d { + e + } + } + `, + }), { a: 1, d: { e: 4 } }, ); assert.deepEqual<{}>( - proxy.readQuery({ query: gql`{ a d { e h { i } } }` }), + proxy.readQuery({ + query: gql` + { + a + d { + e + h { + i + } + } + } + `, + }), { a: 1, d: { e: 4, h: { i: 7 } } }, ); assert.deepEqual<{}>( - proxy.readQuery({ query: gql`{ a b c d { e f g h { i j k } } }` }), + proxy.readQuery({ + query: gql` + { + a + b + c + d { + e + f + g + h { + i + j + k + } + } + } + `, + }), { a: 1, b: 2, c: 3, d: { e: 4, f: 5, g: 6, h: { i: 7, j: 8, k: 9 } } }, ); }); @@ -101,7 +172,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { __typename: 'Query', }, foo: { @@ -127,7 +198,9 @@ describe('ReduxDataProxy', () => { query: gql` query { thing(id: "foo") { - a b c + a + b + c } } `, @@ -143,7 +216,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, }, @@ -152,16 +225,21 @@ describe('ReduxDataProxy', () => { }, }); - assert.deepEqual<{}>(proxy.readQuery({ - query: gql`query ($literal: Boolean, $value: Int) { - a: field(literal: true, value: 42) - b: field(literal: $literal, value: $value) - }`, - variables: { - literal: false, - value: 42, - }, - }), { a: 1, b: 2 }); + assert.deepEqual<{}>( + proxy.readQuery({ + query: gql` + query($literal: Boolean, $value: Int) { + a: field(literal: true, value: 42) + b: field(literal: $literal, value: $value) + } + `, + variables: { + literal: false, + value: 42, + }, + }), + { a: 1, b: 2 }, + ); }); }); @@ -170,10 +248,26 @@ describe('ReduxDataProxy', () => { const proxy = createDataProxy(); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`query { a b c }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + query { + a + b + c + } + `, + }); }, 'Found a query operation. No operations are allowed when using a fragment as a query. Only fragments are allowed.'); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`schema { query: Query }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + schema { + query: Query + } + `, + }); }, 'Found 0 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -181,10 +275,36 @@ describe('ReduxDataProxy', () => { const proxy = createDataProxy(); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + `, + }); }, 'Found 2 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b } fragment c on C { c }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + + fragment c on C { + c + } + `, + }); }, 'Found 3 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -193,7 +313,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { __typename: 'Type1', a: 1, b: 2, @@ -204,7 +324,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'foo': { + foo: { __typename: 'Foo', e: 4, f: 5, @@ -215,7 +335,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { __typename: 'Bar', i: 7, j: 8, @@ -227,25 +347,82 @@ describe('ReduxDataProxy', () => { }); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment fragmentFoo on Foo { e h { i } }` }), + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment fragmentFoo on Foo { + e + h { + i + } + } + `, + }), { e: 4, h: { i: 7 } }, ); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } }` }), + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + `, + }), { e: 4, f: 5, g: 6, h: { i: 7, j: 8, k: 9 } }, ); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'bar', fragment: gql`fragment fragmentBar on Bar { i }` }), + proxy.readFragment({ + id: 'bar', + fragment: gql` + fragment fragmentBar on Bar { + i + } + `, + }), { i: 7 }, ); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'bar', fragment: gql`fragment fragmentBar on Bar { i j k }` }), + proxy.readFragment({ + id: 'bar', + fragment: gql` + fragment fragmentBar on Bar { + i + j + k + } + `, + }), { i: 7, j: 8, k: 9 }, ); assert.deepEqual<{} | null>( proxy.readFragment({ id: 'foo', - fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } } fragment fragmentBar on Bar { i j k }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + + fragment fragmentBar on Bar { + i + j + k + } + `, fragmentName: 'fragmentFoo', }), { e: 4, f: 5, g: 6, h: { i: 7, j: 8, k: 9 } }, @@ -253,7 +430,24 @@ describe('ReduxDataProxy', () => { assert.deepEqual<{} | null>( proxy.readFragment({ id: 'bar', - fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } } fragment fragmentBar on Bar { i j k }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + + fragment fragmentBar on Bar { + i + j + k + } + `, fragmentName: 'fragmentBar', }), { i: 7, j: 8, k: 9 }, @@ -265,7 +459,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'foo': { + foo: { __typename: 'Foo', 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, @@ -275,19 +469,22 @@ describe('ReduxDataProxy', () => { }, }); - assert.deepEqual<{} | null>(proxy.readFragment({ - id: 'foo', - fragment: gql` - fragment foo on Foo { - a: field(literal: true, value: 42) - b: field(literal: $literal, value: $value) - } - `, - variables: { - literal: false, - value: 42, - }, - }), { a: 1, b: 2 }); + assert.deepEqual<{} | null>( + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment foo on Foo { + a: field(literal: true, value: 42) + b: field(literal: $literal, value: $value) + } + `, + variables: { + literal: false, + value: 42, + }, + }), + { a: 1, b: 2 }, + ); }); it('will return null when an id that can’t be found is provided', () => { @@ -296,7 +493,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'bar': { __typename: 'Bar', a: 1, b: 2, c: 3 }, + bar: { __typename: 'Bar', a: 1, b: 2, c: 3 }, }, }, }, @@ -305,17 +502,51 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'foo': { __typename: 'Foo', a: 1, b: 2, c: 3 }, + foo: { __typename: 'Foo', a: 1, b: 2, c: 3 }, }, }, }, }); - assert.equal(client1.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), null); - assert.equal(client2.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), null); + assert.equal( + client1.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + null, + ); + assert.equal( + client2.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + null, + ); assert.deepEqual<{} | null>( - client3.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), - { a: 1, b: 2, c: 3 }); + client3.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + { a: 1, b: 2, c: 3 }, + ); }); it('will read data using custom resolvers', () => { @@ -323,7 +554,7 @@ describe('ReduxDataProxy', () => { initialState: { apollo: { data: { - 'ROOT_QUERY': { + ROOT_QUERY: { __typename: 'Query', }, foo: { @@ -352,9 +583,15 @@ describe('ReduxDataProxy', () => { const queryResult = proxy.readFragment({ id: 'foo', - fragment: gql`fragment fooFragment on Query { - thing(id: "bar") { a b c } - }`, + fragment: gql` + fragment fooFragment on Query { + thing(id: "bar") { + a + b + c + } + } + `, }); assert.deepEqual<{} | null>(queryResult, { @@ -367,28 +604,52 @@ describe('ReduxDataProxy', () => { it('will write some data to the store', () => { const proxy = createDataProxy(); - proxy.writeQuery({ data: { a: 1 }, query: gql`{ a }` }); + proxy.writeQuery({ + data: { a: 1 }, + query: gql` + { + a + } + `, + }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, }, }); - proxy.writeQuery({ data: { b: 2, c: 3 }, query: gql`{ b c }` }); + proxy.writeQuery({ + data: { b: 2, c: 3 }, + query: gql` + { + b + c + } + `, + }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, }, }); - proxy.writeQuery({ data: { a: 4, b: 5, c: 6 }, query: gql`{ a b c }` }); + proxy.writeQuery({ + data: { a: 4, b: 5, c: 6 }, + query: gql` + { + a + b + c + } + `, + }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 4, b: 5, c: 6, @@ -401,11 +662,18 @@ describe('ReduxDataProxy', () => { proxy.writeQuery({ data: { a: 1, d: { e: 4 } }, - query: gql`{ a d { e } }`, + query: gql` + { + a + d { + e + } + } + `, }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, d: { type: 'id', @@ -420,11 +688,20 @@ describe('ReduxDataProxy', () => { proxy.writeQuery({ data: { a: 1, d: { h: { i: 7 } } }, - query: gql`{ a d { h { i } } }`, + query: gql` + { + a + d { + h { + i + } + } + } + `, }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, d: { type: 'id', @@ -446,12 +723,33 @@ describe('ReduxDataProxy', () => { }); proxy.writeQuery({ - data: { a: 1, b: 2, c: 3, d: { e: 4, f: 5, g: 6, h: { i: 7, j: 8, k: 9 } } }, - query: gql`{ a b c d { e f g h { i j k } } }`, + data: { + a: 1, + b: 2, + c: 3, + d: { e: 4, f: 5, g: 6, h: { i: 7, j: 8, k: 9 } }, + }, + query: gql` + { + a + b + c + d { + e + f + g + h { + i + j + k + } + } + } + `, }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, c: 3, @@ -488,7 +786,7 @@ describe('ReduxDataProxy', () => { b: 2, }, query: gql` - query ($literal: Boolean, $value: Int) { + query($literal: Boolean, $value: Int) { a: field(literal: true, value: 42) b: field(literal: $literal, value: $value) } @@ -500,7 +798,7 @@ describe('ReduxDataProxy', () => { }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'ROOT_QUERY': { + ROOT_QUERY: { 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, }, @@ -513,10 +811,28 @@ describe('ReduxDataProxy', () => { const proxy = createDataProxy(); assert.throws(() => { - proxy.writeFragment({ data: {}, id: 'x', fragment: gql`query { a b c }` }); + proxy.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + query { + a + b + c + } + `, + }); }, 'Found a query operation. No operations are allowed when using a fragment as a query. Only fragments are allowed.'); assert.throws(() => { - proxy.writeFragment({ data: {}, id: 'x', fragment: gql`schema { query: Query }` }); + proxy.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + schema { + query: Query + } + `, + }); }, 'Found 0 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -524,26 +840,61 @@ describe('ReduxDataProxy', () => { const proxy = createDataProxy(); assert.throws(() => { - proxy.writeFragment({ data: {}, id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b }` }); + proxy.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + `, + }); }, 'Found 2 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); assert.throws(() => { - proxy.writeFragment({ data: {}, id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b } fragment c on C { c }` }); + proxy.writeFragment({ + data: {}, + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + + fragment c on C { + c + } + `, + }); }, 'Found 3 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); it('will write some deeply nested data into the store at any id', () => { const proxy = createDataProxy({ - config : { dataIdFromObject: (o: any) => o.id }, + config: { dataIdFromObject: (o: any) => o.id }, }); proxy.writeFragment({ data: { __typename: 'Foo', e: 4, h: { id: 'bar', i: 7 } }, id: 'foo', - fragment: gql`fragment fragmentFoo on Foo { e h { i } }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + h { + i + } + } + `, }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'foo': { + foo: { e: 4, h: { type: 'id', @@ -551,18 +902,27 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { i: 7, }, }); proxy.writeFragment({ data: { __typename: 'Foo', f: 5, g: 6, h: { id: 'bar', j: 8, k: 9 } }, id: 'foo', - fragment: gql`fragment fragmentFoo on Foo { f g h { j k } }`, + fragment: gql` + fragment fragmentFoo on Foo { + f + g + h { + j + k + } + } + `, }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'foo': { + foo: { e: 4, f: 5, g: 6, @@ -572,7 +932,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { i: 7, j: 8, k: 9, @@ -582,11 +942,15 @@ describe('ReduxDataProxy', () => { proxy.writeFragment({ data: { i: 10, __typename: 'Bar' }, id: 'bar', - fragment: gql`fragment fragmentBar on Bar { i }`, + fragment: gql` + fragment fragmentBar on Bar { + i + } + `, }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'foo': { + foo: { e: 4, f: 5, g: 6, @@ -596,7 +960,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { i: 10, j: 8, k: 9, @@ -606,11 +970,16 @@ describe('ReduxDataProxy', () => { proxy.writeFragment({ data: { j: 11, k: 12, __typename: 'Bar' }, id: 'bar', - fragment: gql`fragment fragmentBar on Bar { j k }`, + fragment: gql` + fragment fragmentBar on Bar { + j + k + } + `, }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'foo': { + foo: { e: 4, f: 5, g: 6, @@ -620,7 +989,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { i: 10, j: 11, k: 12, @@ -628,14 +997,37 @@ describe('ReduxDataProxy', () => { }); proxy.writeFragment({ - data: { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', id: 'bar', i: 7, j: 8, k: 9 } }, + data: { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', id: 'bar', i: 7, j: 8, k: 9 }, + }, id: 'foo', - fragment: gql`fragment fooFragment on Foo { e f g h { i j k } } fragment barFragment on Bar { i j k }`, + fragment: gql` + fragment fooFragment on Foo { + e + f + g + h { + i + j + k + } + } + + fragment barFragment on Bar { + i + j + k + } + `, fragmentName: 'fooFragment', }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'foo': { + foo: { e: 4, f: 5, g: 6, @@ -645,7 +1037,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { i: 7, j: 8, k: 9, @@ -655,12 +1047,29 @@ describe('ReduxDataProxy', () => { proxy.writeFragment({ data: { __typename: 'Bar', i: 10, j: 11, k: 12 }, id: 'bar', - fragment: gql`fragment fooFragment on Foo { e f g h { i j k } } fragment barFragment on Bar { i j k }`, + fragment: gql` + fragment fooFragment on Foo { + e + f + g + h { + i + j + k + } + } + + fragment barFragment on Bar { + i + j + k + } + `, fragmentName: 'barFragment', }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'foo': { + foo: { e: 4, f: 5, g: 6, @@ -670,7 +1079,7 @@ describe('ReduxDataProxy', () => { generated: false, }, }, - 'bar': { + bar: { i: 10, j: 11, k: 12, @@ -700,7 +1109,7 @@ describe('ReduxDataProxy', () => { }); assert.deepEqual((proxy as any).store.getState().apollo.data, { - 'foo': { + foo: { 'field({"literal":true,"value":42})': 1, 'field({"literal":false,"value":42})': 2, }, @@ -721,118 +1130,220 @@ describe('TransactionDataProxy', () => { }); it('will read some data from the store', () => { - const proxy = new TransactionDataProxy({ - 'ROOT_QUERY': { - a: 1, - b: 2, - c: 3, + const proxy = new TransactionDataProxy( + { + ROOT_QUERY: { + a: 1, + b: 2, + c: 3, + }, }, - }, {}); + {}, + ); - assert.deepEqual<{} | null>(proxy.readQuery({ query: gql`{ a }` }), { a: 1 }); - assert.deepEqual<{} | null>(proxy.readQuery({ query: gql`{ b c }` }), { b: 2, c: 3 }); - assert.deepEqual<{} | null>(proxy.readQuery({ query: gql`{ a b c }` }), { a: 1, b: 2, c: 3 }); + assert.deepEqual<{} | null>( + proxy.readQuery({ + query: gql` + { + a + } + `, + }), + { a: 1 }, + ); + assert.deepEqual<{} | null>( + proxy.readQuery({ + query: gql` + { + b + c + } + `, + }), + { b: 2, c: 3 }, + ); + assert.deepEqual<{} | null>( + proxy.readQuery({ + query: gql` + { + a + b + c + } + `, + }), + { a: 1, b: 2, c: 3 }, + ); }); it('will read some deeply nested data from the store', () => { - const proxy = new TransactionDataProxy({ - 'ROOT_QUERY': { - a: 1, - b: 2, - c: 3, - d: { - type: 'id', - id: 'foo', - generated: false, + const proxy = new TransactionDataProxy( + { + ROOT_QUERY: { + a: 1, + b: 2, + c: 3, + d: { + type: 'id', + id: 'foo', + generated: false, + }, }, - }, - 'foo': { - __typename: 'Foo', - e: 4, - f: 5, - g: 6, - h: { - type: 'id', - id: 'bar', - generated: false, + foo: { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { + type: 'id', + id: 'bar', + generated: false, + }, + }, + bar: { + __typename: 'Bar', + i: 7, + j: 8, + k: 9, }, }, - 'bar': { - __typename: 'Bar', - i: 7, - j: 8, - k: 9, - }, - }, { addTypename: true }); + { addTypename: true }, + ); assert.deepEqual<{}>( - proxy.readQuery({ query: gql`{ a d { e } }` }), + proxy.readQuery({ + query: gql` + { + a + d { + e + } + } + `, + }), { a: 1, d: { __typename: 'Foo', e: 4 } }, ); assert.deepEqual<{}>( - proxy.readQuery({ query: gql`{ a d { e h { i } } }` }), - { a: 1, d: { __typename: 'Foo', e: 4, h: { __typename: 'Bar', i: 7 } } }, + proxy.readQuery({ + query: gql` + { + a + d { + e + h { + i + } + } + } + `, + }), + { + a: 1, + d: { __typename: 'Foo', e: 4, h: { __typename: 'Bar', i: 7 } }, + }, ); assert.deepEqual<{}>( - proxy.readQuery({ query: gql`{ a b c d { e f g h { i j k } } }` }), - { a: 1, b: 2, c: 3, d: { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', i: 7, j: 8, k: 9 } } }, + proxy.readQuery({ + query: gql` + { + a + b + c + d { + e + f + g + h { + i + j + k + } + } + } + `, + }), + { + a: 1, + b: 2, + c: 3, + d: { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', i: 7, j: 8, k: 9 }, + }, + }, ); }); it('will read some data from the store with variables', () => { - const proxy = new TransactionDataProxy({ - 'ROOT_QUERY': { - 'field({"literal":true,"value":42})': 1, - 'field({"literal":false,"value":42})': 2, - }, - }, { addTypename: true }); - - assert.deepEqual<{}>(proxy.readQuery({ - query: gql`query ($literal: Boolean, $value: Int) { - a: field(literal: true, value: 42) - b: field(literal: $literal, value: $value) - }`, - variables: { - literal: false, - value: 42, + const proxy = new TransactionDataProxy( + { + ROOT_QUERY: { + 'field({"literal":true,"value":42})': 1, + 'field({"literal":false,"value":42})': 2, + }, }, - }), { a: 1, b: 2 }); + { addTypename: true }, + ); + + assert.deepEqual<{}>( + proxy.readQuery({ + query: gql` + query($literal: Boolean, $value: Int) { + a: field(literal: true, value: 42) + b: field(literal: $literal, value: $value) + } + `, + variables: { + literal: false, + value: 42, + }, + }), + { a: 1, b: 2 }, + ); }); it('will read data using custom resolvers', () => { - const proxy = new TransactionDataProxy({ - 'ROOT_QUERY': { - __typename: 'Query', + const proxy = new TransactionDataProxy( + { + ROOT_QUERY: { + __typename: 'Query', + }, + foo: { + __typename: 'Foo', + id: 'foo', + a: 1, + b: '2', + c: null, + }, }, - foo: { - __typename: 'Foo', - id: 'foo', - a: 1, - b: '2', - c: null, - }, - }, { - dataIdFromObject: (object: any) => object.id, - customResolvers: { - Query: { - thing: (_, args) => toIdValue(args.id), + { + dataIdFromObject: (object: any) => object.id, + customResolvers: { + Query: { + thing: (_, args) => toIdValue(args.id), + }, }, + addTypename: true, }, - addTypename: true, - }); + ); const queryResult = proxy.readQuery({ query: gql` query { thing(id: "foo") { - a b c + a + b + c } } `, }); assert.deepEqual<{}>(queryResult, { - thing: {__typename: 'Foo', a: 1, b: '2', c: null }, + thing: { __typename: 'Foo', a: 1, b: '2', c: null }, }); }); }); @@ -856,10 +1367,26 @@ describe('TransactionDataProxy', () => { proxy.readFragment({ id: 'x' }); }, 'fragment option is required. Please pass a GraphQL fragment to readFragment.'); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`query { a b c }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + query { + a + b + c + } + `, + }); }, 'Found a query operation. No operations are allowed when using a fragment as a query. Only fragments are allowed.'); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`schema { query: Query }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + schema { + query: Query + } + `, + }); }, 'Found 0 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); @@ -867,72 +1394,187 @@ describe('TransactionDataProxy', () => { const proxy = new TransactionDataProxy({}, {}); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + `, + }); }, 'Found 2 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); assert.throws(() => { - proxy.readFragment({ id: 'x', fragment: gql`fragment a on A { a } fragment b on B { b } fragment c on C { c }` }); + proxy.readFragment({ + id: 'x', + fragment: gql` + fragment a on A { + a + } + + fragment b on B { + b + } + + fragment c on C { + c + } + `, + }); }, 'Found 3 fragments. `fragmentName` must be provided when there is not exactly 1 fragment.'); }); it('will read some deeply nested data from the store at any id', () => { - const proxy = new TransactionDataProxy({ - 'ROOT_QUERY': { - a: 1, - b: 2, - c: 3, - d: { - type: 'id', - id: 'foo', - generated: false, + const proxy = new TransactionDataProxy( + { + ROOT_QUERY: { + a: 1, + b: 2, + c: 3, + d: { + type: 'id', + id: 'foo', + generated: false, + }, }, - }, - 'foo': { - __typename: 'Foo', - e: 4, - f: 5, - g: 6, - h: { - type: 'id', - id: 'bar', - generated: false, + foo: { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { + type: 'id', + id: 'bar', + generated: false, + }, + }, + bar: { + __typename: 'Bar', + i: 7, + j: 8, + k: 9, }, }, - 'bar': { - __typename: 'Bar', - i: 7, - j: 8, - k: 9, - }, - }, { addTypename: true }); + { addTypename: true }, + ); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment fragmentFoo on Foo { e h { i } }` }), + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment fragmentFoo on Foo { + e + h { + i + } + } + `, + }), { __typename: 'Foo', e: 4, h: { __typename: 'Bar', i: 7 } }, ); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } }` }), - { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', i: 7, j: 8, k: 9 } }, + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + `, + }), + { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', i: 7, j: 8, k: 9 }, + }, ); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'bar', fragment: gql`fragment fragmentBar on Bar { i }` }), + proxy.readFragment({ + id: 'bar', + fragment: gql` + fragment fragmentBar on Bar { + i + } + `, + }), { __typename: 'Bar', i: 7 }, ); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'bar', fragment: gql`fragment fragmentBar on Bar { i j k }` }), + proxy.readFragment({ + id: 'bar', + fragment: gql` + fragment fragmentBar on Bar { + i + j + k + } + `, + }), { __typename: 'Bar', i: 7, j: 8, k: 9 }, ); assert.deepEqual<{} | null>( proxy.readFragment({ id: 'foo', - fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } } fragment fragmentBar on Bar { i j k }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + + fragment fragmentBar on Bar { + i + j + k + } + `, fragmentName: 'fragmentFoo', }), - { __typename: 'Foo', e: 4, f: 5, g: 6, h: { __typename: 'Bar', i: 7, j: 8, k: 9 } }, + { + __typename: 'Foo', + e: 4, + f: 5, + g: 6, + h: { __typename: 'Bar', i: 7, j: 8, k: 9 }, + }, ); assert.deepEqual<{} | null>( proxy.readFragment({ id: 'bar', - fragment: gql`fragment fragmentFoo on Foo { e f g h { i j k } } fragment fragmentBar on Bar { i j k }`, + fragment: gql` + fragment fragmentFoo on Foo { + e + f + g + h { + i + j + k + } + } + + fragment fragmentBar on Bar { + i + j + k + } + `, fragmentName: 'fragmentBar', }), { __typename: 'Bar', i: 7, j: 8, k: 9 }, @@ -940,76 +1582,131 @@ describe('TransactionDataProxy', () => { }); it('will read some data from the store with variables', () => { - const proxy = new TransactionDataProxy({ - 'foo': { - __typename: 'Foo', - 'field({"literal":true,"value":42})': 1, - 'field({"literal":false,"value":42})': 2, + const proxy = new TransactionDataProxy( + { + foo: { + __typename: 'Foo', + 'field({"literal":true,"value":42})': 1, + 'field({"literal":false,"value":42})': 2, + }, }, - }, { addTypename: true }); + { addTypename: true }, + ); - assert.deepEqual<{} | null>(proxy.readFragment({ - id: 'foo', - fragment: gql` - fragment foo on Foo { - a: field(literal: true, value: 42) - b: field(literal: $literal, value: $value) - } - `, - variables: { - literal: false, - value: 42, - }, - }), { a: 1, b: 2, __typename: 'Foo' }); + assert.deepEqual<{} | null>( + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment foo on Foo { + a: field(literal: true, value: 42) + b: field(literal: $literal, value: $value) + } + `, + variables: { + literal: false, + value: 42, + }, + }), + { a: 1, b: 2, __typename: 'Foo' }, + ); }); it('will return null when an id that can’t be found is provided', () => { const client1 = new TransactionDataProxy({}, {}); - const client2 = new TransactionDataProxy({ - 'bar': { __typename: 'Type1', a: 1, b: 2, c: 3 }, - }, {}); - const client3 = new TransactionDataProxy({ - 'foo': { __typename: 'Type1', a: 1, b: 2, c: 3 }, - }, {}); - - assert.equal(client1.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), null); - assert.equal(client2.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), null); + const client2 = new TransactionDataProxy( + { + bar: { __typename: 'Type1', a: 1, b: 2, c: 3 }, + }, + {}, + ); + const client3 = new TransactionDataProxy( + { + foo: { __typename: 'Type1', a: 1, b: 2, c: 3 }, + }, + {}, + ); + + assert.equal( + client1.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + null, + ); + assert.equal( + client2.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + null, + ); assert.deepEqual<{} | null>( - client3.readFragment({ id: 'foo', fragment: gql`fragment fooFragment on Foo { a b c }` }), - { a: 1, b: 2, c: 3 }); + client3.readFragment({ + id: 'foo', + fragment: gql` + fragment fooFragment on Foo { + a + b + c + } + `, + }), + { a: 1, b: 2, c: 3 }, + ); }); it('will read data using custom resolvers', () => { - const proxy = new TransactionDataProxy({ - 'ROOT_QUERY': { - __typename: 'Query', - }, - foo: { - __typename: 'Query', - id: 'foo', + const proxy = new TransactionDataProxy( + { + ROOT_QUERY: { + __typename: 'Query', + }, + foo: { + __typename: 'Query', + id: 'foo', + }, + bar: { + __typename: 'Thing', + id: 'bar', + a: 1, + b: '2', + c: null, + }, }, - bar: { - __typename: 'Thing', - id: 'bar', - a: 1, - b: '2', - c: null, - }, - }, { - dataIdFromObject: (object: any) => object.id, - customResolvers: { - Query: { - thing: (_, args) => toIdValue(args.id), + { + dataIdFromObject: (object: any) => object.id, + customResolvers: { + Query: { + thing: (_, args) => toIdValue(args.id), + }, }, + addTypename: true, }, - addTypename: true, - }); + ); const queryResult = proxy.readFragment({ id: 'foo', - fragment: gql`fragment fooFragment on Query { - thing(id: "bar") { a b c } - }`, + fragment: gql` + fragment fooFragment on Query { + thing(id: "bar") { + a + b + c + } + } + `, }); assert.deepEqual<{} | null>(queryResult, { @@ -1034,19 +1731,57 @@ describe('TransactionDataProxy', () => { proxy.writeQuery({ data: { a: 1, b: 2, c: 3 }, - query: gql`{ a b c }`, + query: gql` + { + a + b + c + } + `, }); proxy.writeQuery({ data: { foo: { d: 4, e: 5, bar: { f: 6, g: 7 } } }, - query: gql`{ foo(id: $id) { d e bar { f g } } }`, + query: gql` + { + foo(id: $id) { + d + e + bar { + f + g + } + } + } + `, variables: { id: 7 }, }); const writes = proxy.finish(); - const document1 = addTypenameToDocument(gql`{ a b c }`); - const document2 = addTypenameToDocument(gql`{ foo(id: $id) { d e bar { f g } } }`); + const document1 = addTypenameToDocument( + gql` + { + a + b + c + } + `, + ); + const document2 = addTypenameToDocument( + gql` + { + foo(id: $id) { + d + e + bar { + f + g + } + } + } + `, + ); assert.deepEqual(writes, [ { rootId: 'ROOT_QUERY', @@ -1082,15 +1817,35 @@ describe('TransactionDataProxy', () => { proxy.writeFragment({ data: { a: 1, b: 2, c: 3 }, id: 'foo', - fragment: gql`fragment fragment1 on Foo { a b c }`, + fragment: gql` + fragment fragment1 on Foo { + a + b + c + } + `, }); proxy.writeFragment({ data: { foo: { d: 4, e: 5, bar: { f: 6, g: 7 } } }, id: 'bar', fragment: gql` - fragment fragment1 on Foo { a b c } - fragment fragment2 on Bar { foo(id: $id) { d e bar { f g } } } + fragment fragment1 on Foo { + a + b + c + } + + fragment fragment2 on Bar { + foo(id: $id) { + d + e + bar { + f + g + } + } + } `, fragmentName: 'fragment2', variables: { id: 7 }, @@ -1099,30 +1854,74 @@ describe('TransactionDataProxy', () => { const writes = proxy.finish(); assert.equal(writes.length, 2); - assert.deepEqual(Object.keys(writes[0]), ['rootId', 'result', 'document', 'operationName', 'variables']); + assert.deepEqual(Object.keys(writes[0]), [ + 'rootId', + 'result', + 'document', + 'operationName', + 'variables', + ]); assert.equal(writes[0].rootId, 'foo'); assert.deepEqual(writes[0].result, { a: 1, b: 2, c: 3 }); assert.deepEqual(writes[0].variables, {}); - assert.equal(print(writes[0].document), print(gql` - { ...fragment1 } - fragment fragment1 on Foo { a b c } - `)); - assert.deepEqual(Object.keys(writes[1]), ['rootId', 'result', 'document', 'operationName', 'variables']); + assert.equal( + print(writes[0].document), + print(gql` + { + ...fragment1 + } + + fragment fragment1 on Foo { + a + b + c + } + `), + ); + assert.deepEqual(Object.keys(writes[1]), [ + 'rootId', + 'result', + 'document', + 'operationName', + 'variables', + ]); assert.equal(writes[1].rootId, 'bar'); - assert.deepEqual(writes[1].result, { foo: { d: 4, e: 5, bar: { f: 6, g: 7 } } }); + assert.deepEqual(writes[1].result, { + foo: { d: 4, e: 5, bar: { f: 6, g: 7 } }, + }); assert.deepEqual(writes[1].variables, { id: 7 }); - assert.equal(print(writes[1].document), print(gql` - { ...fragment2 } - fragment fragment1 on Foo { a b c } - fragment fragment2 on Bar { foo(id: $id) { d e bar { f g } } } - `)); + assert.equal( + print(writes[1].document), + print(gql` + { + ...fragment2 + } + + fragment fragment1 on Foo { + a + b + c + } + + fragment fragment2 on Bar { + foo(id: $id) { + d + e + bar { + f + g + } + } + } + `), + ); }); }); describe('write then read', () => { it('will write data locally which will then be read back', () => { const data: any = { - 'foo': { + foo: { __typename: 'Foo', a: 1, b: 2, @@ -1144,45 +1943,139 @@ describe('TransactionDataProxy', () => { const proxy = new TransactionDataProxy(data, { addTypename: true }); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 1, b: 2, c: 3, bar: { __typename: 'Bar', d: 4, e: 5, f: 6 } }, + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 1, + b: 2, + c: 3, + bar: { __typename: 'Bar', d: 4, e: 5, f: 6 }, + }, ); proxy.writeFragment({ id: 'foo', - fragment: gql`fragment x on Foo { a }`, + fragment: gql` + fragment x on Foo { + a + } + `, data: { a: 7 }, }); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 7, b: 2, c: 3, bar: { __typename: 'Bar', d: 4, e: 5, f: 6 } }, + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 7, + b: 2, + c: 3, + bar: { __typename: 'Bar', d: 4, e: 5, f: 6 }, + }, ); proxy.writeFragment({ id: 'foo', - fragment: gql`fragment x on Foo { bar { d } }`, + fragment: gql` + fragment x on Foo { + bar { + d + } + } + `, data: { __typename: 'Foo', bar: { __typename: 'Bar', d: 8 } }, }); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 7, b: 2, c: 3, bar: { __typename: 'Bar', d: 8, e: 5, f: 6 } }, + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 7, + b: 2, + c: 3, + bar: { __typename: 'Bar', d: 8, e: 5, f: 6 }, + }, ); proxy.writeFragment({ id: '$foo.bar', - fragment: gql`fragment y on Bar { e }`, + fragment: gql` + fragment y on Bar { + e + } + `, data: { __typename: 'Bar', e: 9 }, }); assert.deepEqual<{} | null>( - proxy.readFragment({ id: 'foo', fragment: gql`fragment x on Foo { a b c bar { d e f } }` }), - { __typename: 'Foo', a: 7, b: 2, c: 3, bar: { __typename: 'Bar', d: 8, e: 9, f: 6 } }, + proxy.readFragment({ + id: 'foo', + fragment: gql` + fragment x on Foo { + a + b + c + bar { + d + e + f + } + } + `, + }), + { + __typename: 'Foo', + a: 7, + b: 2, + c: 3, + bar: { __typename: 'Bar', d: 8, e: 9, f: 6 }, + }, ); assert.deepEqual((proxy as any).data, { - 'foo': { + foo: { __typename: 'Foo', a: 7, b: 2, @@ -1202,7 +2095,7 @@ describe('TransactionDataProxy', () => { }); assert.deepEqual(data, { - 'foo': { + foo: { __typename: 'Foo', a: 1, b: 2, @@ -1224,20 +2117,71 @@ describe('TransactionDataProxy', () => { it('will write data to a specific id', () => { const data = {}; - const proxy = new TransactionDataProxy(data, { dataIdFromObject : (o: any) => o.id, addTypename: true }); + const proxy = new TransactionDataProxy(data, { + dataIdFromObject: (o: any) => o.id, + addTypename: true, + }); proxy.writeQuery({ - query: gql`{ a b foo { c d bar { id e f } } }`, - data: { a: 1, b: 2, foo: { __typename: 'Foo', c: 3, d: 4, bar: { __typename: 'Bar', id: 'foobar', e: 5, f: 6 } } }, + query: gql` + { + a + b + foo { + c + d + bar { + id + e + f + } + } + } + `, + data: { + a: 1, + b: 2, + foo: { + __typename: 'Foo', + c: 3, + d: 4, + bar: { __typename: 'Bar', id: 'foobar', e: 5, f: 6 }, + }, + }, }); assert.deepEqual<{} | null>( - proxy.readQuery({ query: gql`{ a b foo { c d bar { id e f } } }` }), - { a: 1, b: 2, foo: { __typename: 'Foo', c: 3, d: 4, bar: { __typename: 'Bar', id: 'foobar', e: 5, f: 6 } } }, + proxy.readQuery({ + query: gql` + { + a + b + foo { + c + d + bar { + id + e + f + } + } + } + `, + }), + { + a: 1, + b: 2, + foo: { + __typename: 'Foo', + c: 3, + d: 4, + bar: { __typename: 'Bar', id: 'foobar', e: 5, f: 6 }, + }, + }, ); assert.deepEqual((proxy as any).data, { - 'ROOT_QUERY': { + ROOT_QUERY: { a: 1, b: 2, foo: { @@ -1256,7 +2200,7 @@ describe('TransactionDataProxy', () => { generated: false, }, }, - 'foobar': { + foobar: { __typename: 'Bar', id: 'foobar', e: 5, diff --git a/test/queryTransform.ts b/test/queryTransform.ts index 9fd8c437175..dac451ee0d1 100644 --- a/test/queryTransform.ts +++ b/test/queryTransform.ts @@ -1,10 +1,6 @@ -import { - addTypenameToDocument, -} from '../src/queries/queryTransform'; +import { addTypenameToDocument } from '../src/queries/queryTransform'; -import { - getQueryDefinition, -} from '../src/queries/getFromAST'; +import { getQueryDefinition } from '../src/queries/getFromAST'; import { print } from 'graphql/language/printer'; import gql from 'graphql-tag'; @@ -74,84 +70,96 @@ describe('query transforms', () => { it('should not screw up on a FragmentSpread within the query AST', () => { const testQuery = gql` - query withFragments { - user(id: 4) { - friends(first: 10) { - ...friendFields + query withFragments { + user(id: 4) { + friends(first: 10) { + ...friendFields + } } } - }`; + `; const expectedQuery = getQueryDefinition(gql` - query withFragments { - user(id: 4) { - friends(first: 10) { - ...friendFields + query withFragments { + user(id: 4) { + friends(first: 10) { + ...friendFields + __typename + } __typename } - __typename } - } `); const modifiedQuery = addTypenameToDocument(testQuery); - assert.equal(print(expectedQuery), print(getQueryDefinition(modifiedQuery))); + assert.equal( + print(expectedQuery), + print(getQueryDefinition(modifiedQuery)), + ); }); it('should modify all definitions in a document', () => { const testQuery = gql` - query withFragments { - user(id: 4) { - friends(first: 10) { - ...friendFields + query withFragments { + user(id: 4) { + friends(first: 10) { + ...friendFields + } } } - } - fragment friendFields on User { - firstName - lastName - }`; + + fragment friendFields on User { + firstName + lastName + } + `; const newQueryDoc = addTypenameToDocument(testQuery); const expectedQuery = gql` - query withFragments { - user(id: 4) { - friends(first: 10) { - ...friendFields + query withFragments { + user(id: 4) { + friends(first: 10) { + ...friendFields + __typename + } __typename } + } + + fragment friendFields on User { + firstName + lastName __typename } - } - fragment friendFields on User { - firstName - lastName - __typename - }`; + `; assert.equal(print(expectedQuery), print(newQueryDoc)); }); it('should be able to apply a QueryTransformer correctly', () => { const testQuery = gql` - query { - author { - firstName - lastName + query { + author { + firstName + lastName + } } - }`; + `; const expectedQuery = getQueryDefinition(gql` - query { - author { - firstName - lastName - __typename + query { + author { + firstName + lastName + __typename + } } - } `); const modifiedQuery = addTypenameToDocument(testQuery); - assert.equal(print(expectedQuery), print(getQueryDefinition(modifiedQuery))); + assert.equal( + print(expectedQuery), + print(getQueryDefinition(modifiedQuery)), + ); }); it('should be able to apply a MutationTransformer correctly', () => { @@ -161,7 +169,8 @@ describe('query transforms', () => { firstName lastName } - }`; + } + `; const expectedQuery = gql` mutation { createAuthor(firstName: "John", lastName: "Smith") { @@ -169,81 +178,84 @@ describe('query transforms', () => { lastName __typename } - }`; + } + `; const modifiedQuery = addTypenameToDocument(testQuery); assert.equal(print(expectedQuery), print(modifiedQuery)); - }); - it('should add typename fields correctly on this one query' , () => { + it('should add typename fields correctly on this one query', () => { const testQuery = gql` - query Feed($type: FeedType!) { - # Eventually move this into a no fetch query right on the entry - # since we literally just need this info to determine whether to - # show upvote/downvote buttons - currentUser { + query Feed($type: FeedType!) { + # Eventually move this into a no fetch query right on the entry + # since we literally just need this info to determine whether to + # show upvote/downvote buttons + currentUser { + login + } + feed(type: $type) { + createdAt + score + commentCount + id + postedBy { login + html_url } - feed(type: $type) { - createdAt - score - commentCount - id - postedBy { - login - html_url - } - - repository { - name - full_name - description - html_url - stargazers_count - open_issues_count - created_at - owner { - avatar_url - } + repository { + name + full_name + description + html_url + stargazers_count + open_issues_count + created_at + owner { + avatar_url } } - }`; + } + } + `; const expectedQuery = getQueryDefinition(gql` query Feed($type: FeedType!) { - currentUser { + currentUser { + login + __typename + } + feed(type: $type) { + createdAt + score + commentCount + id + postedBy { login + html_url __typename } - feed(type: $type) { - createdAt - score - commentCount - id - postedBy { - login - html_url - __typename - } - - repository { - name - full_name - description - html_url - stargazers_count - open_issues_count - created_at - owner { - avatar_url - __typename - } + repository { + name + full_name + description + html_url + stargazers_count + open_issues_count + created_at + owner { + avatar_url __typename } __typename } - }`); + __typename + } + } + `); const modifiedQuery = addTypenameToDocument(testQuery); - assert.equal(print(expectedQuery), print(getQueryDefinition(modifiedQuery))); + assert.equal( + print(expectedQuery), + print(getQueryDefinition(modifiedQuery)), + ); }); }); diff --git a/test/readFromStore.ts b/test/readFromStore.ts index 04af69aaa20..7693a191a09 100644 --- a/test/readFromStore.ts +++ b/test/readFromStore.ts @@ -1,13 +1,9 @@ import { assert } from 'chai'; import { assign, omit } from 'lodash'; -import { - readQueryFromStore, -} from '../src/data/readFromStore'; +import { readQueryFromStore } from '../src/data/readFromStore'; -import { - withError, -} from './util/wrap'; +import { withError } from './util/wrap'; import { NormalizedCache, @@ -17,22 +13,24 @@ import { JsonValue, } from '../src/data/storeUtils'; -import { - HeuristicFragmentMatcher, -} from '../src/data/fragmentMatcher'; +import { HeuristicFragmentMatcher } from '../src/data/fragmentMatcher'; const fragmentMatcherFunction = new HeuristicFragmentMatcher().match; import gql from 'graphql-tag'; - describe('reading from the store', () => { it('rejects malformed queries', () => { assert.throws(() => { readQueryFromStore({ store: {}, query: gql` - query { name } - query { address } + query { + name + } + + query { + address + } `, }); }, /exactly one/); @@ -41,7 +39,9 @@ describe('reading from the store', () => { readQueryFromStore({ store: {}, query: gql` - fragment x on y { name } + fragment x on y { + name + } `, }); }, /contain a query/); @@ -56,14 +56,14 @@ describe('reading from the store', () => { } as StoreObject; const store = { - 'ROOT_QUERY': result, + ROOT_QUERY: result, } as NormalizedCache; const queryResult = readQueryFromStore({ store, query: gql` query { - stringField, + stringField numberField } `, @@ -79,9 +79,9 @@ describe('reading from the store', () => { it('runs a basic query with arguments', () => { const query = gql` query { - id, - stringField(arg: $stringArg), - numberField(intArg: $intArg, floatArg: $floatArg), + id + stringField(arg: $stringArg) + numberField(intArg: $intArg, floatArg: $floatArg) nullField } `; @@ -93,7 +93,7 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'abcd', nullField: null, 'numberField({"intArg":5,"floatArg":3.14})': 5, @@ -118,13 +118,13 @@ describe('reading from the store', () => { it('runs a basic query with default values for arguments', () => { const query = gql` query someBigQuery( - $stringArg: String = "This is a default string!", - $intArg: Int = 0, - $floatArg: Float, - ){ - id, - stringField(arg: $stringArg), - numberField(intArg: $intArg, floatArg: $floatArg), + $stringArg: String = "This is a default string!" + $intArg: Int = 0 + $floatArg: Float + ) { + id + stringField(arg: $stringArg) + numberField(intArg: $intArg, floatArg: $floatArg) nullField } `; @@ -134,7 +134,7 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'abcd', nullField: null, 'numberField({"intArg":0,"floatArg":3.14})': 5, @@ -171,13 +171,17 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedObj')), { - nestedObj: { - type: 'id', - id: 'abcde', - generated: false, - }, - } as StoreObject), + ROOT_QUERY: assign( + {}, + assign({}, omit(result, 'nestedObj')), + { + nestedObj: { + type: 'id', + id: 'abcde', + generated: false, + }, + } as StoreObject, + ), abcde: result.nestedObj, } as NormalizedCache; @@ -185,10 +189,10 @@ describe('reading from the store', () => { store, query: gql` { - stringField, - numberField, + stringField + numberField nestedObj { - stringField, + stringField numberField } } @@ -228,14 +232,18 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedObj', 'deepNestedObj')), { - __typename: 'Query', - nestedObj: { - type: 'id', - id: 'abcde', - generated: false, - }, - } as StoreObject), + ROOT_QUERY: assign( + {}, + assign({}, omit(result, 'nestedObj', 'deepNestedObj')), + { + __typename: 'Query', + nestedObj: { + type: 'id', + id: 'abcde', + generated: false, + }, + } as StoreObject, + ), abcde: assign({}, result.nestedObj, { deepNestedObj: { type: 'id', @@ -250,9 +258,9 @@ describe('reading from the store', () => { store, query: gql` { - stringField, - numberField, - nullField, + stringField + numberField + nullField ... on Query { nestedObj { stringField @@ -302,13 +310,15 @@ describe('reading from the store', () => { it('runs a nested query with proper fragment fields in arrays', () => { return withError(() => { const store = { - 'ROOT_QUERY': { + ROOT_QUERY: { __typename: 'Query', nestedObj: { type: 'id', id: 'abcde', generated: false }, } as StoreObject, abcde: { id: 'abcde', - innerArray: [{ type: 'id', generated: true, id: 'abcde.innerArray.0' } as any], + innerArray: [ + { type: 'id', generated: true, id: 'abcde.innerArray.0' } as any, + ], } as StoreObject, 'abcde.innerArray.0': { id: 'abcdef', @@ -322,17 +332,26 @@ describe('reading from the store', () => { { ... on DummyQuery { nestedObj { - innerArray { id otherField } + innerArray { + id + otherField + } } } ... on Query { nestedObj { - innerArray { id someField } + innerArray { + id + someField + } } } ... on DummyQuery2 { nestedObj { - innerArray { id otherField2 } + innerArray { + id + otherField2 + } } } } @@ -342,7 +361,7 @@ describe('reading from the store', () => { assert.deepEqual<{}>(queryResult, { nestedObj: { - innerArray: [{id: 'abcdef', someField: 3}], + innerArray: [{ id: 'abcdef', someField: 3 }], }, }); }, /IntrospectionFragmentMatcher/); @@ -369,7 +388,7 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedArray')), { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedArray')), { nestedArray: [ { type: 'id', generated: true, id: 'abcd.nestedArray.0' } as IdValue, { type: 'id', generated: true, id: 'abcd.nestedArray.1' } as IdValue, @@ -383,10 +402,10 @@ describe('reading from the store', () => { store, query: gql` { - stringField, - numberField, + stringField + numberField nestedArray { - stringField, + stringField numberField } } @@ -427,7 +446,7 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedArray')), { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedArray')), { nestedArray: [ null, { type: 'id', generated: true, id: 'abcd.nestedArray.1' } as IdValue, @@ -440,10 +459,10 @@ describe('reading from the store', () => { store, query: gql` { - stringField, - numberField, + stringField + numberField nestedArray { - stringField, + stringField numberField } } @@ -482,24 +501,21 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedArray')), { - nestedArray: [ - null, - { type: 'id', generated: false, id: 'abcde' }, - ], + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedArray')), { + nestedArray: [null, { type: 'id', generated: false, id: 'abcde' }], }) as StoreObject, - 'abcde': result.nestedArray[1], + abcde: result.nestedArray[1], } as NormalizedCache; const queryResult = readQueryFromStore({ store, query: gql` { - stringField, - numberField, + stringField + numberField nestedArray { - id, - stringField, + id + stringField numberField } } @@ -529,14 +545,14 @@ describe('reading from the store', () => { nullField: null, } as StoreObject; - const store = { 'ROOT_QUERY': result } as NormalizedCache; + const store = { ROOT_QUERY: result } as NormalizedCache; assert.throws(() => { readQueryFromStore({ store, query: gql` { - stringField, + stringField missingField } `, @@ -554,17 +570,19 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedObj')), { nestedObj: null }) as StoreObject, + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedObj')), { + nestedObj: null, + }) as StoreObject, } as NormalizedCache; const queryResult = readQueryFromStore({ store, query: gql` { - stringField, - numberField, + stringField + numberField nestedObj { - stringField, + stringField numberField } } @@ -589,18 +607,20 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'simpleArray')), { simpleArray: { - type: 'json', - json: result.simpleArray, - } as JsonValue }) as StoreObject, + ROOT_QUERY: assign({}, assign({}, omit(result, 'simpleArray')), { + simpleArray: { + type: 'json', + json: result.simpleArray, + } as JsonValue, + }) as StoreObject, } as NormalizedCache; const queryResult = readQueryFromStore({ store, query: gql` { - stringField, - numberField, + stringField + numberField simpleArray } `, @@ -624,18 +644,20 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'simpleArray')), { simpleArray: { - type: 'json', - json: result.simpleArray, - } as JsonValue }) as StoreObject, + ROOT_QUERY: assign({}, assign({}, omit(result, 'simpleArray')), { + simpleArray: { + type: 'json', + json: result.simpleArray, + } as JsonValue, + }) as StoreObject, } as NormalizedCache; const queryResult = readQueryFromStore({ store, query: gql` { - stringField, - numberField, + stringField + numberField simpleArray } `, @@ -659,7 +681,7 @@ describe('reading from the store', () => { } as StoreObject; const store = { - 'ROOT_QUERY': result, + ROOT_QUERY: result, } as NormalizedCache; const queryResult = readQueryFromStore({ @@ -674,7 +696,8 @@ describe('reading from the store', () => { config: { customResolvers: { Thing: { - computedField: (obj, args) => obj.stringField + obj.numberField + args['extra'], + computedField: (obj, args) => + obj.stringField + obj.numberField + args['extra'], }, }, }, @@ -697,7 +720,7 @@ describe('reading from the store', () => { } as StoreObject; const store = { - 'ROOT_QUERY': result, + ROOT_QUERY: result, } as NormalizedCache; const queryResult = readQueryFromStore({ @@ -712,7 +735,8 @@ describe('reading from the store', () => { config: { customResolvers: { Query: { - computedField: (obj, args) => obj.stringField + obj.numberField + args['extra'], + computedField: (obj, args) => + obj.stringField + obj.numberField + args['extra'], }, }, }, @@ -748,14 +772,18 @@ describe('reading from the store', () => { }; const store = { - 'ROOT_QUERY': assign({}, assign({}, omit(data, 'nestedObj', 'deepNestedObj')), { - __typename: 'Query', - nestedObj: { - type: 'id', - id: 'abcde', - generated: false, - } as IdValue, - }) as StoreObject, + ROOT_QUERY: assign( + {}, + assign({}, omit(data, 'nestedObj', 'deepNestedObj')), + { + __typename: 'Query', + nestedObj: { + type: 'id', + id: 'abcde', + generated: false, + } as IdValue, + }, + ) as StoreObject, abcde: assign({}, data.nestedObj, { deepNestedObj: { type: 'id', @@ -815,17 +843,17 @@ describe('reading from the store', () => { it('properly handles the connection directive', () => { const store: NormalizedCache = { - 'ROOT_QUERY': { - 'abc': [ + ROOT_QUERY: { + abc: [ { - 'generated': true, - 'id': 'ROOT_QUERY.abc.0', - 'type': 'id', + generated: true, + id: 'ROOT_QUERY.abc.0', + type: 'id', }, ], }, 'ROOT_QUERY.abc.0': { - 'name': 'efgh', + name: 'efgh', }, }; @@ -840,15 +868,12 @@ describe('reading from the store', () => { `, }); - assert.deepEqual<{}>( - queryResult, - { - 'books': [ - { - 'name': 'efgh', - }, - ], - }, - ); + assert.deepEqual<{}>(queryResult, { + books: [ + { + name: 'efgh', + }, + ], + }); }); }); diff --git a/test/roundtrip.ts b/test/roundtrip.ts index 9bb6b96c9ae..df3d8767971 100644 --- a/test/roundtrip.ts +++ b/test/roundtrip.ts @@ -7,228 +7,250 @@ import { createFragmentMap, } from '../src/queries/getFromAST'; -import { - DocumentNode, -} from 'graphql'; +import { DocumentNode } from 'graphql'; import gql from 'graphql-tag'; import { withWarning, withError } from './util/wrap'; -import { - HeuristicFragmentMatcher, -} from '../src/data/fragmentMatcher'; +import { HeuristicFragmentMatcher } from '../src/data/fragmentMatcher'; const fragmentMatcherFunction = new HeuristicFragmentMatcher().match; describe('roundtrip', () => { it('real graphql result', () => { - storeRoundtrip(gql` - { - people_one(id: "1") { - name + storeRoundtrip( + gql` + { + people_one(id: "1") { + name + } } - } - `, { - people_one: { - name: 'Luke Skywalker', + `, + { + people_one: { + name: 'Luke Skywalker', + }, }, - }); + ); }); it('multidimensional array (#776)', () => { - storeRoundtrip(gql` - { - rows { - value + storeRoundtrip( + gql` + { + rows { + value + } } - } - `, { - rows: [ - [ - { value: 1 }, - { value: 2 }, - ], - [ - { value: 3 }, - { value: 4 }, - ], - ], - }); + `, + { + rows: [[{ value: 1 }, { value: 2 }], [{ value: 3 }, { value: 4 }]], + }, + ); }); - it('array with null values (#1551)', () => { - storeRoundtrip(gql` - { - list { - value + it('array with null values (#1551)', () => { + storeRoundtrip( + gql` + { + list { + value + } } - } - `, { - list: [ - null, - { value: 1 }, - ], - }); + `, + { + list: [null, { value: 1 }], + }, + ); }); it('enum arguments', () => { - storeRoundtrip(gql` - { - hero(episode: JEDI) { - name + storeRoundtrip( + gql` + { + hero(episode: JEDI) { + name + } } - } - `, { - hero: { - name: 'Luke Skywalker', + `, + { + hero: { + name: 'Luke Skywalker', + }, }, - }); + ); }); it('with an alias', () => { - storeRoundtrip(gql` + storeRoundtrip( + gql` + { + luke: people_one(id: "1") { + name + } + vader: people_one(id: "4") { + name + } + } + `, { - luke: people_one(id: "1") { - name, + luke: { + name: 'Luke Skywalker', + }, + vader: { + name: 'Darth Vader', }, - vader: people_one(id: "4") { - name, - } - } - `, { - luke: { - name: 'Luke Skywalker', - }, - vader: { - name: 'Darth Vader', }, - }); + ); }); it('with variables', () => { - storeRoundtrip(gql` + storeRoundtrip( + gql` + { + luke: people_one(id: $lukeId) { + name + } + vader: people_one(id: $vaderId) { + name + } + } + `, { - luke: people_one(id: $lukeId) { - name, + luke: { + name: 'Luke Skywalker', + }, + vader: { + name: 'Darth Vader', }, - vader: people_one(id: $vaderId) { - name, - } - } - `, { - luke: { - name: 'Luke Skywalker', }, - vader: { - name: 'Darth Vader', + { + lukeId: '1', + vaderId: '4', }, - }, { - lukeId: '1', - vaderId: '4', - }); + ); }); it('with GraphQLJSON scalar type', () => { - storeRoundtrip(gql` - { - updateClub { - uid, - name, - settings + storeRoundtrip( + gql` + { + updateClub { + uid + name + settings + } } - } - `, { - updateClub: { - uid: '1d7f836018fc11e68d809dfee940f657', - name: 'Eple', - settings: { - name: 'eple', - currency: 'AFN', - calendarStretch: 2, - defaultPreAllocationPeriod: 1, - confirmationEmailCopy: null, - emailDomains: null, + `, + { + updateClub: { + uid: '1d7f836018fc11e68d809dfee940f657', + name: 'Eple', + settings: { + name: 'eple', + currency: 'AFN', + calendarStretch: 2, + defaultPreAllocationPeriod: 1, + confirmationEmailCopy: null, + emailDomains: null, + }, }, }, - }); + ); }); describe('directives', () => { it('should be able to query with skip directive true', () => { - storeRoundtrip(gql` - query { - fortuneCookie @skip(if: true) - } - `, {}); + storeRoundtrip( + gql` + query { + fortuneCookie @skip(if: true) + } + `, + {}, + ); }); it('should be able to query with skip directive false', () => { - storeRoundtrip(gql` - query { - fortuneCookie @skip(if: false) - } - `, {fortuneCookie: 'live long and prosper'}); + storeRoundtrip( + gql` + query { + fortuneCookie @skip(if: false) + } + `, + { fortuneCookie: 'live long and prosper' }, + ); }); }); describe('fragments', () => { it('should work on null fields', () => { - storeRoundtrip(gql` - query { - field { - ... on Obj { - stuff + storeRoundtrip( + gql` + query { + field { + ... on Obj { + stuff + } } } - } - `, { - field: null, - }); + `, + { + field: null, + }, + ); }); it('should work on basic inline fragments', () => { - storeRoundtrip(gql` - query { - field { - __typename - ... on Obj { - stuff + storeRoundtrip( + gql` + query { + field { + __typename + ... on Obj { + stuff + } } } - } - `, { - field: { - __typename: 'Obj', - stuff: 'Result', + `, + { + field: { + __typename: 'Obj', + stuff: 'Result', + }, }, - }); + ); }); it('should resolve on union types with inline fragments without typenames with warning', () => { return withWarning(() => { - storeRoundtrip(gql` - query { - all_people { - name - ... on Jedi { - side - } - ... on Droid { - model + storeRoundtrip( + gql` + query { + all_people { + name + ... on Jedi { + side + } + ... on Droid { + model + } } } - }`, { - all_people: [ - { - name: 'Luke Skywalker', - side: 'bright', - }, - { - name: 'R2D2', - model: 'astromech', - }, - ], - }); + `, + { + all_people: [ + { + name: 'Luke Skywalker', + side: 'bright', + }, + { + name: 'R2D2', + model: 'astromech', + }, + ], + }, + ); }, /using fragments/); }); @@ -236,201 +258,233 @@ describe('roundtrip', () => { // However, the user may have written this result with client.writeQuery. it('should throw an error on two of the same inline fragment types', () => { return assert.throws(() => { - storeRoundtrip(gql` - query { - all_people { - __typename - name - ... on Jedi { - side - } - ... on Jedi { - rank + storeRoundtrip( + gql` + query { + all_people { + __typename + name + ... on Jedi { + side + } + ... on Jedi { + rank + } } } - }`, { - all_people: [ - { - __typename: 'Jedi', - name: 'Luke Skywalker', - side: 'bright', - }, - ], - }); + `, + { + all_people: [ + { + __typename: 'Jedi', + name: 'Luke Skywalker', + side: 'bright', + }, + ], + }, + ); }, /Can\'t find field rank on object/); }); it('should resolve fields it can on interface with non matching inline fragments', () => { return withError(() => { - storeRoundtrip(gql` - query { - dark_forces { - __typename - name - ... on Droid { - model + storeRoundtrip( + gql` + query { + dark_forces { + __typename + name + ... on Droid { + model + } } } - }`, { - dark_forces: [ - { - __typename: 'Droid', - name: '8t88', - model: '88', - }, - { - __typename: 'Darth', - name: 'Anakin Skywalker', - }, - ], - }); + `, + { + dark_forces: [ + { + __typename: 'Droid', + name: '8t88', + model: '88', + }, + { + __typename: 'Darth', + name: 'Anakin Skywalker', + }, + ], + }, + ); }, /IntrospectionFragmentMatcher/); }); it('should resolve on union types with spread fragments', () => { return withError(() => { - storeRoundtrip(gql` - fragment jediFragment on Jedi { - side - } + storeRoundtrip( + gql` + fragment jediFragment on Jedi { + side + } - fragment droidFragment on Droid { - model - } + fragment droidFragment on Droid { + model + } - query { - all_people { - __typename - name - ...jediFragment - ...droidFragment + query { + all_people { + __typename + name + ...jediFragment + ...droidFragment + } } - }`, { - all_people: [ - { - __typename: 'Jedi', - name: 'Luke Skywalker', - side: 'bright', - }, - { - __typename: 'Droid', - name: 'R2D2', - model: 'astromech', - }, - ], - }); + `, + { + all_people: [ + { + __typename: 'Jedi', + name: 'Luke Skywalker', + side: 'bright', + }, + { + __typename: 'Droid', + name: 'R2D2', + model: 'astromech', + }, + ], + }, + ); }, /IntrospectionFragmentMatcher/); }); it('should work with a fragment on the actual interface or union', () => { return withError(() => { - storeRoundtrip(gql` - fragment jediFragment on Character { - side - } + storeRoundtrip( + gql` + fragment jediFragment on Character { + side + } - fragment droidFragment on Droid { - model - } + fragment droidFragment on Droid { + model + } - query { - all_people { - name - __typename - ...jediFragment - ...droidFragment + query { + all_people { + name + __typename + ...jediFragment + ...droidFragment + } } - }`, { - all_people: [ - { - __typename: 'Jedi', - name: 'Luke Skywalker', - side: 'bright', - }, - { - __typename: 'Droid', - name: 'R2D2', - model: 'astromech', - }, - ], - }); + `, + { + all_people: [ + { + __typename: 'Jedi', + name: 'Luke Skywalker', + side: 'bright', + }, + { + __typename: 'Droid', + name: 'R2D2', + model: 'astromech', + }, + ], + }, + ); }, /IntrospectionFragmentMatcher/); }); it('should throw on error on two of the same spread fragment types', () => { - assert.throws(() => - storeRoundtrip(gql` - fragment jediSide on Jedi { - side - } - fragment jediRank on Jedi { - rank - } - query { - all_people { - __typename - name - ...jediSide - ...jediRank - } - }`, { - all_people: [ + assert.throws( + () => + storeRoundtrip( + gql` + fragment jediSide on Jedi { + side + } + + fragment jediRank on Jedi { + rank + } + + query { + all_people { + __typename + name + ...jediSide + ...jediRank + } + } + `, { - __typename: 'Jedi', - name: 'Luke Skywalker', - side: 'bright', + all_people: [ + { + __typename: 'Jedi', + name: 'Luke Skywalker', + side: 'bright', + }, + ], }, - ], - }) - , /Can\'t find field rank on object/); + ), + /Can\'t find field rank on object/, + ); }); it('should resolve on @include and @skip with inline fragments', () => { - storeRoundtrip(gql` - query { - person { - name - __typename - ... on Jedi @include(if: true) { - side - } - ... on Droid @skip(if: true) { - model + storeRoundtrip( + gql` + query { + person { + name + __typename + ... on Jedi @include(if: true) { + side + } + ... on Droid @skip(if: true) { + model + } } } - }`, { - person: { - __typename: 'Jedi', - name: 'Luke Skywalker', - side: 'bright', + `, + { + person: { + __typename: 'Jedi', + name: 'Luke Skywalker', + side: 'bright', + }, }, - }); + ); }); it('should resolve on @include and @skip with spread fragments', () => { - storeRoundtrip(gql` - fragment jediFragment on Jedi { - side - } + storeRoundtrip( + gql` + fragment jediFragment on Jedi { + side + } - fragment droidFragment on Droid { - model - } + fragment droidFragment on Droid { + model + } - query { - person { - name - __typename - ...jediFragment @include(if: true) - ...droidFragment @skip(if: true) + query { + person { + name + __typename + ...jediFragment @include(if: true) + ...droidFragment @skip(if: true) + } } - }`, { - person: { - __typename: 'Jedi', - name: 'Luke Skywalker', - side: 'bright', + `, + { + person: { + __typename: 'Jedi', + name: 'Luke Skywalker', + side: 'bright', + }, }, - }); + ); }); }); }); diff --git a/test/scheduler.ts b/test/scheduler.ts index 5e6bbfd3c9b..db1c2c4a47f 100644 --- a/test/scheduler.ts +++ b/test/scheduler.ts @@ -1,12 +1,8 @@ import { QueryScheduler } from '../src/scheduler/scheduler'; import { assert } from 'chai'; -import { - QueryManager, -} from '../src/core/QueryManager'; +import { QueryManager } from '../src/core/QueryManager'; import { WatchQueryOptions } from '../src/core/watchQueryOptions'; -import { - createApolloStore, -} from '../src/store'; +import { createApolloStore } from '../src/store'; import mockNetworkInterface from './mocks/mockNetworkInterface'; import { NetworkStatus } from '../src/queries/networkStatus'; import gql from 'graphql-tag'; @@ -32,7 +28,8 @@ describe('QueryScheduler', () => { firstName lastName } - }`; + } + `; const queryOptions: WatchQueryOptions = { query, }; @@ -41,19 +38,20 @@ describe('QueryScheduler', () => { }); }); - it('should correctly start polling queries', (done) => { + it('should correctly start polling queries', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const data = { - 'author': { - 'firstName': 'John', - 'lastName': 'Smith', + author: { + firstName: 'John', + lastName: 'Smith', }, }; const queryOptions = { @@ -61,12 +59,10 @@ describe('QueryScheduler', () => { pollInterval: 80, }; - const networkInterface = mockNetworkInterface( - { - request: queryOptions, - result: { data }, - }, - ); + const networkInterface = mockNetworkInterface({ + request: queryOptions, + result: { data }, + }); const queryManager = new QueryManager({ networkInterface: networkInterface, store: createApolloStore(), @@ -77,9 +73,13 @@ describe('QueryScheduler', () => { queryManager, }); let timesFired = 0; - const queryId = scheduler.startPollingQuery(queryOptions, 'fake-id', (queryStoreValue) => { - timesFired += 1; - }); + const queryId = scheduler.startPollingQuery( + queryOptions, + 'fake-id', + queryStoreValue => { + timesFired += 1; + }, + ); setTimeout(() => { assert.isAtLeast(timesFired, 0); scheduler.stopPollingQuery(queryId); @@ -87,32 +87,31 @@ describe('QueryScheduler', () => { }, 120); }); - it('should correctly stop polling queries', (done) => { + it('should correctly stop polling queries', done => { const query = gql` query { someAlias: author { firstName lastName } - }`; + } + `; const data = { - 'someAlias': { - 'firstName': 'John', - 'lastName': 'Smith', + someAlias: { + firstName: 'John', + lastName: 'Smith', }, }; const queryOptions = { query, pollInterval: 20, }; - const networkInterface = mockNetworkInterface( - { - request: { - query: queryOptions.query, - }, - result: { data }, + const networkInterface = mockNetworkInterface({ + request: { + query: queryOptions.query, }, - ); + result: { data }, + }); const queryManager = new QueryManager({ networkInterface: networkInterface, store: createApolloStore(), @@ -122,12 +121,16 @@ describe('QueryScheduler', () => { queryManager, }); let timesFired = 0; - let queryId = scheduler.startPollingQuery(queryOptions, 'fake-id', (queryStoreValue) => { - if (queryStoreValue.networkStatus !== NetworkStatus.poll) { - timesFired += 1; - scheduler.stopPollingQuery(queryId); - } - }); + let queryId = scheduler.startPollingQuery( + queryOptions, + 'fake-id', + queryStoreValue => { + if (queryStoreValue.networkStatus !== NetworkStatus.poll) { + timesFired += 1; + scheduler.stopPollingQuery(queryId); + } + }, + ); setTimeout(() => { assert.equal(timesFired, 1); @@ -135,30 +138,29 @@ describe('QueryScheduler', () => { }, 170); }); - it('should register a query and return an observable that can be unsubscribed', (done) => { + it('should register a query and return an observable that can be unsubscribed', done => { const myQuery = gql` query { someAuthorAlias: author { firstName lastName } - }`; + } + `; const data = { - 'someAuthorAlias': { - 'firstName': 'John', - 'lastName': 'Smith', + someAuthorAlias: { + firstName: 'John', + lastName: 'Smith', }, }; const queryOptions = { query: myQuery, pollInterval: 20, }; - const networkInterface = mockNetworkInterface( - { - request: queryOptions, - result: { data }, - }, - ); + const networkInterface = mockNetworkInterface({ + request: queryOptions, + result: { data }, + }); const queryManager = new QueryManager({ networkInterface, store: createApolloStore(), @@ -184,25 +186,24 @@ describe('QueryScheduler', () => { }, 100); }); - it('should handle network errors on polling queries correctly', (done) => { + it('should handle network errors on polling queries correctly', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const error = new Error('something went terribly wrong'); const queryOptions = { query, pollInterval: 80, }; - const networkInterface = mockNetworkInterface( - { - request: queryOptions, - error, - }, - ); + const networkInterface = mockNetworkInterface({ + request: queryOptions, + error, + }); const queryManager = new QueryManager({ networkInterface, store: createApolloStore(), @@ -225,25 +226,24 @@ describe('QueryScheduler', () => { }); }); - it('should handle graphql errors on polling queries correctly', (done) => { + it('should handle graphql errors on polling queries correctly', done => { const query = gql` query { author { firstName lastName } - }`; + } + `; const errors = [new Error('oh no something went wrong')]; const queryOptions = { query, pollInterval: 80, }; - const networkInterface = mockNetworkInterface( - { - request: queryOptions, - result: { errors }, - }, - ); + const networkInterface = mockNetworkInterface({ + request: queryOptions, + result: { errors }, + }); const queryManager = new QueryManager({ networkInterface, store: createApolloStore(), @@ -262,25 +262,24 @@ describe('QueryScheduler', () => { }); }); - it('should not fire another query if one with the same id is in flight', (done) => { + it('should not fire another query if one with the same id is in flight', done => { const query = gql` query B { fortuneCookie - }`; + } + `; const data = { - 'fortuneCookie': 'you will live a long life', + fortuneCookie: 'you will live a long life', }; const queryOptions = { query, pollInterval: 10, }; - const networkInterface = mockNetworkInterface( - { - request: queryOptions, - result: { data }, - delay: 20000, - }, - ); + const networkInterface = mockNetworkInterface({ + request: queryOptions, + result: { data }, + delay: 20000, + }); const queryManager = new QueryManager({ networkInterface, store: createApolloStore(), @@ -301,20 +300,19 @@ describe('QueryScheduler', () => { const query = gql` query { fortuneCookie - }`; + } + `; const data = { - 'fortuneCookie': 'live long and prosper', + fortuneCookie: 'live long and prosper', }; const queryOptions = { query, pollInterval: 10000, }; - const networkInterface = mockNetworkInterface( - { - request: queryOptions, - result: { data }, - }, - ); + const networkInterface = mockNetworkInterface({ + request: queryOptions, + result: { data }, + }); const queryManager = new QueryManager({ networkInterface, store: createApolloStore(), @@ -326,8 +324,13 @@ describe('QueryScheduler', () => { const queryId = 'fake-id'; scheduler.addQueryOnInterval(queryId, queryOptions); assert.equal(Object.keys(scheduler.intervalQueries).length, 1); - assert.equal(Object.keys(scheduler.intervalQueries)[0], queryOptions.pollInterval.toString()); - const queries = (scheduler.intervalQueries)[queryOptions.pollInterval.toString()]; + assert.equal( + Object.keys(scheduler.intervalQueries)[0], + queryOptions.pollInterval.toString(), + ); + const queries = (scheduler.intervalQueries)[ + queryOptions.pollInterval.toString() + ]; assert.equal(queries.length, 1); assert.equal(queries[0], queryId); }); @@ -336,17 +339,19 @@ describe('QueryScheduler', () => { const query1 = gql` query { fortuneCookie - }`; + } + `; const data1 = { - 'fortuneCookie': 'live long and prosper', + fortuneCookie: 'live long and prosper', }; const query2 = gql` - query { - author { - firstName - lastName + query { + author { + firstName + lastName + } } - }`; + `; const data2 = { author: { firstName: 'Dhaivat', @@ -404,14 +409,15 @@ describe('QueryScheduler', () => { assert.deepEqual(scheduler.registeredQueries[queryIds[1]], queryOptions2); }); - it('should remove queries from the interval list correctly', (done) => { + it('should remove queries from the interval list correctly', done => { const query = gql` - query { - author { - firstName - lastName + query { + author { + firstName + lastName + } } - }`; + `; const data = { author: { firstName: 'John', @@ -419,12 +425,10 @@ describe('QueryScheduler', () => { }, }; const queryManager = new QueryManager({ - networkInterface: mockNetworkInterface( - { - request: { query }, - result: { data }, - }, - ), + networkInterface: mockNetworkInterface({ + request: { query }, + result: { data }, + }), store: createApolloStore(), reduxRootSelector: defaultReduxRootSelector, addTypename: false, @@ -433,7 +437,10 @@ describe('QueryScheduler', () => { queryManager, }); let timesFired = 0; - const observable = scheduler.registerPollingQuery({ query, pollInterval: 10 }); + const observable = scheduler.registerPollingQuery({ + query, + pollInterval: 10, + }); const subscription = observable.subscribe({ next(result) { timesFired += 1; @@ -449,30 +456,29 @@ describe('QueryScheduler', () => { }, 100); }); - it('should correctly start new polling query after removing old one', (done) => { + it('should correctly start new polling query after removing old one', done => { const query = gql` query { someAlias: author { firstName lastName } - }`; + } + `; const data = { - 'someAlias': { - 'firstName': 'John', - 'lastName': 'Smith', + someAlias: { + firstName: 'John', + lastName: 'Smith', }, }; const queryOptions = { query, pollInterval: 20, }; - const networkInterface = mockNetworkInterface( - { - request: queryOptions, - result: { data }, - }, - ); + const networkInterface = mockNetworkInterface({ + request: queryOptions, + result: { data }, + }); const queryManager = new QueryManager({ networkInterface: networkInterface, store: createApolloStore(), @@ -483,18 +489,26 @@ describe('QueryScheduler', () => { queryManager, }); let timesFired = 0; - let queryId = scheduler.startPollingQuery(queryOptions, 'fake-id', (queryStoreValue) => { - scheduler.stopPollingQuery(queryId); - }); + let queryId = scheduler.startPollingQuery( + queryOptions, + 'fake-id', + queryStoreValue => { + scheduler.stopPollingQuery(queryId); + }, + ); setTimeout(() => { - let queryId2 = scheduler.startPollingQuery(queryOptions, 'fake-id2', (queryStoreValue) => { - timesFired += 1; - }); + let queryId2 = scheduler.startPollingQuery( + queryOptions, + 'fake-id2', + queryStoreValue => { + timesFired += 1; + }, + ); assert.equal(scheduler.intervalQueries[20].length, 1); setTimeout(() => { - assert.isAtLeast(timesFired, 1); - scheduler.stopPollingQuery(queryId2); - done(); + assert.isAtLeast(timesFired, 1); + scheduler.stopPollingQuery(queryId2); + done(); }, 300); }, 200); }); diff --git a/test/store.ts b/test/store.ts index 8397dacd54b..295980aac00 100644 --- a/test/store.ts +++ b/test/store.ts @@ -2,13 +2,9 @@ import * as chai from 'chai'; const { assert } = chai; import gql from 'graphql-tag'; -import { - Store, - createApolloStore, - ReducerError, -} from '../src/store'; +import { Store, createApolloStore, ReducerError } from '../src/store'; -import {getOperationName} from '../src/queries/getFromAST'; +import { getOperationName } from '../src/queries/getFromAST'; describe('createApolloStore', () => { it('does not require any arguments', () => { @@ -18,14 +14,11 @@ describe('createApolloStore', () => { it('has a default root key', () => { const store = createApolloStore(); - assert.deepEqual( - store.getState()['apollo'], - { - data: {}, - optimistic: [], - reducerError: null, - }, - ); + assert.deepEqual(store.getState()['apollo'], { + data: {}, + optimistic: [], + reducerError: null, + }); }); it('can take a custom root key', () => { @@ -33,14 +26,11 @@ describe('createApolloStore', () => { reduxRootKey: 'test', }); - assert.deepEqual( - store.getState()['test'], - { - data: {}, - optimistic: [], - reducerError: null, - }, - ); + assert.deepEqual(store.getState()['test'], { + data: {}, + optimistic: [], + reducerError: null, + }); }); it('can be rehydrated from the server', () => { @@ -49,7 +39,7 @@ describe('createApolloStore', () => { data: { 'test.0': true, }, - optimistic: ([] as any[]), + optimistic: [] as any[], }, }; @@ -76,13 +66,15 @@ describe('createApolloStore', () => { data: { 'test.0': true, }, - optimistic: ([] as any[]), + optimistic: [] as any[], }, }; - assert.throws(() => createApolloStore({ - initialState, - })); + assert.throws(() => + createApolloStore({ + initialState, + }), + ); }); it('throws an error if state contains a non-empty mutations field', () => { @@ -93,13 +85,15 @@ describe('createApolloStore', () => { data: { 'test.0': true, }, - optimistic: ([] as any[]), + optimistic: [] as any[], }, }; - assert.throws(() => createApolloStore({ - initialState, - })); + assert.throws(() => + createApolloStore({ + initialState, + }), + ); }); it('reset itself', () => { @@ -112,8 +106,8 @@ describe('createApolloStore', () => { }; const emptyState: Store = { - data: { }, - optimistic: ([] as any[]), + data: {}, + optimistic: [] as any[], reducerError: null, }; @@ -130,7 +124,11 @@ describe('createApolloStore', () => { }); it('can reset itself and keep the observable query ids', () => { - const queryDocument = gql` query { abc }`; + const queryDocument = gql` + query { + abc + } + `; const initialState = { apollo: { @@ -138,13 +136,13 @@ describe('createApolloStore', () => { 'test.0': true, 'test.1': true, }, - optimistic: ([] as any[]), + optimistic: [] as any[], }, }; const emptyState: Store = { data: {}, - optimistic: ([] as any[]), + optimistic: [] as any[], reducerError: null, }; @@ -175,12 +173,12 @@ describe('createApolloStore', () => { assert.deepEqual(store.getState().apollo, emptyState); }); - it('can\'t crash the reducer', () => { + it("can't crash the reducer", () => { const initialState = { apollo: { data: {}, - optimistic: ([] as any[]), - reducerError: (null as Error | null), + optimistic: [] as any[], + reducerError: null as Error | null, }, }; @@ -200,17 +198,21 @@ describe('createApolloStore', () => { variables, operationName: 'Increment', mutationId: '1', - optimisticResponse: {data: {incrementer: {counter: 1}}}, + optimisticResponse: { data: { incrementer: { counter: 1 } } }, }); store.dispatch({ type: 'APOLLO_MUTATION_RESULT', - result: {data: {incrementer: {counter: 1}}}, + result: { data: { incrementer: { counter: 1 } } }, document: mutation, operationName: 'Increment', variables, mutationId: '1', - extraReducers: [() => { throw new Error('test!!!'); }], + extraReducers: [ + () => { + throw new Error('test!!!'); + }, + ], }); assert(/test!!!/.test(store.getState().apollo.reducerError.error)); @@ -223,7 +225,7 @@ describe('createApolloStore', () => { mutationId: '1', action: { type: 'APOLLO_MUTATION_RESULT', - result: {data: {data: {incrementer: {counter: 1}}}}, + result: { data: { data: { incrementer: { counter: 1 } } } }, document: mutation, operationName: 'Increment', variables: {}, @@ -234,7 +236,7 @@ describe('createApolloStore', () => { }, }, ], - reducerError: (null as ReducerError | null), + reducerError: null as ReducerError | null, }; store.dispatch({ diff --git a/test/subscribeToMore.ts b/test/subscribeToMore.ts index 38822f7288c..965e4cc93de 100644 --- a/test/subscribeToMore.ts +++ b/test/subscribeToMore.ts @@ -1,9 +1,7 @@ import * as chai from 'chai'; const { assert } = chai; -import { - mockSubscriptionNetworkInterface, -} from './mocks/mockNetworkInterface'; +import { mockSubscriptionNetworkInterface } from './mocks/mockNetworkInterface'; import ApolloClient from '../src'; @@ -27,9 +25,10 @@ describe('subscribeToMore', () => { const req1 = { request: { query }, result }; - const results = ['Dahivat Pandya', 'Amanda Liu'].map( - name => ({ result: { data: { name: name } }, delay: 10 }), - ); + const results = ['Dahivat Pandya', 'Amanda Liu'].map(name => ({ + result: { data: { name: name } }, + delay: 10, + })); const sub1 = { request: { @@ -62,7 +61,7 @@ describe('subscribeToMore', () => { const results3 = [ { error: new Error('You cant touch this'), delay: 10 }, - { result: { data: { name: 'Amanda Liu' }}, delay: 10 }, + { result: { data: { name: 'Amanda Liu' } }, delay: 10 }, ]; const sub3 = { @@ -77,9 +76,10 @@ describe('subscribeToMore', () => { results: [...results3], }; - const results4 = ['Vyacheslav Kim', 'Changping Chen'].map( - name => ({ result: { data: { name: name }}, delay: 10 }), - ); + const results4 = ['Vyacheslav Kim', 'Changping Chen'].map(name => ({ + result: { data: { name: name } }, + delay: 10, + })); const sub4 = { request: { @@ -93,7 +93,7 @@ describe('subscribeToMore', () => { results: [...results4], }; - it('triggers new result from subscription data', (done) => { + it('triggers new result from subscription data', done => { let latestResult: any = null; const networkInterface = mockSubscriptionNetworkInterface([sub1], req1); let counter = 0; @@ -127,10 +127,12 @@ describe('subscribeToMore', () => { setTimeout(() => { sub.unsubscribe(); assert.equal(counter, 3); - assert.deepEqual( - latestResult, - { data: { entry: { value: 'Amanda Liu' } }, loading: false, networkStatus: 7, stale: false }, - ); + assert.deepEqual(latestResult, { + data: { entry: { value: 'Amanda Liu' } }, + loading: false, + networkStatus: 7, + stale: false, + }); done(); }, 50); @@ -139,8 +141,7 @@ describe('subscribeToMore', () => { } }); - - it('calls error callback on error', (done) => { + it('calls error callback on error', done => { let latestResult: any = null; const networkInterface = mockSubscriptionNetworkInterface([sub2], req1); let counter = 0; @@ -171,16 +172,20 @@ describe('subscribeToMore', () => { updateQuery: (prev, { subscriptionData }) => { return { entry: { value: subscriptionData.data.name } }; }, - onError: (err) => { errorCount += 1; }, + onError: err => { + errorCount += 1; + }, }); setTimeout(() => { sub.unsubscribe(); assert.equal(counter, 2); - assert.deepEqual( - latestResult, - { data: { entry: { value: 'Amanda Liu' } }, loading: false, networkStatus: 7, stale: false }, - ); + assert.deepEqual(latestResult, { + data: { entry: { value: 'Amanda Liu' } }, + loading: false, + networkStatus: 7, + stale: false, + }); assert.equal(errorCount, 1); done(); }, 50); @@ -190,7 +195,7 @@ describe('subscribeToMore', () => { } }); - it('prints unhandled subscription errors to the console', (done) => { + it('prints unhandled subscription errors to the console', done => { let latestResult: any = null; const networkInterface = mockSubscriptionNetworkInterface([sub3], req1); let counter = 0; @@ -212,7 +217,9 @@ describe('subscribeToMore', () => { let errorCount = 0; const consoleErr = console.error; - console.error = (err: Error) => { errorCount += 1; }; + console.error = (err: Error) => { + errorCount += 1; + }; obsHandle.subscribeToMore({ document: gql` @@ -228,10 +235,12 @@ describe('subscribeToMore', () => { setTimeout(() => { sub.unsubscribe(); assert.equal(counter, 2); - assert.deepEqual( - latestResult, - { data: { entry: { value: 'Amanda Liu' } }, loading: false, networkStatus: 7, stale: false }, - ); + assert.deepEqual(latestResult, { + data: { entry: { value: 'Amanda Liu' } }, + loading: false, + networkStatus: 7, + stale: false, + }); assert.equal(errorCount, 1); console.error = consoleErr; done(); @@ -242,7 +251,7 @@ describe('subscribeToMore', () => { } }); - it('updates new result from subscription via a reducer in watchQuery options', (done) => { + it('updates new result from subscription via a reducer in watchQuery options', done => { let latestResult: any = null; const networkInterface = mockSubscriptionNetworkInterface([sub4], req1); let counter = 0; @@ -255,7 +264,10 @@ describe('subscribeToMore', () => { const obsHandle = client.watchQuery({ query, reducer: (previousResult, action) => { - if (action.type === 'APOLLO_SUBSCRIPTION_RESULT' && action.operationName === 'newValues') { + if ( + action.type === 'APOLLO_SUBSCRIPTION_RESULT' && + action.operationName === 'newValues' + ) { if (action.result.data) { return { entry: { value: action.result.data.name } }; } @@ -281,10 +293,12 @@ describe('subscribeToMore', () => { setTimeout(() => { sub.unsubscribe(); assert.equal(counter, 3); - assert.deepEqual( - latestResult, - { data: { entry: { value: 'Changping Chen' } }, loading: false, networkStatus: 7, stale: false }, - ); + assert.deepEqual(latestResult, { + data: { entry: { value: 'Changping Chen' } }, + loading: false, + networkStatus: 7, + stale: false, + }); done(); }, 50); diff --git a/test/tests.ts b/test/tests.ts index 8e0fe8bf905..9048229ddfe 100644 --- a/test/tests.ts +++ b/test/tests.ts @@ -15,13 +15,16 @@ import { QueryManager } from '../src/core/QueryManager'; process.env.NODE_ENV = 'test'; QueryManager.EMIT_REDUX_ACTIONS = false; -declare function require(name: string): any; +declare function require(name: string): any require('source-map-support').install(); console.warn = console.error = (...messages: string[]) => { - console.log(`==> Error in test: Tried to log warning or error with message: -`, ...messages); - if ( (!process.env.CI) && (!process.env.COV) ) { + console.log( + `==> Error in test: Tried to log warning or error with message: +`, + ...messages, + ); + if (!process.env.CI && !process.env.COV) { process.exit(1); } }; diff --git a/test/util/observableToPromise.ts b/test/util/observableToPromise.ts index af8d5d287b2..71224618f89 100644 --- a/test/util/observableToPromise.ts +++ b/test/util/observableToPromise.ts @@ -12,10 +12,10 @@ import { Subscription } from '../../src/util/Observable'; * @param errorCallbacks an expected set of errors */ export type Options = { - observable: ObservableQuery, - shouldResolve?: boolean, - wait?: number, - errorCallbacks?: ((error: Error) => any)[], + observable: ObservableQuery; + shouldResolve?: boolean; + wait?: number; + errorCallbacks?: ((error: Error) => any)[]; }; export type ResultCallback = ((result: ApolloQueryResult) => any); @@ -24,15 +24,10 @@ export type ResultCallback = ((result: ApolloQueryResult) => any); // ensuring it is called exactly N times, resolving once it has done so. // Optionally takes a timeout, which it will wait X ms after the Nth callback // to ensure it is not called again. -export function observableToPromiseAndSubscription({ - observable, - shouldResolve = true, - wait = -1, - errorCallbacks = [], - }: Options, - ...cbs: ResultCallback[], -): { promise: Promise, subscription: Subscription } { - +export function observableToPromiseAndSubscription( + { observable, shouldResolve = true, wait = -1, errorCallbacks = [] }: Options, + ...cbs: ResultCallback[] +): { promise: Promise; subscription: Subscription } { let subscription: Subscription = null as never; const promise = new Promise((resolve, reject) => { let errorIndex = 0; @@ -98,7 +93,7 @@ export function observableToPromiseAndSubscription({ export default function( options: Options, - ...cbs: ResultCallback[], + ...cbs: ResultCallback[] ): Promise { return observableToPromiseAndSubscription(options, ...cbs).promise; } diff --git a/test/util/subscribeAndCount.ts b/test/util/subscribeAndCount.ts index 75aef8b7230..e1bbc1f1e85 100644 --- a/test/util/subscribeAndCount.ts +++ b/test/util/subscribeAndCount.ts @@ -4,8 +4,11 @@ import { Subscription } from '../../src/util/Observable'; import wrap from './wrap'; -export default function subscribeAndCount(done: MochaDone, observable: ObservableQuery, - cb: (handleCount: number, result: ApolloQueryResult) => any): Subscription { +export default function subscribeAndCount( + done: MochaDone, + observable: ObservableQuery, + cb: (handleCount: number, result: ApolloQueryResult) => any, +): Subscription { let handleCount = 0; const subscription = observable.subscribe({ next: result => { diff --git a/test/util/wrap.ts b/test/util/wrap.ts index 67f174f4454..b82c0408dc6 100644 --- a/test/util/wrap.ts +++ b/test/util/wrap.ts @@ -3,7 +3,9 @@ const { assert } = chai; // I'm not sure why mocha doesn't provide something like this, you can't // always use promises -export default (done: MochaDone, cb: (...args: any[]) => any) => (...args: any[]) => { +export default (done: MochaDone, cb: (...args: any[]) => any) => ( + ...args: any[] +) => { try { return cb(...args); } catch (e) { @@ -15,9 +17,9 @@ export function withWarning(func: Function, regex: RegExp) { let message: string = null as never; const oldWarn = console.warn; - console.warn = (m: string) => message = m; + console.warn = (m: string) => (message = m); - return Promise.resolve(func()).then((val) => { + return Promise.resolve(func()).then(val => { assert.match(message, regex); console.warn = oldWarn; return val; @@ -28,13 +30,12 @@ export function withError(func: Function, regex: RegExp) { let message: string = null as never; const oldError = console.error; - console.error = (m: string) => message = m; + console.error = (m: string) => (message = m); try { const result = func(); assert.match(message, regex); return result; - } finally { console.error = oldError; } diff --git a/test/writeToStore.ts b/test/writeToStore.ts index 874612bda46..c4ab1906376 100644 --- a/test/writeToStore.ts +++ b/test/writeToStore.ts @@ -19,15 +19,9 @@ import { writeSelectionSetToStore, } from '../src/data/writeToStore'; -import { - storeKeyNameFromField, -} from '../src/data/storeUtils'; +import { storeKeyNameFromField } from '../src/data/storeUtils'; -import { - NormalizedCache, - StoreObject, - IdValue, -} from '../src/data/storeUtils'; +import { NormalizedCache, StoreObject, IdValue } from '../src/data/storeUtils'; import { HeuristicFragmentMatcher, @@ -39,15 +33,15 @@ import { createFragmentMap, } from '../src/queries/getFromAST'; -const getIdField = ({id}: {id: string}) => id; +const getIdField = ({ id }: { id: string }) => id; describe('writing to the store', () => { it('properly normalizes a trivial item', () => { const query = gql` { - id, - stringField, - numberField, + id + stringField + numberField nullField } `; @@ -59,20 +53,23 @@ describe('writing to the store', () => { nullField: null, }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - }), { - 'ROOT_QUERY': result, - }); + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), + }), + { + ROOT_QUERY: result, + }, + ); }); it('properly normalizes an aliased field', () => { const query = gql` { - id, - aliasedField: stringField, - numberField, + id + aliasedField: stringField + numberField nullField } `; @@ -90,7 +87,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'abcd', stringField: 'This is a string!', numberField: 5, @@ -102,10 +99,10 @@ describe('writing to the store', () => { it('properly normalizes a aliased fields with arguments', () => { const query = gql` { - id, - aliasedField1: stringField(arg: 1), - aliasedField2: stringField(arg: 2), - numberField, + id + aliasedField1: stringField(arg: 1) + aliasedField2: stringField(arg: 2) + numberField nullField } `; @@ -124,7 +121,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'abcd', 'stringField({"arg":1})': 'The arg was 1!', 'stringField({"arg":2})': 'The arg was 2!', @@ -137,9 +134,9 @@ describe('writing to the store', () => { it('properly normalizes a query with variables', () => { const query = gql` { - id, - stringField(arg: $stringArg), - numberField(intArg: $intArg, floatArg: $floatArg), + id + stringField(arg: $stringArg) + numberField(intArg: $intArg, floatArg: $floatArg) nullField } `; @@ -164,7 +161,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'abcd', nullField: null, 'numberField({"intArg":5,"floatArg":3.14})': 5, @@ -175,10 +172,14 @@ describe('writing to the store', () => { it('properly normalizes a query with default values', () => { const query = gql` - query someBigQuery($stringArg: String = "This is a default string!", $intArg: Int, $floatArg: Float){ - id, - stringField(arg: $stringArg), - numberField(intArg: $intArg, floatArg: $floatArg), + query someBigQuery( + $stringArg: String = "This is a default string!" + $intArg: Int + $floatArg: Float + ) { + id + stringField(arg: $stringArg) + numberField(intArg: $intArg, floatArg: $floatArg) nullField } `; @@ -202,7 +203,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'abcd', nullField: null, 'numberField({"intArg":5,"floatArg":3.14})': 5, @@ -214,14 +215,14 @@ describe('writing to the store', () => { it('properly normalizes a nested object with an ID', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedObj { - id, - stringField, - numberField, + id + stringField + numberField nullField } } @@ -240,32 +241,35 @@ describe('writing to the store', () => { }, }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - dataIdFromObject: getIdField, - }), { - 'ROOT_QUERY': assign<{}>({}, assign({}, omit(result, 'nestedObj')), { - nestedObj: { - type: 'id', - id: result.nestedObj.id, - generated: false, - }, + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), + dataIdFromObject: getIdField, }), - [result.nestedObj.id]: result.nestedObj, - }); + { + ROOT_QUERY: assign<{}>({}, assign({}, omit(result, 'nestedObj')), { + nestedObj: { + type: 'id', + id: result.nestedObj.id, + generated: false, + }, + }), + [result.nestedObj.id]: result.nestedObj, + }, + ); }); it('properly normalizes a nested object without an ID', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedObj { - stringField, - numberField, + stringField + numberField nullField } } @@ -283,31 +287,34 @@ describe('writing to the store', () => { }, }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - }), { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedObj')), { - nestedObj: { - type: 'id', - id: `$ROOT_QUERY.nestedObj`, - generated: true, - }, + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), }), - [`$ROOT_QUERY.nestedObj`]: result.nestedObj, - }); + { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedObj')), { + nestedObj: { + type: 'id', + id: `$ROOT_QUERY.nestedObj`, + generated: true, + }, + }), + [`$ROOT_QUERY.nestedObj`]: result.nestedObj, + }, + ); }); it('properly normalizes a nested object with arguments but without an ID', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedObj(arg: "val") { - stringField, - numberField, + stringField + numberField nullField } } @@ -325,32 +332,35 @@ describe('writing to the store', () => { }, }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - }), { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedObj')), { - 'nestedObj({"arg":"val"})': { - type: 'id', - id: `$ROOT_QUERY.nestedObj({"arg":"val"})`, - generated: true, - }, + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), }), - [`$ROOT_QUERY.nestedObj({"arg":"val"})`]: result.nestedObj, - }); + { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedObj')), { + 'nestedObj({"arg":"val"})': { + type: 'id', + id: `$ROOT_QUERY.nestedObj({"arg":"val"})`, + generated: true, + }, + }), + [`$ROOT_QUERY.nestedObj({"arg":"val"})`]: result.nestedObj, + }, + ); }); it('properly normalizes a nested array with IDs', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedArray { - id, - stringField, - numberField, + id + stringField + numberField nullField } } @@ -377,34 +387,37 @@ describe('writing to the store', () => { ], }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - dataIdFromObject: getIdField, - }), { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedArray')), { - nestedArray: result.nestedArray.map((obj: any) => ({ - type: 'id', - id: obj.id, - generated: false, - })), + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), + dataIdFromObject: getIdField, }), - [result.nestedArray[0].id]: result.nestedArray[0], - [result.nestedArray[1].id]: result.nestedArray[1], - }); + { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedArray')), { + nestedArray: result.nestedArray.map((obj: any) => ({ + type: 'id', + id: obj.id, + generated: false, + })), + }), + [result.nestedArray[0].id]: result.nestedArray[0], + [result.nestedArray[1].id]: result.nestedArray[1], + }, + ); }); it('properly normalizes a nested array with IDs and a null', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedArray { - id, - stringField, - numberField, + id + stringField + numberField nullField } } @@ -426,31 +439,34 @@ describe('writing to the store', () => { ], }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - dataIdFromObject: getIdField, - }), { - 'ROOT_QUERY': assign<{}>({}, assign({}, omit(result, 'nestedArray')), { - nestedArray: [ - { type: 'id', id: result.nestedArray[0].id, generated: false }, - null, - ], + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), + dataIdFromObject: getIdField, }), - [result.nestedArray[0].id]: result.nestedArray[0], - }); + { + ROOT_QUERY: assign<{}>({}, assign({}, omit(result, 'nestedArray')), { + nestedArray: [ + { type: 'id', id: result.nestedArray[0].id, generated: false }, + null, + ], + }), + [result.nestedArray[0].id]: result.nestedArray[0], + }, + ); }); it('properly normalizes a nested array without IDs', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedArray { - stringField, - numberField, + stringField + numberField nullField } } @@ -481,7 +497,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedArray')), { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedArray')), { nestedArray: [ { type: 'id', generated: true, id: `ROOT_QUERY.nestedArray.0` }, { type: 'id', generated: true, id: `ROOT_QUERY.nestedArray.1` }, @@ -495,13 +511,13 @@ describe('writing to the store', () => { it('properly normalizes a nested array without IDs and a null item', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedArray { - stringField, - numberField, + stringField + numberField nullField } } @@ -528,7 +544,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedArray')), { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedArray')), { nestedArray: [ null, { type: 'id', generated: true, id: `ROOT_QUERY.nestedArray.1` }, @@ -541,10 +557,10 @@ describe('writing to the store', () => { it('properly normalizes an array of non-objects', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField simpleArray } `; @@ -564,10 +580,10 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': assign<{}>({}, assign({}, omit(result, 'simpleArray')), { + ROOT_QUERY: assign<{}>({}, assign({}, omit(result, 'simpleArray')), { simpleArray: { type: 'json', - 'json': [ + json: [ result.simpleArray[0], result.simpleArray[1], result.simpleArray[2], @@ -580,10 +596,10 @@ describe('writing to the store', () => { it('properly normalizes an array of non-objects with null', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField simpleArray } `; @@ -602,7 +618,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': assign<{}>({}, assign({}, omit(result, 'simpleArray')), { + ROOT_QUERY: assign<{}>({}, assign({}, omit(result, 'simpleArray')), { simpleArray: { type: 'json', json: [ @@ -618,7 +634,7 @@ describe('writing to the store', () => { it('properly normalizes an object occurring in different graphql paths twice', () => { const query = gql` { - id, + id object1 { id stringField @@ -649,7 +665,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'a', object1: { type: 'id', @@ -662,7 +678,7 @@ describe('writing to the store', () => { generated: false, }, }, - 'aa': { + aa: { id: 'aa', stringField: 'string', numberField: 1, @@ -673,7 +689,7 @@ describe('writing to the store', () => { it('properly normalizes an object occurring in different graphql array paths twice', () => { const query = gql` { - id, + id array1 { id stringField @@ -695,22 +711,26 @@ describe('writing to the store', () => { const result: any = { id: 'a', - array1: [{ - id: 'aa', - stringField: 'string', - obj: { - id: 'aaa', + array1: [ + { + id: 'aa', stringField: 'string', + obj: { + id: 'aaa', + stringField: 'string', + }, }, - }], - array2: [{ - id: 'ab', - stringField: 'string2', - obj: { - id: 'aaa', - numberField: 1, + ], + array2: [ + { + id: 'ab', + stringField: 'string2', + obj: { + id: 'aaa', + numberField: 1, + }, }, - }], + ], }; const normalized = writeQueryToStore({ @@ -720,20 +740,24 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'a', - array1: [{ - type: 'id', - id: 'aa', - generated: false, - }], - array2: [{ - type: 'id', - id: 'ab', - generated: false, - }], + array1: [ + { + type: 'id', + id: 'aa', + generated: false, + }, + ], + array2: [ + { + type: 'id', + id: 'ab', + generated: false, + }, + ], }, - 'aa': { + aa: { id: 'aa', stringField: 'string', obj: { @@ -742,7 +766,7 @@ describe('writing to the store', () => { generated: false, }, }, - 'ab': { + ab: { id: 'ab', stringField: 'string2', obj: { @@ -751,7 +775,7 @@ describe('writing to the store', () => { generated: false, }, }, - 'aaa': { + aaa: { id: 'aaa', stringField: 'string', numberField: 1, @@ -762,7 +786,7 @@ describe('writing to the store', () => { it('properly normalizes an object occurring in the same graphql array path twice', () => { const query = gql` { - id, + id array1 { id stringField @@ -806,7 +830,7 @@ describe('writing to the store', () => { }); assert.deepEqual(normalized, { - 'ROOT_QUERY': { + ROOT_QUERY: { id: 'a', array1: [ { @@ -821,7 +845,7 @@ describe('writing to the store', () => { }, ], }, - 'aa': { + aa: { id: 'aa', stringField: 'string', obj: { @@ -830,7 +854,7 @@ describe('writing to the store', () => { generated: false, }, }, - 'ab': { + ab: { id: 'ab', stringField: 'string2', obj: { @@ -839,7 +863,7 @@ describe('writing to the store', () => { generated: false, }, }, - 'aaa': { + aaa: { id: 'aaa', stringField: 'string', numberField: 1, @@ -850,8 +874,8 @@ describe('writing to the store', () => { it('merges nodes', () => { const query = gql` { - id, - numberField, + id + numberField nullField } `; @@ -870,8 +894,8 @@ describe('writing to the store', () => { const query2 = gql` { - id, - stringField, + id + stringField nullField } `; @@ -890,21 +914,21 @@ describe('writing to the store', () => { }); assert.deepEqual(store2, { - 'ROOT_QUERY': assign({}, result, result2), + ROOT_QUERY: assign({}, result, result2), }); }); it('properly normalizes a nested object that returns null', () => { const query = gql` { - id, - stringField, - numberField, - nullField, + id + stringField + numberField + nullField nestedObj { - id, - stringField, - numberField, + id + stringField + numberField nullField } } @@ -918,14 +942,17 @@ describe('writing to the store', () => { nestedObj: null, }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - }), { - 'ROOT_QUERY': assign({}, assign({}, omit(result, 'nestedObj')), { - nestedObj: null, + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), }), - }); + { + ROOT_QUERY: assign({}, assign({}, omit(result, 'nestedObj')), { + nestedObj: null, + }), + }, + ); }); it('properly normalizes an object with an ID when no extension is passed', () => { @@ -945,59 +972,100 @@ describe('writing to the store', () => { }, }; - assert.deepEqual(writeQueryToStore({ - query, - result: cloneDeep(result), - }), { - 'ROOT_QUERY': { - 'people_one({"id":"5"})': { - type: 'id', - id: '$ROOT_QUERY.people_one({"id":"5"})', - generated: true, + assert.deepEqual( + writeQueryToStore({ + query, + result: cloneDeep(result), + }), + { + ROOT_QUERY: { + 'people_one({"id":"5"})': { + type: 'id', + id: '$ROOT_QUERY.people_one({"id":"5"})', + generated: true, + }, + }, + '$ROOT_QUERY.people_one({"id":"5"})': { + id: 'abcd', + stringField: 'This is a string!', }, }, - '$ROOT_QUERY.people_one({"id":"5"})': { - 'id': 'abcd', - 'stringField': 'This is a string!', - }, - }); + ); }); it('consistently serialize different types of input when passed inlined or as variable', () => { const testData = [ { - mutation: gql`mutation mut($in: Int!) { mut(inline: 5, variable: $in) { id } }`, + mutation: gql` + mutation mut($in: Int!) { + mut(inline: 5, variable: $in) { + id + } + } + `, variables: { in: 5 }, expected: 'mut({"inline":5,"variable":5})', }, { - mutation: gql`mutation mut($in: Float!) { mut(inline: 5.5, variable: $in) { id } }`, + mutation: gql` + mutation mut($in: Float!) { + mut(inline: 5.5, variable: $in) { + id + } + } + `, variables: { in: 5.5 }, expected: 'mut({"inline":5.5,"variable":5.5})', }, { - mutation: gql`mutation mut($in: String!) { mut(inline: "abc", variable: $in) { id } }`, + mutation: gql` + mutation mut($in: String!) { + mut(inline: "abc", variable: $in) { + id + } + } + `, variables: { in: 'abc' }, expected: 'mut({"inline":"abc","variable":"abc"})', }, { - mutation: gql`mutation mut($in: Array!) { mut(inline: [1, 2], variable: $in) { id } }`, + mutation: gql` + mutation mut($in: Array!) { + mut(inline: [1, 2], variable: $in) { + id + } + } + `, variables: { in: [1, 2] }, expected: 'mut({"inline":[1,2],"variable":[1,2]})', }, { - mutation: gql`mutation mut($in: Object!) { mut(inline: {a: 1}, variable: $in) { id } }`, + mutation: gql` + mutation mut($in: Object!) { + mut(inline: { a: 1 }, variable: $in) { + id + } + } + `, variables: { in: { a: 1 } }, expected: 'mut({"inline":{"a":1},"variable":{"a":1}})', }, { - mutation: gql`mutation mut($in: Boolean!) { mut(inline: true, variable: $in) { id } }`, + mutation: gql` + mutation mut($in: Boolean!) { + mut(inline: true, variable: $in) { + id + } + } + `, variables: { in: true }, expected: 'mut({"inline":true,"variable":true})', }, ]; - function isOperationDefinition(definition: DefinitionNode): definition is OperationDefinitionNode { + function isOperationDefinition( + definition: DefinitionNode, + ): definition is OperationDefinitionNode { return definition.kind === 'OperationDefinition'; } @@ -1005,41 +1073,41 @@ describe('writing to the store', () => { return selection.kind === 'Field'; } - testData.forEach((data) => { - data.mutation.definitions.forEach((definition: OperationDefinitionNode) => { - if (isOperationDefinition(definition)) { - definition.selectionSet.selections.forEach((selection) => { - if (isField(selection)) { - assert.equal(storeKeyNameFromField(selection, data.variables), data.expected); - } - }); - } - }); + testData.forEach(data => { + data.mutation.definitions.forEach( + (definition: OperationDefinitionNode) => { + if (isOperationDefinition(definition)) { + definition.selectionSet.selections.forEach(selection => { + if (isField(selection)) { + assert.equal( + storeKeyNameFromField(selection, data.variables), + data.expected, + ); + } + }); + } + }, + ); }); }); it('properly normalizes a mutation with object or array parameters and variables', () => { const mutation = gql` - mutation some_mutation( - $nil: ID, - $in: Object - ) { + mutation some_mutation($nil: ID, $in: Object) { some_mutation( input: { - id: "5", - arr: [1,{a:"b"}], - obj: {a:"b"}, - num: 5.5, - nil: $nil, + id: "5" + arr: [1, { a: "b" }] + obj: { a: "b" } + num: 5.5 + nil: $nil bo: true - }, + } ) { - id, + id } - some_mutation_with_variables( - input: $in, - ) { - id, + some_mutation_with_variables(input: $in) { + id } } `; @@ -1065,36 +1133,41 @@ describe('writing to the store', () => { }, }; - function isOperationDefinition(value: ASTNode): value is OperationDefinitionNode { + function isOperationDefinition( + value: ASTNode, + ): value is OperationDefinitionNode { return value.kind === 'OperationDefinition'; } mutation.definitions.map((def: OperationDefinitionNode) => { if (isOperationDefinition(def)) { - assert.deepEqual(writeSelectionSetToStore({ - dataId: '5', - selectionSet: def.selectionSet, - result: cloneDeep(result), - context: { - store: {}, - variables, - dataIdFromObject: () => '5', - }, - }), { - '5': { - 'some_mutation({"input":{"id":"5","arr":[1,{"a":"b"}],"obj":{"a":"b"},"num":5.5,"nil":null,"bo":true}})': { - type: 'id', - id: '5', - generated: false, + assert.deepEqual( + writeSelectionSetToStore({ + dataId: '5', + selectionSet: def.selectionSet, + result: cloneDeep(result), + context: { + store: {}, + variables, + dataIdFromObject: () => '5', }, - 'some_mutation_with_variables({"input":{"id":"5","arr":[1,{"a":"b"}],"obj":{"a":"b"},"num":5.5,"nil":null,"bo":true}})': { - type: 'id', - id: '5', - generated: false, + }), + { + '5': { + 'some_mutation({"input":{"id":"5","arr":[1,{"a":"b"}],"obj":{"a":"b"},"num":5.5,"nil":null,"bo":true}})': { + type: 'id', + id: '5', + generated: false, + }, + 'some_mutation_with_variables({"input":{"id":"5","arr":[1,{"a":"b"}],"obj":{"a":"b"},"num":5.5,"nil":null,"bo":true}})': { + type: 'id', + id: '5', + generated: false, + }, + id: 'id', }, - 'id': 'id', }, - }); + ); } else { throw 'No operation definition found'; } @@ -1116,7 +1189,8 @@ describe('writing to the store', () => { firstName lastName } - }`; + } + `; const data = { author: { firstName: 'John', @@ -1133,10 +1207,13 @@ describe('writing to the store', () => { }, '$ROOT_QUERY.author': data.author, }; - assert.deepEqual(writeQueryToStore({ - result: data, - query, - }), expStore); + assert.deepEqual( + writeQueryToStore({ + result: data, + query, + }), + expStore, + ); }); it('should correctly escape real ids', () => { @@ -1147,7 +1224,8 @@ describe('writing to the store', () => { id __typename } - }`; + } + `; const data = { author: { firstName: 'John', @@ -1169,11 +1247,14 @@ describe('writing to the store', () => { __typename: data.author.__typename, }, }; - assert.deepEqual(writeQueryToStore({ - result: data, - query, - dataIdFromObject, - }), expStore); + assert.deepEqual( + writeQueryToStore({ + result: data, + query, + dataIdFromObject, + }), + expStore, + ); }); it('should correctly escape json blobs', () => { @@ -1184,7 +1265,8 @@ describe('writing to the store', () => { id __typename } - }`; + } + `; const data = { author: { info: { @@ -1211,11 +1293,14 @@ describe('writing to the store', () => { }, }, }; - assert.deepEqual(writeQueryToStore({ - result: data, - query, - dataIdFromObject, - }), expStore); + assert.deepEqual( + writeQueryToStore({ + result: data, + query, + dataIdFromObject, + }), + expStore, + ); }); }); @@ -1246,7 +1331,8 @@ describe('writing to the store', () => { firstName lastName } - }`; + } + `; const queryWithId = gql` query { author { @@ -1254,14 +1340,15 @@ describe('writing to the store', () => { id __typename } - }`; + } + `; const expStoreWithoutId = { '$ROOT_QUERY.author': { firstName: 'John', lastName: 'Smith', }, ROOT_QUERY: { - 'author': { + author: { type: 'id', id: '$ROOT_QUERY.author', generated: true, @@ -1269,7 +1356,7 @@ describe('writing to the store', () => { }, }; const expStoreWithId = { - 'Author__129': { + Author__129: { firstName: 'John', lastName: 'Smith', id: '129', @@ -1303,7 +1390,8 @@ describe('writing to the store', () => { query { ...notARealFragment fortuneCookie - }`; + } + `; const result: any = { fortuneCookie: 'Star Wars unit tests are boring', }; @@ -1318,9 +1406,9 @@ describe('writing to the store', () => { it('does not change object references if the value is the same', () => { const query = gql` { - id, - stringField, - numberField, + id + stringField + numberField nullField } `; @@ -1342,7 +1430,7 @@ describe('writing to the store', () => { store: assign({}, store) as NormalizedCache, }); - Object.keys(store).forEach((field) => { + Object.keys(store).forEach(field => { assert.equal(store[field], newStore[field], 'references are the same'); }); }); @@ -1403,7 +1491,6 @@ describe('writing to the store', () => { }, /Missing field description/); }); - it('should warn when it receives the wrong data inside a fragment (using an introspection matcher)', () => { const fragmentMatcherFunction = new IntrospectionFragmentMatcher({ introspectionQueryResultData: { @@ -1433,11 +1520,11 @@ describe('writing to the store', () => { } fragment TodoFragment on Todo { - ...on ShoppingCartItem { + ... on ShoppingCartItem { price __typename } - ...on TaskItem { + ... on TaskItem { date __typename } @@ -1487,19 +1574,25 @@ describe('writing to the store', () => { it('throws when trying to write an object without id that was previously queried with id', () => { const store = { - 'ROOT_QUERY': assign({}, { - __typename: 'Query', - item: { - type: 'id', + ROOT_QUERY: assign( + {}, + { + __typename: 'Query', + item: { + type: 'id', + id: 'abcd', + generated: false, + } as IdValue, + }, + ) as StoreObject, + abcd: assign( + {}, + { id: 'abcd', - generated: false, - } as IdValue, - }) as StoreObject, - abcd: assign({}, { - id: 'abcd', - __typename: 'Item', - stringField: 'This is a string!', - }) as StoreObject, + __typename: 'Item', + stringField: 'This is a string!', + }, + ) as StoreObject, } as NormalizedCache; assert.throws(() => { @@ -1583,22 +1676,19 @@ describe('writing to the store', () => { store, }); - assert.deepEqual( - store, - { - 'ROOT_QUERY': { - 'abc': [ - { - 'generated': true, - 'id': 'ROOT_QUERY.abc.0', - 'type': 'id', - }, - ], - }, - 'ROOT_QUERY.abc.0': { - 'name': 'efgh', - }, + assert.deepEqual(store, { + ROOT_QUERY: { + abc: [ + { + generated: true, + id: 'ROOT_QUERY.abc.0', + type: 'id', + }, + ], }, - ); + 'ROOT_QUERY.abc.0': { + name: 'efgh', + }, + }); }); }); diff --git a/tslint.json b/tslint.json index 1dd308f1608..4be502c91dd 100644 --- a/tslint.json +++ b/tslint.json @@ -1,27 +1,12 @@ { "rules": { - "align": [ - false, - "parameters", - "arguments", - "statements" - ], "ban": false, "class-name": true, - "curly": true, "eofline": true, "forin": true, - "indent": [ - true, - "spaces" - ], "interface-name": false, "jsdoc-format": true, "label-position": true, - "max-line-length": [ - true, - 140 - ], "member-access": true, "member-ordering": [ true, @@ -60,32 +45,8 @@ "no-var-keyword": true, "no-var-requires": true, "object-literal-sort-keys": false, - "one-line": [ - true, - "check-open-brace", - "check-catch", - "check-else", - "check-finally", - "check-whitespace" - ], - "quotemark": [ - true, - "single", - "avoid-escape" - ], "radix": true, - "semicolon": [ - true, - "always" - ], "switch-default": true, - "trailing-comma": [ - true, - { - "multiline": "always", - "singleline": "never" - } - ], "triple-equals": [ true, "allow-null-check" @@ -99,36 +60,11 @@ "variable-declaration", "member-variable-declaration" ], - "typedef-whitespace": [ - true, - { - "call-signature": "nospace", - "index-signature": "nospace", - "parameter": "nospace", - "property-declaration": "nospace", - "variable-declaration": "nospace" - }, - { - "call-signature": "space", - "index-signature": "space", - "parameter": "space", - "property-declaration": "space", - "variable-declaration": "space" - } - ], "variable-name": [ true, "check-format", "allow-leading-underscore", "ban-keywords" - ], - "whitespace": [ - true, - "check-branch", - "check-decl", - "check-operator", - "check-separator", - "check-type" ] } } From 380f22229a4770939fe32f5989db7c8a0ed56210 Mon Sep 17 00:00:00 2001 From: James Baxley Date: Fri, 14 Jul 2017 13:08:38 -0700 Subject: [PATCH 2/4] update changelog --- CHANGELOG.md | 1 + package.json | 5 +---- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 617fdde148e..a20d8ca1703 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Change log ### vNEXT +- added prettier to manage formatting of project [PR #1904](https://github.com/apollographql/apollo-client/pull/1904) ### v.1.9.0-0 - Remove query tracking from the Redux store. Query status tracking is now handled outside of Redux in the QueryStore class. [PR #1859](https://github.com/apollographql/apollo-client/pull/1859) diff --git a/package.json b/package.json index 92b1b154fa0..16caf0b57d7 100644 --- a/package.json +++ b/package.json @@ -54,10 +54,7 @@ } }, "lint-staged": { - "*.ts*": [ - "prettier --trailing-comma all --single-quote --write", - "git add" - ] + "*.ts*": ["prettier --trailing-comma all --single-quote --write", "git add"] }, "pre-commit": "lint-staged", "keywords": [ From f7cf5525ecda7536270b5db70a8731a4d53ee061 Mon Sep 17 00:00:00 2001 From: James Baxley Date: Fri, 14 Jul 2017 13:09:54 -0700 Subject: [PATCH 3/4] update test checker in danger --- dangerfile.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dangerfile.ts b/dangerfile.ts index 96e5c6a69bc..d9df2b20a0f 100644 --- a/dangerfile.ts +++ b/dangerfile.ts @@ -47,7 +47,7 @@ const createLink = (href: string, text: string): string => const raiseIssueAboutPaths = ( type: Function, paths: string[], - codeToInclude: string + codeToInclude: string, ) => { if (paths.length > 0) { const files = linkableFiles(paths); @@ -83,8 +83,8 @@ if (!isBot) { const hasAppChanges = modifiedAppFiles.length > 0; - const testChanges = modifiedAppFiles.filter( - filepath => filepath.includes('__tests__') || filepath.includes('test') + const testChanges = modifiedAppFiles.filter(filepath => + filepath.includes('test'), ); const hasTestChanges = testChanges.length > 0; @@ -103,7 +103,7 @@ if (!isBot) { // Warn if there are library changes, but not tests if (hasAppChanges && !hasTestChanges) { warn( - "There are library changes, but not tests. That's OK as long as you're refactoring existing code" + "There are library changes, but not tests. That's OK as long as you're refactoring existing code", ); } @@ -122,6 +122,6 @@ if (!isBot) { // Politely ask for their name in the authors file message('Please add your name and email to the AUTHORS file (optional)'); message( - 'If this was a change that affects the external API, please update the docs and post a link to the PR in the discussion' + 'If this was a change that affects the external API, please update the docs and post a link to the PR in the discussion', ); } From cfcd31cdce9b1369c53dad95336fbf2e46f1919f Mon Sep 17 00:00:00 2001 From: James Baxley Date: Fri, 14 Jul 2017 13:18:24 -0700 Subject: [PATCH 4/4] fix dangerfile test lookup --- dangerfile.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/dangerfile.ts b/dangerfile.ts index d9df2b20a0f..22a570fe88e 100644 --- a/dangerfile.ts +++ b/dangerfile.ts @@ -18,7 +18,7 @@ const filesOnly = (file: string) => // Custom subsets of known files const modifiedAppFiles = modified - .filter(p => includes(p, 'src/')) + .filter(p => includes(p, 'src/') || includes(p, 'test/')) .filter(p => filesOnly(p) && typescriptOnly(p)); // Takes a list of file paths, and converts it into clickable links @@ -97,9 +97,6 @@ if (!isBot) { warn(':exclamation: Big PR'); } - // XXX add in License header - // https://github.com/facebook/jest/blob/master/dangerfile.js#L58 - // Warn if there are library changes, but not tests if (hasAppChanges && !hasTestChanges) { warn(