-
Notifications
You must be signed in to change notification settings - Fork 19
/
Copy pathindex.ts
162 lines (138 loc) · 5.56 KB
/
index.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
import { Trie } from "@wry/trie";
import { Cache } from "./cache";
import { Entry, AnyEntry } from "./entry";
import { parentEntrySlot } from "./context";
// These helper functions are important for making optimism work with
// asynchronous code. In order to register parent-child dependencies,
// optimism needs to know about any currently active parent computations.
// In ordinary synchronous code, the parent context is implicit in the
// execution stack, but asynchronous code requires some extra guidance in
// order to propagate context from one async task segment to the next.
export {
bindContext,
noContext,
setTimeout,
asyncFromGen,
} from "./context";
// A lighter-weight dependency, similar to OptimisticWrapperFunction, except
// with only one argument, no makeCacheKey, no wrapped function to recompute,
// and no result value. Useful for representing dependency leaves in the graph
// of computation. Subscriptions are supported.
export { dep, OptimisticDependencyFunction } from "./dep";
// Since the Cache uses a Map internally, any value or object reference can
// be safely used as a key, though common types include object and string.
export type TCacheKey = any;
// The defaultMakeCacheKey function is remarkably powerful, because it gives
// a unique object for any shallow-identical list of arguments. If you need
// to implement a custom makeCacheKey function, you may find it helpful to
// delegate the final work to defaultMakeCacheKey, which is why we export it
// here. However, you may want to avoid defaultMakeCacheKey if your runtime
// does not support WeakMap, or you have the ability to return a string key.
// In those cases, just write your own custom makeCacheKey functions.
const keyTrie = new Trie<TCacheKey>(typeof WeakMap === "function");
export function defaultMakeCacheKey(...args: any[]) {
return keyTrie.lookupArray(args);
}
// If you're paranoid about memory leaks, or you want to avoid using WeakMap
// under the hood, but you still need the behavior of defaultMakeCacheKey,
// import this constructor to create your own tries.
export { Trie as KeyTrie }
export type OptimisticWrapperFunction<
TArgs extends any[],
TResult,
TKeyArgs extends any[] = TArgs,
> = ((...args: TArgs) => TResult) & {
// The .dirty(...) method of an optimistic function takes exactly the
// same parameter types as the original function.
dirty: (...args: TKeyArgs) => void;
// Examine the current value without recomputing it.
peek: (...args: TKeyArgs) => TResult | undefined;
// Remove the entry from the cache, dirtying any parent entries.
forget: (...args: TKeyArgs) => boolean;
};
export type OptimisticWrapOptions<
TArgs extends any[],
TKeyArgs extends any[] = TArgs,
> = {
// The maximum number of cache entries that should be retained before the
// cache begins evicting the oldest ones.
max?: number;
// Transform the raw arguments to some other type of array, which will then
// be passed to makeCacheKey.
keyArgs?: (...args: TArgs) => TKeyArgs;
// The makeCacheKey function takes the same arguments that were passed to
// the wrapper function and returns a single value that can be used as a key
// in a Map to identify the cached result.
makeCacheKey?: (...args: TKeyArgs) => TCacheKey;
// If provided, the subscribe function should either return an unsubscribe
// function or return nothing.
subscribe?: (...args: TArgs) => void | (() => any);
};
const caches = new Set<Cache<TCacheKey, AnyEntry>>();
export function wrap<
TArgs extends any[],
TResult,
TKeyArgs extends any[] = TArgs,
>(
originalFunction: (...args: TArgs) => TResult,
options: OptimisticWrapOptions<TArgs, TKeyArgs> = Object.create(null),
) {
const cache = new Cache<TCacheKey, Entry<TArgs, TResult>>(
options.max || Math.pow(2, 16),
entry => entry.dispose(),
);
const keyArgs = options.keyArgs;
const makeCacheKey = options.makeCacheKey || defaultMakeCacheKey;
function optimistic(): TResult {
const key = makeCacheKey.apply(
null,
keyArgs ? keyArgs.apply(null, arguments as any) : arguments as any
);
if (key === void 0) {
return originalFunction.apply(null, arguments as any);
}
let entry = cache.get(key)!;
if (!entry) {
cache.set(key, entry = new Entry(originalFunction));
entry.subscribe = options.subscribe;
}
const value = entry.recompute(
Array.prototype.slice.call(arguments) as TArgs,
);
// Move this entry to the front of the least-recently used queue,
// since we just finished computing its value.
cache.set(key, entry);
caches.add(cache);
// Clean up any excess entries in the cache, but only if there is no
// active parent entry, meaning we're not in the middle of a larger
// computation that might be flummoxed by the cleaning.
if (! parentEntrySlot.hasValue()) {
caches.forEach(cache => cache.clean());
caches.clear();
}
return value;
}
function lookup(): Entry<TArgs, TResult> | undefined {
const key = makeCacheKey.apply(null, arguments as any);
if (key !== void 0) {
return cache.get(key);
}
}
optimistic.dirty = function () {
const entry = lookup.apply(null, arguments as any);
if (entry) {
entry.setDirty();
}
};
optimistic.peek = function () {
const entry = lookup.apply(null, arguments as any);
if (entry) {
return entry.peek();
}
};
optimistic.forget = function () {
const key = makeCacheKey.apply(null, arguments as any);
return key !== void 0 && cache.delete(key);
};
return optimistic as OptimisticWrapperFunction<TArgs, TResult, TKeyArgs>;
}