-
Notifications
You must be signed in to change notification settings - Fork 673
/
Copy pathlruMemoize.ts
249 lines (210 loc) · 6.14 KB
/
lruMemoize.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
import type {
AnyFunction,
DefaultMemoizeFields,
EqualityFn,
Simplify
} from './types'
import type { NOT_FOUND_TYPE } from './utils'
import { NOT_FOUND } from './utils'
// Cache implementation based on Erik Rasmussen's `lru-memoize`:
// https://github.com/erikras/lru-memoize
interface Entry {
key: unknown
value: unknown
}
interface Cache {
get(key: unknown): unknown | NOT_FOUND_TYPE
put(key: unknown, value: unknown): void
getEntries(): Entry[]
clear(): void
}
function createSingletonCache(equals: EqualityFn): Cache {
let entry: Entry | undefined
return {
get(key: unknown) {
if (entry && equals(entry.key, key)) {
return entry.value
}
return NOT_FOUND
},
put(key: unknown, value: unknown) {
entry = { key, value }
},
getEntries() {
return entry ? [entry] : []
},
clear() {
entry = undefined
}
}
}
function createLruCache(maxSize: number, equals: EqualityFn): Cache {
let entries: Entry[] = []
function get(key: unknown) {
const cacheIndex = entries.findIndex(entry => equals(key, entry.key))
// We found a cached entry
if (cacheIndex > -1) {
const entry = entries[cacheIndex]
// Cached entry not at top of cache, move it to the top
if (cacheIndex > 0) {
entries.splice(cacheIndex, 1)
entries.unshift(entry)
}
return entry.value
}
// No entry found in cache, return sentinel
return NOT_FOUND
}
function put(key: unknown, value: unknown) {
if (get(key) === NOT_FOUND) {
// TODO Is unshift slow?
entries.unshift({ key, value })
if (entries.length > maxSize) {
entries.pop()
}
}
}
function getEntries() {
return entries
}
function clear() {
entries = []
}
return { get, put, getEntries, clear }
}
/**
* Runs a simple reference equality check.
* What {@linkcode lruMemoize lruMemoize} uses by default.
*
* **Note**: This function was previously known as `defaultEqualityCheck`.
*
* @public
*/
export const referenceEqualityCheck: EqualityFn = (a, b) => a === b
export function createCacheKeyComparator(equalityCheck: EqualityFn) {
return function areArgumentsShallowlyEqual(
prev: unknown[] | IArguments | null,
next: unknown[] | IArguments | null
): boolean {
if (prev === null || next === null || prev.length !== next.length) {
return false
}
// Do this in a for loop (and not a `forEach` or an `every`) so we can determine equality as fast as possible.
const { length } = prev
for (let i = 0; i < length; i++) {
if (!equalityCheck(prev[i], next[i])) {
return false
}
}
return true
}
}
/**
* Options for configuring the behavior of a function memoized with
* LRU (Least Recently Used) caching.
*
* @template Result - The type of the return value of the memoized function.
*
* @public
*/
export interface LruMemoizeOptions<Result = any> {
/**
* Function used to compare the individual arguments of the
* provided calculation function.
*
* @default referenceEqualityCheck
*/
equalityCheck?: EqualityFn
/**
* If provided, used to compare a newly generated output value against
* previous values in the cache. If a match is found,
* the old value is returned. This addresses the common
* ```ts
* todos.map(todo => todo.id)
* ```
* use case, where an update to another field in the original data causes
* a recalculation due to changed references, but the output is still
* effectively the same.
*
* @since 4.1.0
*/
resultEqualityCheck?: EqualityFn<Result>
/**
* The maximum size of the cache used by the selector.
* A size greater than 1 means the selector will use an
* LRU (Least Recently Used) cache, allowing for the caching of multiple
* results based on different sets of arguments.
*
* @default 1
*/
maxSize?: number
}
/**
* Creates a memoized version of a function with an optional
* LRU (Least Recently Used) cache. The memoized function uses a cache to
* store computed values. Depending on the `maxSize` option, it will use
* either a singleton cache (for a single entry) or an
* LRU cache (for multiple entries).
*
* **Note**: This function was previously known as `defaultMemoize`.
*
* @param func - The function to be memoized.
* @param equalityCheckOrOptions - Either an equality check function or an options object.
* @returns A memoized function with a `.clearCache()` method attached.
*
* @template Func - The type of the function that is memoized.
*
* @see {@link https://reselect.js.org/api/lruMemoize `lruMemoize`}
*
* @public
*/
export function lruMemoize<Func extends AnyFunction>(
func: Func,
equalityCheckOrOptions?: EqualityFn | LruMemoizeOptions<ReturnType<Func>>
) {
const providedOptions =
typeof equalityCheckOrOptions === 'object'
? equalityCheckOrOptions
: { equalityCheck: equalityCheckOrOptions }
const {
equalityCheck = referenceEqualityCheck,
maxSize = 1,
resultEqualityCheck
} = providedOptions
const comparator = createCacheKeyComparator(equalityCheck)
let resultsCount = 0
const cache =
maxSize <= 1
? createSingletonCache(comparator)
: createLruCache(maxSize, comparator)
function memoized() {
let value = cache.get(arguments) as ReturnType<Func>
if (value === NOT_FOUND) {
// apply arguments instead of spreading for performance.
// @ts-ignore
value = func.apply(null, arguments) as ReturnType<Func>
resultsCount++
if (resultEqualityCheck) {
const entries = cache.getEntries()
const matchingEntry = entries.find(entry =>
resultEqualityCheck(entry.value as ReturnType<Func>, value)
)
if (matchingEntry) {
value = matchingEntry.value as ReturnType<Func>
resultsCount !== 0 && resultsCount--
}
}
cache.put(arguments, value)
}
return value
}
memoized.clearCache = () => {
cache.clear()
memoized.resetResultsCount()
}
memoized.resultsCount = () => resultsCount
memoized.resetResultsCount = () => {
resultsCount = 0
}
return memoized as Func & Simplify<DefaultMemoizeFields>
}