-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathasync.ts
More file actions
166 lines (149 loc) · 5.5 KB
/
async.ts
File metadata and controls
166 lines (149 loc) · 5.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
/**
* @fileoverview `memoizeAsync` — async-aware memoizer with the same
* LRU+TTL contract as `memoize`, plus thundering-herd dedup. Concurrent
* callers join an in-flight promise instead of starting fresh fetches,
* and the cache timestamp is refreshed on resolution so a slow fn can't
* land a value that's already "expired".
*/
import { debugLog } from '../debug/output'
import { DateNow } from '../primordials/date'
import { MapCtor } from '../primordials/map-set'
import { PromiseResolve } from '../primordials/promise'
import { cacheRegistry, defaultKeyGen } from './_internal'
import type { CacheEntry, MemoizeOptions } from './types'
/**
* Memoize an async function.
* Similar to memoize() but handles promises properly.
*
* @param fn - Async function to memoize
* @param options - Memoization options
* @returns Memoized version of the async function
*
* @example
* import { memoizeAsync } from '@socketsecurity/lib/memo/async'
*
* const fetchUser = memoizeAsync(async (id: string) => {
* const response = await fetch(`/api/users/${id}`)
* return response.json()
* }, { ttl: 300000, name: 'fetchUser' })
*
* await fetchUser('123') // Fetches from API
* await fetchUser('123') // Returns cached result
*/
export function memoizeAsync<Args extends unknown[], Result>(
fn: (...args: Args) => Promise<Result>,
options: MemoizeOptions<Args> = {},
): (...args: Args) => Promise<Result> {
const {
keyGen = (...args) => defaultKeyGen(args),
maxSize = Number.POSITIVE_INFINITY,
name = fn.name || 'anonymous',
ttl = Number.POSITIVE_INFINITY,
} = options
// LRU via Map insertion-order: see `memoize()` above for the full
// rationale. Key lifecycle on bump: `cache.delete(key)` +
// `cache.set(key, entry)` moves the entry to the tail in O(1).
const cache = new MapCtor<string, CacheEntry<Promise<Result>>>()
// Register for global clearing.
cacheRegistry.push(() => {
cache.clear()
})
function evictLRU(): void {
if (cache.size >= maxSize) {
const oldest = cache.keys().next().value
/* c8 ignore next 8 - cache.size >= maxSize guarantees keys().next()
yields a defined value; the undefined branch is defensive. */
if (oldest !== undefined) {
cache.delete(oldest)
debugLog(`[memoizeAsync:${name}] clear`, {
key: oldest,
reason: 'LRU',
})
}
}
}
function isExpired(entry: CacheEntry<Promise<Result>>): boolean {
// ttl===Infinity arm fires for callers who pass that explicitly;
// most tests use a finite ttl.
/* c8 ignore next 3 */
if (ttl === Number.POSITIVE_INFINITY) {
return false
}
return DateNow() - entry.timestamp > ttl
}
// Bump an existing cache entry to the tail (most-recently-used) in
// O(1). Caller must have already verified `cache.has(key)`.
function bumpRecency(key: string, entry: CacheEntry<Promise<Result>>): void {
cache.delete(key)
cache.set(key, entry)
}
// Track in-flight refreshes to prevent thundering herd on TTL expiry.
const refreshing = new MapCtor<string, Promise<Result>>()
return async function memoized(...args: Args): Promise<Result> {
const key = keyGen(...args)
const cached = cache.get(key)
// Cache-hit, expired-with-inflight (stale-dedup), and cold-dedup
// sub-arms all tested but not always paired in a single run.
/* c8 ignore start */
if (cached) {
if (!isExpired(cached)) {
cached.hits++
bumpRecency(key, cached)
debugLog(`[memoizeAsync:${name}] hit`, { key, hits: cached.hits })
return await cached.value
}
const inflight = refreshing.get(key)
if (inflight) {
debugLog(`[memoizeAsync:${name}] stale-dedup`, { key })
bumpRecency(key, cached)
return await inflight
}
cache.delete(key)
}
const inflightCold = refreshing.get(key)
if (inflightCold) {
debugLog(`[memoizeAsync:${name}] cold-dedup`, { key })
return await inflightCold
}
/* c8 ignore stop */
debugLog(`[memoizeAsync:${name}] miss`, { key })
// Create promise and cache it immediately (for deduplication).
// The async IIFE is what gets stored in `refreshing` and `cache`,
// so concurrent callers can join the same in-flight computation
// before it resolves — that's the dedup contract.
const promise = (async () => {
try {
const result = await fn(...args)
refreshing.delete(key)
// Success — refresh the timestamp so the freshly-computed
// value isn't immediately classified as expired. The
// timestamp was previously set when the fetch *started*;
// under a slow fn this meant `isExpired` could fire right
// as the value landed, and every subsequent call past TTL
// recomputed because the stale-dedup branch had nothing to
// join (`refreshing` was emptied here first).
const entry = cache.get(key)
if (entry) {
entry.value = PromiseResolve(result)
entry.timestamp = DateNow()
}
return result
} catch (error) {
refreshing.delete(key)
// Failure — remove from cache to allow retry.
cache.delete(key)
debugLog(`[memoizeAsync:${name}] error`, { key, error })
throw error
}
})()
refreshing.set(key, promise)
evictLRU()
cache.set(key, {
value: promise,
timestamp: Date.now(),
hits: 0,
})
debugLog(`[memoizeAsync:${name}] set`, { key, cacheSize: cache.size })
return await promise
}
}