Skip to content

Instantly share code, notes, and snippets.

@camflan
Last active May 26, 2023 00:50
Show Gist options
  • Save camflan/0d3feb01aef09fff6e086e717cd2b3c1 to your computer and use it in GitHub Desktop.
Save camflan/0d3feb01aef09fff6e086e717cd2b3c1 to your computer and use it in GitHub Desktop.
A handy HOF for creating cacheable resources
const DEFAULT_CACHE_KEY_FN = JSON.stringify;
/**
* @typedef MakeCacheReaderOptions
* @property {Function} [makeCacheKey] - Fn to serialize loader args for caching. Defaults to JSON.stringify
* @property {Map} [cacheMap] - Existing map to use for promise caching
*/
export type MakeCacheReaderOptions<LoaderArgs extends unknown[]> = {
makeCacheKey?: (args: LoaderArgs) => string;
cacheMap?: Map<string, Promise<unknown>>;
};
/**
* HOF for creating cacheable resources.
*
* Takes an async loading function and caches its Promise. If there
* is already a Promise cached for the same resource, the Promise will
* be returned. All consumers can `await` or `.then` on the Promise
* to access the resolved value.
*
* @argument {Function} loaderFn - the function that fetches the uncached resource
* @argument {MakeCacheReaderOptions} [options]
*
* @example
* // provide a caching key as well as then function that will return
* // an item that isn't in the cache. (hits api, fetches from db, etc)
* const readUser = makeCacheReader(getUser);
*
* // this will hit the API
* const patient = readUser(3215454644);
*
* // this will use the cache
* const patient2 = readUser(3215454644);
*
* // all of these will resolve simultaneously from the same request
* const patients = await Promise.all([1, 2, 3, 4].map(id => readUser(id)))
*
*/
export function makeCacheReader<ItemType, LoaderArgs extends unknown[]>(
loaderFn: (...args: LoaderArgs) => Promise<ItemType>,
{
makeCacheKey = DEFAULT_CACHE_KEY_FN,
cacheMap,
}: MakeCacheReaderOptions<LoaderArgs> = {}
) {
const promiseCache = cacheMap ?? new Map<string, Promise<ItemType>>();
return async (...args: LoaderArgs) => {
const cacheKey = makeCacheKey(args);
if (promiseCache.has(cacheKey)) {
return promiseCache.get(cacheKey) as Promise<ItemType>;
}
// Save the promise in our cache so that we can retrieve it for parallel tasks
// rather than only the result. Also makes a consistent API around the Promise
const item = loaderFn(...args);
promiseCache.set(cacheKey, item);
return item;
};
}
/**
* HOF for creating globally cacheable resources.
*
* Creates a global version of makeCacheReader. The returned makeCacheReader HOF
* reads from 1 globally shared cache, segmented by a cachePrefix - provided as the first arg.
*
*
* @example
* const makeGlobalCacheReader = makeGlobalPromiseCache();
*
* // provide a caching key as well as then function that will return
* // an item that isn't in the cache. (hits api, fetches from db, etc)
* const readUser = makeGlobalCacheReader("user", getUser);
*
* // this will hit the API
* const patient = await readUser(3215454644);
*
* // this will use the cache
* const patient2 = await readUser(3215454644);
*
* // all of these will resolve simultaneously from the same request
* const patients = await Promise.all([1, 2, 3, 4].map(id => readUser(id)))
*
*/
export function makeGlobalPromiseCache<
PromiseCache extends Map<string, Promise<unknown>> = Map<
string,
Promise<unknown>
>
>(existingCache?: PromiseCache) {
const __GLOBAL_CACHE = existingCache ?? (new Map() as PromiseCache);
// Returns a Global version of makeCacheReader which takes a keyPrefix
// for segmenting cached values
return function makeGlobalCacheReader<ItemType, LoaderArgs extends unknown[]>(
cachePrefix: Map<string, Promise<unknown>> | string,
loaderFn: (...args: LoaderArgs) => Promise<ItemType>,
makeCacheKey: MakeCacheReaderOptions<LoaderArgs>['makeCacheKey'] = DEFAULT_CACHE_KEY_FN
) {
const makeGlobalCacheKey = (args: LoaderArgs) =>
`${cachePrefix}:${makeCacheKey(args)}`;
return makeCacheReader(loaderFn, {
makeCacheKey: makeGlobalCacheKey,
cacheMap: __GLOBAL_CACHE,
});
};
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment