]>
Commit | Line | Data |
---|---|---|
20bafcb6 C |
1 | // Thanks: https://github.com/kwhitley/apicache |
2 | // We duplicated the library because it is unmaintened and prevent us to upgrade to recent NodeJS versions | |
3 | ||
41fb13c3 | 4 | import express from 'express' |
20bafcb6 C |
5 | import { OutgoingHttpHeaders } from 'http' |
6 | import { isTestInstance, parseDurationToMs } from '@server/helpers/core-utils' | |
7 | import { logger } from '@server/helpers/logger' | |
8 | import { Redis } from '@server/lib/redis' | |
e5d91a9b | 9 | import { asyncMiddleware } from '@server/middlewares' |
b2111066 | 10 | import { HttpStatusCode } from '@shared/models' |
20bafcb6 C |
11 | |
12 | export interface APICacheOptions { | |
13 | headerBlacklist?: string[] | |
14 | excludeStatus?: HttpStatusCode[] | |
15 | } | |
16 | ||
17 | interface CacheObject { | |
18 | status: number | |
19 | headers: OutgoingHttpHeaders | |
20 | data: any | |
21 | encoding: BufferEncoding | |
22 | timestamp: number | |
23 | } | |
24 | ||
25 | export class ApiCache { | |
26 | ||
27 | private readonly options: APICacheOptions | |
28 | private readonly timers: { [ id: string ]: NodeJS.Timeout } = {} | |
29 | ||
cb0eda56 AG |
30 | private readonly index = { |
31 | groups: [] as string[], | |
32 | all: [] as string[] | |
33 | } | |
34 | ||
35 | // Cache keys per group | |
36 | private groups: { [groupIndex: string]: string[] } = {} | |
20bafcb6 | 37 | |
ed14d1eb C |
38 | private readonly seed: number |
39 | ||
20bafcb6 | 40 | constructor (options: APICacheOptions) { |
ed14d1eb C |
41 | this.seed = new Date().getTime() |
42 | ||
20bafcb6 C |
43 | this.options = { |
44 | headerBlacklist: [], | |
45 | excludeStatus: [], | |
46 | ||
47 | ...options | |
48 | } | |
49 | } | |
50 | ||
51 | buildMiddleware (strDuration: string) { | |
52 | const duration = parseDurationToMs(strDuration) | |
53 | ||
e5d91a9b C |
54 | return asyncMiddleware( |
55 | async (req: express.Request, res: express.Response, next: express.NextFunction) => { | |
cb0eda56 | 56 | const key = this.getCacheKey(req) |
e5d91a9b | 57 | const redis = Redis.Instance.getClient() |
20bafcb6 | 58 | |
e5d91a9b | 59 | if (!Redis.Instance.isConnected()) return this.makeResponseCacheable(res, next, key, duration) |
20bafcb6 | 60 | |
e5d91a9b | 61 | try { |
564b9b55 | 62 | const obj = await redis.hgetall(key) |
e5d91a9b | 63 | if (obj?.response) { |
20bafcb6 C |
64 | return this.sendCachedResponse(req, res, JSON.parse(obj.response), duration) |
65 | } | |
66 | ||
67 | return this.makeResponseCacheable(res, next, key, duration) | |
e5d91a9b C |
68 | } catch (err) { |
69 | return this.makeResponseCacheable(res, next, key, duration) | |
70 | } | |
20bafcb6 | 71 | } |
e5d91a9b | 72 | ) |
20bafcb6 C |
73 | } |
74 | ||
cb0eda56 AG |
75 | clearGroupSafe (group: string) { |
76 | const run = async () => { | |
77 | const cacheKeys = this.groups[group] | |
78 | if (!cacheKeys) return | |
79 | ||
80 | for (const key of cacheKeys) { | |
81 | try { | |
82 | await this.clear(key) | |
83 | } catch (err) { | |
84 | logger.error('Cannot clear ' + key, { err }) | |
85 | } | |
86 | } | |
87 | ||
88 | delete this.groups[group] | |
89 | } | |
90 | ||
91 | void run() | |
92 | } | |
93 | ||
94 | private getCacheKey (req: express.Request) { | |
ed14d1eb | 95 | return Redis.Instance.getPrefix() + 'api-cache-' + this.seed + '-' + req.originalUrl |
cb0eda56 AG |
96 | } |
97 | ||
20bafcb6 C |
98 | private shouldCacheResponse (response: express.Response) { |
99 | if (!response) return false | |
100 | if (this.options.excludeStatus.includes(response.statusCode)) return false | |
101 | ||
102 | return true | |
103 | } | |
104 | ||
cb0eda56 | 105 | private addIndexEntries (key: string, res: express.Response) { |
20bafcb6 | 106 | this.index.all.unshift(key) |
cb0eda56 AG |
107 | |
108 | const groups = res.locals.apicacheGroups || [] | |
109 | ||
110 | for (const group of groups) { | |
111 | if (!this.groups[group]) this.groups[group] = [] | |
112 | ||
113 | this.groups[group].push(key) | |
114 | } | |
20bafcb6 C |
115 | } |
116 | ||
117 | private filterBlacklistedHeaders (headers: OutgoingHttpHeaders) { | |
118 | return Object.keys(headers) | |
119 | .filter(key => !this.options.headerBlacklist.includes(key)) | |
120 | .reduce((acc, header) => { | |
121 | acc[header] = headers[header] | |
122 | ||
123 | return acc | |
124 | }, {}) | |
125 | } | |
126 | ||
127 | private createCacheObject (status: number, headers: OutgoingHttpHeaders, data: any, encoding: BufferEncoding) { | |
128 | return { | |
129 | status, | |
130 | headers: this.filterBlacklistedHeaders(headers), | |
131 | data, | |
132 | encoding, | |
133 | ||
134 | // Seconds since epoch, used to properly decrement max-age headers in cached responses. | |
135 | timestamp: new Date().getTime() / 1000 | |
136 | } as CacheObject | |
137 | } | |
138 | ||
e5d91a9b | 139 | private async cacheResponse (key: string, value: object, duration: number) { |
20bafcb6 C |
140 | const redis = Redis.Instance.getClient() |
141 | ||
e5d91a9b C |
142 | if (Redis.Instance.isConnected()) { |
143 | await Promise.all([ | |
564b9b55 | 144 | redis.hset(key, 'response', JSON.stringify(value)), |
145 | redis.hset(key, 'duration', duration + ''), | |
20bafcb6 | 146 | redis.expire(key, duration / 1000) |
e5d91a9b | 147 | ]) |
20bafcb6 C |
148 | } |
149 | ||
150 | // add automatic cache clearing from duration, includes max limit on setTimeout | |
e5d91a9b C |
151 | this.timers[key] = setTimeout(() => { |
152 | this.clear(key) | |
153 | .catch(err => logger.error('Cannot clear Redis key %s.', key, { err })) | |
154 | }, Math.min(duration, 2147483647)) | |
20bafcb6 C |
155 | } |
156 | ||
157 | private accumulateContent (res: express.Response, content: any) { | |
158 | if (!content) return | |
159 | ||
160 | if (typeof content === 'string') { | |
161 | res.locals.apicache.content = (res.locals.apicache.content || '') + content | |
162 | return | |
163 | } | |
164 | ||
165 | if (Buffer.isBuffer(content)) { | |
166 | let oldContent = res.locals.apicache.content | |
167 | ||
168 | if (typeof oldContent === 'string') { | |
169 | oldContent = Buffer.from(oldContent) | |
170 | } | |
171 | ||
172 | if (!oldContent) { | |
173 | oldContent = Buffer.alloc(0) | |
174 | } | |
175 | ||
176 | res.locals.apicache.content = Buffer.concat( | |
177 | [ oldContent, content ], | |
178 | oldContent.length + content.length | |
179 | ) | |
180 | ||
181 | return | |
182 | } | |
183 | ||
184 | res.locals.apicache.content = content | |
185 | } | |
186 | ||
187 | private makeResponseCacheable (res: express.Response, next: express.NextFunction, key: string, duration: number) { | |
188 | const self = this | |
189 | ||
190 | res.locals.apicache = { | |
191 | write: res.write, | |
192 | writeHead: res.writeHead, | |
193 | end: res.end, | |
194 | cacheable: true, | |
195 | content: undefined, | |
b2111066 | 196 | headers: undefined |
20bafcb6 C |
197 | } |
198 | ||
199 | // Patch express | |
200 | res.writeHead = function () { | |
201 | if (self.shouldCacheResponse(res)) { | |
202 | res.setHeader('cache-control', 'max-age=' + (duration / 1000).toFixed(0)) | |
203 | } else { | |
204 | res.setHeader('cache-control', 'no-cache, no-store, must-revalidate') | |
205 | } | |
206 | ||
207 | res.locals.apicache.headers = Object.assign({}, res.getHeaders()) | |
208 | return res.locals.apicache.writeHead.apply(this, arguments as any) | |
209 | } | |
210 | ||
211 | res.write = function (chunk: any) { | |
212 | self.accumulateContent(res, chunk) | |
213 | return res.locals.apicache.write.apply(this, arguments as any) | |
214 | } | |
215 | ||
216 | res.end = function (content: any, encoding: BufferEncoding) { | |
217 | if (self.shouldCacheResponse(res)) { | |
218 | self.accumulateContent(res, content) | |
219 | ||
220 | if (res.locals.apicache.cacheable && res.locals.apicache.content) { | |
cb0eda56 | 221 | self.addIndexEntries(key, res) |
20bafcb6 C |
222 | |
223 | const headers = res.locals.apicache.headers || res.getHeaders() | |
224 | const cacheObject = self.createCacheObject( | |
225 | res.statusCode, | |
226 | headers, | |
227 | res.locals.apicache.content, | |
228 | encoding | |
229 | ) | |
230 | self.cacheResponse(key, cacheObject, duration) | |
e5d91a9b | 231 | .catch(err => logger.error('Cannot cache response', { err })) |
20bafcb6 C |
232 | } |
233 | } | |
234 | ||
235 | res.locals.apicache.end.apply(this, arguments as any) | |
236 | } as any | |
237 | ||
238 | next() | |
239 | } | |
240 | ||
241 | private sendCachedResponse (request: express.Request, response: express.Response, cacheObject: CacheObject, duration: number) { | |
242 | const headers = response.getHeaders() | |
243 | ||
244 | if (isTestInstance()) { | |
245 | Object.assign(headers, { | |
246 | 'x-api-cache-cached': 'true' | |
247 | }) | |
248 | } | |
249 | ||
250 | Object.assign(headers, this.filterBlacklistedHeaders(cacheObject.headers || {}), { | |
251 | // Set properly decremented max-age header | |
252 | // This ensures that max-age is in sync with the cache expiration | |
253 | 'cache-control': | |
254 | 'max-age=' + | |
255 | Math.max( | |
256 | 0, | |
257 | (duration / 1000 - (new Date().getTime() / 1000 - cacheObject.timestamp)) | |
258 | ).toFixed(0) | |
259 | }) | |
260 | ||
261 | // unstringify buffers | |
262 | let data = cacheObject.data | |
263 | if (data && data.type === 'Buffer') { | |
264 | data = typeof data.data === 'number' | |
265 | ? Buffer.alloc(data.data) | |
266 | : Buffer.from(data.data) | |
267 | } | |
268 | ||
269 | // Test Etag against If-None-Match for 304 | |
270 | const cachedEtag = cacheObject.headers.etag | |
271 | const requestEtag = request.headers['if-none-match'] | |
272 | ||
273 | if (requestEtag && cachedEtag === requestEtag) { | |
274 | response.writeHead(304, headers) | |
275 | return response.end() | |
276 | } | |
277 | ||
278 | response.writeHead(cacheObject.status || 200, headers) | |
279 | ||
280 | return response.end(data, cacheObject.encoding) | |
281 | } | |
282 | ||
e5d91a9b | 283 | private async clear (target: string) { |
20bafcb6 C |
284 | const redis = Redis.Instance.getClient() |
285 | ||
286 | if (target) { | |
287 | clearTimeout(this.timers[target]) | |
288 | delete this.timers[target] | |
289 | ||
290 | try { | |
e5d91a9b | 291 | await redis.del(target) |
20bafcb6 C |
292 | } catch (err) { |
293 | logger.error('Cannot delete %s in redis cache.', target, { err }) | |
294 | } | |
295 | ||
296 | this.index.all = this.index.all.filter(key => key !== target) | |
297 | } else { | |
298 | for (const key of this.index.all) { | |
299 | clearTimeout(this.timers[key]) | |
300 | delete this.timers[key] | |
301 | ||
302 | try { | |
e5d91a9b | 303 | await redis.del(key) |
20bafcb6 C |
304 | } catch (err) { |
305 | logger.error('Cannot delete %s in redis cache.', key, { err }) | |
306 | } | |
307 | } | |
308 | ||
309 | this.index.all = [] | |
310 | } | |
311 | ||
312 | return this.index | |
313 | } | |
314 | } |