diff options
Diffstat (limited to 'server/middlewares/cache/shared/api-cache.ts')
-rw-r--r-- | server/middlewares/cache/shared/api-cache.ts | 314 |
1 files changed, 0 insertions, 314 deletions
diff --git a/server/middlewares/cache/shared/api-cache.ts b/server/middlewares/cache/shared/api-cache.ts deleted file mode 100644 index b50b7dce4..000000000 --- a/server/middlewares/cache/shared/api-cache.ts +++ /dev/null | |||
@@ -1,314 +0,0 @@ | |||
1 | // Thanks: https://github.com/kwhitley/apicache | ||
2 | // We duplicated the library because it is unmaintened and prevent us to upgrade to recent NodeJS versions | ||
3 | |||
4 | import express from 'express' | ||
5 | import { OutgoingHttpHeaders } from 'http' | ||
6 | import { isTestInstance, parseDurationToMs } from '@server/helpers/core-utils' | ||
7 | import { logger } from '@server/helpers/logger' | ||
8 | import { Redis } from '@server/lib/redis' | ||
9 | import { asyncMiddleware } from '@server/middlewares' | ||
10 | import { HttpStatusCode } from '@shared/models' | ||
11 | |||
12 | export interface APICacheOptions { | ||
13 | headerBlacklist?: string[] | ||
14 | excludeStatus?: HttpStatusCode[] | ||
15 | } | ||
16 | |||
17 | interface CacheObject { | ||
18 | status: number | ||
19 | headers: OutgoingHttpHeaders | ||
20 | data: any | ||
21 | encoding: BufferEncoding | ||
22 | timestamp: number | ||
23 | } | ||
24 | |||
25 | export class ApiCache { | ||
26 | |||
27 | private readonly options: APICacheOptions | ||
28 | private readonly timers: { [ id: string ]: NodeJS.Timeout } = {} | ||
29 | |||
30 | private readonly index = { | ||
31 | groups: [] as string[], | ||
32 | all: [] as string[] | ||
33 | } | ||
34 | |||
35 | // Cache keys per group | ||
36 | private groups: { [groupIndex: string]: string[] } = {} | ||
37 | |||
38 | private readonly seed: number | ||
39 | |||
40 | constructor (options: APICacheOptions) { | ||
41 | this.seed = new Date().getTime() | ||
42 | |||
43 | this.options = { | ||
44 | headerBlacklist: [], | ||
45 | excludeStatus: [], | ||
46 | |||
47 | ...options | ||
48 | } | ||
49 | } | ||
50 | |||
51 | buildMiddleware (strDuration: string) { | ||
52 | const duration = parseDurationToMs(strDuration) | ||
53 | |||
54 | return asyncMiddleware( | ||
55 | async (req: express.Request, res: express.Response, next: express.NextFunction) => { | ||
56 | const key = this.getCacheKey(req) | ||
57 | const redis = Redis.Instance.getClient() | ||
58 | |||
59 | if (!Redis.Instance.isConnected()) return this.makeResponseCacheable(res, next, key, duration) | ||
60 | |||
61 | try { | ||
62 | const obj = await redis.hgetall(key) | ||
63 | if (obj?.response) { | ||
64 | return this.sendCachedResponse(req, res, JSON.parse(obj.response), duration) | ||
65 | } | ||
66 | |||
67 | return this.makeResponseCacheable(res, next, key, duration) | ||
68 | } catch (err) { | ||
69 | return this.makeResponseCacheable(res, next, key, duration) | ||
70 | } | ||
71 | } | ||
72 | ) | ||
73 | } | ||
74 | |||
75 | clearGroupSafe (group: string) { | ||
76 | const run = async () => { | ||
77 | const cacheKeys = this.groups[group] | ||
78 | if (!cacheKeys) return | ||
79 | |||
80 | for (const key of cacheKeys) { | ||
81 | try { | ||
82 | await this.clear(key) | ||
83 | } catch (err) { | ||
84 | logger.error('Cannot clear ' + key, { err }) | ||
85 | } | ||
86 | } | ||
87 | |||
88 | delete this.groups[group] | ||
89 | } | ||
90 | |||
91 | void run() | ||
92 | } | ||
93 | |||
94 | private getCacheKey (req: express.Request) { | ||
95 | return Redis.Instance.getPrefix() + 'api-cache-' + this.seed + '-' + req.originalUrl | ||
96 | } | ||
97 | |||
98 | private shouldCacheResponse (response: express.Response) { | ||
99 | if (!response) return false | ||
100 | if (this.options.excludeStatus.includes(response.statusCode)) return false | ||
101 | |||
102 | return true | ||
103 | } | ||
104 | |||
105 | private addIndexEntries (key: string, res: express.Response) { | ||
106 | this.index.all.unshift(key) | ||
107 | |||
108 | const groups = res.locals.apicacheGroups || [] | ||
109 | |||
110 | for (const group of groups) { | ||
111 | if (!this.groups[group]) this.groups[group] = [] | ||
112 | |||
113 | this.groups[group].push(key) | ||
114 | } | ||
115 | } | ||
116 | |||
117 | private filterBlacklistedHeaders (headers: OutgoingHttpHeaders) { | ||
118 | return Object.keys(headers) | ||
119 | .filter(key => !this.options.headerBlacklist.includes(key)) | ||
120 | .reduce((acc, header) => { | ||
121 | acc[header] = headers[header] | ||
122 | |||
123 | return acc | ||
124 | }, {}) | ||
125 | } | ||
126 | |||
127 | private createCacheObject (status: number, headers: OutgoingHttpHeaders, data: any, encoding: BufferEncoding) { | ||
128 | return { | ||
129 | status, | ||
130 | headers: this.filterBlacklistedHeaders(headers), | ||
131 | data, | ||
132 | encoding, | ||
133 | |||
134 | // Seconds since epoch, used to properly decrement max-age headers in cached responses. | ||
135 | timestamp: new Date().getTime() / 1000 | ||
136 | } as CacheObject | ||
137 | } | ||
138 | |||
139 | private async cacheResponse (key: string, value: object, duration: number) { | ||
140 | const redis = Redis.Instance.getClient() | ||
141 | |||
142 | if (Redis.Instance.isConnected()) { | ||
143 | await Promise.all([ | ||
144 | redis.hset(key, 'response', JSON.stringify(value)), | ||
145 | redis.hset(key, 'duration', duration + ''), | ||
146 | redis.expire(key, duration / 1000) | ||
147 | ]) | ||
148 | } | ||
149 | |||
150 | // add automatic cache clearing from duration, includes max limit on setTimeout | ||
151 | this.timers[key] = setTimeout(() => { | ||
152 | this.clear(key) | ||
153 | .catch(err => logger.error('Cannot clear Redis key %s.', key, { err })) | ||
154 | }, Math.min(duration, 2147483647)) | ||
155 | } | ||
156 | |||
157 | private accumulateContent (res: express.Response, content: any) { | ||
158 | if (!content) return | ||
159 | |||
160 | if (typeof content === 'string') { | ||
161 | res.locals.apicache.content = (res.locals.apicache.content || '') + content | ||
162 | return | ||
163 | } | ||
164 | |||
165 | if (Buffer.isBuffer(content)) { | ||
166 | let oldContent = res.locals.apicache.content | ||
167 | |||
168 | if (typeof oldContent === 'string') { | ||
169 | oldContent = Buffer.from(oldContent) | ||
170 | } | ||
171 | |||
172 | if (!oldContent) { | ||
173 | oldContent = Buffer.alloc(0) | ||
174 | } | ||
175 | |||
176 | res.locals.apicache.content = Buffer.concat( | ||
177 | [ oldContent, content ], | ||
178 | oldContent.length + content.length | ||
179 | ) | ||
180 | |||
181 | return | ||
182 | } | ||
183 | |||
184 | res.locals.apicache.content = content | ||
185 | } | ||
186 | |||
187 | private makeResponseCacheable (res: express.Response, next: express.NextFunction, key: string, duration: number) { | ||
188 | const self = this | ||
189 | |||
190 | res.locals.apicache = { | ||
191 | write: res.write, | ||
192 | writeHead: res.writeHead, | ||
193 | end: res.end, | ||
194 | cacheable: true, | ||
195 | content: undefined, | ||
196 | headers: undefined | ||
197 | } | ||
198 | |||
199 | // Patch express | ||
200 | res.writeHead = function () { | ||
201 | if (self.shouldCacheResponse(res)) { | ||
202 | res.setHeader('cache-control', 'max-age=' + (duration / 1000).toFixed(0)) | ||
203 | } else { | ||
204 | res.setHeader('cache-control', 'no-cache, no-store, must-revalidate') | ||
205 | } | ||
206 | |||
207 | res.locals.apicache.headers = Object.assign({}, res.getHeaders()) | ||
208 | return res.locals.apicache.writeHead.apply(this, arguments as any) | ||
209 | } | ||
210 | |||
211 | res.write = function (chunk: any) { | ||
212 | self.accumulateContent(res, chunk) | ||
213 | return res.locals.apicache.write.apply(this, arguments as any) | ||
214 | } | ||
215 | |||
216 | res.end = function (content: any, encoding: BufferEncoding) { | ||
217 | if (self.shouldCacheResponse(res)) { | ||
218 | self.accumulateContent(res, content) | ||
219 | |||
220 | if (res.locals.apicache.cacheable && res.locals.apicache.content) { | ||
221 | self.addIndexEntries(key, res) | ||
222 | |||
223 | const headers = res.locals.apicache.headers || res.getHeaders() | ||
224 | const cacheObject = self.createCacheObject( | ||
225 | res.statusCode, | ||
226 | headers, | ||
227 | res.locals.apicache.content, | ||
228 | encoding | ||
229 | ) | ||
230 | self.cacheResponse(key, cacheObject, duration) | ||
231 | .catch(err => logger.error('Cannot cache response', { err })) | ||
232 | } | ||
233 | } | ||
234 | |||
235 | res.locals.apicache.end.apply(this, arguments as any) | ||
236 | } as any | ||
237 | |||
238 | next() | ||
239 | } | ||
240 | |||
241 | private sendCachedResponse (request: express.Request, response: express.Response, cacheObject: CacheObject, duration: number) { | ||
242 | const headers = response.getHeaders() | ||
243 | |||
244 | if (isTestInstance()) { | ||
245 | Object.assign(headers, { | ||
246 | 'x-api-cache-cached': 'true' | ||
247 | }) | ||
248 | } | ||
249 | |||
250 | Object.assign(headers, this.filterBlacklistedHeaders(cacheObject.headers || {}), { | ||
251 | // Set properly decremented max-age header | ||
252 | // This ensures that max-age is in sync with the cache expiration | ||
253 | 'cache-control': | ||
254 | 'max-age=' + | ||
255 | Math.max( | ||
256 | 0, | ||
257 | (duration / 1000 - (new Date().getTime() / 1000 - cacheObject.timestamp)) | ||
258 | ).toFixed(0) | ||
259 | }) | ||
260 | |||
261 | // unstringify buffers | ||
262 | let data = cacheObject.data | ||
263 | if (data && data.type === 'Buffer') { | ||
264 | data = typeof data.data === 'number' | ||
265 | ? Buffer.alloc(data.data) | ||
266 | : Buffer.from(data.data) | ||
267 | } | ||
268 | |||
269 | // Test Etag against If-None-Match for 304 | ||
270 | const cachedEtag = cacheObject.headers.etag | ||
271 | const requestEtag = request.headers['if-none-match'] | ||
272 | |||
273 | if (requestEtag && cachedEtag === requestEtag) { | ||
274 | response.writeHead(304, headers) | ||
275 | return response.end() | ||
276 | } | ||
277 | |||
278 | response.writeHead(cacheObject.status || 200, headers) | ||
279 | |||
280 | return response.end(data, cacheObject.encoding) | ||
281 | } | ||
282 | |||
283 | private async clear (target: string) { | ||
284 | const redis = Redis.Instance.getClient() | ||
285 | |||
286 | if (target) { | ||
287 | clearTimeout(this.timers[target]) | ||
288 | delete this.timers[target] | ||
289 | |||
290 | try { | ||
291 | await redis.del(target) | ||
292 | } catch (err) { | ||
293 | logger.error('Cannot delete %s in redis cache.', target, { err }) | ||
294 | } | ||
295 | |||
296 | this.index.all = this.index.all.filter(key => key !== target) | ||
297 | } else { | ||
298 | for (const key of this.index.all) { | ||
299 | clearTimeout(this.timers[key]) | ||
300 | delete this.timers[key] | ||
301 | |||
302 | try { | ||
303 | await redis.del(key) | ||
304 | } catch (err) { | ||
305 | logger.error('Cannot delete %s in redis cache.', key, { err }) | ||
306 | } | ||
307 | } | ||
308 | |||
309 | this.index.all = [] | ||
310 | } | ||
311 | |||
312 | return this.index | ||
313 | } | ||
314 | } | ||