diff options
Diffstat (limited to 'server/middlewares/cache/shared')
-rw-r--r-- | server/middlewares/cache/shared/api-cache.ts | 269 | ||||
-rw-r--r-- | server/middlewares/cache/shared/index.ts | 1 |
2 files changed, 270 insertions, 0 deletions
diff --git a/server/middlewares/cache/shared/api-cache.ts b/server/middlewares/cache/shared/api-cache.ts new file mode 100644 index 000000000..f9f7b1b67 --- /dev/null +++ b/server/middlewares/cache/shared/api-cache.ts | |||
@@ -0,0 +1,269 @@ | |||
1 | // Thanks: https://github.com/kwhitley/apicache | ||
2 | // We duplicated the library because it is unmaintened and prevent us to upgrade to recent NodeJS versions | ||
3 | |||
4 | import * as express from 'express' | ||
5 | import { OutgoingHttpHeaders } from 'http' | ||
6 | import { isTestInstance, parseDurationToMs } from '@server/helpers/core-utils' | ||
7 | import { logger } from '@server/helpers/logger' | ||
8 | import { Redis } from '@server/lib/redis' | ||
9 | import { HttpStatusCode } from '@shared/models' | ||
10 | |||
11 | export interface APICacheOptions { | ||
12 | headerBlacklist?: string[] | ||
13 | excludeStatus?: HttpStatusCode[] | ||
14 | } | ||
15 | |||
16 | interface CacheObject { | ||
17 | status: number | ||
18 | headers: OutgoingHttpHeaders | ||
19 | data: any | ||
20 | encoding: BufferEncoding | ||
21 | timestamp: number | ||
22 | } | ||
23 | |||
24 | export class ApiCache { | ||
25 | |||
26 | private readonly options: APICacheOptions | ||
27 | private readonly timers: { [ id: string ]: NodeJS.Timeout } = {} | ||
28 | |||
29 | private index: { all: string[] } = { all: [] } | ||
30 | |||
31 | constructor (options: APICacheOptions) { | ||
32 | this.options = { | ||
33 | headerBlacklist: [], | ||
34 | excludeStatus: [], | ||
35 | |||
36 | ...options | ||
37 | } | ||
38 | } | ||
39 | |||
40 | buildMiddleware (strDuration: string) { | ||
41 | const duration = parseDurationToMs(strDuration) | ||
42 | |||
43 | return (req: express.Request, res: express.Response, next: express.NextFunction) => { | ||
44 | const key = Redis.Instance.getPrefix() + 'api-cache-' + req.originalUrl | ||
45 | const redis = Redis.Instance.getClient() | ||
46 | |||
47 | if (!redis.connected) return this.makeResponseCacheable(res, next, key, duration) | ||
48 | |||
49 | try { | ||
50 | redis.hgetall(key, (err, obj) => { | ||
51 | if (!err && obj && obj.response) { | ||
52 | return this.sendCachedResponse(req, res, JSON.parse(obj.response), duration) | ||
53 | } | ||
54 | |||
55 | return this.makeResponseCacheable(res, next, key, duration) | ||
56 | }) | ||
57 | } catch (err) { | ||
58 | return this.makeResponseCacheable(res, next, key, duration) | ||
59 | } | ||
60 | } | ||
61 | } | ||
62 | |||
63 | private shouldCacheResponse (response: express.Response) { | ||
64 | if (!response) return false | ||
65 | if (this.options.excludeStatus.includes(response.statusCode)) return false | ||
66 | |||
67 | return true | ||
68 | } | ||
69 | |||
70 | private addIndexEntries (key: string) { | ||
71 | this.index.all.unshift(key) | ||
72 | } | ||
73 | |||
74 | private filterBlacklistedHeaders (headers: OutgoingHttpHeaders) { | ||
75 | return Object.keys(headers) | ||
76 | .filter(key => !this.options.headerBlacklist.includes(key)) | ||
77 | .reduce((acc, header) => { | ||
78 | acc[header] = headers[header] | ||
79 | |||
80 | return acc | ||
81 | }, {}) | ||
82 | } | ||
83 | |||
84 | private createCacheObject (status: number, headers: OutgoingHttpHeaders, data: any, encoding: BufferEncoding) { | ||
85 | return { | ||
86 | status, | ||
87 | headers: this.filterBlacklistedHeaders(headers), | ||
88 | data, | ||
89 | encoding, | ||
90 | |||
91 | // Seconds since epoch, used to properly decrement max-age headers in cached responses. | ||
92 | timestamp: new Date().getTime() / 1000 | ||
93 | } as CacheObject | ||
94 | } | ||
95 | |||
96 | private cacheResponse (key: string, value: object, duration: number) { | ||
97 | const redis = Redis.Instance.getClient() | ||
98 | |||
99 | if (redis.connected) { | ||
100 | try { | ||
101 | redis.hset(key, 'response', JSON.stringify(value)) | ||
102 | redis.hset(key, 'duration', duration + '') | ||
103 | redis.expire(key, duration / 1000) | ||
104 | } catch (err) { | ||
105 | logger.error('Cannot set cache in redis.', { err }) | ||
106 | } | ||
107 | } | ||
108 | |||
109 | // add automatic cache clearing from duration, includes max limit on setTimeout | ||
110 | this.timers[key] = setTimeout(() => this.clear(key), Math.min(duration, 2147483647)) | ||
111 | } | ||
112 | |||
113 | private accumulateContent (res: express.Response, content: any) { | ||
114 | if (!content) return | ||
115 | |||
116 | if (typeof content === 'string') { | ||
117 | res.locals.apicache.content = (res.locals.apicache.content || '') + content | ||
118 | return | ||
119 | } | ||
120 | |||
121 | if (Buffer.isBuffer(content)) { | ||
122 | let oldContent = res.locals.apicache.content | ||
123 | |||
124 | if (typeof oldContent === 'string') { | ||
125 | oldContent = Buffer.from(oldContent) | ||
126 | } | ||
127 | |||
128 | if (!oldContent) { | ||
129 | oldContent = Buffer.alloc(0) | ||
130 | } | ||
131 | |||
132 | res.locals.apicache.content = Buffer.concat( | ||
133 | [ oldContent, content ], | ||
134 | oldContent.length + content.length | ||
135 | ) | ||
136 | |||
137 | return | ||
138 | } | ||
139 | |||
140 | res.locals.apicache.content = content | ||
141 | } | ||
142 | |||
143 | private makeResponseCacheable (res: express.Response, next: express.NextFunction, key: string, duration: number) { | ||
144 | const self = this | ||
145 | |||
146 | res.locals.apicache = { | ||
147 | write: res.write, | ||
148 | writeHead: res.writeHead, | ||
149 | end: res.end, | ||
150 | cacheable: true, | ||
151 | content: undefined, | ||
152 | headers: {} | ||
153 | } | ||
154 | |||
155 | // Patch express | ||
156 | res.writeHead = function () { | ||
157 | if (self.shouldCacheResponse(res)) { | ||
158 | res.setHeader('cache-control', 'max-age=' + (duration / 1000).toFixed(0)) | ||
159 | } else { | ||
160 | res.setHeader('cache-control', 'no-cache, no-store, must-revalidate') | ||
161 | } | ||
162 | |||
163 | res.locals.apicache.headers = Object.assign({}, res.getHeaders()) | ||
164 | return res.locals.apicache.writeHead.apply(this, arguments as any) | ||
165 | } | ||
166 | |||
167 | res.write = function (chunk: any) { | ||
168 | self.accumulateContent(res, chunk) | ||
169 | return res.locals.apicache.write.apply(this, arguments as any) | ||
170 | } | ||
171 | |||
172 | res.end = function (content: any, encoding: BufferEncoding) { | ||
173 | if (self.shouldCacheResponse(res)) { | ||
174 | self.accumulateContent(res, content) | ||
175 | |||
176 | if (res.locals.apicache.cacheable && res.locals.apicache.content) { | ||
177 | self.addIndexEntries(key) | ||
178 | |||
179 | const headers = res.locals.apicache.headers || res.getHeaders() | ||
180 | const cacheObject = self.createCacheObject( | ||
181 | res.statusCode, | ||
182 | headers, | ||
183 | res.locals.apicache.content, | ||
184 | encoding | ||
185 | ) | ||
186 | self.cacheResponse(key, cacheObject, duration) | ||
187 | } | ||
188 | } | ||
189 | |||
190 | res.locals.apicache.end.apply(this, arguments as any) | ||
191 | } as any | ||
192 | |||
193 | next() | ||
194 | } | ||
195 | |||
196 | private sendCachedResponse (request: express.Request, response: express.Response, cacheObject: CacheObject, duration: number) { | ||
197 | const headers = response.getHeaders() | ||
198 | |||
199 | if (isTestInstance()) { | ||
200 | Object.assign(headers, { | ||
201 | 'x-api-cache-cached': 'true' | ||
202 | }) | ||
203 | } | ||
204 | |||
205 | Object.assign(headers, this.filterBlacklistedHeaders(cacheObject.headers || {}), { | ||
206 | // Set properly decremented max-age header | ||
207 | // This ensures that max-age is in sync with the cache expiration | ||
208 | 'cache-control': | ||
209 | 'max-age=' + | ||
210 | Math.max( | ||
211 | 0, | ||
212 | (duration / 1000 - (new Date().getTime() / 1000 - cacheObject.timestamp)) | ||
213 | ).toFixed(0) | ||
214 | }) | ||
215 | |||
216 | // unstringify buffers | ||
217 | let data = cacheObject.data | ||
218 | if (data && data.type === 'Buffer') { | ||
219 | data = typeof data.data === 'number' | ||
220 | ? Buffer.alloc(data.data) | ||
221 | : Buffer.from(data.data) | ||
222 | } | ||
223 | |||
224 | // Test Etag against If-None-Match for 304 | ||
225 | const cachedEtag = cacheObject.headers.etag | ||
226 | const requestEtag = request.headers['if-none-match'] | ||
227 | |||
228 | if (requestEtag && cachedEtag === requestEtag) { | ||
229 | response.writeHead(304, headers) | ||
230 | return response.end() | ||
231 | } | ||
232 | |||
233 | response.writeHead(cacheObject.status || 200, headers) | ||
234 | |||
235 | return response.end(data, cacheObject.encoding) | ||
236 | } | ||
237 | |||
238 | private clear (target: string) { | ||
239 | const redis = Redis.Instance.getClient() | ||
240 | |||
241 | if (target) { | ||
242 | clearTimeout(this.timers[target]) | ||
243 | delete this.timers[target] | ||
244 | |||
245 | try { | ||
246 | redis.del(target) | ||
247 | } catch (err) { | ||
248 | logger.error('Cannot delete %s in redis cache.', target, { err }) | ||
249 | } | ||
250 | |||
251 | this.index.all = this.index.all.filter(key => key !== target) | ||
252 | } else { | ||
253 | for (const key of this.index.all) { | ||
254 | clearTimeout(this.timers[key]) | ||
255 | delete this.timers[key] | ||
256 | |||
257 | try { | ||
258 | redis.del(key) | ||
259 | } catch (err) { | ||
260 | logger.error('Cannot delete %s in redis cache.', key, { err }) | ||
261 | } | ||
262 | } | ||
263 | |||
264 | this.index.all = [] | ||
265 | } | ||
266 | |||
267 | return this.index | ||
268 | } | ||
269 | } | ||
diff --git a/server/middlewares/cache/shared/index.ts b/server/middlewares/cache/shared/index.ts new file mode 100644 index 000000000..c707eaf7a --- /dev/null +++ b/server/middlewares/cache/shared/index.ts | |||
@@ -0,0 +1 @@ | |||
export * from './api-cache' | |||