]>
Commit | Line | Data |
---|---|---|
1 | import express from 'express' | |
2 | import { readdir, readFile } from 'fs-extra' | |
3 | import { join } from 'path' | |
4 | import { isArray } from '@server/helpers/custom-validators/misc' | |
5 | import { logger, mtimeSortFilesDesc } from '@server/helpers/logger' | |
6 | import { LogLevel } from '../../../../shared/models/server/log-level.type' | |
7 | import { UserRight } from '../../../../shared/models/users' | |
8 | import { CONFIG } from '../../../initializers/config' | |
9 | import { AUDIT_LOG_FILENAME, LOG_FILENAME, MAX_LOGS_OUTPUT_CHARACTERS } from '../../../initializers/constants' | |
10 | import { asyncMiddleware, authenticate, ensureUserHasRight } from '../../../middlewares' | |
11 | import { getAuditLogsValidator, getLogsValidator } from '../../../middlewares/validators/logs' | |
12 | ||
13 | const logsRouter = express.Router() | |
14 | ||
15 | logsRouter.get('/logs', | |
16 | authenticate, | |
17 | ensureUserHasRight(UserRight.MANAGE_LOGS), | |
18 | getLogsValidator, | |
19 | asyncMiddleware(getLogs) | |
20 | ) | |
21 | ||
22 | logsRouter.get('/audit-logs', | |
23 | authenticate, | |
24 | ensureUserHasRight(UserRight.MANAGE_LOGS), | |
25 | getAuditLogsValidator, | |
26 | asyncMiddleware(getAuditLogs) | |
27 | ) | |
28 | ||
29 | // --------------------------------------------------------------------------- | |
30 | ||
31 | export { | |
32 | logsRouter | |
33 | } | |
34 | ||
35 | // --------------------------------------------------------------------------- | |
36 | ||
37 | const auditLogNameFilter = generateLogNameFilter(AUDIT_LOG_FILENAME) | |
38 | async function getAuditLogs (req: express.Request, res: express.Response) { | |
39 | const output = await generateOutput({ | |
40 | startDateQuery: req.query.startDate, | |
41 | endDateQuery: req.query.endDate, | |
42 | level: 'audit', | |
43 | nameFilter: auditLogNameFilter | |
44 | }) | |
45 | ||
46 | return res.json(output).end() | |
47 | } | |
48 | ||
49 | const logNameFilter = generateLogNameFilter(LOG_FILENAME) | |
50 | async function getLogs (req: express.Request, res: express.Response) { | |
51 | const output = await generateOutput({ | |
52 | startDateQuery: req.query.startDate, | |
53 | endDateQuery: req.query.endDate, | |
54 | level: req.query.level || 'info', | |
55 | tagsOneOf: req.query.tagsOneOf, | |
56 | nameFilter: logNameFilter | |
57 | }) | |
58 | ||
59 | return res.json(output) | |
60 | } | |
61 | ||
62 | async function generateOutput (options: { | |
63 | startDateQuery: string | |
64 | endDateQuery?: string | |
65 | ||
66 | level: LogLevel | |
67 | nameFilter: RegExp | |
68 | tagsOneOf?: string[] | |
69 | }) { | |
70 | const { startDateQuery, level, nameFilter } = options | |
71 | ||
72 | const tagsOneOf = Array.isArray(options.tagsOneOf) && options.tagsOneOf.length !== 0 | |
73 | ? new Set(options.tagsOneOf) | |
74 | : undefined | |
75 | ||
76 | const logFiles = await readdir(CONFIG.STORAGE.LOG_DIR) | |
77 | const sortedLogFiles = await mtimeSortFilesDesc(logFiles, CONFIG.STORAGE.LOG_DIR) | |
78 | let currentSize = 0 | |
79 | ||
80 | const startDate = new Date(startDateQuery) | |
81 | const endDate = options.endDateQuery ? new Date(options.endDateQuery) : new Date() | |
82 | ||
83 | let output: string[] = [] | |
84 | ||
85 | for (const meta of sortedLogFiles) { | |
86 | if (nameFilter.exec(meta.file) === null) continue | |
87 | ||
88 | const path = join(CONFIG.STORAGE.LOG_DIR, meta.file) | |
89 | logger.debug('Opening %s to fetch logs.', path) | |
90 | ||
91 | const result = await getOutputFromFile({ path, startDate, endDate, level, currentSize, tagsOneOf }) | |
92 | if (!result.output) break | |
93 | ||
94 | output = result.output.concat(output) | |
95 | currentSize = result.currentSize | |
96 | ||
97 | if (currentSize > MAX_LOGS_OUTPUT_CHARACTERS || (result.logTime && result.logTime < startDate.getTime())) break | |
98 | } | |
99 | ||
100 | return output | |
101 | } | |
102 | ||
103 | async function getOutputFromFile (options: { | |
104 | path: string | |
105 | startDate: Date | |
106 | endDate: Date | |
107 | level: LogLevel | |
108 | currentSize: number | |
109 | tagsOneOf: Set<string> | |
110 | }) { | |
111 | const { path, startDate, endDate, level, tagsOneOf } = options | |
112 | ||
113 | const startTime = startDate.getTime() | |
114 | const endTime = endDate.getTime() | |
115 | let currentSize = options.currentSize | |
116 | ||
117 | let logTime: number | |
118 | ||
119 | const logsLevel: { [ id in LogLevel ]: number } = { | |
120 | audit: -1, | |
121 | debug: 0, | |
122 | info: 1, | |
123 | warn: 2, | |
124 | error: 3 | |
125 | } | |
126 | ||
127 | const content = await readFile(path) | |
128 | const lines = content.toString().split('\n') | |
129 | const output: any[] = [] | |
130 | ||
131 | for (let i = lines.length - 1; i >= 0; i--) { | |
132 | const line = lines[i] | |
133 | let log: any | |
134 | ||
135 | try { | |
136 | log = JSON.parse(line) | |
137 | } catch { | |
138 | // Maybe there a multiple \n at the end of the file | |
139 | continue | |
140 | } | |
141 | ||
142 | logTime = new Date(log.timestamp).getTime() | |
143 | if ( | |
144 | logTime >= startTime && | |
145 | logTime <= endTime && | |
146 | logsLevel[log.level] >= logsLevel[level] && | |
147 | (!tagsOneOf || lineHasTag(log, tagsOneOf)) | |
148 | ) { | |
149 | output.push(log) | |
150 | ||
151 | currentSize += line.length | |
152 | ||
153 | if (currentSize > MAX_LOGS_OUTPUT_CHARACTERS) break | |
154 | } else if (logTime < startTime) { | |
155 | break | |
156 | } | |
157 | } | |
158 | ||
159 | return { currentSize, output: output.reverse(), logTime } | |
160 | } | |
161 | ||
162 | function lineHasTag (line: { tags?: string }, tagsOneOf: Set<string>) { | |
163 | if (!isArray(line.tags)) return false | |
164 | ||
165 | for (const lineTag of line.tags) { | |
166 | if (tagsOneOf.has(lineTag)) return true | |
167 | } | |
168 | ||
169 | return false | |
170 | } | |
171 | ||
172 | function generateLogNameFilter (baseName: string) { | |
173 | return new RegExp('^' + baseName.replace(/\.log$/, '') + '\\d*.log$') | |
174 | } |