]>
Commit | Line | Data |
---|---|---|
1 | import * as ffmpeg from 'fluent-ffmpeg' | |
2 | import { dirname, join } from 'path' | |
3 | import { getTargetBitrate, VideoResolution } from '../../shared/models/videos' | |
4 | import { FFMPEG_NICE, VIDEO_TRANSCODING_FPS } from '../initializers/constants' | |
5 | import { processImage } from './image-utils' | |
6 | import { logger } from './logger' | |
7 | import { checkFFmpegEncoders } from '../initializers/checker-before-init' | |
8 | import { readFile, remove, writeFile } from 'fs-extra' | |
9 | import { CONFIG } from '../initializers/config' | |
10 | ||
11 | function computeResolutionsToTranscode (videoFileHeight: number) { | |
12 | const resolutionsEnabled: number[] = [] | |
13 | const configResolutions = CONFIG.TRANSCODING.RESOLUTIONS | |
14 | ||
15 | // Put in the order we want to proceed jobs | |
16 | const resolutions = [ | |
17 | VideoResolution.H_480P, | |
18 | VideoResolution.H_360P, | |
19 | VideoResolution.H_720P, | |
20 | VideoResolution.H_240P, | |
21 | VideoResolution.H_1080P | |
22 | ] | |
23 | ||
24 | for (const resolution of resolutions) { | |
25 | if (configResolutions[ resolution + 'p' ] === true && videoFileHeight > resolution) { | |
26 | resolutionsEnabled.push(resolution) | |
27 | } | |
28 | } | |
29 | ||
30 | return resolutionsEnabled | |
31 | } | |
32 | ||
33 | async function getVideoFileSize (path: string) { | |
34 | const videoStream = await getVideoFileStream(path) | |
35 | ||
36 | return { | |
37 | width: videoStream.width, | |
38 | height: videoStream.height | |
39 | } | |
40 | } | |
41 | ||
42 | async function getVideoFileResolution (path: string) { | |
43 | const size = await getVideoFileSize(path) | |
44 | ||
45 | return { | |
46 | videoFileResolution: Math.min(size.height, size.width), | |
47 | isPortraitMode: size.height > size.width | |
48 | } | |
49 | } | |
50 | ||
51 | async function getVideoFileFPS (path: string) { | |
52 | const videoStream = await getVideoFileStream(path) | |
53 | ||
54 | for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) { | |
55 | const valuesText: string = videoStream[key] | |
56 | if (!valuesText) continue | |
57 | ||
58 | const [ frames, seconds ] = valuesText.split('/') | |
59 | if (!frames || !seconds) continue | |
60 | ||
61 | const result = parseInt(frames, 10) / parseInt(seconds, 10) | |
62 | if (result > 0) return Math.round(result) | |
63 | } | |
64 | ||
65 | return 0 | |
66 | } | |
67 | ||
68 | async function getVideoFileBitrate (path: string) { | |
69 | return new Promise<number>((res, rej) => { | |
70 | ffmpeg.ffprobe(path, (err, metadata) => { | |
71 | if (err) return rej(err) | |
72 | ||
73 | return res(metadata.format.bit_rate) | |
74 | }) | |
75 | }) | |
76 | } | |
77 | ||
78 | function getDurationFromVideoFile (path: string) { | |
79 | return new Promise<number>((res, rej) => { | |
80 | ffmpeg.ffprobe(path, (err, metadata) => { | |
81 | if (err) return rej(err) | |
82 | ||
83 | return res(Math.floor(metadata.format.duration)) | |
84 | }) | |
85 | }) | |
86 | } | |
87 | ||
88 | async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) { | |
89 | const pendingImageName = 'pending-' + imageName | |
90 | ||
91 | const options = { | |
92 | filename: pendingImageName, | |
93 | count: 1, | |
94 | folder | |
95 | } | |
96 | ||
97 | const pendingImagePath = join(folder, pendingImageName) | |
98 | ||
99 | try { | |
100 | await new Promise<string>((res, rej) => { | |
101 | ffmpeg(fromPath, { niceness: FFMPEG_NICE.THUMBNAIL }) | |
102 | .on('error', rej) | |
103 | .on('end', () => res(imageName)) | |
104 | .thumbnail(options) | |
105 | }) | |
106 | ||
107 | const destination = join(folder, imageName) | |
108 | await processImage(pendingImagePath, destination, size) | |
109 | } catch (err) { | |
110 | logger.error('Cannot generate image from video %s.', fromPath, { err }) | |
111 | ||
112 | try { | |
113 | await remove(pendingImagePath) | |
114 | } catch (err) { | |
115 | logger.debug('Cannot remove pending image path after generation error.', { err }) | |
116 | } | |
117 | } | |
118 | } | |
119 | ||
120 | type TranscodeOptions = { | |
121 | inputPath: string | |
122 | outputPath: string | |
123 | resolution: VideoResolution | |
124 | isPortraitMode?: boolean | |
125 | ||
126 | hlsPlaylist?: { | |
127 | videoFilename: string | |
128 | } | |
129 | } | |
130 | ||
131 | function transcode (options: TranscodeOptions) { | |
132 | return new Promise<void>(async (res, rej) => { | |
133 | try { | |
134 | let command = ffmpeg(options.inputPath, { niceness: FFMPEG_NICE.TRANSCODING }) | |
135 | .output(options.outputPath) | |
136 | ||
137 | if (options.hlsPlaylist) { | |
138 | command = await buildHLSCommand(command, options) | |
139 | } else { | |
140 | command = await buildx264Command(command, options) | |
141 | } | |
142 | ||
143 | if (CONFIG.TRANSCODING.THREADS > 0) { | |
144 | // if we don't set any threads ffmpeg will chose automatically | |
145 | command = command.outputOption('-threads ' + CONFIG.TRANSCODING.THREADS) | |
146 | } | |
147 | ||
148 | command | |
149 | .on('error', (err, stdout, stderr) => { | |
150 | logger.error('Error in transcoding job.', { stdout, stderr }) | |
151 | return rej(err) | |
152 | }) | |
153 | .on('end', () => { | |
154 | return onTranscodingSuccess(options) | |
155 | .then(() => res()) | |
156 | .catch(err => rej(err)) | |
157 | }) | |
158 | .run() | |
159 | } catch (err) { | |
160 | return rej(err) | |
161 | } | |
162 | }) | |
163 | } | |
164 | ||
165 | // --------------------------------------------------------------------------- | |
166 | ||
167 | export { | |
168 | getVideoFileSize, | |
169 | getVideoFileResolution, | |
170 | getDurationFromVideoFile, | |
171 | generateImageFromVideoFile, | |
172 | transcode, | |
173 | getVideoFileFPS, | |
174 | computeResolutionsToTranscode, | |
175 | audio, | |
176 | getVideoFileBitrate | |
177 | } | |
178 | ||
179 | // --------------------------------------------------------------------------- | |
180 | ||
181 | async function buildx264Command (command: ffmpeg.FfmpegCommand, options: TranscodeOptions) { | |
182 | let fps = await getVideoFileFPS(options.inputPath) | |
183 | // On small/medium resolutions, limit FPS | |
184 | if ( | |
185 | options.resolution !== undefined && | |
186 | options.resolution < VIDEO_TRANSCODING_FPS.KEEP_ORIGIN_FPS_RESOLUTION_MIN && | |
187 | fps > VIDEO_TRANSCODING_FPS.AVERAGE | |
188 | ) { | |
189 | fps = VIDEO_TRANSCODING_FPS.AVERAGE | |
190 | } | |
191 | ||
192 | command = await presetH264(command, options.resolution, fps) | |
193 | ||
194 | if (options.resolution !== undefined) { | |
195 | // '?x720' or '720x?' for example | |
196 | const size = options.isPortraitMode === true ? `${options.resolution}x?` : `?x${options.resolution}` | |
197 | command = command.size(size) | |
198 | } | |
199 | ||
200 | if (fps) { | |
201 | // Hard FPS limits | |
202 | if (fps > VIDEO_TRANSCODING_FPS.MAX) fps = VIDEO_TRANSCODING_FPS.MAX | |
203 | else if (fps < VIDEO_TRANSCODING_FPS.MIN) fps = VIDEO_TRANSCODING_FPS.MIN | |
204 | ||
205 | command = command.withFPS(fps) | |
206 | } | |
207 | ||
208 | return command | |
209 | } | |
210 | ||
211 | async function buildHLSCommand (command: ffmpeg.FfmpegCommand, options: TranscodeOptions) { | |
212 | const videoPath = getHLSVideoPath(options) | |
213 | ||
214 | command = await presetCopy(command) | |
215 | ||
216 | command = command.outputOption('-hls_time 4') | |
217 | .outputOption('-hls_list_size 0') | |
218 | .outputOption('-hls_playlist_type vod') | |
219 | .outputOption('-hls_segment_filename ' + videoPath) | |
220 | .outputOption('-hls_segment_type fmp4') | |
221 | .outputOption('-f hls') | |
222 | .outputOption('-hls_flags single_file') | |
223 | ||
224 | return command | |
225 | } | |
226 | ||
227 | function getHLSVideoPath (options: TranscodeOptions) { | |
228 | return `${dirname(options.outputPath)}/${options.hlsPlaylist.videoFilename}` | |
229 | } | |
230 | ||
231 | async function onTranscodingSuccess (options: TranscodeOptions) { | |
232 | if (!options.hlsPlaylist) return | |
233 | ||
234 | // Fix wrong mapping with some ffmpeg versions | |
235 | const fileContent = await readFile(options.outputPath) | |
236 | ||
237 | const videoFileName = options.hlsPlaylist.videoFilename | |
238 | const videoFilePath = getHLSVideoPath(options) | |
239 | ||
240 | const newContent = fileContent.toString() | |
241 | .replace(`#EXT-X-MAP:URI="${videoFilePath}",`, `#EXT-X-MAP:URI="${videoFileName}",`) | |
242 | ||
243 | await writeFile(options.outputPath, newContent) | |
244 | } | |
245 | ||
246 | function getVideoFileStream (path: string) { | |
247 | return new Promise<any>((res, rej) => { | |
248 | ffmpeg.ffprobe(path, (err, metadata) => { | |
249 | if (err) return rej(err) | |
250 | ||
251 | const videoStream = metadata.streams.find(s => s.codec_type === 'video') | |
252 | if (!videoStream) return rej(new Error('Cannot find video stream of ' + path)) | |
253 | ||
254 | return res(videoStream) | |
255 | }) | |
256 | }) | |
257 | } | |
258 | ||
259 | /** | |
260 | * A slightly customised version of the 'veryfast' x264 preset | |
261 | * | |
262 | * The veryfast preset is right in the sweet spot of performance | |
263 | * and quality. Superfast and ultrafast will give you better | |
264 | * performance, but then quality is noticeably worse. | |
265 | */ | |
266 | async function presetH264VeryFast (command: ffmpeg.FfmpegCommand, resolution: VideoResolution, fps: number): Promise<ffmpeg.FfmpegCommand> { | |
267 | let localCommand = await presetH264(command, resolution, fps) | |
268 | localCommand = localCommand.outputOption('-preset:v veryfast') | |
269 | .outputOption([ '--aq-mode=2', '--aq-strength=1.3' ]) | |
270 | /* | |
271 | MAIN reference: https://slhck.info/video/2017/03/01/rate-control.html | |
272 | Our target situation is closer to a livestream than a stream, | |
273 | since we want to reduce as much a possible the encoding burden, | |
274 | altough not to the point of a livestream where there is a hard | |
275 | constraint on the frames per second to be encoded. | |
276 | ||
277 | why '--aq-mode=2 --aq-strength=1.3' instead of '-profile:v main'? | |
278 | Make up for most of the loss of grain and macroblocking | |
279 | with less computing power. | |
280 | */ | |
281 | ||
282 | return localCommand | |
283 | } | |
284 | ||
285 | /** | |
286 | * A preset optimised for a stillimage audio video | |
287 | */ | |
288 | async function presetStillImageWithAudio ( | |
289 | command: ffmpeg.FfmpegCommand, | |
290 | resolution: VideoResolution, | |
291 | fps: number | |
292 | ): Promise<ffmpeg.FfmpegCommand> { | |
293 | let localCommand = await presetH264VeryFast(command, resolution, fps) | |
294 | localCommand = localCommand.outputOption('-tune stillimage') | |
295 | ||
296 | return localCommand | |
297 | } | |
298 | ||
299 | /** | |
300 | * A toolbox to play with audio | |
301 | */ | |
302 | namespace audio { | |
303 | export const get = (option: ffmpeg.FfmpegCommand | string) => { | |
304 | // without position, ffprobe considers the last input only | |
305 | // we make it consider the first input only | |
306 | // if you pass a file path to pos, then ffprobe acts on that file directly | |
307 | return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => { | |
308 | ||
309 | function parseFfprobe (err: any, data: ffmpeg.FfprobeData) { | |
310 | if (err) return rej(err) | |
311 | ||
312 | if ('streams' in data) { | |
313 | const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio') | |
314 | if (audioStream) { | |
315 | return res({ | |
316 | absolutePath: data.format.filename, | |
317 | audioStream | |
318 | }) | |
319 | } | |
320 | } | |
321 | ||
322 | return res({ absolutePath: data.format.filename }) | |
323 | } | |
324 | ||
325 | if (typeof option === 'string') { | |
326 | return ffmpeg.ffprobe(option, parseFfprobe) | |
327 | } | |
328 | ||
329 | return option.ffprobe(parseFfprobe) | |
330 | }) | |
331 | } | |
332 | ||
333 | export namespace bitrate { | |
334 | const baseKbitrate = 384 | |
335 | ||
336 | const toBits = (kbits: number): number => { return kbits * 8000 } | |
337 | ||
338 | export const aac = (bitrate: number): number => { | |
339 | switch (true) { | |
340 | case bitrate > toBits(baseKbitrate): | |
341 | return baseKbitrate | |
342 | default: | |
343 | return -1 // we interpret it as a signal to copy the audio stream as is | |
344 | } | |
345 | } | |
346 | ||
347 | export const mp3 = (bitrate: number): number => { | |
348 | /* | |
349 | a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac. | |
350 | That's why, when using aac, we can go to lower kbit/sec. The equivalences | |
351 | made here are not made to be accurate, especially with good mp3 encoders. | |
352 | */ | |
353 | switch (true) { | |
354 | case bitrate <= toBits(192): | |
355 | return 128 | |
356 | case bitrate <= toBits(384): | |
357 | return 256 | |
358 | default: | |
359 | return baseKbitrate | |
360 | } | |
361 | } | |
362 | } | |
363 | } | |
364 | ||
365 | /** | |
366 | * Standard profile, with variable bitrate audio and faststart. | |
367 | * | |
368 | * As for the audio, quality '5' is the highest and ensures 96-112kbps/channel | |
369 | * See https://trac.ffmpeg.org/wiki/Encode/AAC#fdk_vbr | |
370 | */ | |
371 | async function presetH264 (command: ffmpeg.FfmpegCommand, resolution: VideoResolution, fps: number): Promise<ffmpeg.FfmpegCommand> { | |
372 | let localCommand = command | |
373 | .format('mp4') | |
374 | .videoCodec('libx264') | |
375 | .outputOption('-level 3.1') // 3.1 is the minimal ressource allocation for our highest supported resolution | |
376 | .outputOption('-b_strategy 1') // NOTE: b-strategy 1 - heuristic algorythm, 16 is optimal B-frames for it | |
377 | .outputOption('-bf 16') // NOTE: Why 16: https://github.com/Chocobozzz/PeerTube/pull/774. b-strategy 2 -> B-frames<16 | |
378 | .outputOption('-pix_fmt yuv420p') // allows import of source material with incompatible pixel formats (e.g. MJPEG video) | |
379 | .outputOption('-map_metadata -1') // strip all metadata | |
380 | .outputOption('-movflags faststart') | |
381 | ||
382 | const parsedAudio = await audio.get(localCommand) | |
383 | ||
384 | if (!parsedAudio.audioStream) { | |
385 | localCommand = localCommand.noAudio() | |
386 | } else if ((await checkFFmpegEncoders()).get('libfdk_aac')) { // we favor VBR, if a good AAC encoder is available | |
387 | localCommand = localCommand | |
388 | .audioCodec('libfdk_aac') | |
389 | .audioQuality(5) | |
390 | } else { | |
391 | // we try to reduce the ceiling bitrate by making rough correspondances of bitrates | |
392 | // of course this is far from perfect, but it might save some space in the end | |
393 | const audioCodecName = parsedAudio.audioStream[ 'codec_name' ] | |
394 | let bitrate: number | |
395 | if (audio.bitrate[ audioCodecName ]) { | |
396 | localCommand = localCommand.audioCodec('aac') | |
397 | ||
398 | bitrate = audio.bitrate[ audioCodecName ](parsedAudio.audioStream[ 'bit_rate' ]) | |
399 | if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate) | |
400 | } | |
401 | } | |
402 | ||
403 | // Constrained Encoding (VBV) | |
404 | // https://slhck.info/video/2017/03/01/rate-control.html | |
405 | // https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate | |
406 | const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS) | |
407 | localCommand = localCommand.outputOptions([`-maxrate ${ targetBitrate }`, `-bufsize ${ targetBitrate * 2 }`]) | |
408 | ||
409 | // Keyframe interval of 2 seconds for faster seeking and resolution switching. | |
410 | // https://streaminglearningcenter.com/blogs/whats-the-right-keyframe-interval.html | |
411 | // https://superuser.com/a/908325 | |
412 | localCommand = localCommand.outputOption(`-g ${ fps * 2 }`) | |
413 | ||
414 | return localCommand | |
415 | } | |
416 | ||
417 | async function presetCopy (command: ffmpeg.FfmpegCommand): Promise<ffmpeg.FfmpegCommand> { | |
418 | return command | |
419 | .format('mp4') | |
420 | .videoCodec('copy') | |
421 | .audioCodec('copy') | |
422 | } |