]>
Commit | Line | Data |
---|---|---|
1 | import * as ffmpeg from 'fluent-ffmpeg' | |
2 | import { dirname, join } from 'path' | |
3 | import { getMaxBitrate, getTargetBitrate, VideoResolution } from '../../shared/models/videos' | |
4 | import { FFMPEG_NICE, VIDEO_TRANSCODING_FPS } from '../initializers/constants' | |
5 | import { processImage } from './image-utils' | |
6 | import { logger } from './logger' | |
7 | import { checkFFmpegEncoders } from '../initializers/checker-before-init' | |
8 | import { readFile, remove, writeFile } from 'fs-extra' | |
9 | import { CONFIG } from '../initializers/config' | |
10 | ||
11 | /** | |
12 | * A toolbox to play with audio | |
13 | */ | |
14 | namespace audio { | |
15 | export const get = (videoPath: string) => { | |
16 | // without position, ffprobe considers the last input only | |
17 | // we make it consider the first input only | |
18 | // if you pass a file path to pos, then ffprobe acts on that file directly | |
19 | return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => { | |
20 | ||
21 | function parseFfprobe (err: any, data: ffmpeg.FfprobeData) { | |
22 | if (err) return rej(err) | |
23 | ||
24 | if ('streams' in data) { | |
25 | const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio') | |
26 | if (audioStream) { | |
27 | return res({ | |
28 | absolutePath: data.format.filename, | |
29 | audioStream | |
30 | }) | |
31 | } | |
32 | } | |
33 | ||
34 | return res({ absolutePath: data.format.filename }) | |
35 | } | |
36 | ||
37 | return ffmpeg.ffprobe(videoPath, parseFfprobe) | |
38 | }) | |
39 | } | |
40 | ||
41 | export namespace bitrate { | |
42 | const baseKbitrate = 384 | |
43 | ||
44 | const toBits = (kbits: number) => kbits * 8000 | |
45 | ||
46 | export const aac = (bitrate: number): number => { | |
47 | switch (true) { | |
48 | case bitrate > toBits(baseKbitrate): | |
49 | return baseKbitrate | |
50 | ||
51 | default: | |
52 | return -1 // we interpret it as a signal to copy the audio stream as is | |
53 | } | |
54 | } | |
55 | ||
56 | export const mp3 = (bitrate: number): number => { | |
57 | /* | |
58 | a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac. | |
59 | That's why, when using aac, we can go to lower kbit/sec. The equivalences | |
60 | made here are not made to be accurate, especially with good mp3 encoders. | |
61 | */ | |
62 | switch (true) { | |
63 | case bitrate <= toBits(192): | |
64 | return 128 | |
65 | ||
66 | case bitrate <= toBits(384): | |
67 | return 256 | |
68 | ||
69 | default: | |
70 | return baseKbitrate | |
71 | } | |
72 | } | |
73 | } | |
74 | } | |
75 | ||
76 | function computeResolutionsToTranscode (videoFileHeight: number) { | |
77 | const resolutionsEnabled: number[] = [] | |
78 | const configResolutions = CONFIG.TRANSCODING.RESOLUTIONS | |
79 | ||
80 | // Put in the order we want to proceed jobs | |
81 | const resolutions = [ | |
82 | VideoResolution.H_NOVIDEO, | |
83 | VideoResolution.H_480P, | |
84 | VideoResolution.H_360P, | |
85 | VideoResolution.H_720P, | |
86 | VideoResolution.H_240P, | |
87 | VideoResolution.H_1080P, | |
88 | VideoResolution.H_4K | |
89 | ] | |
90 | ||
91 | for (const resolution of resolutions) { | |
92 | if (configResolutions[resolution + 'p'] === true && videoFileHeight > resolution) { | |
93 | resolutionsEnabled.push(resolution) | |
94 | } | |
95 | } | |
96 | ||
97 | return resolutionsEnabled | |
98 | } | |
99 | ||
100 | async function getVideoStreamSize (path: string) { | |
101 | const videoStream = await getVideoStreamFromFile(path) | |
102 | ||
103 | return videoStream === null | |
104 | ? { width: 0, height: 0 } | |
105 | : { width: videoStream.width, height: videoStream.height } | |
106 | } | |
107 | ||
108 | async function getVideoStreamCodec (path: string) { | |
109 | const videoStream = await getVideoStreamFromFile(path) | |
110 | ||
111 | if (!videoStream) return '' | |
112 | ||
113 | const videoCodec = videoStream.codec_tag_string | |
114 | ||
115 | const baseProfileMatrix = { | |
116 | High: '6400', | |
117 | Main: '4D40', | |
118 | Baseline: '42E0' | |
119 | } | |
120 | ||
121 | let baseProfile = baseProfileMatrix[videoStream.profile] | |
122 | if (!baseProfile) { | |
123 | logger.warn('Cannot get video profile codec of %s.', path, { videoStream }) | |
124 | baseProfile = baseProfileMatrix['High'] // Fallback | |
125 | } | |
126 | ||
127 | const level = videoStream.level.toString(16) | |
128 | ||
129 | return `${videoCodec}.${baseProfile}${level}` | |
130 | } | |
131 | ||
132 | async function getAudioStreamCodec (path: string) { | |
133 | const { audioStream } = await audio.get(path) | |
134 | ||
135 | if (!audioStream) return '' | |
136 | ||
137 | const audioCodec = audioStream.codec_name | |
138 | if (audioCodec === 'aac') return 'mp4a.40.2' | |
139 | ||
140 | logger.warn('Cannot get audio codec of %s.', path, { audioStream }) | |
141 | ||
142 | return 'mp4a.40.2' // Fallback | |
143 | } | |
144 | ||
145 | async function getVideoFileResolution (path: string) { | |
146 | const size = await getVideoStreamSize(path) | |
147 | ||
148 | return { | |
149 | videoFileResolution: Math.min(size.height, size.width), | |
150 | isPortraitMode: size.height > size.width | |
151 | } | |
152 | } | |
153 | ||
154 | async function getVideoFileFPS (path: string) { | |
155 | const videoStream = await getVideoStreamFromFile(path) | |
156 | if (videoStream === null) return 0 | |
157 | ||
158 | for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) { | |
159 | const valuesText: string = videoStream[key] | |
160 | if (!valuesText) continue | |
161 | ||
162 | const [ frames, seconds ] = valuesText.split('/') | |
163 | if (!frames || !seconds) continue | |
164 | ||
165 | const result = parseInt(frames, 10) / parseInt(seconds, 10) | |
166 | if (result > 0) return Math.round(result) | |
167 | } | |
168 | ||
169 | return 0 | |
170 | } | |
171 | ||
172 | async function getVideoFileBitrate (path: string) { | |
173 | return new Promise<number>((res, rej) => { | |
174 | ffmpeg.ffprobe(path, (err, metadata) => { | |
175 | if (err) return rej(err) | |
176 | ||
177 | return res(metadata.format.bit_rate) | |
178 | }) | |
179 | }) | |
180 | } | |
181 | ||
182 | function getDurationFromVideoFile (path: string) { | |
183 | return new Promise<number>((res, rej) => { | |
184 | ffmpeg.ffprobe(path, (err, metadata) => { | |
185 | if (err) return rej(err) | |
186 | ||
187 | return res(Math.floor(metadata.format.duration)) | |
188 | }) | |
189 | }) | |
190 | } | |
191 | ||
192 | async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) { | |
193 | const pendingImageName = 'pending-' + imageName | |
194 | ||
195 | const options = { | |
196 | filename: pendingImageName, | |
197 | count: 1, | |
198 | folder | |
199 | } | |
200 | ||
201 | const pendingImagePath = join(folder, pendingImageName) | |
202 | ||
203 | try { | |
204 | await new Promise<string>((res, rej) => { | |
205 | ffmpeg(fromPath, { niceness: FFMPEG_NICE.THUMBNAIL }) | |
206 | .on('error', rej) | |
207 | .on('end', () => res(imageName)) | |
208 | .thumbnail(options) | |
209 | }) | |
210 | ||
211 | const destination = join(folder, imageName) | |
212 | await processImage(pendingImagePath, destination, size) | |
213 | } catch (err) { | |
214 | logger.error('Cannot generate image from video %s.', fromPath, { err }) | |
215 | ||
216 | try { | |
217 | await remove(pendingImagePath) | |
218 | } catch (err) { | |
219 | logger.debug('Cannot remove pending image path after generation error.', { err }) | |
220 | } | |
221 | } | |
222 | } | |
223 | ||
224 | type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio' | |
225 | ||
226 | interface BaseTranscodeOptions { | |
227 | type: TranscodeOptionsType | |
228 | inputPath: string | |
229 | outputPath: string | |
230 | resolution: VideoResolution | |
231 | isPortraitMode?: boolean | |
232 | } | |
233 | ||
234 | interface HLSTranscodeOptions extends BaseTranscodeOptions { | |
235 | type: 'hls' | |
236 | copyCodecs: boolean | |
237 | hlsPlaylist: { | |
238 | videoFilename: string | |
239 | } | |
240 | } | |
241 | ||
242 | interface QuickTranscodeOptions extends BaseTranscodeOptions { | |
243 | type: 'quick-transcode' | |
244 | } | |
245 | ||
246 | interface VideoTranscodeOptions extends BaseTranscodeOptions { | |
247 | type: 'video' | |
248 | } | |
249 | ||
250 | interface MergeAudioTranscodeOptions extends BaseTranscodeOptions { | |
251 | type: 'merge-audio' | |
252 | audioPath: string | |
253 | } | |
254 | ||
255 | interface OnlyAudioTranscodeOptions extends BaseTranscodeOptions { | |
256 | type: 'only-audio' | |
257 | } | |
258 | ||
259 | type TranscodeOptions = | |
260 | HLSTranscodeOptions | |
261 | | VideoTranscodeOptions | |
262 | | MergeAudioTranscodeOptions | |
263 | | OnlyAudioTranscodeOptions | |
264 | | QuickTranscodeOptions | |
265 | ||
266 | function transcode (options: TranscodeOptions) { | |
267 | return new Promise<void>(async (res, rej) => { | |
268 | try { | |
269 | let command = ffmpeg(options.inputPath, { niceness: FFMPEG_NICE.TRANSCODING }) | |
270 | .output(options.outputPath) | |
271 | ||
272 | if (options.type === 'quick-transcode') { | |
273 | command = buildQuickTranscodeCommand(command) | |
274 | } else if (options.type === 'hls') { | |
275 | command = await buildHLSCommand(command, options) | |
276 | } else if (options.type === 'merge-audio') { | |
277 | command = await buildAudioMergeCommand(command, options) | |
278 | } else if (options.type === 'only-audio') { | |
279 | command = buildOnlyAudioCommand(command, options) | |
280 | } else { | |
281 | command = await buildx264Command(command, options) | |
282 | } | |
283 | ||
284 | if (CONFIG.TRANSCODING.THREADS > 0) { | |
285 | // if we don't set any threads ffmpeg will chose automatically | |
286 | command = command.outputOption('-threads ' + CONFIG.TRANSCODING.THREADS) | |
287 | } | |
288 | ||
289 | command | |
290 | .on('error', (err, stdout, stderr) => { | |
291 | logger.error('Error in transcoding job.', { stdout, stderr }) | |
292 | return rej(err) | |
293 | }) | |
294 | .on('end', () => { | |
295 | return fixHLSPlaylistIfNeeded(options) | |
296 | .then(() => res()) | |
297 | .catch(err => rej(err)) | |
298 | }) | |
299 | .run() | |
300 | } catch (err) { | |
301 | return rej(err) | |
302 | } | |
303 | }) | |
304 | } | |
305 | ||
306 | async function canDoQuickTranscode (path: string): Promise<boolean> { | |
307 | // NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway) | |
308 | const videoStream = await getVideoStreamFromFile(path) | |
309 | const parsedAudio = await audio.get(path) | |
310 | const fps = await getVideoFileFPS(path) | |
311 | const bitRate = await getVideoFileBitrate(path) | |
312 | const resolution = await getVideoFileResolution(path) | |
313 | ||
314 | // check video params | |
315 | if (videoStream == null) return false | |
316 | if (videoStream['codec_name'] !== 'h264') return false | |
317 | if (videoStream['pix_fmt'] !== 'yuv420p') return false | |
318 | if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false | |
319 | if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false | |
320 | ||
321 | // check audio params (if audio stream exists) | |
322 | if (parsedAudio.audioStream) { | |
323 | if (parsedAudio.audioStream['codec_name'] !== 'aac') return false | |
324 | ||
325 | const maxAudioBitrate = audio.bitrate['aac'](parsedAudio.audioStream['bit_rate']) | |
326 | if (maxAudioBitrate !== -1 && parsedAudio.audioStream['bit_rate'] > maxAudioBitrate) return false | |
327 | } | |
328 | ||
329 | return true | |
330 | } | |
331 | ||
332 | function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number { | |
333 | return VIDEO_TRANSCODING_FPS[type].slice(0) | |
334 | .sort((a, b) => fps % a - fps % b)[0] | |
335 | } | |
336 | ||
337 | // --------------------------------------------------------------------------- | |
338 | ||
339 | export { | |
340 | getVideoStreamCodec, | |
341 | getAudioStreamCodec, | |
342 | getVideoStreamSize, | |
343 | getVideoFileResolution, | |
344 | getDurationFromVideoFile, | |
345 | generateImageFromVideoFile, | |
346 | TranscodeOptions, | |
347 | TranscodeOptionsType, | |
348 | transcode, | |
349 | getVideoFileFPS, | |
350 | computeResolutionsToTranscode, | |
351 | audio, | |
352 | getVideoFileBitrate, | |
353 | canDoQuickTranscode | |
354 | } | |
355 | ||
356 | // --------------------------------------------------------------------------- | |
357 | ||
358 | async function buildx264Command (command: ffmpeg.FfmpegCommand, options: TranscodeOptions) { | |
359 | let fps = await getVideoFileFPS(options.inputPath) | |
360 | if ( | |
361 | // On small/medium resolutions, limit FPS | |
362 | options.resolution !== undefined && | |
363 | options.resolution < VIDEO_TRANSCODING_FPS.KEEP_ORIGIN_FPS_RESOLUTION_MIN && | |
364 | fps > VIDEO_TRANSCODING_FPS.AVERAGE | |
365 | ) { | |
366 | // Get closest standard framerate by modulo: downsampling has to be done to a divisor of the nominal fps value | |
367 | fps = getClosestFramerateStandard(fps, 'STANDARD') | |
368 | } | |
369 | ||
370 | command = await presetH264(command, options.inputPath, options.resolution, fps) | |
371 | ||
372 | if (options.resolution !== undefined) { | |
373 | // '?x720' or '720x?' for example | |
374 | const size = options.isPortraitMode === true ? `${options.resolution}x?` : `?x${options.resolution}` | |
375 | command = command.size(size) | |
376 | } | |
377 | ||
378 | if (fps) { | |
379 | // Hard FPS limits | |
380 | if (fps > VIDEO_TRANSCODING_FPS.MAX) fps = getClosestFramerateStandard(fps, 'HD_STANDARD') | |
381 | else if (fps < VIDEO_TRANSCODING_FPS.MIN) fps = VIDEO_TRANSCODING_FPS.MIN | |
382 | ||
383 | command = command.withFPS(fps) | |
384 | } | |
385 | ||
386 | return command | |
387 | } | |
388 | ||
389 | async function buildAudioMergeCommand (command: ffmpeg.FfmpegCommand, options: MergeAudioTranscodeOptions) { | |
390 | command = command.loop(undefined) | |
391 | ||
392 | command = await presetH264VeryFast(command, options.audioPath, options.resolution) | |
393 | ||
394 | command = command.input(options.audioPath) | |
395 | .videoFilter('scale=trunc(iw/2)*2:trunc(ih/2)*2') // Avoid "height not divisible by 2" error | |
396 | .outputOption('-tune stillimage') | |
397 | .outputOption('-shortest') | |
398 | ||
399 | return command | |
400 | } | |
401 | ||
402 | function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, options: OnlyAudioTranscodeOptions) { | |
403 | command = presetOnlyAudio(command) | |
404 | ||
405 | return command | |
406 | } | |
407 | ||
408 | function buildQuickTranscodeCommand (command: ffmpeg.FfmpegCommand) { | |
409 | command = presetCopy(command) | |
410 | ||
411 | command = command.outputOption('-map_metadata -1') // strip all metadata | |
412 | .outputOption('-movflags faststart') | |
413 | ||
414 | return command | |
415 | } | |
416 | ||
417 | async function buildHLSCommand (command: ffmpeg.FfmpegCommand, options: HLSTranscodeOptions) { | |
418 | const videoPath = getHLSVideoPath(options) | |
419 | ||
420 | if (options.copyCodecs) command = presetCopy(command) | |
421 | else command = await buildx264Command(command, options) | |
422 | ||
423 | command = command.outputOption('-hls_time 4') | |
424 | .outputOption('-hls_list_size 0') | |
425 | .outputOption('-hls_playlist_type vod') | |
426 | .outputOption('-hls_segment_filename ' + videoPath) | |
427 | .outputOption('-hls_segment_type fmp4') | |
428 | .outputOption('-f hls') | |
429 | .outputOption('-hls_flags single_file') | |
430 | ||
431 | return command | |
432 | } | |
433 | ||
434 | function getHLSVideoPath (options: HLSTranscodeOptions) { | |
435 | return `${dirname(options.outputPath)}/${options.hlsPlaylist.videoFilename}` | |
436 | } | |
437 | ||
438 | async function fixHLSPlaylistIfNeeded (options: TranscodeOptions) { | |
439 | if (options.type !== 'hls') return | |
440 | ||
441 | const fileContent = await readFile(options.outputPath) | |
442 | ||
443 | const videoFileName = options.hlsPlaylist.videoFilename | |
444 | const videoFilePath = getHLSVideoPath(options) | |
445 | ||
446 | // Fix wrong mapping with some ffmpeg versions | |
447 | const newContent = fileContent.toString() | |
448 | .replace(`#EXT-X-MAP:URI="${videoFilePath}",`, `#EXT-X-MAP:URI="${videoFileName}",`) | |
449 | ||
450 | await writeFile(options.outputPath, newContent) | |
451 | } | |
452 | ||
453 | function getVideoStreamFromFile (path: string) { | |
454 | return new Promise<any>((res, rej) => { | |
455 | ffmpeg.ffprobe(path, (err, metadata) => { | |
456 | if (err) return rej(err) | |
457 | ||
458 | const videoStream = metadata.streams.find(s => s.codec_type === 'video') | |
459 | return res(videoStream || null) | |
460 | }) | |
461 | }) | |
462 | } | |
463 | ||
464 | /** | |
465 | * A slightly customised version of the 'veryfast' x264 preset | |
466 | * | |
467 | * The veryfast preset is right in the sweet spot of performance | |
468 | * and quality. Superfast and ultrafast will give you better | |
469 | * performance, but then quality is noticeably worse. | |
470 | */ | |
471 | async function presetH264VeryFast (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) { | |
472 | let localCommand = await presetH264(command, input, resolution, fps) | |
473 | ||
474 | localCommand = localCommand.outputOption('-preset:v veryfast') | |
475 | ||
476 | /* | |
477 | MAIN reference: https://slhck.info/video/2017/03/01/rate-control.html | |
478 | Our target situation is closer to a livestream than a stream, | |
479 | since we want to reduce as much a possible the encoding burden, | |
480 | although not to the point of a livestream where there is a hard | |
481 | constraint on the frames per second to be encoded. | |
482 | */ | |
483 | ||
484 | return localCommand | |
485 | } | |
486 | ||
487 | /** | |
488 | * Standard profile, with variable bitrate audio and faststart. | |
489 | * | |
490 | * As for the audio, quality '5' is the highest and ensures 96-112kbps/channel | |
491 | * See https://trac.ffmpeg.org/wiki/Encode/AAC#fdk_vbr | |
492 | */ | |
493 | async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) { | |
494 | let localCommand = command | |
495 | .format('mp4') | |
496 | .videoCodec('libx264') | |
497 | .outputOption('-level 3.1') // 3.1 is the minimal resource allocation for our highest supported resolution | |
498 | .outputOption('-b_strategy 1') // NOTE: b-strategy 1 - heuristic algorithm, 16 is optimal B-frames for it | |
499 | .outputOption('-bf 16') // NOTE: Why 16: https://github.com/Chocobozzz/PeerTube/pull/774. b-strategy 2 -> B-frames<16 | |
500 | .outputOption('-pix_fmt yuv420p') // allows import of source material with incompatible pixel formats (e.g. MJPEG video) | |
501 | .outputOption('-map_metadata -1') // strip all metadata | |
502 | .outputOption('-movflags faststart') | |
503 | ||
504 | const parsedAudio = await audio.get(input) | |
505 | ||
506 | if (!parsedAudio.audioStream) { | |
507 | localCommand = localCommand.noAudio() | |
508 | } else if ((await checkFFmpegEncoders()).get('libfdk_aac')) { // we favor VBR, if a good AAC encoder is available | |
509 | localCommand = localCommand | |
510 | .audioCodec('libfdk_aac') | |
511 | .audioQuality(5) | |
512 | } else { | |
513 | // we try to reduce the ceiling bitrate by making rough matches of bitrates | |
514 | // of course this is far from perfect, but it might save some space in the end | |
515 | localCommand = localCommand.audioCodec('aac') | |
516 | ||
517 | const audioCodecName = parsedAudio.audioStream['codec_name'] | |
518 | ||
519 | if (audio.bitrate[audioCodecName]) { | |
520 | const bitrate = audio.bitrate[audioCodecName](parsedAudio.audioStream['bit_rate']) | |
521 | if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate) | |
522 | } | |
523 | } | |
524 | ||
525 | if (fps) { | |
526 | // Constrained Encoding (VBV) | |
527 | // https://slhck.info/video/2017/03/01/rate-control.html | |
528 | // https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate | |
529 | const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS) | |
530 | localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ]) | |
531 | ||
532 | // Keyframe interval of 2 seconds for faster seeking and resolution switching. | |
533 | // https://streaminglearningcenter.com/blogs/whats-the-right-keyframe-interval.html | |
534 | // https://superuser.com/a/908325 | |
535 | localCommand = localCommand.outputOption(`-g ${fps * 2}`) | |
536 | } | |
537 | ||
538 | return localCommand | |
539 | } | |
540 | ||
541 | function presetCopy (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand { | |
542 | return command | |
543 | .format('mp4') | |
544 | .videoCodec('copy') | |
545 | .audioCodec('copy') | |
546 | } | |
547 | ||
548 | function presetOnlyAudio (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand { | |
549 | return command | |
550 | .format('mp4') | |
551 | .audioCodec('copy') | |
552 | .noVideo() | |
553 | } |