]>
Commit | Line | Data |
---|---|---|
1 | import * as ffmpeg from 'fluent-ffmpeg' | |
2 | import { dirname, join } from 'path' | |
3 | import { getTargetBitrate, getMaxBitrate, VideoResolution } from '../../shared/models/videos' | |
4 | import { FFMPEG_NICE, VIDEO_TRANSCODING_FPS } from '../initializers/constants' | |
5 | import { processImage } from './image-utils' | |
6 | import { logger } from './logger' | |
7 | import { checkFFmpegEncoders } from '../initializers/checker-before-init' | |
8 | import { readFile, remove, writeFile } from 'fs-extra' | |
9 | import { CONFIG } from '../initializers/config' | |
10 | ||
11 | function computeResolutionsToTranscode (videoFileHeight: number) { | |
12 | const resolutionsEnabled: number[] = [] | |
13 | const configResolutions = CONFIG.TRANSCODING.RESOLUTIONS | |
14 | ||
15 | // Put in the order we want to proceed jobs | |
16 | const resolutions = [ | |
17 | VideoResolution.H_NOVIDEO, | |
18 | VideoResolution.H_480P, | |
19 | VideoResolution.H_360P, | |
20 | VideoResolution.H_720P, | |
21 | VideoResolution.H_240P, | |
22 | VideoResolution.H_1080P, | |
23 | VideoResolution.H_4K | |
24 | ] | |
25 | ||
26 | for (const resolution of resolutions) { | |
27 | if (configResolutions[ resolution + 'p' ] === true && videoFileHeight > resolution) { | |
28 | resolutionsEnabled.push(resolution) | |
29 | } | |
30 | } | |
31 | ||
32 | return resolutionsEnabled | |
33 | } | |
34 | ||
35 | async function getVideoStreamSize (path: string) { | |
36 | const videoStream = await getVideoStreamFromFile(path) | |
37 | ||
38 | return videoStream === null | |
39 | ? { width: 0, height: 0 } | |
40 | : { width: videoStream.width, height: videoStream.height } | |
41 | } | |
42 | ||
43 | async function getVideoStreamCodec (path: string) { | |
44 | const videoStream = await getVideoStreamFromFile(path) | |
45 | ||
46 | if (!videoStream) return '' | |
47 | ||
48 | const videoCodec = videoStream.codec_tag_string | |
49 | ||
50 | const baseProfileMatrix = { | |
51 | 'High': '6400', | |
52 | 'Main': '4D40', | |
53 | 'Baseline': '42E0' | |
54 | } | |
55 | ||
56 | let baseProfile = baseProfileMatrix[videoStream.profile] | |
57 | if (!baseProfile) { | |
58 | logger.warn('Cannot get video profile codec of %s.', path, { videoStream }) | |
59 | baseProfile = baseProfileMatrix['High'] // Fallback | |
60 | } | |
61 | ||
62 | const level = videoStream.level.toString(16) | |
63 | ||
64 | return `${videoCodec}.${baseProfile}${level}` | |
65 | } | |
66 | ||
67 | async function getAudioStreamCodec (path: string) { | |
68 | const { audioStream } = await audio.get(path) | |
69 | ||
70 | if (!audioStream) return '' | |
71 | ||
72 | const audioCodec = audioStream.codec_name | |
73 | if (audioCodec === 'aac') return 'mp4a.40.2' | |
74 | ||
75 | logger.warn('Cannot get audio codec of %s.', path, { audioStream }) | |
76 | ||
77 | return 'mp4a.40.2' // Fallback | |
78 | } | |
79 | ||
80 | async function getVideoFileResolution (path: string) { | |
81 | const size = await getVideoStreamSize(path) | |
82 | ||
83 | return { | |
84 | videoFileResolution: Math.min(size.height, size.width), | |
85 | isPortraitMode: size.height > size.width | |
86 | } | |
87 | } | |
88 | ||
89 | async function getVideoFileFPS (path: string) { | |
90 | const videoStream = await getVideoStreamFromFile(path) | |
91 | if (videoStream === null) return 0 | |
92 | ||
93 | for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) { | |
94 | const valuesText: string = videoStream[ key ] | |
95 | if (!valuesText) continue | |
96 | ||
97 | const [ frames, seconds ] = valuesText.split('/') | |
98 | if (!frames || !seconds) continue | |
99 | ||
100 | const result = parseInt(frames, 10) / parseInt(seconds, 10) | |
101 | if (result > 0) return Math.round(result) | |
102 | } | |
103 | ||
104 | return 0 | |
105 | } | |
106 | ||
107 | async function getVideoFileBitrate (path: string) { | |
108 | return new Promise<number>((res, rej) => { | |
109 | ffmpeg.ffprobe(path, (err, metadata) => { | |
110 | if (err) return rej(err) | |
111 | ||
112 | return res(metadata.format.bit_rate) | |
113 | }) | |
114 | }) | |
115 | } | |
116 | ||
117 | function getDurationFromVideoFile (path: string) { | |
118 | return new Promise<number>((res, rej) => { | |
119 | ffmpeg.ffprobe(path, (err, metadata) => { | |
120 | if (err) return rej(err) | |
121 | ||
122 | return res(Math.floor(metadata.format.duration)) | |
123 | }) | |
124 | }) | |
125 | } | |
126 | ||
127 | async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) { | |
128 | const pendingImageName = 'pending-' + imageName | |
129 | ||
130 | const options = { | |
131 | filename: pendingImageName, | |
132 | count: 1, | |
133 | folder | |
134 | } | |
135 | ||
136 | const pendingImagePath = join(folder, pendingImageName) | |
137 | ||
138 | try { | |
139 | await new Promise<string>((res, rej) => { | |
140 | ffmpeg(fromPath, { niceness: FFMPEG_NICE.THUMBNAIL }) | |
141 | .on('error', rej) | |
142 | .on('end', () => res(imageName)) | |
143 | .thumbnail(options) | |
144 | }) | |
145 | ||
146 | const destination = join(folder, imageName) | |
147 | await processImage(pendingImagePath, destination, size) | |
148 | } catch (err) { | |
149 | logger.error('Cannot generate image from video %s.', fromPath, { err }) | |
150 | ||
151 | try { | |
152 | await remove(pendingImagePath) | |
153 | } catch (err) { | |
154 | logger.debug('Cannot remove pending image path after generation error.', { err }) | |
155 | } | |
156 | } | |
157 | } | |
158 | ||
159 | type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio' | |
160 | ||
161 | interface BaseTranscodeOptions { | |
162 | type: TranscodeOptionsType | |
163 | inputPath: string | |
164 | outputPath: string | |
165 | resolution: VideoResolution | |
166 | isPortraitMode?: boolean | |
167 | } | |
168 | ||
169 | interface HLSTranscodeOptions extends BaseTranscodeOptions { | |
170 | type: 'hls' | |
171 | copyCodecs: boolean | |
172 | hlsPlaylist: { | |
173 | videoFilename: string | |
174 | } | |
175 | } | |
176 | ||
177 | interface QuickTranscodeOptions extends BaseTranscodeOptions { | |
178 | type: 'quick-transcode' | |
179 | } | |
180 | ||
181 | interface VideoTranscodeOptions extends BaseTranscodeOptions { | |
182 | type: 'video' | |
183 | } | |
184 | ||
185 | interface MergeAudioTranscodeOptions extends BaseTranscodeOptions { | |
186 | type: 'merge-audio' | |
187 | audioPath: string | |
188 | } | |
189 | ||
190 | interface OnlyAudioTranscodeOptions extends BaseTranscodeOptions { | |
191 | type: 'only-audio' | |
192 | } | |
193 | ||
194 | type TranscodeOptions = HLSTranscodeOptions | |
195 | | VideoTranscodeOptions | |
196 | | MergeAudioTranscodeOptions | |
197 | | OnlyAudioTranscodeOptions | |
198 | | QuickTranscodeOptions | |
199 | ||
200 | function transcode (options: TranscodeOptions) { | |
201 | return new Promise<void>(async (res, rej) => { | |
202 | try { | |
203 | let command = ffmpeg(options.inputPath, { niceness: FFMPEG_NICE.TRANSCODING }) | |
204 | .output(options.outputPath) | |
205 | ||
206 | if (options.type === 'quick-transcode') { | |
207 | command = await buildQuickTranscodeCommand(command) | |
208 | } else if (options.type === 'hls') { | |
209 | command = await buildHLSCommand(command, options) | |
210 | } else if (options.type === 'merge-audio') { | |
211 | command = await buildAudioMergeCommand(command, options) | |
212 | } else if (options.type === 'only-audio') { | |
213 | command = await buildOnlyAudioCommand(command, options) | |
214 | } else { | |
215 | command = await buildx264Command(command, options) | |
216 | } | |
217 | ||
218 | if (CONFIG.TRANSCODING.THREADS > 0) { | |
219 | // if we don't set any threads ffmpeg will chose automatically | |
220 | command = command.outputOption('-threads ' + CONFIG.TRANSCODING.THREADS) | |
221 | } | |
222 | ||
223 | command | |
224 | .on('error', (err, stdout, stderr) => { | |
225 | logger.error('Error in transcoding job.', { stdout, stderr }) | |
226 | return rej(err) | |
227 | }) | |
228 | .on('end', () => { | |
229 | return fixHLSPlaylistIfNeeded(options) | |
230 | .then(() => res()) | |
231 | .catch(err => rej(err)) | |
232 | }) | |
233 | .run() | |
234 | } catch (err) { | |
235 | return rej(err) | |
236 | } | |
237 | }) | |
238 | } | |
239 | ||
240 | async function canDoQuickTranscode (path: string): Promise<boolean> { | |
241 | // NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway) | |
242 | const videoStream = await getVideoStreamFromFile(path) | |
243 | const parsedAudio = await audio.get(path) | |
244 | const fps = await getVideoFileFPS(path) | |
245 | const bitRate = await getVideoFileBitrate(path) | |
246 | const resolution = await getVideoFileResolution(path) | |
247 | ||
248 | // check video params | |
249 | if (videoStream == null) return false | |
250 | if (videoStream[ 'codec_name' ] !== 'h264') return false | |
251 | if (videoStream[ 'pix_fmt' ] !== 'yuv420p') return false | |
252 | if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false | |
253 | if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false | |
254 | ||
255 | // check audio params (if audio stream exists) | |
256 | if (parsedAudio.audioStream) { | |
257 | if (parsedAudio.audioStream[ 'codec_name' ] !== 'aac') return false | |
258 | ||
259 | const maxAudioBitrate = audio.bitrate[ 'aac' ](parsedAudio.audioStream[ 'bit_rate' ]) | |
260 | if (maxAudioBitrate !== -1 && parsedAudio.audioStream[ 'bit_rate' ] > maxAudioBitrate) return false | |
261 | } | |
262 | ||
263 | return true | |
264 | } | |
265 | ||
266 | function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number { | |
267 | return VIDEO_TRANSCODING_FPS[type].slice(0) | |
268 | .sort((a, b) => fps % a - fps % b)[0] | |
269 | } | |
270 | ||
271 | // --------------------------------------------------------------------------- | |
272 | ||
273 | export { | |
274 | getVideoStreamCodec, | |
275 | getAudioStreamCodec, | |
276 | getVideoStreamSize, | |
277 | getVideoFileResolution, | |
278 | getDurationFromVideoFile, | |
279 | generateImageFromVideoFile, | |
280 | TranscodeOptions, | |
281 | TranscodeOptionsType, | |
282 | transcode, | |
283 | getVideoFileFPS, | |
284 | computeResolutionsToTranscode, | |
285 | audio, | |
286 | getVideoFileBitrate, | |
287 | canDoQuickTranscode | |
288 | } | |
289 | ||
290 | // --------------------------------------------------------------------------- | |
291 | ||
292 | async function buildx264Command (command: ffmpeg.FfmpegCommand, options: TranscodeOptions) { | |
293 | let fps = await getVideoFileFPS(options.inputPath) | |
294 | if ( | |
295 | // On small/medium resolutions, limit FPS | |
296 | options.resolution !== undefined && | |
297 | options.resolution < VIDEO_TRANSCODING_FPS.KEEP_ORIGIN_FPS_RESOLUTION_MIN && | |
298 | fps > VIDEO_TRANSCODING_FPS.AVERAGE | |
299 | ) { | |
300 | // Get closest standard framerate by modulo: downsampling has to be done to a divisor of the nominal fps value | |
301 | fps = getClosestFramerateStandard(fps, 'STANDARD') | |
302 | } | |
303 | ||
304 | command = await presetH264(command, options.inputPath, options.resolution, fps) | |
305 | ||
306 | if (options.resolution !== undefined) { | |
307 | // '?x720' or '720x?' for example | |
308 | const size = options.isPortraitMode === true ? `${options.resolution}x?` : `?x${options.resolution}` | |
309 | command = command.size(size) | |
310 | } | |
311 | ||
312 | if (fps) { | |
313 | // Hard FPS limits | |
314 | if (fps > VIDEO_TRANSCODING_FPS.MAX) fps = getClosestFramerateStandard(fps, 'HD_STANDARD') | |
315 | else if (fps < VIDEO_TRANSCODING_FPS.MIN) fps = VIDEO_TRANSCODING_FPS.MIN | |
316 | ||
317 | command = command.withFPS(fps) | |
318 | } | |
319 | ||
320 | return command | |
321 | } | |
322 | ||
323 | async function buildAudioMergeCommand (command: ffmpeg.FfmpegCommand, options: MergeAudioTranscodeOptions) { | |
324 | command = command.loop(undefined) | |
325 | ||
326 | command = await presetH264VeryFast(command, options.audioPath, options.resolution) | |
327 | ||
328 | command = command.input(options.audioPath) | |
329 | .videoFilter('scale=trunc(iw/2)*2:trunc(ih/2)*2') // Avoid "height not divisible by 2" error | |
330 | .outputOption('-tune stillimage') | |
331 | .outputOption('-shortest') | |
332 | ||
333 | return command | |
334 | } | |
335 | ||
336 | async function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, options: OnlyAudioTranscodeOptions) { | |
337 | command = await presetOnlyAudio(command) | |
338 | ||
339 | return command | |
340 | } | |
341 | ||
342 | async function buildQuickTranscodeCommand (command: ffmpeg.FfmpegCommand) { | |
343 | command = await presetCopy(command) | |
344 | ||
345 | command = command.outputOption('-map_metadata -1') // strip all metadata | |
346 | .outputOption('-movflags faststart') | |
347 | ||
348 | return command | |
349 | } | |
350 | ||
351 | async function buildHLSCommand (command: ffmpeg.FfmpegCommand, options: HLSTranscodeOptions) { | |
352 | const videoPath = getHLSVideoPath(options) | |
353 | ||
354 | if (options.copyCodecs) command = await presetCopy(command) | |
355 | else command = await buildx264Command(command, options) | |
356 | ||
357 | command = command.outputOption('-hls_time 4') | |
358 | .outputOption('-hls_list_size 0') | |
359 | .outputOption('-hls_playlist_type vod') | |
360 | .outputOption('-hls_segment_filename ' + videoPath) | |
361 | .outputOption('-hls_segment_type fmp4') | |
362 | .outputOption('-f hls') | |
363 | .outputOption('-hls_flags single_file') | |
364 | ||
365 | return command | |
366 | } | |
367 | ||
368 | function getHLSVideoPath (options: HLSTranscodeOptions) { | |
369 | return `${dirname(options.outputPath)}/${options.hlsPlaylist.videoFilename}` | |
370 | } | |
371 | ||
372 | async function fixHLSPlaylistIfNeeded (options: TranscodeOptions) { | |
373 | if (options.type !== 'hls') return | |
374 | ||
375 | const fileContent = await readFile(options.outputPath) | |
376 | ||
377 | const videoFileName = options.hlsPlaylist.videoFilename | |
378 | const videoFilePath = getHLSVideoPath(options) | |
379 | ||
380 | // Fix wrong mapping with some ffmpeg versions | |
381 | const newContent = fileContent.toString() | |
382 | .replace(`#EXT-X-MAP:URI="${videoFilePath}",`, `#EXT-X-MAP:URI="${videoFileName}",`) | |
383 | ||
384 | await writeFile(options.outputPath, newContent) | |
385 | } | |
386 | ||
387 | function getVideoStreamFromFile (path: string) { | |
388 | return new Promise<any>((res, rej) => { | |
389 | ffmpeg.ffprobe(path, (err, metadata) => { | |
390 | if (err) return rej(err) | |
391 | ||
392 | const videoStream = metadata.streams.find(s => s.codec_type === 'video') | |
393 | return res(videoStream || null) | |
394 | }) | |
395 | }) | |
396 | } | |
397 | ||
398 | /** | |
399 | * A slightly customised version of the 'veryfast' x264 preset | |
400 | * | |
401 | * The veryfast preset is right in the sweet spot of performance | |
402 | * and quality. Superfast and ultrafast will give you better | |
403 | * performance, but then quality is noticeably worse. | |
404 | */ | |
405 | async function presetH264VeryFast (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) { | |
406 | let localCommand = await presetH264(command, input, resolution, fps) | |
407 | ||
408 | localCommand = localCommand.outputOption('-preset:v veryfast') | |
409 | ||
410 | /* | |
411 | MAIN reference: https://slhck.info/video/2017/03/01/rate-control.html | |
412 | Our target situation is closer to a livestream than a stream, | |
413 | since we want to reduce as much a possible the encoding burden, | |
414 | although not to the point of a livestream where there is a hard | |
415 | constraint on the frames per second to be encoded. | |
416 | */ | |
417 | ||
418 | return localCommand | |
419 | } | |
420 | ||
421 | /** | |
422 | * A toolbox to play with audio | |
423 | */ | |
424 | namespace audio { | |
425 | export const get = (videoPath: string) => { | |
426 | // without position, ffprobe considers the last input only | |
427 | // we make it consider the first input only | |
428 | // if you pass a file path to pos, then ffprobe acts on that file directly | |
429 | return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => { | |
430 | ||
431 | function parseFfprobe (err: any, data: ffmpeg.FfprobeData) { | |
432 | if (err) return rej(err) | |
433 | ||
434 | if ('streams' in data) { | |
435 | const audioStream = data.streams.find(stream => stream[ 'codec_type' ] === 'audio') | |
436 | if (audioStream) { | |
437 | return res({ | |
438 | absolutePath: data.format.filename, | |
439 | audioStream | |
440 | }) | |
441 | } | |
442 | } | |
443 | ||
444 | return res({ absolutePath: data.format.filename }) | |
445 | } | |
446 | ||
447 | return ffmpeg.ffprobe(videoPath, parseFfprobe) | |
448 | }) | |
449 | } | |
450 | ||
451 | export namespace bitrate { | |
452 | const baseKbitrate = 384 | |
453 | ||
454 | const toBits = (kbits: number) => kbits * 8000 | |
455 | ||
456 | export const aac = (bitrate: number): number => { | |
457 | switch (true) { | |
458 | case bitrate > toBits(baseKbitrate): | |
459 | return baseKbitrate | |
460 | ||
461 | default: | |
462 | return -1 // we interpret it as a signal to copy the audio stream as is | |
463 | } | |
464 | } | |
465 | ||
466 | export const mp3 = (bitrate: number): number => { | |
467 | /* | |
468 | a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac. | |
469 | That's why, when using aac, we can go to lower kbit/sec. The equivalences | |
470 | made here are not made to be accurate, especially with good mp3 encoders. | |
471 | */ | |
472 | switch (true) { | |
473 | case bitrate <= toBits(192): | |
474 | return 128 | |
475 | ||
476 | case bitrate <= toBits(384): | |
477 | return 256 | |
478 | ||
479 | default: | |
480 | return baseKbitrate | |
481 | } | |
482 | } | |
483 | } | |
484 | } | |
485 | ||
486 | /** | |
487 | * Standard profile, with variable bitrate audio and faststart. | |
488 | * | |
489 | * As for the audio, quality '5' is the highest and ensures 96-112kbps/channel | |
490 | * See https://trac.ffmpeg.org/wiki/Encode/AAC#fdk_vbr | |
491 | */ | |
492 | async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) { | |
493 | let localCommand = command | |
494 | .format('mp4') | |
495 | .videoCodec('libx264') | |
496 | .outputOption('-level 3.1') // 3.1 is the minimal resource allocation for our highest supported resolution | |
497 | .outputOption('-b_strategy 1') // NOTE: b-strategy 1 - heuristic algorithm, 16 is optimal B-frames for it | |
498 | .outputOption('-bf 16') // NOTE: Why 16: https://github.com/Chocobozzz/PeerTube/pull/774. b-strategy 2 -> B-frames<16 | |
499 | .outputOption('-pix_fmt yuv420p') // allows import of source material with incompatible pixel formats (e.g. MJPEG video) | |
500 | .outputOption('-map_metadata -1') // strip all metadata | |
501 | .outputOption('-movflags faststart') | |
502 | ||
503 | const parsedAudio = await audio.get(input) | |
504 | ||
505 | if (!parsedAudio.audioStream) { | |
506 | localCommand = localCommand.noAudio() | |
507 | } else if ((await checkFFmpegEncoders()).get('libfdk_aac')) { // we favor VBR, if a good AAC encoder is available | |
508 | localCommand = localCommand | |
509 | .audioCodec('libfdk_aac') | |
510 | .audioQuality(5) | |
511 | } else { | |
512 | // we try to reduce the ceiling bitrate by making rough matches of bitrates | |
513 | // of course this is far from perfect, but it might save some space in the end | |
514 | localCommand = localCommand.audioCodec('aac') | |
515 | ||
516 | const audioCodecName = parsedAudio.audioStream[ 'codec_name' ] | |
517 | ||
518 | if (audio.bitrate[ audioCodecName ]) { | |
519 | const bitrate = audio.bitrate[ audioCodecName ](parsedAudio.audioStream[ 'bit_rate' ]) | |
520 | if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate) | |
521 | } | |
522 | } | |
523 | ||
524 | if (fps) { | |
525 | // Constrained Encoding (VBV) | |
526 | // https://slhck.info/video/2017/03/01/rate-control.html | |
527 | // https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate | |
528 | const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS) | |
529 | localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ]) | |
530 | ||
531 | // Keyframe interval of 2 seconds for faster seeking and resolution switching. | |
532 | // https://streaminglearningcenter.com/blogs/whats-the-right-keyframe-interval.html | |
533 | // https://superuser.com/a/908325 | |
534 | localCommand = localCommand.outputOption(`-g ${fps * 2}`) | |
535 | } | |
536 | ||
537 | return localCommand | |
538 | } | |
539 | ||
540 | async function presetCopy (command: ffmpeg.FfmpegCommand): Promise<ffmpeg.FfmpegCommand> { | |
541 | return command | |
542 | .format('mp4') | |
543 | .videoCodec('copy') | |
544 | .audioCodec('copy') | |
545 | } | |
546 | ||
547 | async function presetOnlyAudio (command: ffmpeg.FfmpegCommand): Promise<ffmpeg.FfmpegCommand> { | |
548 | return command | |
549 | .format('mp4') | |
550 | .audioCodec('copy') | |
551 | .noVideo() | |
552 | } |