diff options
author | Chocobozzz <me@florianbigard.com> | 2020-11-20 17:16:55 +0100 |
---|---|---|
committer | Chocobozzz <chocobozzz@cpy.re> | 2020-11-25 10:07:51 +0100 |
commit | daf6e4801052d3ca6be2fafd20bae2323b1ce175 (patch) | |
tree | a136af611c2543c461ce3fd126ddb7cb1e37a0c2 /server/helpers | |
parent | 123f61933611f326ea5a5e8c2ea253ee8720e4f0 (diff) | |
download | PeerTube-daf6e4801052d3ca6be2fafd20bae2323b1ce175.tar.gz PeerTube-daf6e4801052d3ca6be2fafd20bae2323b1ce175.tar.zst PeerTube-daf6e4801052d3ca6be2fafd20bae2323b1ce175.zip |
Split ffmpeg utils with ffprobe utils
Diffstat (limited to 'server/helpers')
-rw-r--r-- | server/helpers/ffmpeg-utils.ts | 310 | ||||
-rw-r--r-- | server/helpers/ffprobe-utils.ts | 249 |
2 files changed, 282 insertions, 277 deletions
diff --git a/server/helpers/ffmpeg-utils.ts b/server/helpers/ffmpeg-utils.ts index 66b9d2e44..df3926658 100644 --- a/server/helpers/ffmpeg-utils.ts +++ b/server/helpers/ffmpeg-utils.ts | |||
@@ -1,201 +1,14 @@ | |||
1 | import * as ffmpeg from 'fluent-ffmpeg' | 1 | import * as ffmpeg from 'fluent-ffmpeg' |
2 | import { readFile, remove, writeFile } from 'fs-extra' | 2 | import { readFile, remove, writeFile } from 'fs-extra' |
3 | import { dirname, join } from 'path' | 3 | import { dirname, join } from 'path' |
4 | import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata' | 4 | import { getTargetBitrate, VideoResolution } from '../../shared/models/videos' |
5 | import { getMaxBitrate, getTargetBitrate, VideoResolution } from '../../shared/models/videos' | ||
6 | import { checkFFmpegEncoders } from '../initializers/checker-before-init' | 5 | import { checkFFmpegEncoders } from '../initializers/checker-before-init' |
7 | import { CONFIG } from '../initializers/config' | 6 | import { CONFIG } from '../initializers/config' |
8 | import { FFMPEG_NICE, VIDEO_LIVE, VIDEO_TRANSCODING_FPS } from '../initializers/constants' | 7 | import { FFMPEG_NICE, VIDEO_LIVE, VIDEO_TRANSCODING_FPS } from '../initializers/constants' |
8 | import { getAudioStream, getClosestFramerateStandard, getMaxAudioBitrate, getVideoFileFPS } from './ffprobe-utils' | ||
9 | import { processImage } from './image-utils' | 9 | import { processImage } from './image-utils' |
10 | import { logger } from './logger' | 10 | import { logger } from './logger' |
11 | 11 | ||
12 | /** | ||
13 | * A toolbox to play with audio | ||
14 | */ | ||
15 | namespace audio { | ||
16 | export const get = (videoPath: string) => { | ||
17 | // without position, ffprobe considers the last input only | ||
18 | // we make it consider the first input only | ||
19 | // if you pass a file path to pos, then ffprobe acts on that file directly | ||
20 | return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => { | ||
21 | |||
22 | function parseFfprobe (err: any, data: ffmpeg.FfprobeData) { | ||
23 | if (err) return rej(err) | ||
24 | |||
25 | if ('streams' in data) { | ||
26 | const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio') | ||
27 | if (audioStream) { | ||
28 | return res({ | ||
29 | absolutePath: data.format.filename, | ||
30 | audioStream | ||
31 | }) | ||
32 | } | ||
33 | } | ||
34 | |||
35 | return res({ absolutePath: data.format.filename }) | ||
36 | } | ||
37 | |||
38 | return ffmpeg.ffprobe(videoPath, parseFfprobe) | ||
39 | }) | ||
40 | } | ||
41 | |||
42 | export namespace bitrate { | ||
43 | const baseKbitrate = 384 | ||
44 | |||
45 | const toBits = (kbits: number) => kbits * 8000 | ||
46 | |||
47 | export const aac = (bitrate: number): number => { | ||
48 | switch (true) { | ||
49 | case bitrate > toBits(baseKbitrate): | ||
50 | return baseKbitrate | ||
51 | |||
52 | default: | ||
53 | return -1 // we interpret it as a signal to copy the audio stream as is | ||
54 | } | ||
55 | } | ||
56 | |||
57 | export const mp3 = (bitrate: number): number => { | ||
58 | /* | ||
59 | a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac. | ||
60 | That's why, when using aac, we can go to lower kbit/sec. The equivalences | ||
61 | made here are not made to be accurate, especially with good mp3 encoders. | ||
62 | */ | ||
63 | switch (true) { | ||
64 | case bitrate <= toBits(192): | ||
65 | return 128 | ||
66 | |||
67 | case bitrate <= toBits(384): | ||
68 | return 256 | ||
69 | |||
70 | default: | ||
71 | return baseKbitrate | ||
72 | } | ||
73 | } | ||
74 | } | ||
75 | } | ||
76 | |||
77 | function computeResolutionsToTranscode (videoFileResolution: number, type: 'vod' | 'live') { | ||
78 | const configResolutions = type === 'vod' | ||
79 | ? CONFIG.TRANSCODING.RESOLUTIONS | ||
80 | : CONFIG.LIVE.TRANSCODING.RESOLUTIONS | ||
81 | |||
82 | const resolutionsEnabled: number[] = [] | ||
83 | |||
84 | // Put in the order we want to proceed jobs | ||
85 | const resolutions = [ | ||
86 | VideoResolution.H_NOVIDEO, | ||
87 | VideoResolution.H_480P, | ||
88 | VideoResolution.H_360P, | ||
89 | VideoResolution.H_720P, | ||
90 | VideoResolution.H_240P, | ||
91 | VideoResolution.H_1080P, | ||
92 | VideoResolution.H_4K | ||
93 | ] | ||
94 | |||
95 | for (const resolution of resolutions) { | ||
96 | if (configResolutions[resolution + 'p'] === true && videoFileResolution > resolution) { | ||
97 | resolutionsEnabled.push(resolution) | ||
98 | } | ||
99 | } | ||
100 | |||
101 | return resolutionsEnabled | ||
102 | } | ||
103 | |||
104 | async function getVideoStreamSize (path: string) { | ||
105 | const videoStream = await getVideoStreamFromFile(path) | ||
106 | |||
107 | return videoStream === null | ||
108 | ? { width: 0, height: 0 } | ||
109 | : { width: videoStream.width, height: videoStream.height } | ||
110 | } | ||
111 | |||
112 | async function getVideoStreamCodec (path: string) { | ||
113 | const videoStream = await getVideoStreamFromFile(path) | ||
114 | |||
115 | if (!videoStream) return '' | ||
116 | |||
117 | const videoCodec = videoStream.codec_tag_string | ||
118 | |||
119 | const baseProfileMatrix = { | ||
120 | High: '6400', | ||
121 | Main: '4D40', | ||
122 | Baseline: '42E0' | ||
123 | } | ||
124 | |||
125 | let baseProfile = baseProfileMatrix[videoStream.profile] | ||
126 | if (!baseProfile) { | ||
127 | logger.warn('Cannot get video profile codec of %s.', path, { videoStream }) | ||
128 | baseProfile = baseProfileMatrix['High'] // Fallback | ||
129 | } | ||
130 | |||
131 | let level = videoStream.level.toString(16) | ||
132 | if (level.length === 1) level = `0${level}` | ||
133 | |||
134 | return `${videoCodec}.${baseProfile}${level}` | ||
135 | } | ||
136 | |||
137 | async function getAudioStreamCodec (path: string) { | ||
138 | const { audioStream } = await audio.get(path) | ||
139 | |||
140 | if (!audioStream) return '' | ||
141 | |||
142 | const audioCodec = audioStream.codec_name | ||
143 | if (audioCodec === 'aac') return 'mp4a.40.2' | ||
144 | |||
145 | logger.warn('Cannot get audio codec of %s.', path, { audioStream }) | ||
146 | |||
147 | return 'mp4a.40.2' // Fallback | ||
148 | } | ||
149 | |||
150 | async function getVideoFileResolution (path: string) { | ||
151 | const size = await getVideoStreamSize(path) | ||
152 | |||
153 | return { | ||
154 | videoFileResolution: Math.min(size.height, size.width), | ||
155 | isPortraitMode: size.height > size.width | ||
156 | } | ||
157 | } | ||
158 | |||
159 | async function getVideoFileFPS (path: string) { | ||
160 | const videoStream = await getVideoStreamFromFile(path) | ||
161 | if (videoStream === null) return 0 | ||
162 | |||
163 | for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) { | ||
164 | const valuesText: string = videoStream[key] | ||
165 | if (!valuesText) continue | ||
166 | |||
167 | const [ frames, seconds ] = valuesText.split('/') | ||
168 | if (!frames || !seconds) continue | ||
169 | |||
170 | const result = parseInt(frames, 10) / parseInt(seconds, 10) | ||
171 | if (result > 0) return Math.round(result) | ||
172 | } | ||
173 | |||
174 | return 0 | ||
175 | } | ||
176 | |||
177 | async function getMetadataFromFile <T> (path: string, cb = metadata => metadata) { | ||
178 | return new Promise<T>((res, rej) => { | ||
179 | ffmpeg.ffprobe(path, (err, metadata) => { | ||
180 | if (err) return rej(err) | ||
181 | |||
182 | return res(cb(new VideoFileMetadata(metadata))) | ||
183 | }) | ||
184 | }) | ||
185 | } | ||
186 | |||
187 | async function getVideoFileBitrate (path: string) { | ||
188 | return getMetadataFromFile<number>(path, metadata => metadata.format.bit_rate) | ||
189 | } | ||
190 | |||
191 | function getDurationFromVideoFile (path: string) { | ||
192 | return getMetadataFromFile<number>(path, metadata => Math.floor(metadata.format.duration)) | ||
193 | } | ||
194 | |||
195 | function getVideoStreamFromFile (path: string) { | ||
196 | return getMetadataFromFile<any>(path, metadata => metadata.streams.find(s => s.codec_type === 'video') || null) | ||
197 | } | ||
198 | |||
199 | async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) { | 12 | async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) { |
200 | const pendingImageName = 'pending-' + imageName | 13 | const pendingImageName = 'pending-' + imageName |
201 | 14 | ||
@@ -228,6 +41,10 @@ async function generateImageFromVideoFile (fromPath: string, folder: string, ima | |||
228 | } | 41 | } |
229 | } | 42 | } |
230 | 43 | ||
44 | // --------------------------------------------------------------------------- | ||
45 | // Transcode meta function | ||
46 | // --------------------------------------------------------------------------- | ||
47 | |||
231 | type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio' | 48 | type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio' |
232 | 49 | ||
233 | interface BaseTranscodeOptions { | 50 | interface BaseTranscodeOptions { |
@@ -270,72 +87,27 @@ type TranscodeOptions = | |||
270 | | OnlyAudioTranscodeOptions | 87 | | OnlyAudioTranscodeOptions |
271 | | QuickTranscodeOptions | 88 | | QuickTranscodeOptions |
272 | 89 | ||
273 | function transcode (options: TranscodeOptions) { | 90 | const builders: { |
91 | [ type in TranscodeOptionsType ]: (c: ffmpeg.FfmpegCommand, o?: TranscodeOptions) => Promise<ffmpeg.FfmpegCommand> | ffmpeg.FfmpegCommand | ||
92 | } = { | ||
93 | 'quick-transcode': buildQuickTranscodeCommand, | ||
94 | 'hls': buildHLSVODCommand, | ||
95 | 'merge-audio': buildAudioMergeCommand, | ||
96 | 'only-audio': buildOnlyAudioCommand, | ||
97 | 'video': buildx264Command | ||
98 | } | ||
99 | |||
100 | async function transcode (options: TranscodeOptions) { | ||
274 | logger.debug('Will run transcode.', { options }) | 101 | logger.debug('Will run transcode.', { options }) |
275 | 102 | ||
276 | return new Promise<void>(async (res, rej) => { | 103 | let command = getFFmpeg(options.inputPath) |
277 | try { | 104 | .output(options.outputPath) |
278 | let command = getFFmpeg(options.inputPath) | ||
279 | .output(options.outputPath) | ||
280 | |||
281 | if (options.type === 'quick-transcode') { | ||
282 | command = buildQuickTranscodeCommand(command) | ||
283 | } else if (options.type === 'hls') { | ||
284 | command = await buildHLSVODCommand(command, options) | ||
285 | } else if (options.type === 'merge-audio') { | ||
286 | command = await buildAudioMergeCommand(command, options) | ||
287 | } else if (options.type === 'only-audio') { | ||
288 | command = buildOnlyAudioCommand(command, options) | ||
289 | } else { | ||
290 | command = await buildx264Command(command, options) | ||
291 | } | ||
292 | |||
293 | command | ||
294 | .on('error', (err, stdout, stderr) => { | ||
295 | logger.error('Error in transcoding job.', { stdout, stderr }) | ||
296 | return rej(err) | ||
297 | }) | ||
298 | .on('end', () => { | ||
299 | return fixHLSPlaylistIfNeeded(options) | ||
300 | .then(() => res()) | ||
301 | .catch(err => rej(err)) | ||
302 | }) | ||
303 | .run() | ||
304 | } catch (err) { | ||
305 | return rej(err) | ||
306 | } | ||
307 | }) | ||
308 | } | ||
309 | 105 | ||
310 | async function canDoQuickTranscode (path: string): Promise<boolean> { | 106 | command = await builders[options.type](command, options) |
311 | // NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway) | ||
312 | const videoStream = await getVideoStreamFromFile(path) | ||
313 | const parsedAudio = await audio.get(path) | ||
314 | const fps = await getVideoFileFPS(path) | ||
315 | const bitRate = await getVideoFileBitrate(path) | ||
316 | const resolution = await getVideoFileResolution(path) | ||
317 | |||
318 | // check video params | ||
319 | if (videoStream == null) return false | ||
320 | if (videoStream['codec_name'] !== 'h264') return false | ||
321 | if (videoStream['pix_fmt'] !== 'yuv420p') return false | ||
322 | if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false | ||
323 | if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false | ||
324 | |||
325 | // check audio params (if audio stream exists) | ||
326 | if (parsedAudio.audioStream) { | ||
327 | if (parsedAudio.audioStream['codec_name'] !== 'aac') return false | ||
328 | |||
329 | const maxAudioBitrate = audio.bitrate['aac'](parsedAudio.audioStream['bit_rate']) | ||
330 | if (maxAudioBitrate !== -1 && parsedAudio.audioStream['bit_rate'] > maxAudioBitrate) return false | ||
331 | } | ||
332 | 107 | ||
333 | return true | 108 | await runCommand(command) |
334 | } | ||
335 | 109 | ||
336 | function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number { | 110 | await fixHLSPlaylistIfNeeded(options) |
337 | return VIDEO_TRANSCODING_FPS[type].slice(0) | ||
338 | .sort((a, b) => fps % a - fps % b)[0] | ||
339 | } | 111 | } |
340 | 112 | ||
341 | function convertWebPToJPG (path: string, destination: string): Promise<void> { | 113 | function convertWebPToJPG (path: string, destination: string): Promise<void> { |
@@ -484,12 +256,11 @@ async function hlsPlaylistToFragmentedMP4 (hlsDirectory: string, segmentFiles: s | |||
484 | } | 256 | } |
485 | 257 | ||
486 | async function runCommand (command: ffmpeg.FfmpegCommand, onEnd?: Function) { | 258 | async function runCommand (command: ffmpeg.FfmpegCommand, onEnd?: Function) { |
487 | command.run() | ||
488 | |||
489 | return new Promise<string>((res, rej) => { | 259 | return new Promise<string>((res, rej) => { |
490 | command.on('error', err => { | 260 | command.on('error', (err, stdout, stderr) => { |
491 | if (onEnd) onEnd() | 261 | if (onEnd) onEnd() |
492 | 262 | ||
263 | logger.error('Error in transcoding job.', { stdout, stderr }) | ||
493 | rej(err) | 264 | rej(err) |
494 | }) | 265 | }) |
495 | 266 | ||
@@ -498,32 +269,23 @@ async function runCommand (command: ffmpeg.FfmpegCommand, onEnd?: Function) { | |||
498 | 269 | ||
499 | res() | 270 | res() |
500 | }) | 271 | }) |
272 | |||
273 | command.run() | ||
501 | }) | 274 | }) |
502 | } | 275 | } |
503 | 276 | ||
504 | // --------------------------------------------------------------------------- | 277 | // --------------------------------------------------------------------------- |
505 | 278 | ||
506 | export { | 279 | export { |
507 | getVideoStreamCodec, | ||
508 | getAudioStreamCodec, | ||
509 | runLiveMuxing, | 280 | runLiveMuxing, |
510 | convertWebPToJPG, | 281 | convertWebPToJPG, |
511 | processGIF, | 282 | processGIF, |
512 | getVideoStreamSize, | ||
513 | getVideoFileResolution, | ||
514 | getMetadataFromFile, | ||
515 | getDurationFromVideoFile, | ||
516 | runLiveTranscoding, | 283 | runLiveTranscoding, |
517 | generateImageFromVideoFile, | 284 | generateImageFromVideoFile, |
518 | TranscodeOptions, | 285 | TranscodeOptions, |
519 | TranscodeOptionsType, | 286 | TranscodeOptionsType, |
520 | transcode, | 287 | transcode, |
521 | getVideoFileFPS, | 288 | hlsPlaylistToFragmentedMP4 |
522 | computeResolutionsToTranscode, | ||
523 | audio, | ||
524 | hlsPlaylistToFragmentedMP4, | ||
525 | getVideoFileBitrate, | ||
526 | canDoQuickTranscode | ||
527 | } | 289 | } |
528 | 290 | ||
529 | // --------------------------------------------------------------------------- | 291 | // --------------------------------------------------------------------------- |
@@ -595,7 +357,7 @@ async function buildAudioMergeCommand (command: ffmpeg.FfmpegCommand, options: M | |||
595 | return command | 357 | return command |
596 | } | 358 | } |
597 | 359 | ||
598 | function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, options: OnlyAudioTranscodeOptions) { | 360 | function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, _options: OnlyAudioTranscodeOptions) { |
599 | command = presetOnlyAudio(command) | 361 | command = presetOnlyAudio(command) |
600 | 362 | ||
601 | return command | 363 | return command |
@@ -684,7 +446,7 @@ async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolut | |||
684 | 446 | ||
685 | addDefaultX264Params(localCommand) | 447 | addDefaultX264Params(localCommand) |
686 | 448 | ||
687 | const parsedAudio = await audio.get(input) | 449 | const parsedAudio = await getAudioStream(input) |
688 | 450 | ||
689 | if (!parsedAudio.audioStream) { | 451 | if (!parsedAudio.audioStream) { |
690 | localCommand = localCommand.noAudio() | 452 | localCommand = localCommand.noAudio() |
@@ -699,22 +461,16 @@ async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolut | |||
699 | 461 | ||
700 | const audioCodecName = parsedAudio.audioStream['codec_name'] | 462 | const audioCodecName = parsedAudio.audioStream['codec_name'] |
701 | 463 | ||
702 | if (audio.bitrate[audioCodecName]) { | 464 | const bitrate = getMaxAudioBitrate(audioCodecName, parsedAudio.bitrate) |
703 | const bitrate = audio.bitrate[audioCodecName](parsedAudio.audioStream['bit_rate']) | 465 | |
704 | if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate) | 466 | if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate) |
705 | } | ||
706 | } | 467 | } |
707 | 468 | ||
708 | if (fps) { | 469 | if (fps) { |
709 | // Constrained Encoding (VBV) | 470 | // Constrained Encoding (VBV) |
710 | // https://slhck.info/video/2017/03/01/rate-control.html | 471 | // https://slhck.info/video/2017/03/01/rate-control.html |
711 | // https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate | 472 | // https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate |
712 | let targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS) | 473 | const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS) |
713 | |||
714 | // Don't transcode to an higher bitrate than the original file | ||
715 | const fileBitrate = await getVideoFileBitrate(input) | ||
716 | targetBitrate = Math.min(targetBitrate, fileBitrate) | ||
717 | |||
718 | localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ]) | 474 | localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ]) |
719 | 475 | ||
720 | // Keyframe interval of 2 seconds for faster seeking and resolution switching. | 476 | // Keyframe interval of 2 seconds for faster seeking and resolution switching. |
diff --git a/server/helpers/ffprobe-utils.ts b/server/helpers/ffprobe-utils.ts new file mode 100644 index 000000000..6159d3963 --- /dev/null +++ b/server/helpers/ffprobe-utils.ts | |||
@@ -0,0 +1,249 @@ | |||
1 | import * as ffmpeg from 'fluent-ffmpeg' | ||
2 | import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata' | ||
3 | import { getMaxBitrate, VideoResolution } from '../../shared/models/videos' | ||
4 | import { CONFIG } from '../initializers/config' | ||
5 | import { VIDEO_TRANSCODING_FPS } from '../initializers/constants' | ||
6 | import { logger } from './logger' | ||
7 | |||
8 | function ffprobePromise (path: string) { | ||
9 | return new Promise<ffmpeg.FfprobeData>((res, rej) => { | ||
10 | ffmpeg.ffprobe(path, (err, data) => { | ||
11 | if (err) return rej(err) | ||
12 | |||
13 | return res(data) | ||
14 | }) | ||
15 | }) | ||
16 | } | ||
17 | |||
18 | async function getAudioStream (videoPath: string, existingProbe?: ffmpeg.FfprobeData) { | ||
19 | // without position, ffprobe considers the last input only | ||
20 | // we make it consider the first input only | ||
21 | // if you pass a file path to pos, then ffprobe acts on that file directly | ||
22 | const data = existingProbe || await ffprobePromise(videoPath) | ||
23 | |||
24 | if (Array.isArray(data.streams)) { | ||
25 | const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio') | ||
26 | |||
27 | if (audioStream) { | ||
28 | return { | ||
29 | absolutePath: data.format.filename, | ||
30 | audioStream, | ||
31 | bitrate: parseInt(audioStream['bit_rate'] + '', 10) | ||
32 | } | ||
33 | } | ||
34 | } | ||
35 | |||
36 | return { absolutePath: data.format.filename } | ||
37 | } | ||
38 | |||
39 | function getMaxAudioBitrate (type: 'aac' | 'mp3' | string, bitrate: number) { | ||
40 | const baseKbitrate = 384 | ||
41 | const toBits = (kbits: number) => kbits * 8000 | ||
42 | |||
43 | if (type === 'aac') { | ||
44 | switch (true) { | ||
45 | case bitrate > toBits(baseKbitrate): | ||
46 | return baseKbitrate | ||
47 | |||
48 | default: | ||
49 | return -1 // we interpret it as a signal to copy the audio stream as is | ||
50 | } | ||
51 | } | ||
52 | |||
53 | if (type === 'mp3') { | ||
54 | /* | ||
55 | a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac. | ||
56 | That's why, when using aac, we can go to lower kbit/sec. The equivalences | ||
57 | made here are not made to be accurate, especially with good mp3 encoders. | ||
58 | */ | ||
59 | switch (true) { | ||
60 | case bitrate <= toBits(192): | ||
61 | return 128 | ||
62 | |||
63 | case bitrate <= toBits(384): | ||
64 | return 256 | ||
65 | |||
66 | default: | ||
67 | return baseKbitrate | ||
68 | } | ||
69 | } | ||
70 | |||
71 | return undefined | ||
72 | } | ||
73 | |||
74 | async function getVideoStreamSize (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
75 | const videoStream = await getVideoStreamFromFile(path, existingProbe) | ||
76 | |||
77 | return videoStream === null | ||
78 | ? { width: 0, height: 0 } | ||
79 | : { width: videoStream.width, height: videoStream.height } | ||
80 | } | ||
81 | |||
82 | async function getVideoStreamCodec (path: string) { | ||
83 | const videoStream = await getVideoStreamFromFile(path) | ||
84 | |||
85 | if (!videoStream) return '' | ||
86 | |||
87 | const videoCodec = videoStream.codec_tag_string | ||
88 | |||
89 | const baseProfileMatrix = { | ||
90 | High: '6400', | ||
91 | Main: '4D40', | ||
92 | Baseline: '42E0' | ||
93 | } | ||
94 | |||
95 | let baseProfile = baseProfileMatrix[videoStream.profile] | ||
96 | if (!baseProfile) { | ||
97 | logger.warn('Cannot get video profile codec of %s.', path, { videoStream }) | ||
98 | baseProfile = baseProfileMatrix['High'] // Fallback | ||
99 | } | ||
100 | |||
101 | let level = videoStream.level.toString(16) | ||
102 | if (level.length === 1) level = `0${level}` | ||
103 | |||
104 | return `${videoCodec}.${baseProfile}${level}` | ||
105 | } | ||
106 | |||
107 | async function getAudioStreamCodec (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
108 | const { audioStream } = await getAudioStream(path, existingProbe) | ||
109 | |||
110 | if (!audioStream) return '' | ||
111 | |||
112 | const audioCodec = audioStream.codec_name | ||
113 | if (audioCodec === 'aac') return 'mp4a.40.2' | ||
114 | |||
115 | logger.warn('Cannot get audio codec of %s.', path, { audioStream }) | ||
116 | |||
117 | return 'mp4a.40.2' // Fallback | ||
118 | } | ||
119 | |||
120 | async function getVideoFileResolution (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
121 | const size = await getVideoStreamSize(path, existingProbe) | ||
122 | |||
123 | return { | ||
124 | videoFileResolution: Math.min(size.height, size.width), | ||
125 | isPortraitMode: size.height > size.width | ||
126 | } | ||
127 | } | ||
128 | |||
129 | async function getVideoFileFPS (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
130 | const videoStream = await getVideoStreamFromFile(path, existingProbe) | ||
131 | if (videoStream === null) return 0 | ||
132 | |||
133 | for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) { | ||
134 | const valuesText: string = videoStream[key] | ||
135 | if (!valuesText) continue | ||
136 | |||
137 | const [ frames, seconds ] = valuesText.split('/') | ||
138 | if (!frames || !seconds) continue | ||
139 | |||
140 | const result = parseInt(frames, 10) / parseInt(seconds, 10) | ||
141 | if (result > 0) return Math.round(result) | ||
142 | } | ||
143 | |||
144 | return 0 | ||
145 | } | ||
146 | |||
147 | async function getMetadataFromFile (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
148 | const metadata = existingProbe || await ffprobePromise(path) | ||
149 | |||
150 | return new VideoFileMetadata(metadata) | ||
151 | } | ||
152 | |||
153 | async function getVideoFileBitrate (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
154 | const metadata = await getMetadataFromFile(path, existingProbe) | ||
155 | |||
156 | return metadata.format.bit_rate as number | ||
157 | } | ||
158 | |||
159 | async function getDurationFromVideoFile (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
160 | const metadata = await getMetadataFromFile(path, existingProbe) | ||
161 | |||
162 | return Math.floor(metadata.format.duration) | ||
163 | } | ||
164 | |||
165 | async function getVideoStreamFromFile (path: string, existingProbe?: ffmpeg.FfprobeData) { | ||
166 | const metadata = await getMetadataFromFile(path, existingProbe) | ||
167 | |||
168 | return metadata.streams.find(s => s.codec_type === 'video') || null | ||
169 | } | ||
170 | |||
171 | function computeResolutionsToTranscode (videoFileResolution: number, type: 'vod' | 'live') { | ||
172 | const configResolutions = type === 'vod' | ||
173 | ? CONFIG.TRANSCODING.RESOLUTIONS | ||
174 | : CONFIG.LIVE.TRANSCODING.RESOLUTIONS | ||
175 | |||
176 | const resolutionsEnabled: number[] = [] | ||
177 | |||
178 | // Put in the order we want to proceed jobs | ||
179 | const resolutions = [ | ||
180 | VideoResolution.H_NOVIDEO, | ||
181 | VideoResolution.H_480P, | ||
182 | VideoResolution.H_360P, | ||
183 | VideoResolution.H_720P, | ||
184 | VideoResolution.H_240P, | ||
185 | VideoResolution.H_1080P, | ||
186 | VideoResolution.H_4K | ||
187 | ] | ||
188 | |||
189 | for (const resolution of resolutions) { | ||
190 | if (configResolutions[resolution + 'p'] === true && videoFileResolution > resolution) { | ||
191 | resolutionsEnabled.push(resolution) | ||
192 | } | ||
193 | } | ||
194 | |||
195 | return resolutionsEnabled | ||
196 | } | ||
197 | |||
198 | async function canDoQuickTranscode (path: string): Promise<boolean> { | ||
199 | const probe = await ffprobePromise(path) | ||
200 | |||
201 | // NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway) | ||
202 | const videoStream = await getVideoStreamFromFile(path, probe) | ||
203 | const parsedAudio = await getAudioStream(path, probe) | ||
204 | const fps = await getVideoFileFPS(path, probe) | ||
205 | const bitRate = await getVideoFileBitrate(path, probe) | ||
206 | const resolution = await getVideoFileResolution(path, probe) | ||
207 | |||
208 | // check video params | ||
209 | if (videoStream == null) return false | ||
210 | if (videoStream['codec_name'] !== 'h264') return false | ||
211 | if (videoStream['pix_fmt'] !== 'yuv420p') return false | ||
212 | if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false | ||
213 | if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false | ||
214 | |||
215 | // check audio params (if audio stream exists) | ||
216 | if (parsedAudio.audioStream) { | ||
217 | if (parsedAudio.audioStream['codec_name'] !== 'aac') return false | ||
218 | |||
219 | const audioBitrate = parsedAudio.bitrate | ||
220 | |||
221 | const maxAudioBitrate = getMaxAudioBitrate('aac', audioBitrate) | ||
222 | if (maxAudioBitrate !== -1 && audioBitrate > maxAudioBitrate) return false | ||
223 | } | ||
224 | |||
225 | return true | ||
226 | } | ||
227 | |||
228 | function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number { | ||
229 | return VIDEO_TRANSCODING_FPS[type].slice(0) | ||
230 | .sort((a, b) => fps % a - fps % b)[0] | ||
231 | } | ||
232 | |||
233 | // --------------------------------------------------------------------------- | ||
234 | |||
235 | export { | ||
236 | getVideoStreamCodec, | ||
237 | getAudioStreamCodec, | ||
238 | getVideoStreamSize, | ||
239 | getVideoFileResolution, | ||
240 | getMetadataFromFile, | ||
241 | getMaxAudioBitrate, | ||
242 | getDurationFromVideoFile, | ||
243 | getAudioStream, | ||
244 | getVideoFileFPS, | ||
245 | getClosestFramerateStandard, | ||
246 | computeResolutionsToTranscode, | ||
247 | getVideoFileBitrate, | ||
248 | canDoQuickTranscode | ||
249 | } | ||