]>
Commit | Line | Data |
---|---|---|
1 | import * as ffmpeg from 'fluent-ffmpeg' | |
2 | import { readFile, remove, writeFile } from 'fs-extra' | |
3 | import { dirname, join } from 'path' | |
4 | import { VideoFileMetadata } from '@shared/models/videos/video-file-metadata' | |
5 | import { getMaxBitrate, getTargetBitrate, VideoResolution } from '../../shared/models/videos' | |
6 | import { checkFFmpegEncoders } from '../initializers/checker-before-init' | |
7 | import { CONFIG } from '../initializers/config' | |
8 | import { FFMPEG_NICE, VIDEO_LIVE, VIDEO_TRANSCODING_FPS } from '../initializers/constants' | |
9 | import { processImage } from './image-utils' | |
10 | import { logger } from './logger' | |
11 | ||
12 | /** | |
13 | * A toolbox to play with audio | |
14 | */ | |
15 | namespace audio { | |
16 | export const get = (videoPath: string) => { | |
17 | // without position, ffprobe considers the last input only | |
18 | // we make it consider the first input only | |
19 | // if you pass a file path to pos, then ffprobe acts on that file directly | |
20 | return new Promise<{ absolutePath: string, audioStream?: any }>((res, rej) => { | |
21 | ||
22 | function parseFfprobe (err: any, data: ffmpeg.FfprobeData) { | |
23 | if (err) return rej(err) | |
24 | ||
25 | if ('streams' in data) { | |
26 | const audioStream = data.streams.find(stream => stream['codec_type'] === 'audio') | |
27 | if (audioStream) { | |
28 | return res({ | |
29 | absolutePath: data.format.filename, | |
30 | audioStream | |
31 | }) | |
32 | } | |
33 | } | |
34 | ||
35 | return res({ absolutePath: data.format.filename }) | |
36 | } | |
37 | ||
38 | return ffmpeg.ffprobe(videoPath, parseFfprobe) | |
39 | }) | |
40 | } | |
41 | ||
42 | export namespace bitrate { | |
43 | const baseKbitrate = 384 | |
44 | ||
45 | const toBits = (kbits: number) => kbits * 8000 | |
46 | ||
47 | export const aac = (bitrate: number): number => { | |
48 | switch (true) { | |
49 | case bitrate > toBits(baseKbitrate): | |
50 | return baseKbitrate | |
51 | ||
52 | default: | |
53 | return -1 // we interpret it as a signal to copy the audio stream as is | |
54 | } | |
55 | } | |
56 | ||
57 | export const mp3 = (bitrate: number): number => { | |
58 | /* | |
59 | a 192kbit/sec mp3 doesn't hold as much information as a 192kbit/sec aac. | |
60 | That's why, when using aac, we can go to lower kbit/sec. The equivalences | |
61 | made here are not made to be accurate, especially with good mp3 encoders. | |
62 | */ | |
63 | switch (true) { | |
64 | case bitrate <= toBits(192): | |
65 | return 128 | |
66 | ||
67 | case bitrate <= toBits(384): | |
68 | return 256 | |
69 | ||
70 | default: | |
71 | return baseKbitrate | |
72 | } | |
73 | } | |
74 | } | |
75 | } | |
76 | ||
77 | function computeResolutionsToTranscode (videoFileResolution: number, type: 'vod' | 'live') { | |
78 | const configResolutions = type === 'vod' | |
79 | ? CONFIG.TRANSCODING.RESOLUTIONS | |
80 | : CONFIG.LIVE.TRANSCODING.RESOLUTIONS | |
81 | ||
82 | const resolutionsEnabled: number[] = [] | |
83 | ||
84 | // Put in the order we want to proceed jobs | |
85 | const resolutions = [ | |
86 | VideoResolution.H_NOVIDEO, | |
87 | VideoResolution.H_480P, | |
88 | VideoResolution.H_360P, | |
89 | VideoResolution.H_720P, | |
90 | VideoResolution.H_240P, | |
91 | VideoResolution.H_1080P, | |
92 | VideoResolution.H_4K | |
93 | ] | |
94 | ||
95 | for (const resolution of resolutions) { | |
96 | if (configResolutions[resolution + 'p'] === true && videoFileResolution > resolution) { | |
97 | resolutionsEnabled.push(resolution) | |
98 | } | |
99 | } | |
100 | ||
101 | return resolutionsEnabled | |
102 | } | |
103 | ||
104 | async function getVideoStreamSize (path: string) { | |
105 | const videoStream = await getVideoStreamFromFile(path) | |
106 | ||
107 | return videoStream === null | |
108 | ? { width: 0, height: 0 } | |
109 | : { width: videoStream.width, height: videoStream.height } | |
110 | } | |
111 | ||
112 | async function getVideoStreamCodec (path: string) { | |
113 | const videoStream = await getVideoStreamFromFile(path) | |
114 | ||
115 | if (!videoStream) return '' | |
116 | ||
117 | const videoCodec = videoStream.codec_tag_string | |
118 | ||
119 | const baseProfileMatrix = { | |
120 | High: '6400', | |
121 | Main: '4D40', | |
122 | Baseline: '42E0' | |
123 | } | |
124 | ||
125 | let baseProfile = baseProfileMatrix[videoStream.profile] | |
126 | if (!baseProfile) { | |
127 | logger.warn('Cannot get video profile codec of %s.', path, { videoStream }) | |
128 | baseProfile = baseProfileMatrix['High'] // Fallback | |
129 | } | |
130 | ||
131 | let level = videoStream.level.toString(16) | |
132 | if (level.length === 1) level = `0${level}` | |
133 | ||
134 | return `${videoCodec}.${baseProfile}${level}` | |
135 | } | |
136 | ||
137 | async function getAudioStreamCodec (path: string) { | |
138 | const { audioStream } = await audio.get(path) | |
139 | ||
140 | if (!audioStream) return '' | |
141 | ||
142 | const audioCodec = audioStream.codec_name | |
143 | if (audioCodec === 'aac') return 'mp4a.40.2' | |
144 | ||
145 | logger.warn('Cannot get audio codec of %s.', path, { audioStream }) | |
146 | ||
147 | return 'mp4a.40.2' // Fallback | |
148 | } | |
149 | ||
150 | async function getVideoFileResolution (path: string) { | |
151 | const size = await getVideoStreamSize(path) | |
152 | ||
153 | return { | |
154 | videoFileResolution: Math.min(size.height, size.width), | |
155 | isPortraitMode: size.height > size.width | |
156 | } | |
157 | } | |
158 | ||
159 | async function getVideoFileFPS (path: string) { | |
160 | const videoStream = await getVideoStreamFromFile(path) | |
161 | if (videoStream === null) return 0 | |
162 | ||
163 | for (const key of [ 'avg_frame_rate', 'r_frame_rate' ]) { | |
164 | const valuesText: string = videoStream[key] | |
165 | if (!valuesText) continue | |
166 | ||
167 | const [ frames, seconds ] = valuesText.split('/') | |
168 | if (!frames || !seconds) continue | |
169 | ||
170 | const result = parseInt(frames, 10) / parseInt(seconds, 10) | |
171 | if (result > 0) return Math.round(result) | |
172 | } | |
173 | ||
174 | return 0 | |
175 | } | |
176 | ||
177 | async function getMetadataFromFile <T> (path: string, cb = metadata => metadata) { | |
178 | return new Promise<T>((res, rej) => { | |
179 | ffmpeg.ffprobe(path, (err, metadata) => { | |
180 | if (err) return rej(err) | |
181 | ||
182 | return res(cb(new VideoFileMetadata(metadata))) | |
183 | }) | |
184 | }) | |
185 | } | |
186 | ||
187 | async function getVideoFileBitrate (path: string) { | |
188 | return getMetadataFromFile<number>(path, metadata => metadata.format.bit_rate) | |
189 | } | |
190 | ||
191 | function getDurationFromVideoFile (path: string) { | |
192 | return getMetadataFromFile<number>(path, metadata => Math.floor(metadata.format.duration)) | |
193 | } | |
194 | ||
195 | function getVideoStreamFromFile (path: string) { | |
196 | return getMetadataFromFile<any>(path, metadata => metadata.streams.find(s => s.codec_type === 'video') || null) | |
197 | } | |
198 | ||
199 | async function generateImageFromVideoFile (fromPath: string, folder: string, imageName: string, size: { width: number, height: number }) { | |
200 | const pendingImageName = 'pending-' + imageName | |
201 | ||
202 | const options = { | |
203 | filename: pendingImageName, | |
204 | count: 1, | |
205 | folder | |
206 | } | |
207 | ||
208 | const pendingImagePath = join(folder, pendingImageName) | |
209 | ||
210 | try { | |
211 | await new Promise<string>((res, rej) => { | |
212 | ffmpeg(fromPath, { niceness: FFMPEG_NICE.THUMBNAIL }) | |
213 | .on('error', rej) | |
214 | .on('end', () => res(imageName)) | |
215 | .thumbnail(options) | |
216 | }) | |
217 | ||
218 | const destination = join(folder, imageName) | |
219 | await processImage(pendingImagePath, destination, size) | |
220 | } catch (err) { | |
221 | logger.error('Cannot generate image from video %s.', fromPath, { err }) | |
222 | ||
223 | try { | |
224 | await remove(pendingImagePath) | |
225 | } catch (err) { | |
226 | logger.debug('Cannot remove pending image path after generation error.', { err }) | |
227 | } | |
228 | } | |
229 | } | |
230 | ||
231 | type TranscodeOptionsType = 'hls' | 'quick-transcode' | 'video' | 'merge-audio' | 'only-audio' | |
232 | ||
233 | interface BaseTranscodeOptions { | |
234 | type: TranscodeOptionsType | |
235 | inputPath: string | |
236 | outputPath: string | |
237 | resolution: VideoResolution | |
238 | isPortraitMode?: boolean | |
239 | } | |
240 | ||
241 | interface HLSTranscodeOptions extends BaseTranscodeOptions { | |
242 | type: 'hls' | |
243 | copyCodecs: boolean | |
244 | hlsPlaylist: { | |
245 | videoFilename: string | |
246 | } | |
247 | } | |
248 | ||
249 | interface QuickTranscodeOptions extends BaseTranscodeOptions { | |
250 | type: 'quick-transcode' | |
251 | } | |
252 | ||
253 | interface VideoTranscodeOptions extends BaseTranscodeOptions { | |
254 | type: 'video' | |
255 | } | |
256 | ||
257 | interface MergeAudioTranscodeOptions extends BaseTranscodeOptions { | |
258 | type: 'merge-audio' | |
259 | audioPath: string | |
260 | } | |
261 | ||
262 | interface OnlyAudioTranscodeOptions extends BaseTranscodeOptions { | |
263 | type: 'only-audio' | |
264 | } | |
265 | ||
266 | type TranscodeOptions = | |
267 | HLSTranscodeOptions | |
268 | | VideoTranscodeOptions | |
269 | | MergeAudioTranscodeOptions | |
270 | | OnlyAudioTranscodeOptions | |
271 | | QuickTranscodeOptions | |
272 | ||
273 | function transcode (options: TranscodeOptions) { | |
274 | return new Promise<void>(async (res, rej) => { | |
275 | try { | |
276 | let command = getFFmpeg(options.inputPath) | |
277 | .output(options.outputPath) | |
278 | ||
279 | if (options.type === 'quick-transcode') { | |
280 | command = buildQuickTranscodeCommand(command) | |
281 | } else if (options.type === 'hls') { | |
282 | command = await buildHLSVODCommand(command, options) | |
283 | } else if (options.type === 'merge-audio') { | |
284 | command = await buildAudioMergeCommand(command, options) | |
285 | } else if (options.type === 'only-audio') { | |
286 | command = buildOnlyAudioCommand(command, options) | |
287 | } else { | |
288 | command = await buildx264Command(command, options) | |
289 | } | |
290 | ||
291 | command | |
292 | .on('error', (err, stdout, stderr) => { | |
293 | logger.error('Error in transcoding job.', { stdout, stderr }) | |
294 | return rej(err) | |
295 | }) | |
296 | .on('end', () => { | |
297 | return fixHLSPlaylistIfNeeded(options) | |
298 | .then(() => res()) | |
299 | .catch(err => rej(err)) | |
300 | }) | |
301 | .run() | |
302 | } catch (err) { | |
303 | return rej(err) | |
304 | } | |
305 | }) | |
306 | } | |
307 | ||
308 | async function canDoQuickTranscode (path: string): Promise<boolean> { | |
309 | // NOTE: This could be optimized by running ffprobe only once (but it runs fast anyway) | |
310 | const videoStream = await getVideoStreamFromFile(path) | |
311 | const parsedAudio = await audio.get(path) | |
312 | const fps = await getVideoFileFPS(path) | |
313 | const bitRate = await getVideoFileBitrate(path) | |
314 | const resolution = await getVideoFileResolution(path) | |
315 | ||
316 | // check video params | |
317 | if (videoStream == null) return false | |
318 | if (videoStream['codec_name'] !== 'h264') return false | |
319 | if (videoStream['pix_fmt'] !== 'yuv420p') return false | |
320 | if (fps < VIDEO_TRANSCODING_FPS.MIN || fps > VIDEO_TRANSCODING_FPS.MAX) return false | |
321 | if (bitRate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) return false | |
322 | ||
323 | // check audio params (if audio stream exists) | |
324 | if (parsedAudio.audioStream) { | |
325 | if (parsedAudio.audioStream['codec_name'] !== 'aac') return false | |
326 | ||
327 | const maxAudioBitrate = audio.bitrate['aac'](parsedAudio.audioStream['bit_rate']) | |
328 | if (maxAudioBitrate !== -1 && parsedAudio.audioStream['bit_rate'] > maxAudioBitrate) return false | |
329 | } | |
330 | ||
331 | return true | |
332 | } | |
333 | ||
334 | function getClosestFramerateStandard (fps: number, type: 'HD_STANDARD' | 'STANDARD'): number { | |
335 | return VIDEO_TRANSCODING_FPS[type].slice(0) | |
336 | .sort((a, b) => fps % a - fps % b)[0] | |
337 | } | |
338 | ||
339 | function convertWebPToJPG (path: string, destination: string): Promise<void> { | |
340 | return new Promise<void>(async (res, rej) => { | |
341 | try { | |
342 | const command = ffmpeg(path).output(destination) | |
343 | ||
344 | command.on('error', (err, stdout, stderr) => { | |
345 | logger.error('Error in ffmpeg webp convert process.', { stdout, stderr }) | |
346 | return rej(err) | |
347 | }) | |
348 | .on('end', () => res()) | |
349 | .run() | |
350 | } catch (err) { | |
351 | return rej(err) | |
352 | } | |
353 | }) | |
354 | } | |
355 | ||
356 | function runLiveTranscoding (rtmpUrl: string, outPath: string, resolutions: number[], deleteSegments: boolean) { | |
357 | const command = getFFmpeg(rtmpUrl) | |
358 | command.inputOption('-fflags nobuffer') | |
359 | ||
360 | const varStreamMap: string[] = [] | |
361 | ||
362 | command.complexFilter([ | |
363 | { | |
364 | inputs: '[v:0]', | |
365 | filter: 'split', | |
366 | options: resolutions.length, | |
367 | outputs: resolutions.map(r => `vtemp${r}`) | |
368 | }, | |
369 | ||
370 | ...resolutions.map(r => ({ | |
371 | inputs: `vtemp${r}`, | |
372 | filter: 'scale', | |
373 | options: `w=-2:h=${r}`, | |
374 | outputs: `vout${r}` | |
375 | })) | |
376 | ]) | |
377 | ||
378 | const liveFPS = VIDEO_TRANSCODING_FPS.AVERAGE | |
379 | ||
380 | command.withFps(liveFPS) | |
381 | ||
382 | command.outputOption('-b_strategy 1') | |
383 | command.outputOption('-bf 16') | |
384 | command.outputOption('-preset superfast') | |
385 | command.outputOption('-level 3.1') | |
386 | command.outputOption('-map_metadata -1') | |
387 | command.outputOption('-pix_fmt yuv420p') | |
388 | command.outputOption('-max_muxing_queue_size 1024') | |
389 | ||
390 | for (let i = 0; i < resolutions.length; i++) { | |
391 | const resolution = resolutions[i] | |
392 | ||
393 | command.outputOption(`-map [vout${resolution}]`) | |
394 | command.outputOption(`-c:v:${i} libx264`) | |
395 | command.outputOption(`-b:v:${i} ${getTargetBitrate(resolution, liveFPS, VIDEO_TRANSCODING_FPS)}`) | |
396 | ||
397 | command.outputOption(`-map a:0`) | |
398 | command.outputOption(`-c:a:${i} aac`) | |
399 | ||
400 | varStreamMap.push(`v:${i},a:${i}`) | |
401 | } | |
402 | ||
403 | addDefaultLiveHLSParams(command, outPath, deleteSegments) | |
404 | ||
405 | command.outputOption('-var_stream_map', varStreamMap.join(' ')) | |
406 | ||
407 | command.run() | |
408 | ||
409 | return command | |
410 | } | |
411 | ||
412 | function runLiveMuxing (rtmpUrl: string, outPath: string, deleteSegments: boolean) { | |
413 | const command = getFFmpeg(rtmpUrl) | |
414 | command.inputOption('-fflags nobuffer') | |
415 | ||
416 | command.outputOption('-c:v copy') | |
417 | command.outputOption('-c:a copy') | |
418 | command.outputOption('-map 0:a?') | |
419 | command.outputOption('-map 0:v?') | |
420 | ||
421 | addDefaultLiveHLSParams(command, outPath, deleteSegments) | |
422 | ||
423 | command.run() | |
424 | ||
425 | return command | |
426 | } | |
427 | ||
428 | async function hlsPlaylistToFragmentedMP4 (hlsDirectory: string, segmentFiles: string[], outputPath: string) { | |
429 | const concatFile = 'concat.txt' | |
430 | const concatFilePath = join(hlsDirectory, concatFile) | |
431 | const content = segmentFiles.map(f => 'file ' + f) | |
432 | .join('\n') | |
433 | ||
434 | await writeFile(concatFilePath, content + '\n') | |
435 | ||
436 | const command = getFFmpeg(concatFilePath) | |
437 | command.inputOption('-safe 0') | |
438 | command.inputOption('-f concat') | |
439 | ||
440 | command.outputOption('-c copy') | |
441 | command.output(outputPath) | |
442 | ||
443 | command.run() | |
444 | ||
445 | function cleaner () { | |
446 | remove(concatFile) | |
447 | .catch(err => logger.error('Cannot remove concat file in %s.', hlsDirectory, { err })) | |
448 | } | |
449 | ||
450 | return new Promise<string>((res, rej) => { | |
451 | command.on('error', err => { | |
452 | cleaner() | |
453 | ||
454 | rej(err) | |
455 | }) | |
456 | ||
457 | command.on('end', () => { | |
458 | cleaner() | |
459 | ||
460 | res() | |
461 | }) | |
462 | }) | |
463 | } | |
464 | ||
465 | // --------------------------------------------------------------------------- | |
466 | ||
467 | export { | |
468 | getVideoStreamCodec, | |
469 | getAudioStreamCodec, | |
470 | runLiveMuxing, | |
471 | convertWebPToJPG, | |
472 | getVideoStreamSize, | |
473 | getVideoFileResolution, | |
474 | getMetadataFromFile, | |
475 | getDurationFromVideoFile, | |
476 | runLiveTranscoding, | |
477 | generateImageFromVideoFile, | |
478 | TranscodeOptions, | |
479 | TranscodeOptionsType, | |
480 | transcode, | |
481 | getVideoFileFPS, | |
482 | computeResolutionsToTranscode, | |
483 | audio, | |
484 | hlsPlaylistToFragmentedMP4, | |
485 | getVideoFileBitrate, | |
486 | canDoQuickTranscode | |
487 | } | |
488 | ||
489 | // --------------------------------------------------------------------------- | |
490 | ||
491 | function addDefaultX264Params (command: ffmpeg.FfmpegCommand) { | |
492 | command.outputOption('-level 3.1') // 3.1 is the minimal resource allocation for our highest supported resolution | |
493 | .outputOption('-b_strategy 1') // NOTE: b-strategy 1 - heuristic algorithm, 16 is optimal B-frames for it | |
494 | .outputOption('-bf 16') // NOTE: Why 16: https://github.com/Chocobozzz/PeerTube/pull/774. b-strategy 2 -> B-frames<16 | |
495 | .outputOption('-pix_fmt yuv420p') // allows import of source material with incompatible pixel formats (e.g. MJPEG video) | |
496 | .outputOption('-map_metadata -1') // strip all metadata | |
497 | } | |
498 | ||
499 | function addDefaultLiveHLSParams (command: ffmpeg.FfmpegCommand, outPath: string, deleteSegments: boolean) { | |
500 | command.outputOption('-hls_time ' + VIDEO_LIVE.SEGMENT_TIME) | |
501 | command.outputOption('-hls_list_size ' + VIDEO_LIVE.SEGMENTS_LIST_SIZE) | |
502 | ||
503 | if (deleteSegments === true) { | |
504 | command.outputOption('-hls_flags delete_segments') | |
505 | } | |
506 | ||
507 | command.outputOption(`-hls_segment_filename ${join(outPath, '%v-%d.ts')}`) | |
508 | command.outputOption('-master_pl_name master.m3u8') | |
509 | command.outputOption(`-f hls`) | |
510 | ||
511 | command.output(join(outPath, '%v.m3u8')) | |
512 | } | |
513 | ||
514 | async function buildx264Command (command: ffmpeg.FfmpegCommand, options: TranscodeOptions) { | |
515 | let fps = await getVideoFileFPS(options.inputPath) | |
516 | if ( | |
517 | // On small/medium resolutions, limit FPS | |
518 | options.resolution !== undefined && | |
519 | options.resolution < VIDEO_TRANSCODING_FPS.KEEP_ORIGIN_FPS_RESOLUTION_MIN && | |
520 | fps > VIDEO_TRANSCODING_FPS.AVERAGE | |
521 | ) { | |
522 | // Get closest standard framerate by modulo: downsampling has to be done to a divisor of the nominal fps value | |
523 | fps = getClosestFramerateStandard(fps, 'STANDARD') | |
524 | } | |
525 | ||
526 | command = await presetH264(command, options.inputPath, options.resolution, fps) | |
527 | ||
528 | if (options.resolution !== undefined) { | |
529 | // '?x720' or '720x?' for example | |
530 | const size = options.isPortraitMode === true ? `${options.resolution}x?` : `?x${options.resolution}` | |
531 | command = command.size(size) | |
532 | } | |
533 | ||
534 | if (fps) { | |
535 | // Hard FPS limits | |
536 | if (fps > VIDEO_TRANSCODING_FPS.MAX) fps = getClosestFramerateStandard(fps, 'HD_STANDARD') | |
537 | else if (fps < VIDEO_TRANSCODING_FPS.MIN) fps = VIDEO_TRANSCODING_FPS.MIN | |
538 | ||
539 | command = command.withFPS(fps) | |
540 | } | |
541 | ||
542 | return command | |
543 | } | |
544 | ||
545 | async function buildAudioMergeCommand (command: ffmpeg.FfmpegCommand, options: MergeAudioTranscodeOptions) { | |
546 | command = command.loop(undefined) | |
547 | ||
548 | command = await presetH264VeryFast(command, options.audioPath, options.resolution) | |
549 | ||
550 | command = command.input(options.audioPath) | |
551 | .videoFilter('scale=trunc(iw/2)*2:trunc(ih/2)*2') // Avoid "height not divisible by 2" error | |
552 | .outputOption('-tune stillimage') | |
553 | .outputOption('-shortest') | |
554 | ||
555 | return command | |
556 | } | |
557 | ||
558 | function buildOnlyAudioCommand (command: ffmpeg.FfmpegCommand, options: OnlyAudioTranscodeOptions) { | |
559 | command = presetOnlyAudio(command) | |
560 | ||
561 | return command | |
562 | } | |
563 | ||
564 | function buildQuickTranscodeCommand (command: ffmpeg.FfmpegCommand) { | |
565 | command = presetCopy(command) | |
566 | ||
567 | command = command.outputOption('-map_metadata -1') // strip all metadata | |
568 | .outputOption('-movflags faststart') | |
569 | ||
570 | return command | |
571 | } | |
572 | ||
573 | async function buildHLSVODCommand (command: ffmpeg.FfmpegCommand, options: HLSTranscodeOptions) { | |
574 | const videoPath = getHLSVideoPath(options) | |
575 | ||
576 | if (options.copyCodecs) command = presetCopy(command) | |
577 | else if (options.resolution === VideoResolution.H_NOVIDEO) command = presetOnlyAudio(command) | |
578 | else command = await buildx264Command(command, options) | |
579 | ||
580 | command = command.outputOption('-hls_time 4') | |
581 | .outputOption('-hls_list_size 0') | |
582 | .outputOption('-hls_playlist_type vod') | |
583 | .outputOption('-hls_segment_filename ' + videoPath) | |
584 | .outputOption('-hls_segment_type fmp4') | |
585 | .outputOption('-f hls') | |
586 | .outputOption('-hls_flags single_file') | |
587 | ||
588 | return command | |
589 | } | |
590 | ||
591 | function getHLSVideoPath (options: HLSTranscodeOptions) { | |
592 | return `${dirname(options.outputPath)}/${options.hlsPlaylist.videoFilename}` | |
593 | } | |
594 | ||
595 | async function fixHLSPlaylistIfNeeded (options: TranscodeOptions) { | |
596 | if (options.type !== 'hls') return | |
597 | ||
598 | const fileContent = await readFile(options.outputPath) | |
599 | ||
600 | const videoFileName = options.hlsPlaylist.videoFilename | |
601 | const videoFilePath = getHLSVideoPath(options) | |
602 | ||
603 | // Fix wrong mapping with some ffmpeg versions | |
604 | const newContent = fileContent.toString() | |
605 | .replace(`#EXT-X-MAP:URI="${videoFilePath}",`, `#EXT-X-MAP:URI="${videoFileName}",`) | |
606 | ||
607 | await writeFile(options.outputPath, newContent) | |
608 | } | |
609 | ||
610 | /** | |
611 | * A slightly customised version of the 'veryfast' x264 preset | |
612 | * | |
613 | * The veryfast preset is right in the sweet spot of performance | |
614 | * and quality. Superfast and ultrafast will give you better | |
615 | * performance, but then quality is noticeably worse. | |
616 | */ | |
617 | async function presetH264VeryFast (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) { | |
618 | let localCommand = await presetH264(command, input, resolution, fps) | |
619 | ||
620 | localCommand = localCommand.outputOption('-preset:v veryfast') | |
621 | ||
622 | /* | |
623 | MAIN reference: https://slhck.info/video/2017/03/01/rate-control.html | |
624 | Our target situation is closer to a livestream than a stream, | |
625 | since we want to reduce as much a possible the encoding burden, | |
626 | although not to the point of a livestream where there is a hard | |
627 | constraint on the frames per second to be encoded. | |
628 | */ | |
629 | ||
630 | return localCommand | |
631 | } | |
632 | ||
633 | /** | |
634 | * Standard profile, with variable bitrate audio and faststart. | |
635 | * | |
636 | * As for the audio, quality '5' is the highest and ensures 96-112kbps/channel | |
637 | * See https://trac.ffmpeg.org/wiki/Encode/AAC#fdk_vbr | |
638 | */ | |
639 | async function presetH264 (command: ffmpeg.FfmpegCommand, input: string, resolution: VideoResolution, fps?: number) { | |
640 | let localCommand = command | |
641 | .format('mp4') | |
642 | .videoCodec('libx264') | |
643 | .outputOption('-movflags faststart') | |
644 | ||
645 | addDefaultX264Params(localCommand) | |
646 | ||
647 | const parsedAudio = await audio.get(input) | |
648 | ||
649 | if (!parsedAudio.audioStream) { | |
650 | localCommand = localCommand.noAudio() | |
651 | } else if ((await checkFFmpegEncoders()).get('libfdk_aac')) { // we favor VBR, if a good AAC encoder is available | |
652 | localCommand = localCommand | |
653 | .audioCodec('libfdk_aac') | |
654 | .audioQuality(5) | |
655 | } else { | |
656 | // we try to reduce the ceiling bitrate by making rough matches of bitrates | |
657 | // of course this is far from perfect, but it might save some space in the end | |
658 | localCommand = localCommand.audioCodec('aac') | |
659 | ||
660 | const audioCodecName = parsedAudio.audioStream['codec_name'] | |
661 | ||
662 | if (audio.bitrate[audioCodecName]) { | |
663 | const bitrate = audio.bitrate[audioCodecName](parsedAudio.audioStream['bit_rate']) | |
664 | if (bitrate !== undefined && bitrate !== -1) localCommand = localCommand.audioBitrate(bitrate) | |
665 | } | |
666 | } | |
667 | ||
668 | if (fps) { | |
669 | // Constrained Encoding (VBV) | |
670 | // https://slhck.info/video/2017/03/01/rate-control.html | |
671 | // https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate | |
672 | const targetBitrate = getTargetBitrate(resolution, fps, VIDEO_TRANSCODING_FPS) | |
673 | localCommand = localCommand.outputOptions([ `-maxrate ${targetBitrate}`, `-bufsize ${targetBitrate * 2}` ]) | |
674 | ||
675 | // Keyframe interval of 2 seconds for faster seeking and resolution switching. | |
676 | // https://streaminglearningcenter.com/blogs/whats-the-right-keyframe-interval.html | |
677 | // https://superuser.com/a/908325 | |
678 | localCommand = localCommand.outputOption(`-g ${fps * 2}`) | |
679 | } | |
680 | ||
681 | return localCommand | |
682 | } | |
683 | ||
684 | function presetCopy (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand { | |
685 | return command | |
686 | .format('mp4') | |
687 | .videoCodec('copy') | |
688 | .audioCodec('copy') | |
689 | } | |
690 | ||
691 | function presetOnlyAudio (command: ffmpeg.FfmpegCommand): ffmpeg.FfmpegCommand { | |
692 | return command | |
693 | .format('mp4') | |
694 | .audioCodec('copy') | |
695 | .noVideo() | |
696 | } | |
697 | ||
698 | function getFFmpeg (input: string) { | |
699 | // We set cwd explicitly because ffmpeg appears to create temporary files when trancoding which fails in read-only file systems | |
700 | const command = ffmpeg(input, { niceness: FFMPEG_NICE.TRANSCODING, cwd: CONFIG.STORAGE.TMP_DIR }) | |
701 | ||
702 | if (CONFIG.TRANSCODING.THREADS > 0) { | |
703 | // If we don't set any threads ffmpeg will chose automatically | |
704 | command.outputOption('-threads ' + CONFIG.TRANSCODING.THREADS) | |
705 | } | |
706 | ||
707 | return command | |
708 | } |