From: Felix Ableitner Date: Mon, 8 Oct 2018 14:26:04 +0000 (-0500) Subject: Set bitrate limits for transcoding (fixes #638) (#1135) X-Git-Tag: v1.1.0-alpha.1~33 X-Git-Url: https://git.immae.eu/?a=commitdiff_plain;h=edb4ffc7e0b13659d7c73b120f2c87b27e4c26a1;p=github%2FChocobozzz%2FPeerTube.git Set bitrate limits for transcoding (fixes #638) (#1135) * Set bitrate limits for transcoding (fixes #638) * added optimization script and test, changed stuff * fix test, improve docs * re-add optimize-old-videos script * added documentation * Don't optimize videos without valid UUID, or redundancy videos * move getUUIDFromFilename * fix tests? * update torrent and file size, some more fixes/improvements * use higher bitrate for high fps video, adjust bitrates * add test video * don't throw error if resolution is undefined * generate test fixture on the fly * use random noise video for bitrate test, add promise * shorten test video to avoid timeout * use existing function to optimize video * various fixes * increase test timeout * limit test fixture size, add link * test fixes * add await * more test fixes, add -b:v parameter * replace ffmpeg wiki link * fix ffmpeg params * fix unit test * add test fixture to .gitgnore * add video transcoding fps model * add missing file --- diff --git a/.gitignore b/.gitignore index 22478c444..a31da70a9 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ /test4/ /test5/ /test6/ +/server/tests/fixtures/video_high_bitrate_1080p.mp4 # Production /storage/ diff --git a/package.json b/package.json index 80d5a04ac..034b40cbc 100644 --- a/package.json +++ b/package.json @@ -51,6 +51,7 @@ "generate-api-doc": "scripty", "parse-log": "node ./dist/scripts/parse-log.js", "prune-storage": "node ./dist/scripts/prune-storage.js", + "optimize-old-videos": "node ./dist/scripts/optimize-old-videos.js", "postinstall": "cd client && yarn install --pure-lockfile", "tsc": "tsc", "spectacle-docs": "node_modules/spectacle-docs/bin/spectacle.js", diff --git a/scripts/help.sh b/scripts/help.sh index 8ac090139..bc38bdb40 100755 --- a/scripts/help.sh +++ b/scripts/help.sh @@ -18,6 +18,7 @@ printf " reset-password -- -u [user] -> Reset the password of user [user]\n" printf " create-transcoding-job -- -v [video UUID] \n" printf " -> Create a transcoding job for a particular video\n" printf " prune-storage -> Delete (after confirmation) unknown video files/thumbnails/previews... (due to a bad video deletion, transcoding job not finished...)\n" +printf " optimize-old-videos -> Re-transcode videos that have a high bitrate, to make them suitable for streaming over slow connections" printf " dev -> Watch, run the livereload and run the server so that you can develop the application\n" printf " start -> Run the server\n" printf " update-host -> Upgrade scheme/host in torrent files according to the webserver configuration (config/ folder)\n" diff --git a/scripts/optimize-old-videos.ts b/scripts/optimize-old-videos.ts new file mode 100644 index 000000000..ab44acfbe --- /dev/null +++ b/scripts/optimize-old-videos.ts @@ -0,0 +1,36 @@ +import { join } from 'path' +import { readdir } from 'fs-extra' +import { CONFIG, VIDEO_TRANSCODING_FPS } from '../server/initializers/constants' +import { getVideoFileResolution, getVideoFileBitrate, getVideoFileFPS } from '../server/helpers/ffmpeg-utils' +import { getMaxBitrate } from '../shared/models/videos' +import { VideoRedundancyModel } from '../server/models/redundancy/video-redundancy' +import { VideoModel } from '../server/models/video/video' +import { getUUIDFromFilename } from '../server/helpers/utils' +import { optimizeVideofile } from '../server/lib/video-transcoding' + +run() + .then(() => process.exit(0)) + .catch(err => { + console.error(err) + process.exit(-1) + }) + +async function run () { + const files = await readdir(CONFIG.STORAGE.VIDEOS_DIR) + for (const file of files) { + const inputPath = join(CONFIG.STORAGE.VIDEOS_DIR, file) + const videoBitrate = await getVideoFileBitrate(inputPath) + const fps = await getVideoFileFPS(inputPath) + const resolution = await getVideoFileResolution(inputPath) + const uuid = getUUIDFromFilename(file) + + const isLocalVideo = await VideoRedundancyModel.isLocalByVideoUUIDExists(uuid) + const isMaxBitrateExceeded = + videoBitrate > getMaxBitrate(resolution.videoFileResolution, fps, VIDEO_TRANSCODING_FPS) + if (uuid && isLocalVideo && isMaxBitrateExceeded) { + const videoModel = await VideoModel.loadByUUIDWithFile(uuid) + await optimizeVideofile(videoModel, inputPath) + } + } + console.log('Finished optimizing videos') +} diff --git a/scripts/prune-storage.ts b/scripts/prune-storage.ts index 4088fa700..4ab0b4863 100755 --- a/scripts/prune-storage.ts +++ b/scripts/prune-storage.ts @@ -5,6 +5,7 @@ import { VideoModel } from '../server/models/video/video' import { initDatabaseModels } from '../server/initializers' import { remove, readdir } from 'fs-extra' import { VideoRedundancyModel } from '../server/models/redundancy/video-redundancy' +import { getUUIDFromFilename } from '../server/helpers/utils' run() .then(() => process.exit(0)) @@ -82,15 +83,6 @@ async function pruneDirectory (directory: string, onlyOwned = false) { return toDelete } -function getUUIDFromFilename (filename: string) { - const regex = /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/ - const result = filename.match(regex) - - if (!result || Array.isArray(result) === false) return null - - return result[0] -} - async function askConfirmation () { return new Promise((res, rej) => { prompt.start() diff --git a/server/helpers/ffmpeg-utils.ts b/server/helpers/ffmpeg-utils.ts index 22bc25476..8e4471173 100644 --- a/server/helpers/ffmpeg-utils.ts +++ b/server/helpers/ffmpeg-utils.ts @@ -1,6 +1,6 @@ import * as ffmpeg from 'fluent-ffmpeg' import { join } from 'path' -import { VideoResolution } from '../../shared/models/videos' +import { VideoResolution, getTargetBitrate } from '../../shared/models/videos' import { CONFIG, FFMPEG_NICE, VIDEO_TRANSCODING_FPS } from '../initializers' import { processImage } from './image-utils' import { logger } from './logger' @@ -55,6 +55,16 @@ async function getVideoFileFPS (path: string) { return 0 } +async function getVideoFileBitrate (path: string) { + return new Promise((res, rej) => { + ffmpeg.ffprobe(path, (err, metadata) => { + if (err) return rej(err) + + return res(metadata.format.bit_rate) + }) + }) +} + function getDurationFromVideoFile (path: string) { return new Promise((res, rej) => { ffmpeg.ffprobe(path, (err, metadata) => { @@ -138,6 +148,12 @@ function transcode (options: TranscodeOptions) { command = command.withFPS(fps) } + // Constrained Encoding (VBV) + // https://slhck.info/video/2017/03/01/rate-control.html + // https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate + const targetBitrate = getTargetBitrate(options.resolution, fps, VIDEO_TRANSCODING_FPS) + command.outputOptions([`-b:v ${ targetBitrate }`, `-maxrate ${ targetBitrate }`, `-bufsize ${ targetBitrate * 2 }`]) + command .on('error', (err, stdout, stderr) => { logger.error('Error in transcoding job.', { stdout, stderr }) @@ -157,7 +173,8 @@ export { transcode, getVideoFileFPS, computeResolutionsToTranscode, - audio + audio, + getVideoFileBitrate } // --------------------------------------------------------------------------- diff --git a/server/helpers/utils.ts b/server/helpers/utils.ts index 6228fec04..39afb4e7b 100644 --- a/server/helpers/utils.ts +++ b/server/helpers/utils.ts @@ -77,6 +77,20 @@ async function getVersion () { return require('../../../package.json').version } +/** + * From a filename like "ede4cba5-742b-46fa-a388-9a6eb3a3aeb3.mp4", returns + * only the "ede4cba5-742b-46fa-a388-9a6eb3a3aeb3" part. If the filename does + * not contain a UUID, returns null. + */ +function getUUIDFromFilename (filename: string) { + const regex = /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/ + const result = filename.match(regex) + + if (!result || Array.isArray(result) === false) return null + + return result[0] +} + // --------------------------------------------------------------------------- export { @@ -86,5 +100,6 @@ export { getSecureTorrentName, getServerActor, getVersion, - generateVideoTmpPath + generateVideoTmpPath, + getUUIDFromFilename } diff --git a/server/initializers/constants.ts b/server/initializers/constants.ts index 1a3b52015..a3e5f5dd2 100644 --- a/server/initializers/constants.ts +++ b/server/initializers/constants.ts @@ -3,7 +3,7 @@ import { dirname, join } from 'path' import { JobType, VideoRateType, VideoState, VideosRedundancy } from '../../shared/models' import { ActivityPubActorType } from '../../shared/models/activitypub' import { FollowState } from '../../shared/models/actors' -import { VideoAbuseState, VideoImportState, VideoPrivacy } from '../../shared/models/videos' +import { VideoAbuseState, VideoImportState, VideoPrivacy, VideoTranscodingFPS } from '../../shared/models/videos' // Do not use barrels, remain constants as independent as possible import { buildPath, isTestInstance, parseDuration, root, sanitizeHost, sanitizeUrl } from '../helpers/core-utils' import { NSFWPolicyType } from '../../shared/models/videos/nsfw-policy.type' @@ -393,7 +393,7 @@ const RATES_LIMIT = { } let VIDEO_VIEW_LIFETIME = 60000 * 60 // 1 hour -const VIDEO_TRANSCODING_FPS = { +const VIDEO_TRANSCODING_FPS: VideoTranscodingFPS = { MIN: 10, AVERAGE: 30, MAX: 60, diff --git a/server/lib/activitypub/crawl.ts b/server/lib/activitypub/crawl.ts index 55912341c..db9ce3293 100644 --- a/server/lib/activitypub/crawl.ts +++ b/server/lib/activitypub/crawl.ts @@ -1,7 +1,7 @@ import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT } from '../../initializers' import { doRequest } from '../../helpers/requests' import { logger } from '../../helpers/logger' -import Bluebird = require('bluebird') +import * as Bluebird from 'bluebird' async function crawlCollectionPage (uri: string, handler: (items: T[]) => Promise | Bluebird) { logger.info('Crawling ActivityPub data on %s.', uri) diff --git a/server/lib/job-queue/handlers/video-file.ts b/server/lib/job-queue/handlers/video-file.ts index 1463c93fc..adc0a2a15 100644 --- a/server/lib/job-queue/handlers/video-file.ts +++ b/server/lib/job-queue/handlers/video-file.ts @@ -8,7 +8,7 @@ import { retryTransactionWrapper } from '../../../helpers/database-utils' import { sequelizeTypescript } from '../../../initializers' import * as Bluebird from 'bluebird' import { computeResolutionsToTranscode } from '../../../helpers/ffmpeg-utils' -import { importVideoFile, transcodeOriginalVideofile, optimizeOriginalVideofile } from '../../video-transcoding' +import { importVideoFile, transcodeOriginalVideofile, optimizeVideofile } from '../../video-transcoding' export type VideoFilePayload = { videoUUID: string @@ -56,7 +56,7 @@ async function processVideoFile (job: Bull.Job) { await retryTransactionWrapper(onVideoFileTranscoderOrImportSuccess, video) } else { - await optimizeOriginalVideofile(video) + await optimizeVideofile(video) await retryTransactionWrapper(onVideoFileOptimizerSuccess, video, payload.isNewVideo) } diff --git a/server/lib/video-transcoding.ts b/server/lib/video-transcoding.ts index bf3ff78c2..04cadf74b 100644 --- a/server/lib/video-transcoding.ts +++ b/server/lib/video-transcoding.ts @@ -1,5 +1,5 @@ import { CONFIG } from '../initializers' -import { join, extname } from 'path' +import { join, extname, basename } from 'path' import { getVideoFileFPS, getVideoFileResolution, transcode } from '../helpers/ffmpeg-utils' import { copy, remove, rename, stat } from 'fs-extra' import { logger } from '../helpers/logger' @@ -7,11 +7,16 @@ import { VideoResolution } from '../../shared/models/videos' import { VideoFileModel } from '../models/video/video-file' import { VideoModel } from '../models/video/video' -async function optimizeOriginalVideofile (video: VideoModel) { +async function optimizeVideofile (video: VideoModel, videoInputPath?: string) { const videosDirectory = CONFIG.STORAGE.VIDEOS_DIR const newExtname = '.mp4' - const inputVideoFile = video.getOriginalFile() - const videoInputPath = join(videosDirectory, video.getVideoFilename(inputVideoFile)) + let inputVideoFile = null + if (videoInputPath == null) { + inputVideoFile = video.getOriginalFile() + videoInputPath = join(videosDirectory, video.getVideoFilename(inputVideoFile)) + } else { + inputVideoFile = basename(videoInputPath) + } const videoTranscodedPath = join(videosDirectory, video.id + '-transcoded' + newExtname) const transcodeOptions = { @@ -124,7 +129,7 @@ async function importVideoFile (video: VideoModel, inputFilePath: string) { } export { - optimizeOriginalVideofile, + optimizeVideofile, transcodeOriginalVideofile, importVideoFile } diff --git a/server/tests/api/videos/video-transcoder.ts b/server/tests/api/videos/video-transcoder.ts index 0f83d4d57..ec554ed19 100644 --- a/server/tests/api/videos/video-transcoder.ts +++ b/server/tests/api/videos/video-transcoder.ts @@ -4,8 +4,8 @@ import * as chai from 'chai' import 'mocha' import { omit } from 'lodash' import * as ffmpeg from 'fluent-ffmpeg' -import { VideoDetails, VideoState } from '../../../../shared/models/videos' -import { getVideoFileFPS, audio } from '../../../helpers/ffmpeg-utils' +import { VideoDetails, VideoState, getMaxBitrate, VideoResolution } from '../../../../shared/models/videos' +import { getVideoFileFPS, audio, getVideoFileBitrate, getVideoFileResolution } from '../../../helpers/ffmpeg-utils' import { buildAbsoluteFixturePath, doubleFollow, @@ -20,8 +20,10 @@ import { uploadVideo, webtorrentAdd } from '../../utils' -import { join } from 'path' +import { join, basename } from 'path' import { waitJobs } from '../../utils/server/jobs' +import { remove } from 'fs-extra' +import { VIDEO_TRANSCODING_FPS } from '../../../../server/initializers/constants' const expect = chai.expect @@ -228,7 +230,7 @@ describe('Test video transcoding', function () { } }) - it('Should wait transcoding before publishing the video', async function () { + it('Should wait for transcoding before publishing the video', async function () { this.timeout(80000) { @@ -281,7 +283,59 @@ describe('Test video transcoding', function () { } }) + const tempFixturePath = buildAbsoluteFixturePath('video_high_bitrate_1080p.mp4') + it('Should respect maximum bitrate values', async function () { + this.timeout(160000) + + { + // Generate a random, high bitrate video on the fly, so we don't have to include + // a large file in the repo. The video needs to have a certain minimum length so + // that FFmpeg properly applies bitrate limits. + // https://stackoverflow.com/a/15795112 + await new Promise(async (res, rej) => { + ffmpeg() + .outputOptions(['-f rawvideo', '-video_size 1920x1080', '-i /dev/urandom']) + .outputOptions(['-ac 2', '-f s16le', '-i /dev/urandom', '-t 10']) + .outputOptions(['-maxrate 10M', '-bufsize 10M']) + .output(tempFixturePath) + .on('error', rej) + .on('end', res) + .run() + }) + + const bitrate = await getVideoFileBitrate(tempFixturePath) + expect(bitrate).to.be.above(getMaxBitrate(VideoResolution.H_1080P, 60, VIDEO_TRANSCODING_FPS)) + + const videoAttributes = { + name: 'high bitrate video', + description: 'high bitrate video', + fixture: basename(tempFixturePath) + } + + await uploadVideo(servers[1].url, servers[1].accessToken, videoAttributes) + + await waitJobs(servers) + + for (const server of servers) { + const res = await getVideosList(server.url) + + const video = res.body.data.find(v => v.name === videoAttributes.name) + + for (const resolution of ['240', '360', '480', '720', '1080']) { + const path = join(root(), 'test2', 'videos', video.uuid + '-' + resolution + '.mp4') + const bitrate = await getVideoFileBitrate(path) + const fps = await getVideoFileFPS(path) + const resolution2 = await getVideoFileResolution(path) + + expect(resolution2.videoFileResolution.toString()).to.equal(resolution) + expect(bitrate).to.be.below(getMaxBitrate(resolution2.videoFileResolution, fps, VIDEO_TRANSCODING_FPS)) + } + } + } + }) + after(async function () { + remove(tempFixturePath) killallServers(servers) }) }) diff --git a/shared/models/videos/index.ts b/shared/models/videos/index.ts index 90a0e3053..056ae06da 100644 --- a/shared/models/videos/index.ts +++ b/shared/models/videos/index.ts @@ -21,6 +21,7 @@ export * from './video-update.model' export * from './video.model' export * from './video-query.type' export * from './video-state.enum' +export * from './video-transcoding-fps.model' export * from './caption/video-caption.model' export * from './caption/video-caption-update.model' export * from './import/video-import-create.model' diff --git a/shared/models/videos/video-resolution.enum.ts b/shared/models/videos/video-resolution.enum.ts index 100fc0e6e..3c52bbf98 100644 --- a/shared/models/videos/video-resolution.enum.ts +++ b/shared/models/videos/video-resolution.enum.ts @@ -1,3 +1,5 @@ +import { VideoTranscodingFPS } from './video-transcoding-fps.model' + export enum VideoResolution { H_240P = 240, H_360P = 360, @@ -5,3 +7,56 @@ export enum VideoResolution { H_720P = 720, H_1080P = 1080 } + +/** + * Bitrate targets for different resolutions and frame rates, in bytes per second. + * Sources for individual quality levels: + * Google Live Encoder: https://support.google.com/youtube/answer/2853702?hl=en + * YouTube Video Info (tested with random music video): https://www.h3xed.com/blogmedia/youtube-info.php + */ +export function getTargetBitrate (resolution: VideoResolution, fps: number, + fpsTranscodingConstants: VideoTranscodingFPS) { + switch (resolution) { + case VideoResolution.H_240P: + // quality according to Google Live Encoder: 300 - 700 Kbps + // Quality according to YouTube Video Info: 186 Kbps + return 250 * 1000 + case VideoResolution.H_360P: + // quality according to Google Live Encoder: 400 - 1,000 Kbps + // Quality according to YouTube Video Info: 480 Kbps + return 500 * 1000 + case VideoResolution.H_480P: + // quality according to Google Live Encoder: 500 - 2,000 Kbps + // Quality according to YouTube Video Info: 879 Kbps + return 900 * 1000 + case VideoResolution.H_720P: + if (fps === fpsTranscodingConstants.MAX) { + // quality according to Google Live Encoder: 2,250 - 6,000 Kbps + // Quality according to YouTube Video Info: 2634 Kbps + return 2600 * 1000 + } else { + // quality according to Google Live Encoder: 1,500 - 4,000 Kbps + // Quality according to YouTube Video Info: 1752 Kbps + return 1750 * 1000 + } + case VideoResolution.H_1080P: // fallthrough + default: + if (fps === fpsTranscodingConstants.MAX) { + // quality according to Google Live Encoder: 3000 - 6000 Kbps + // Quality according to YouTube Video Info: 4387 Kbps + return 4400 * 1000 + } else { + // quality according to Google Live Encoder: 3000 - 6000 Kbps + // Quality according to YouTube Video Info: 3277 Kbps + return 3300 * 1000 + } + } +} + +/** + * The maximum bitrate we expect to see on a transcoded video in bytes per second. + */ +export function getMaxBitrate (resolution: VideoResolution, fps: number, + fpsTranscodingConstants: VideoTranscodingFPS) { + return getTargetBitrate(resolution, fps, fpsTranscodingConstants) * 2 +} diff --git a/shared/models/videos/video-transcoding-fps.model.ts b/shared/models/videos/video-transcoding-fps.model.ts new file mode 100644 index 000000000..82022d2f1 --- /dev/null +++ b/shared/models/videos/video-transcoding-fps.model.ts @@ -0,0 +1,6 @@ +export type VideoTranscodingFPS = { + MIN: number, + AVERAGE: number, + MAX: number, + KEEP_ORIGIN_FPS_RESOLUTION_MIN: number +} diff --git a/support/doc/tools.md b/support/doc/tools.md index 1db29edc0..8efb0c13d 100644 --- a/support/doc/tools.md +++ b/support/doc/tools.md @@ -187,6 +187,17 @@ To delete them (a confirmation will be demanded first): $ sudo -u peertube NODE_CONFIG_DIR=/var/www/peertube/config NODE_ENV=production npm run prune-storage ``` +### optimize-old-videos.js + +Before version v1.0.0-beta.16, Peertube did not specify a bitrate for the transcoding of uploaded videos. +This means that videos might be encoded into very large files that are too large for streaming. This script +re-transcodes these videos so that they can be watched properly, even on slow connections. + +``` +$ sudo -u peertube NODE_CONFIG_DIR=/var/www/peertube/config NODE_ENV=production npm run optimize-old-videos +``` + + ### update-host.js If you started PeerTube with a domain, and then changed it you will have invalid torrent files and invalid URLs in your database.