diff options
author | Chocobozzz <me@florianbigard.com> | 2023-07-31 14:34:36 +0200 |
---|---|---|
committer | Chocobozzz <me@florianbigard.com> | 2023-08-11 15:02:33 +0200 |
commit | 3a4992633ee62d5edfbb484d9c6bcb3cf158489d (patch) | |
tree | e4510b39bdac9c318fdb4b47018d08f15368b8f0 /server/lib/transcoding | |
parent | 04d1da5621d25d59bd5fa1543b725c497bf5d9a8 (diff) | |
download | PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.gz PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.zst PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.zip |
Migrate server to ESM
Sorry for the very big commit that may lead to git log issues and merge
conflicts, but it's a major step forward:
* Server can be faster at startup because imports() are async and we can
easily lazy import big modules
* Angular doesn't seem to support ES import (with .js extension), so we
had to correctly organize peertube into a monorepo:
* Use yarn workspace feature
* Use typescript reference projects for dependencies
* Shared projects have been moved into "packages", each one is now a
node module (with a dedicated package.json/tsconfig.json)
* server/tools have been moved into apps/ and is now a dedicated app
bundled and published on NPM so users don't have to build peertube
cli tools manually
* server/tests have been moved into packages/ so we don't compile
them every time we want to run the server
* Use isolatedModule option:
* Had to move from const enum to const
(https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums)
* Had to explictely specify "type" imports when used in decorators
* Prefer tsx (that uses esbuild under the hood) instead of ts-node to
load typescript files (tests with mocha or scripts):
* To reduce test complexity as esbuild doesn't support decorator
metadata, we only test server files that do not import server
models
* We still build tests files into js files for a faster CI
* Remove unmaintained peertube CLI import script
* Removed some barrels to speed up execution (less imports)
Diffstat (limited to 'server/lib/transcoding')
-rw-r--r-- | server/lib/transcoding/create-transcoding-job.ts | 37 | ||||
-rw-r--r-- | server/lib/transcoding/default-transcoding-profiles.ts | 143 | ||||
-rw-r--r-- | server/lib/transcoding/ended-transcoding.ts | 18 | ||||
-rw-r--r-- | server/lib/transcoding/hls-transcoding.ts | 180 | ||||
-rw-r--r-- | server/lib/transcoding/shared/ffmpeg-builder.ts | 18 | ||||
-rw-r--r-- | server/lib/transcoding/shared/index.ts | 2 | ||||
-rw-r--r-- | server/lib/transcoding/shared/job-builders/abstract-job-builder.ts | 21 | ||||
-rw-r--r-- | server/lib/transcoding/shared/job-builders/index.ts | 2 | ||||
-rw-r--r-- | server/lib/transcoding/shared/job-builders/transcoding-job-queue-builder.ts | 322 | ||||
-rw-r--r-- | server/lib/transcoding/shared/job-builders/transcoding-runner-job-builder.ts | 196 | ||||
-rw-r--r-- | server/lib/transcoding/transcoding-priority.ts | 24 | ||||
-rw-r--r-- | server/lib/transcoding/transcoding-quick-transcode.ts | 12 | ||||
-rw-r--r-- | server/lib/transcoding/transcoding-resolutions.ts | 73 | ||||
-rw-r--r-- | server/lib/transcoding/web-transcoding.ts | 263 |
14 files changed, 0 insertions, 1311 deletions
diff --git a/server/lib/transcoding/create-transcoding-job.ts b/server/lib/transcoding/create-transcoding-job.ts deleted file mode 100644 index d78e68b87..000000000 --- a/server/lib/transcoding/create-transcoding-job.ts +++ /dev/null | |||
@@ -1,37 +0,0 @@ | |||
1 | import { CONFIG } from '@server/initializers/config' | ||
2 | import { MUserId, MVideoFile, MVideoFullLight } from '@server/types/models' | ||
3 | import { TranscodingJobQueueBuilder, TranscodingRunnerJobBuilder } from './shared' | ||
4 | |||
5 | export function createOptimizeOrMergeAudioJobs (options: { | ||
6 | video: MVideoFullLight | ||
7 | videoFile: MVideoFile | ||
8 | isNewVideo: boolean | ||
9 | user: MUserId | ||
10 | videoFileAlreadyLocked: boolean | ||
11 | }) { | ||
12 | return getJobBuilder().createOptimizeOrMergeAudioJobs(options) | ||
13 | } | ||
14 | |||
15 | // --------------------------------------------------------------------------- | ||
16 | |||
17 | export function createTranscodingJobs (options: { | ||
18 | transcodingType: 'hls' | 'webtorrent' | 'web-video' // TODO: remove webtorrent in v7 | ||
19 | video: MVideoFullLight | ||
20 | resolutions: number[] | ||
21 | isNewVideo: boolean | ||
22 | user: MUserId | ||
23 | }) { | ||
24 | return getJobBuilder().createTranscodingJobs(options) | ||
25 | } | ||
26 | |||
27 | // --------------------------------------------------------------------------- | ||
28 | // Private | ||
29 | // --------------------------------------------------------------------------- | ||
30 | |||
31 | function getJobBuilder () { | ||
32 | if (CONFIG.TRANSCODING.REMOTE_RUNNERS.ENABLED === true) { | ||
33 | return new TranscodingRunnerJobBuilder() | ||
34 | } | ||
35 | |||
36 | return new TranscodingJobQueueBuilder() | ||
37 | } | ||
diff --git a/server/lib/transcoding/default-transcoding-profiles.ts b/server/lib/transcoding/default-transcoding-profiles.ts deleted file mode 100644 index 8f8fdd026..000000000 --- a/server/lib/transcoding/default-transcoding-profiles.ts +++ /dev/null | |||
@@ -1,143 +0,0 @@ | |||
1 | |||
2 | import { logger } from '@server/helpers/logger' | ||
3 | import { FFmpegCommandWrapper, getDefaultAvailableEncoders } from '@shared/ffmpeg' | ||
4 | import { AvailableEncoders, EncoderOptionsBuilder } from '@shared/models' | ||
5 | |||
6 | // --------------------------------------------------------------------------- | ||
7 | // Profile manager to get and change default profiles | ||
8 | // --------------------------------------------------------------------------- | ||
9 | |||
10 | class VideoTranscodingProfilesManager { | ||
11 | private static instance: VideoTranscodingProfilesManager | ||
12 | |||
13 | // 1 === less priority | ||
14 | private readonly encodersPriorities = { | ||
15 | vod: this.buildDefaultEncodersPriorities(), | ||
16 | live: this.buildDefaultEncodersPriorities() | ||
17 | } | ||
18 | |||
19 | private readonly availableEncoders = getDefaultAvailableEncoders() | ||
20 | |||
21 | private availableProfiles = { | ||
22 | vod: [] as string[], | ||
23 | live: [] as string[] | ||
24 | } | ||
25 | |||
26 | private constructor () { | ||
27 | this.buildAvailableProfiles() | ||
28 | } | ||
29 | |||
30 | getAvailableEncoders (): AvailableEncoders { | ||
31 | return { | ||
32 | available: this.availableEncoders, | ||
33 | encodersToTry: { | ||
34 | vod: { | ||
35 | video: this.getEncodersByPriority('vod', 'video'), | ||
36 | audio: this.getEncodersByPriority('vod', 'audio') | ||
37 | }, | ||
38 | live: { | ||
39 | video: this.getEncodersByPriority('live', 'video'), | ||
40 | audio: this.getEncodersByPriority('live', 'audio') | ||
41 | } | ||
42 | } | ||
43 | } | ||
44 | } | ||
45 | |||
46 | getAvailableProfiles (type: 'vod' | 'live') { | ||
47 | return this.availableProfiles[type] | ||
48 | } | ||
49 | |||
50 | addProfile (options: { | ||
51 | type: 'vod' | 'live' | ||
52 | encoder: string | ||
53 | profile: string | ||
54 | builder: EncoderOptionsBuilder | ||
55 | }) { | ||
56 | const { type, encoder, profile, builder } = options | ||
57 | |||
58 | const encoders = this.availableEncoders[type] | ||
59 | |||
60 | if (!encoders[encoder]) encoders[encoder] = {} | ||
61 | encoders[encoder][profile] = builder | ||
62 | |||
63 | this.buildAvailableProfiles() | ||
64 | } | ||
65 | |||
66 | removeProfile (options: { | ||
67 | type: 'vod' | 'live' | ||
68 | encoder: string | ||
69 | profile: string | ||
70 | }) { | ||
71 | const { type, encoder, profile } = options | ||
72 | |||
73 | delete this.availableEncoders[type][encoder][profile] | ||
74 | this.buildAvailableProfiles() | ||
75 | } | ||
76 | |||
77 | addEncoderPriority (type: 'vod' | 'live', streamType: 'audio' | 'video', encoder: string, priority: number) { | ||
78 | this.encodersPriorities[type][streamType].push({ name: encoder, priority }) | ||
79 | |||
80 | FFmpegCommandWrapper.resetSupportedEncoders() | ||
81 | } | ||
82 | |||
83 | removeEncoderPriority (type: 'vod' | 'live', streamType: 'audio' | 'video', encoder: string, priority: number) { | ||
84 | this.encodersPriorities[type][streamType] = this.encodersPriorities[type][streamType] | ||
85 | .filter(o => o.name !== encoder && o.priority !== priority) | ||
86 | |||
87 | FFmpegCommandWrapper.resetSupportedEncoders() | ||
88 | } | ||
89 | |||
90 | private getEncodersByPriority (type: 'vod' | 'live', streamType: 'audio' | 'video') { | ||
91 | return this.encodersPriorities[type][streamType] | ||
92 | .sort((e1, e2) => { | ||
93 | if (e1.priority > e2.priority) return -1 | ||
94 | else if (e1.priority === e2.priority) return 0 | ||
95 | |||
96 | return 1 | ||
97 | }) | ||
98 | .map(e => e.name) | ||
99 | } | ||
100 | |||
101 | private buildAvailableProfiles () { | ||
102 | for (const type of [ 'vod', 'live' ]) { | ||
103 | const result = new Set() | ||
104 | |||
105 | const encoders = this.availableEncoders[type] | ||
106 | |||
107 | for (const encoderName of Object.keys(encoders)) { | ||
108 | for (const profile of Object.keys(encoders[encoderName])) { | ||
109 | result.add(profile) | ||
110 | } | ||
111 | } | ||
112 | |||
113 | this.availableProfiles[type] = Array.from(result) | ||
114 | } | ||
115 | |||
116 | logger.debug('Available transcoding profiles built.', { availableProfiles: this.availableProfiles }) | ||
117 | } | ||
118 | |||
119 | private buildDefaultEncodersPriorities () { | ||
120 | return { | ||
121 | video: [ | ||
122 | { name: 'libx264', priority: 100 } | ||
123 | ], | ||
124 | |||
125 | // Try the first one, if not available try the second one etc | ||
126 | audio: [ | ||
127 | // we favor VBR, if a good AAC encoder is available | ||
128 | { name: 'libfdk_aac', priority: 200 }, | ||
129 | { name: 'aac', priority: 100 } | ||
130 | ] | ||
131 | } | ||
132 | } | ||
133 | |||
134 | static get Instance () { | ||
135 | return this.instance || (this.instance = new this()) | ||
136 | } | ||
137 | } | ||
138 | |||
139 | // --------------------------------------------------------------------------- | ||
140 | |||
141 | export { | ||
142 | VideoTranscodingProfilesManager | ||
143 | } | ||
diff --git a/server/lib/transcoding/ended-transcoding.ts b/server/lib/transcoding/ended-transcoding.ts deleted file mode 100644 index d31674ede..000000000 --- a/server/lib/transcoding/ended-transcoding.ts +++ /dev/null | |||
@@ -1,18 +0,0 @@ | |||
1 | import { retryTransactionWrapper } from '@server/helpers/database-utils' | ||
2 | import { VideoJobInfoModel } from '@server/models/video/video-job-info' | ||
3 | import { MVideo } from '@server/types/models' | ||
4 | import { moveToNextState } from '../video-state' | ||
5 | |||
6 | export async function onTranscodingEnded (options: { | ||
7 | video: MVideo | ||
8 | isNewVideo: boolean | ||
9 | moveVideoToNextState: boolean | ||
10 | }) { | ||
11 | const { video, isNewVideo, moveVideoToNextState } = options | ||
12 | |||
13 | await VideoJobInfoModel.decrease(video.uuid, 'pendingTranscode') | ||
14 | |||
15 | if (moveVideoToNextState) { | ||
16 | await retryTransactionWrapper(moveToNextState, { video, isNewVideo }) | ||
17 | } | ||
18 | } | ||
diff --git a/server/lib/transcoding/hls-transcoding.ts b/server/lib/transcoding/hls-transcoding.ts deleted file mode 100644 index 2c325d9ee..000000000 --- a/server/lib/transcoding/hls-transcoding.ts +++ /dev/null | |||
@@ -1,180 +0,0 @@ | |||
1 | import { MutexInterface } from 'async-mutex' | ||
2 | import { Job } from 'bullmq' | ||
3 | import { ensureDir, move, stat } from 'fs-extra' | ||
4 | import { basename, extname as extnameUtil, join } from 'path' | ||
5 | import { retryTransactionWrapper } from '@server/helpers/database-utils' | ||
6 | import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' | ||
7 | import { sequelizeTypescript } from '@server/initializers/database' | ||
8 | import { MVideo, MVideoFile } from '@server/types/models' | ||
9 | import { pick } from '@shared/core-utils' | ||
10 | import { getVideoStreamDuration, getVideoStreamFPS } from '@shared/ffmpeg' | ||
11 | import { VideoResolution } from '@shared/models' | ||
12 | import { CONFIG } from '../../initializers/config' | ||
13 | import { VideoFileModel } from '../../models/video/video-file' | ||
14 | import { VideoStreamingPlaylistModel } from '../../models/video/video-streaming-playlist' | ||
15 | import { updatePlaylistAfterFileChange } from '../hls' | ||
16 | import { generateHLSVideoFilename, getHlsResolutionPlaylistFilename } from '../paths' | ||
17 | import { buildFileMetadata } from '../video-file' | ||
18 | import { VideoPathManager } from '../video-path-manager' | ||
19 | import { buildFFmpegVOD } from './shared' | ||
20 | |||
21 | // Concat TS segments from a live video to a fragmented mp4 HLS playlist | ||
22 | export async function generateHlsPlaylistResolutionFromTS (options: { | ||
23 | video: MVideo | ||
24 | concatenatedTsFilePath: string | ||
25 | resolution: VideoResolution | ||
26 | fps: number | ||
27 | isAAC: boolean | ||
28 | inputFileMutexReleaser: MutexInterface.Releaser | ||
29 | }) { | ||
30 | return generateHlsPlaylistCommon({ | ||
31 | type: 'hls-from-ts' as 'hls-from-ts', | ||
32 | inputPath: options.concatenatedTsFilePath, | ||
33 | |||
34 | ...pick(options, [ 'video', 'resolution', 'fps', 'inputFileMutexReleaser', 'isAAC' ]) | ||
35 | }) | ||
36 | } | ||
37 | |||
38 | // Generate an HLS playlist from an input file, and update the master playlist | ||
39 | export function generateHlsPlaylistResolution (options: { | ||
40 | video: MVideo | ||
41 | videoInputPath: string | ||
42 | resolution: VideoResolution | ||
43 | fps: number | ||
44 | copyCodecs: boolean | ||
45 | inputFileMutexReleaser: MutexInterface.Releaser | ||
46 | job?: Job | ||
47 | }) { | ||
48 | return generateHlsPlaylistCommon({ | ||
49 | type: 'hls' as 'hls', | ||
50 | inputPath: options.videoInputPath, | ||
51 | |||
52 | ...pick(options, [ 'video', 'resolution', 'fps', 'copyCodecs', 'inputFileMutexReleaser', 'job' ]) | ||
53 | }) | ||
54 | } | ||
55 | |||
56 | export async function onHLSVideoFileTranscoding (options: { | ||
57 | video: MVideo | ||
58 | videoFile: MVideoFile | ||
59 | videoOutputPath: string | ||
60 | m3u8OutputPath: string | ||
61 | }) { | ||
62 | const { video, videoFile, videoOutputPath, m3u8OutputPath } = options | ||
63 | |||
64 | // Create or update the playlist | ||
65 | const playlist = await retryTransactionWrapper(() => { | ||
66 | return sequelizeTypescript.transaction(async transaction => { | ||
67 | return VideoStreamingPlaylistModel.loadOrGenerate(video, transaction) | ||
68 | }) | ||
69 | }) | ||
70 | videoFile.videoStreamingPlaylistId = playlist.id | ||
71 | |||
72 | const mutexReleaser = await VideoPathManager.Instance.lockFiles(video.uuid) | ||
73 | |||
74 | try { | ||
75 | await video.reload() | ||
76 | |||
77 | const videoFilePath = VideoPathManager.Instance.getFSVideoFileOutputPath(playlist, videoFile) | ||
78 | await ensureDir(VideoPathManager.Instance.getFSHLSOutputPath(video)) | ||
79 | |||
80 | // Move playlist file | ||
81 | const resolutionPlaylistPath = VideoPathManager.Instance.getFSHLSOutputPath(video, basename(m3u8OutputPath)) | ||
82 | await move(m3u8OutputPath, resolutionPlaylistPath, { overwrite: true }) | ||
83 | // Move video file | ||
84 | await move(videoOutputPath, videoFilePath, { overwrite: true }) | ||
85 | |||
86 | // Update video duration if it was not set (in case of a live for example) | ||
87 | if (!video.duration) { | ||
88 | video.duration = await getVideoStreamDuration(videoFilePath) | ||
89 | await video.save() | ||
90 | } | ||
91 | |||
92 | const stats = await stat(videoFilePath) | ||
93 | |||
94 | videoFile.size = stats.size | ||
95 | videoFile.fps = await getVideoStreamFPS(videoFilePath) | ||
96 | videoFile.metadata = await buildFileMetadata(videoFilePath) | ||
97 | |||
98 | await createTorrentAndSetInfoHash(playlist, videoFile) | ||
99 | |||
100 | const oldFile = await VideoFileModel.loadHLSFile({ | ||
101 | playlistId: playlist.id, | ||
102 | fps: videoFile.fps, | ||
103 | resolution: videoFile.resolution | ||
104 | }) | ||
105 | |||
106 | if (oldFile) { | ||
107 | await video.removeStreamingPlaylistVideoFile(playlist, oldFile) | ||
108 | await oldFile.destroy() | ||
109 | } | ||
110 | |||
111 | const savedVideoFile = await VideoFileModel.customUpsert(videoFile, 'streaming-playlist', undefined) | ||
112 | |||
113 | await updatePlaylistAfterFileChange(video, playlist) | ||
114 | |||
115 | return { resolutionPlaylistPath, videoFile: savedVideoFile } | ||
116 | } finally { | ||
117 | mutexReleaser() | ||
118 | } | ||
119 | } | ||
120 | |||
121 | // --------------------------------------------------------------------------- | ||
122 | |||
123 | async function generateHlsPlaylistCommon (options: { | ||
124 | type: 'hls' | 'hls-from-ts' | ||
125 | video: MVideo | ||
126 | inputPath: string | ||
127 | |||
128 | resolution: VideoResolution | ||
129 | fps: number | ||
130 | |||
131 | inputFileMutexReleaser: MutexInterface.Releaser | ||
132 | |||
133 | copyCodecs?: boolean | ||
134 | isAAC?: boolean | ||
135 | |||
136 | job?: Job | ||
137 | }) { | ||
138 | const { type, video, inputPath, resolution, fps, copyCodecs, isAAC, job, inputFileMutexReleaser } = options | ||
139 | const transcodeDirectory = CONFIG.STORAGE.TMP_DIR | ||
140 | |||
141 | const videoTranscodedBasePath = join(transcodeDirectory, type) | ||
142 | await ensureDir(videoTranscodedBasePath) | ||
143 | |||
144 | const videoFilename = generateHLSVideoFilename(resolution) | ||
145 | const videoOutputPath = join(videoTranscodedBasePath, videoFilename) | ||
146 | |||
147 | const resolutionPlaylistFilename = getHlsResolutionPlaylistFilename(videoFilename) | ||
148 | const m3u8OutputPath = join(videoTranscodedBasePath, resolutionPlaylistFilename) | ||
149 | |||
150 | const transcodeOptions = { | ||
151 | type, | ||
152 | |||
153 | inputPath, | ||
154 | outputPath: m3u8OutputPath, | ||
155 | |||
156 | resolution, | ||
157 | fps, | ||
158 | copyCodecs, | ||
159 | |||
160 | isAAC, | ||
161 | |||
162 | inputFileMutexReleaser, | ||
163 | |||
164 | hlsPlaylist: { | ||
165 | videoFilename | ||
166 | } | ||
167 | } | ||
168 | |||
169 | await buildFFmpegVOD(job).transcode(transcodeOptions) | ||
170 | |||
171 | const newVideoFile = new VideoFileModel({ | ||
172 | resolution, | ||
173 | extname: extnameUtil(videoFilename), | ||
174 | size: 0, | ||
175 | filename: videoFilename, | ||
176 | fps: -1 | ||
177 | }) | ||
178 | |||
179 | await onHLSVideoFileTranscoding({ video, videoFile: newVideoFile, videoOutputPath, m3u8OutputPath }) | ||
180 | } | ||
diff --git a/server/lib/transcoding/shared/ffmpeg-builder.ts b/server/lib/transcoding/shared/ffmpeg-builder.ts deleted file mode 100644 index 441445ec4..000000000 --- a/server/lib/transcoding/shared/ffmpeg-builder.ts +++ /dev/null | |||
@@ -1,18 +0,0 @@ | |||
1 | import { Job } from 'bullmq' | ||
2 | import { getFFmpegCommandWrapperOptions } from '@server/helpers/ffmpeg' | ||
3 | import { logger } from '@server/helpers/logger' | ||
4 | import { FFmpegVOD } from '@shared/ffmpeg' | ||
5 | import { VideoTranscodingProfilesManager } from '../default-transcoding-profiles' | ||
6 | |||
7 | export function buildFFmpegVOD (job?: Job) { | ||
8 | return new FFmpegVOD({ | ||
9 | ...getFFmpegCommandWrapperOptions('vod', VideoTranscodingProfilesManager.Instance.getAvailableEncoders()), | ||
10 | |||
11 | updateJobProgress: progress => { | ||
12 | if (!job) return | ||
13 | |||
14 | job.updateProgress(progress) | ||
15 | .catch(err => logger.error('Cannot update ffmpeg job progress', { err })) | ||
16 | } | ||
17 | }) | ||
18 | } | ||
diff --git a/server/lib/transcoding/shared/index.ts b/server/lib/transcoding/shared/index.ts deleted file mode 100644 index f0b45bcbb..000000000 --- a/server/lib/transcoding/shared/index.ts +++ /dev/null | |||
@@ -1,2 +0,0 @@ | |||
1 | export * from './job-builders' | ||
2 | export * from './ffmpeg-builder' | ||
diff --git a/server/lib/transcoding/shared/job-builders/abstract-job-builder.ts b/server/lib/transcoding/shared/job-builders/abstract-job-builder.ts deleted file mode 100644 index 15fc814ae..000000000 --- a/server/lib/transcoding/shared/job-builders/abstract-job-builder.ts +++ /dev/null | |||
@@ -1,21 +0,0 @@ | |||
1 | |||
2 | import { MUserId, MVideoFile, MVideoFullLight } from '@server/types/models' | ||
3 | |||
4 | export abstract class AbstractJobBuilder { | ||
5 | |||
6 | abstract createOptimizeOrMergeAudioJobs (options: { | ||
7 | video: MVideoFullLight | ||
8 | videoFile: MVideoFile | ||
9 | isNewVideo: boolean | ||
10 | user: MUserId | ||
11 | videoFileAlreadyLocked: boolean | ||
12 | }): Promise<any> | ||
13 | |||
14 | abstract createTranscodingJobs (options: { | ||
15 | transcodingType: 'hls' | 'webtorrent' | 'web-video' // TODO: remove webtorrent in v7 | ||
16 | video: MVideoFullLight | ||
17 | resolutions: number[] | ||
18 | isNewVideo: boolean | ||
19 | user: MUserId | null | ||
20 | }): Promise<any> | ||
21 | } | ||
diff --git a/server/lib/transcoding/shared/job-builders/index.ts b/server/lib/transcoding/shared/job-builders/index.ts deleted file mode 100644 index 9b1c82adf..000000000 --- a/server/lib/transcoding/shared/job-builders/index.ts +++ /dev/null | |||
@@ -1,2 +0,0 @@ | |||
1 | export * from './transcoding-job-queue-builder' | ||
2 | export * from './transcoding-runner-job-builder' | ||
diff --git a/server/lib/transcoding/shared/job-builders/transcoding-job-queue-builder.ts b/server/lib/transcoding/shared/job-builders/transcoding-job-queue-builder.ts deleted file mode 100644 index 0505c2b2f..000000000 --- a/server/lib/transcoding/shared/job-builders/transcoding-job-queue-builder.ts +++ /dev/null | |||
@@ -1,322 +0,0 @@ | |||
1 | import Bluebird from 'bluebird' | ||
2 | import { computeOutputFPS } from '@server/helpers/ffmpeg' | ||
3 | import { logger } from '@server/helpers/logger' | ||
4 | import { CONFIG } from '@server/initializers/config' | ||
5 | import { DEFAULT_AUDIO_RESOLUTION, VIDEO_TRANSCODING_FPS } from '@server/initializers/constants' | ||
6 | import { CreateJobArgument, JobQueue } from '@server/lib/job-queue' | ||
7 | import { Hooks } from '@server/lib/plugins/hooks' | ||
8 | import { VideoPathManager } from '@server/lib/video-path-manager' | ||
9 | import { VideoJobInfoModel } from '@server/models/video/video-job-info' | ||
10 | import { MUserId, MVideoFile, MVideoFullLight, MVideoWithFileThumbnail } from '@server/types/models' | ||
11 | import { ffprobePromise, getVideoStreamDimensionsInfo, getVideoStreamFPS, hasAudioStream, isAudioFile } from '@shared/ffmpeg' | ||
12 | import { | ||
13 | HLSTranscodingPayload, | ||
14 | MergeAudioTranscodingPayload, | ||
15 | NewWebVideoResolutionTranscodingPayload, | ||
16 | OptimizeTranscodingPayload, | ||
17 | VideoTranscodingPayload | ||
18 | } from '@shared/models' | ||
19 | import { getTranscodingJobPriority } from '../../transcoding-priority' | ||
20 | import { canDoQuickTranscode } from '../../transcoding-quick-transcode' | ||
21 | import { buildOriginalFileResolution, computeResolutionsToTranscode } from '../../transcoding-resolutions' | ||
22 | import { AbstractJobBuilder } from './abstract-job-builder' | ||
23 | |||
24 | export class TranscodingJobQueueBuilder extends AbstractJobBuilder { | ||
25 | |||
26 | async createOptimizeOrMergeAudioJobs (options: { | ||
27 | video: MVideoFullLight | ||
28 | videoFile: MVideoFile | ||
29 | isNewVideo: boolean | ||
30 | user: MUserId | ||
31 | videoFileAlreadyLocked: boolean | ||
32 | }) { | ||
33 | const { video, videoFile, isNewVideo, user, videoFileAlreadyLocked } = options | ||
34 | |||
35 | let mergeOrOptimizePayload: MergeAudioTranscodingPayload | OptimizeTranscodingPayload | ||
36 | let nextTranscodingSequentialJobPayloads: (NewWebVideoResolutionTranscodingPayload | HLSTranscodingPayload)[][] = [] | ||
37 | |||
38 | const mutexReleaser = videoFileAlreadyLocked | ||
39 | ? () => {} | ||
40 | : await VideoPathManager.Instance.lockFiles(video.uuid) | ||
41 | |||
42 | try { | ||
43 | await video.reload() | ||
44 | await videoFile.reload() | ||
45 | |||
46 | await VideoPathManager.Instance.makeAvailableVideoFile(videoFile.withVideoOrPlaylist(video), async videoFilePath => { | ||
47 | const probe = await ffprobePromise(videoFilePath) | ||
48 | |||
49 | const { resolution } = await getVideoStreamDimensionsInfo(videoFilePath, probe) | ||
50 | const hasAudio = await hasAudioStream(videoFilePath, probe) | ||
51 | const quickTranscode = await canDoQuickTranscode(videoFilePath, probe) | ||
52 | const inputFPS = videoFile.isAudio() | ||
53 | ? VIDEO_TRANSCODING_FPS.AUDIO_MERGE // The first transcoding job will transcode to this FPS value | ||
54 | : await getVideoStreamFPS(videoFilePath, probe) | ||
55 | |||
56 | const maxResolution = await isAudioFile(videoFilePath, probe) | ||
57 | ? DEFAULT_AUDIO_RESOLUTION | ||
58 | : buildOriginalFileResolution(resolution) | ||
59 | |||
60 | if (CONFIG.TRANSCODING.HLS.ENABLED === true) { | ||
61 | nextTranscodingSequentialJobPayloads.push([ | ||
62 | this.buildHLSJobPayload({ | ||
63 | deleteWebVideoFiles: CONFIG.TRANSCODING.WEB_VIDEOS.ENABLED === false, | ||
64 | |||
65 | // We had some issues with a web video quick transcoded while producing a HLS version of it | ||
66 | copyCodecs: !quickTranscode, | ||
67 | |||
68 | resolution: maxResolution, | ||
69 | fps: computeOutputFPS({ inputFPS, resolution: maxResolution }), | ||
70 | videoUUID: video.uuid, | ||
71 | isNewVideo | ||
72 | }) | ||
73 | ]) | ||
74 | } | ||
75 | |||
76 | const lowerResolutionJobPayloads = await this.buildLowerResolutionJobPayloads({ | ||
77 | video, | ||
78 | inputVideoResolution: maxResolution, | ||
79 | inputVideoFPS: inputFPS, | ||
80 | hasAudio, | ||
81 | isNewVideo | ||
82 | }) | ||
83 | |||
84 | nextTranscodingSequentialJobPayloads = [ ...nextTranscodingSequentialJobPayloads, ...lowerResolutionJobPayloads ] | ||
85 | |||
86 | const hasChildren = nextTranscodingSequentialJobPayloads.length !== 0 | ||
87 | mergeOrOptimizePayload = videoFile.isAudio() | ||
88 | ? this.buildMergeAudioPayload({ videoUUID: video.uuid, isNewVideo, hasChildren }) | ||
89 | : this.buildOptimizePayload({ videoUUID: video.uuid, isNewVideo, quickTranscode, hasChildren }) | ||
90 | }) | ||
91 | } finally { | ||
92 | mutexReleaser() | ||
93 | } | ||
94 | |||
95 | const nextTranscodingSequentialJobs = await Bluebird.mapSeries(nextTranscodingSequentialJobPayloads, payloads => { | ||
96 | return Bluebird.mapSeries(payloads, payload => { | ||
97 | return this.buildTranscodingJob({ payload, user }) | ||
98 | }) | ||
99 | }) | ||
100 | |||
101 | const transcodingJobBuilderJob: CreateJobArgument = { | ||
102 | type: 'transcoding-job-builder', | ||
103 | payload: { | ||
104 | videoUUID: video.uuid, | ||
105 | sequentialJobs: nextTranscodingSequentialJobs | ||
106 | } | ||
107 | } | ||
108 | |||
109 | const mergeOrOptimizeJob = await this.buildTranscodingJob({ payload: mergeOrOptimizePayload, user }) | ||
110 | |||
111 | await JobQueue.Instance.createSequentialJobFlow(...[ mergeOrOptimizeJob, transcodingJobBuilderJob ]) | ||
112 | |||
113 | await VideoJobInfoModel.increaseOrCreate(video.uuid, 'pendingTranscode') | ||
114 | } | ||
115 | |||
116 | // --------------------------------------------------------------------------- | ||
117 | |||
118 | async createTranscodingJobs (options: { | ||
119 | transcodingType: 'hls' | 'webtorrent' | 'web-video' // TODO: remove webtorrent in v7 | ||
120 | video: MVideoFullLight | ||
121 | resolutions: number[] | ||
122 | isNewVideo: boolean | ||
123 | user: MUserId | null | ||
124 | }) { | ||
125 | const { video, transcodingType, resolutions, isNewVideo } = options | ||
126 | |||
127 | const maxResolution = Math.max(...resolutions) | ||
128 | const childrenResolutions = resolutions.filter(r => r !== maxResolution) | ||
129 | |||
130 | logger.info('Manually creating transcoding jobs for %s.', transcodingType, { childrenResolutions, maxResolution }) | ||
131 | |||
132 | const { fps: inputFPS } = await video.probeMaxQualityFile() | ||
133 | |||
134 | const children = childrenResolutions.map(resolution => { | ||
135 | const fps = computeOutputFPS({ inputFPS, resolution }) | ||
136 | |||
137 | if (transcodingType === 'hls') { | ||
138 | return this.buildHLSJobPayload({ videoUUID: video.uuid, resolution, fps, isNewVideo }) | ||
139 | } | ||
140 | |||
141 | if (transcodingType === 'webtorrent' || transcodingType === 'web-video') { | ||
142 | return this.buildWebVideoJobPayload({ videoUUID: video.uuid, resolution, fps, isNewVideo }) | ||
143 | } | ||
144 | |||
145 | throw new Error('Unknown transcoding type') | ||
146 | }) | ||
147 | |||
148 | const fps = computeOutputFPS({ inputFPS, resolution: maxResolution }) | ||
149 | |||
150 | const parent = transcodingType === 'hls' | ||
151 | ? this.buildHLSJobPayload({ videoUUID: video.uuid, resolution: maxResolution, fps, isNewVideo }) | ||
152 | : this.buildWebVideoJobPayload({ videoUUID: video.uuid, resolution: maxResolution, fps, isNewVideo }) | ||
153 | |||
154 | // Process the last resolution after the other ones to prevent concurrency issue | ||
155 | // Because low resolutions use the biggest one as ffmpeg input | ||
156 | await this.createTranscodingJobsWithChildren({ videoUUID: video.uuid, parent, children, user: null }) | ||
157 | } | ||
158 | |||
159 | // --------------------------------------------------------------------------- | ||
160 | |||
161 | private async createTranscodingJobsWithChildren (options: { | ||
162 | videoUUID: string | ||
163 | parent: (HLSTranscodingPayload | NewWebVideoResolutionTranscodingPayload) | ||
164 | children: (HLSTranscodingPayload | NewWebVideoResolutionTranscodingPayload)[] | ||
165 | user: MUserId | null | ||
166 | }) { | ||
167 | const { videoUUID, parent, children, user } = options | ||
168 | |||
169 | const parentJob = await this.buildTranscodingJob({ payload: parent, user }) | ||
170 | const childrenJobs = await Bluebird.mapSeries(children, c => this.buildTranscodingJob({ payload: c, user })) | ||
171 | |||
172 | await JobQueue.Instance.createJobWithChildren(parentJob, childrenJobs) | ||
173 | |||
174 | await VideoJobInfoModel.increaseOrCreate(videoUUID, 'pendingTranscode', 1 + children.length) | ||
175 | } | ||
176 | |||
177 | private async buildTranscodingJob (options: { | ||
178 | payload: VideoTranscodingPayload | ||
179 | user: MUserId | null // null means we don't want priority | ||
180 | }) { | ||
181 | const { user, payload } = options | ||
182 | |||
183 | return { | ||
184 | type: 'video-transcoding' as 'video-transcoding', | ||
185 | priority: await getTranscodingJobPriority({ user, type: 'vod', fallback: undefined }), | ||
186 | payload | ||
187 | } | ||
188 | } | ||
189 | |||
190 | private async buildLowerResolutionJobPayloads (options: { | ||
191 | video: MVideoWithFileThumbnail | ||
192 | inputVideoResolution: number | ||
193 | inputVideoFPS: number | ||
194 | hasAudio: boolean | ||
195 | isNewVideo: boolean | ||
196 | }) { | ||
197 | const { video, inputVideoResolution, inputVideoFPS, isNewVideo, hasAudio } = options | ||
198 | |||
199 | // Create transcoding jobs if there are enabled resolutions | ||
200 | const resolutionsEnabled = await Hooks.wrapObject( | ||
201 | computeResolutionsToTranscode({ input: inputVideoResolution, type: 'vod', includeInput: false, strictLower: true, hasAudio }), | ||
202 | 'filter:transcoding.auto.resolutions-to-transcode.result', | ||
203 | options | ||
204 | ) | ||
205 | |||
206 | const sequentialPayloads: (NewWebVideoResolutionTranscodingPayload | HLSTranscodingPayload)[][] = [] | ||
207 | |||
208 | for (const resolution of resolutionsEnabled) { | ||
209 | const fps = computeOutputFPS({ inputFPS: inputVideoFPS, resolution }) | ||
210 | |||
211 | if (CONFIG.TRANSCODING.WEB_VIDEOS.ENABLED) { | ||
212 | const payloads: (NewWebVideoResolutionTranscodingPayload | HLSTranscodingPayload)[] = [ | ||
213 | this.buildWebVideoJobPayload({ | ||
214 | videoUUID: video.uuid, | ||
215 | resolution, | ||
216 | fps, | ||
217 | isNewVideo | ||
218 | }) | ||
219 | ] | ||
220 | |||
221 | // Create a subsequent job to create HLS resolution that will just copy web video codecs | ||
222 | if (CONFIG.TRANSCODING.HLS.ENABLED) { | ||
223 | payloads.push( | ||
224 | this.buildHLSJobPayload({ | ||
225 | videoUUID: video.uuid, | ||
226 | resolution, | ||
227 | fps, | ||
228 | isNewVideo, | ||
229 | copyCodecs: true | ||
230 | }) | ||
231 | ) | ||
232 | } | ||
233 | |||
234 | sequentialPayloads.push(payloads) | ||
235 | } else if (CONFIG.TRANSCODING.HLS.ENABLED) { | ||
236 | sequentialPayloads.push([ | ||
237 | this.buildHLSJobPayload({ | ||
238 | videoUUID: video.uuid, | ||
239 | resolution, | ||
240 | fps, | ||
241 | copyCodecs: false, | ||
242 | isNewVideo | ||
243 | }) | ||
244 | ]) | ||
245 | } | ||
246 | } | ||
247 | |||
248 | return sequentialPayloads | ||
249 | } | ||
250 | |||
251 | private buildHLSJobPayload (options: { | ||
252 | videoUUID: string | ||
253 | resolution: number | ||
254 | fps: number | ||
255 | isNewVideo: boolean | ||
256 | deleteWebVideoFiles?: boolean // default false | ||
257 | copyCodecs?: boolean // default false | ||
258 | }): HLSTranscodingPayload { | ||
259 | const { videoUUID, resolution, fps, isNewVideo, deleteWebVideoFiles = false, copyCodecs = false } = options | ||
260 | |||
261 | return { | ||
262 | type: 'new-resolution-to-hls', | ||
263 | videoUUID, | ||
264 | resolution, | ||
265 | fps, | ||
266 | copyCodecs, | ||
267 | isNewVideo, | ||
268 | deleteWebVideoFiles | ||
269 | } | ||
270 | } | ||
271 | |||
272 | private buildWebVideoJobPayload (options: { | ||
273 | videoUUID: string | ||
274 | resolution: number | ||
275 | fps: number | ||
276 | isNewVideo: boolean | ||
277 | }): NewWebVideoResolutionTranscodingPayload { | ||
278 | const { videoUUID, resolution, fps, isNewVideo } = options | ||
279 | |||
280 | return { | ||
281 | type: 'new-resolution-to-web-video', | ||
282 | videoUUID, | ||
283 | isNewVideo, | ||
284 | resolution, | ||
285 | fps | ||
286 | } | ||
287 | } | ||
288 | |||
289 | private buildMergeAudioPayload (options: { | ||
290 | videoUUID: string | ||
291 | isNewVideo: boolean | ||
292 | hasChildren: boolean | ||
293 | }): MergeAudioTranscodingPayload { | ||
294 | const { videoUUID, isNewVideo, hasChildren } = options | ||
295 | |||
296 | return { | ||
297 | type: 'merge-audio-to-web-video', | ||
298 | resolution: DEFAULT_AUDIO_RESOLUTION, | ||
299 | fps: VIDEO_TRANSCODING_FPS.AUDIO_MERGE, | ||
300 | videoUUID, | ||
301 | isNewVideo, | ||
302 | hasChildren | ||
303 | } | ||
304 | } | ||
305 | |||
306 | private buildOptimizePayload (options: { | ||
307 | videoUUID: string | ||
308 | quickTranscode: boolean | ||
309 | isNewVideo: boolean | ||
310 | hasChildren: boolean | ||
311 | }): OptimizeTranscodingPayload { | ||
312 | const { videoUUID, quickTranscode, isNewVideo, hasChildren } = options | ||
313 | |||
314 | return { | ||
315 | type: 'optimize-to-web-video', | ||
316 | videoUUID, | ||
317 | isNewVideo, | ||
318 | hasChildren, | ||
319 | quickTranscode | ||
320 | } | ||
321 | } | ||
322 | } | ||
diff --git a/server/lib/transcoding/shared/job-builders/transcoding-runner-job-builder.ts b/server/lib/transcoding/shared/job-builders/transcoding-runner-job-builder.ts deleted file mode 100644 index f0671bd7a..000000000 --- a/server/lib/transcoding/shared/job-builders/transcoding-runner-job-builder.ts +++ /dev/null | |||
@@ -1,196 +0,0 @@ | |||
1 | import { computeOutputFPS } from '@server/helpers/ffmpeg' | ||
2 | import { logger, loggerTagsFactory } from '@server/helpers/logger' | ||
3 | import { CONFIG } from '@server/initializers/config' | ||
4 | import { DEFAULT_AUDIO_RESOLUTION, VIDEO_TRANSCODING_FPS } from '@server/initializers/constants' | ||
5 | import { Hooks } from '@server/lib/plugins/hooks' | ||
6 | import { VODAudioMergeTranscodingJobHandler, VODHLSTranscodingJobHandler, VODWebVideoTranscodingJobHandler } from '@server/lib/runners' | ||
7 | import { VideoPathManager } from '@server/lib/video-path-manager' | ||
8 | import { MUserId, MVideoFile, MVideoFullLight, MVideoWithFileThumbnail } from '@server/types/models' | ||
9 | import { MRunnerJob } from '@server/types/models/runners' | ||
10 | import { ffprobePromise, getVideoStreamDimensionsInfo, getVideoStreamFPS, hasAudioStream, isAudioFile } from '@shared/ffmpeg' | ||
11 | import { getTranscodingJobPriority } from '../../transcoding-priority' | ||
12 | import { computeResolutionsToTranscode } from '../../transcoding-resolutions' | ||
13 | import { AbstractJobBuilder } from './abstract-job-builder' | ||
14 | |||
15 | /** | ||
16 | * | ||
17 | * Class to build transcoding job in the local job queue | ||
18 | * | ||
19 | */ | ||
20 | |||
21 | const lTags = loggerTagsFactory('transcoding') | ||
22 | |||
23 | export class TranscodingRunnerJobBuilder extends AbstractJobBuilder { | ||
24 | |||
25 | async createOptimizeOrMergeAudioJobs (options: { | ||
26 | video: MVideoFullLight | ||
27 | videoFile: MVideoFile | ||
28 | isNewVideo: boolean | ||
29 | user: MUserId | ||
30 | videoFileAlreadyLocked: boolean | ||
31 | }) { | ||
32 | const { video, videoFile, isNewVideo, user, videoFileAlreadyLocked } = options | ||
33 | |||
34 | const mutexReleaser = videoFileAlreadyLocked | ||
35 | ? () => {} | ||
36 | : await VideoPathManager.Instance.lockFiles(video.uuid) | ||
37 | |||
38 | try { | ||
39 | await video.reload() | ||
40 | await videoFile.reload() | ||
41 | |||
42 | await VideoPathManager.Instance.makeAvailableVideoFile(videoFile.withVideoOrPlaylist(video), async videoFilePath => { | ||
43 | const probe = await ffprobePromise(videoFilePath) | ||
44 | |||
45 | const { resolution } = await getVideoStreamDimensionsInfo(videoFilePath, probe) | ||
46 | const hasAudio = await hasAudioStream(videoFilePath, probe) | ||
47 | const inputFPS = videoFile.isAudio() | ||
48 | ? VIDEO_TRANSCODING_FPS.AUDIO_MERGE // The first transcoding job will transcode to this FPS value | ||
49 | : await getVideoStreamFPS(videoFilePath, probe) | ||
50 | |||
51 | const maxResolution = await isAudioFile(videoFilePath, probe) | ||
52 | ? DEFAULT_AUDIO_RESOLUTION | ||
53 | : resolution | ||
54 | |||
55 | const fps = computeOutputFPS({ inputFPS, resolution: maxResolution }) | ||
56 | const priority = await getTranscodingJobPriority({ user, type: 'vod', fallback: 0 }) | ||
57 | |||
58 | const mainRunnerJob = videoFile.isAudio() | ||
59 | ? await new VODAudioMergeTranscodingJobHandler().create({ video, resolution: maxResolution, fps, isNewVideo, priority }) | ||
60 | : await new VODWebVideoTranscodingJobHandler().create({ video, resolution: maxResolution, fps, isNewVideo, priority }) | ||
61 | |||
62 | if (CONFIG.TRANSCODING.HLS.ENABLED === true) { | ||
63 | await new VODHLSTranscodingJobHandler().create({ | ||
64 | video, | ||
65 | deleteWebVideoFiles: CONFIG.TRANSCODING.WEB_VIDEOS.ENABLED === false, | ||
66 | resolution: maxResolution, | ||
67 | fps, | ||
68 | isNewVideo, | ||
69 | dependsOnRunnerJob: mainRunnerJob, | ||
70 | priority: await getTranscodingJobPriority({ user, type: 'vod', fallback: 0 }) | ||
71 | }) | ||
72 | } | ||
73 | |||
74 | await this.buildLowerResolutionJobPayloads({ | ||
75 | video, | ||
76 | inputVideoResolution: maxResolution, | ||
77 | inputVideoFPS: inputFPS, | ||
78 | hasAudio, | ||
79 | isNewVideo, | ||
80 | mainRunnerJob, | ||
81 | user | ||
82 | }) | ||
83 | }) | ||
84 | } finally { | ||
85 | mutexReleaser() | ||
86 | } | ||
87 | } | ||
88 | |||
89 | // --------------------------------------------------------------------------- | ||
90 | |||
91 | async createTranscodingJobs (options: { | ||
92 | transcodingType: 'hls' | 'webtorrent' | 'web-video' // TODO: remove webtorrent in v7 | ||
93 | video: MVideoFullLight | ||
94 | resolutions: number[] | ||
95 | isNewVideo: boolean | ||
96 | user: MUserId | null | ||
97 | }) { | ||
98 | const { video, transcodingType, resolutions, isNewVideo, user } = options | ||
99 | |||
100 | const maxResolution = Math.max(...resolutions) | ||
101 | const { fps: inputFPS } = await video.probeMaxQualityFile() | ||
102 | const maxFPS = computeOutputFPS({ inputFPS, resolution: maxResolution }) | ||
103 | const priority = await getTranscodingJobPriority({ user, type: 'vod', fallback: 0 }) | ||
104 | |||
105 | const childrenResolutions = resolutions.filter(r => r !== maxResolution) | ||
106 | |||
107 | logger.info('Manually creating transcoding jobs for %s.', transcodingType, { childrenResolutions, maxResolution }) | ||
108 | |||
109 | // Process the last resolution before the other ones to prevent concurrency issue | ||
110 | // Because low resolutions use the biggest one as ffmpeg input | ||
111 | const mainJob = transcodingType === 'hls' | ||
112 | // eslint-disable-next-line max-len | ||
113 | ? await new VODHLSTranscodingJobHandler().create({ video, resolution: maxResolution, fps: maxFPS, isNewVideo, deleteWebVideoFiles: false, priority }) | ||
114 | : await new VODWebVideoTranscodingJobHandler().create({ video, resolution: maxResolution, fps: maxFPS, isNewVideo, priority }) | ||
115 | |||
116 | for (const resolution of childrenResolutions) { | ||
117 | const dependsOnRunnerJob = mainJob | ||
118 | const fps = computeOutputFPS({ inputFPS, resolution }) | ||
119 | |||
120 | if (transcodingType === 'hls') { | ||
121 | await new VODHLSTranscodingJobHandler().create({ | ||
122 | video, | ||
123 | resolution, | ||
124 | fps, | ||
125 | isNewVideo, | ||
126 | deleteWebVideoFiles: false, | ||
127 | dependsOnRunnerJob, | ||
128 | priority: await getTranscodingJobPriority({ user, type: 'vod', fallback: 0 }) | ||
129 | }) | ||
130 | continue | ||
131 | } | ||
132 | |||
133 | if (transcodingType === 'webtorrent' || transcodingType === 'web-video') { | ||
134 | await new VODWebVideoTranscodingJobHandler().create({ | ||
135 | video, | ||
136 | resolution, | ||
137 | fps, | ||
138 | isNewVideo, | ||
139 | dependsOnRunnerJob, | ||
140 | priority: await getTranscodingJobPriority({ user, type: 'vod', fallback: 0 }) | ||
141 | }) | ||
142 | continue | ||
143 | } | ||
144 | |||
145 | throw new Error('Unknown transcoding type') | ||
146 | } | ||
147 | } | ||
148 | |||
149 | private async buildLowerResolutionJobPayloads (options: { | ||
150 | mainRunnerJob: MRunnerJob | ||
151 | video: MVideoWithFileThumbnail | ||
152 | inputVideoResolution: number | ||
153 | inputVideoFPS: number | ||
154 | hasAudio: boolean | ||
155 | isNewVideo: boolean | ||
156 | user: MUserId | ||
157 | }) { | ||
158 | const { video, inputVideoResolution, inputVideoFPS, isNewVideo, hasAudio, mainRunnerJob, user } = options | ||
159 | |||
160 | // Create transcoding jobs if there are enabled resolutions | ||
161 | const resolutionsEnabled = await Hooks.wrapObject( | ||
162 | computeResolutionsToTranscode({ input: inputVideoResolution, type: 'vod', includeInput: false, strictLower: true, hasAudio }), | ||
163 | 'filter:transcoding.auto.resolutions-to-transcode.result', | ||
164 | options | ||
165 | ) | ||
166 | |||
167 | logger.debug('Lower resolutions build for %s.', video.uuid, { resolutionsEnabled, ...lTags(video.uuid) }) | ||
168 | |||
169 | for (const resolution of resolutionsEnabled) { | ||
170 | const fps = computeOutputFPS({ inputFPS: inputVideoFPS, resolution }) | ||
171 | |||
172 | if (CONFIG.TRANSCODING.WEB_VIDEOS.ENABLED) { | ||
173 | await new VODWebVideoTranscodingJobHandler().create({ | ||
174 | video, | ||
175 | resolution, | ||
176 | fps, | ||
177 | isNewVideo, | ||
178 | dependsOnRunnerJob: mainRunnerJob, | ||
179 | priority: await getTranscodingJobPriority({ user, type: 'vod', fallback: 0 }) | ||
180 | }) | ||
181 | } | ||
182 | |||
183 | if (CONFIG.TRANSCODING.HLS.ENABLED) { | ||
184 | await new VODHLSTranscodingJobHandler().create({ | ||
185 | video, | ||
186 | resolution, | ||
187 | fps, | ||
188 | isNewVideo, | ||
189 | deleteWebVideoFiles: false, | ||
190 | dependsOnRunnerJob: mainRunnerJob, | ||
191 | priority: await getTranscodingJobPriority({ user, type: 'vod', fallback: 0 }) | ||
192 | }) | ||
193 | } | ||
194 | } | ||
195 | } | ||
196 | } | ||
diff --git a/server/lib/transcoding/transcoding-priority.ts b/server/lib/transcoding/transcoding-priority.ts deleted file mode 100644 index 82ab6f2f1..000000000 --- a/server/lib/transcoding/transcoding-priority.ts +++ /dev/null | |||
@@ -1,24 +0,0 @@ | |||
1 | import { JOB_PRIORITY } from '@server/initializers/constants' | ||
2 | import { VideoModel } from '@server/models/video/video' | ||
3 | import { MUserId } from '@server/types/models' | ||
4 | |||
5 | export async function getTranscodingJobPriority (options: { | ||
6 | user: MUserId | ||
7 | fallback: number | ||
8 | type: 'vod' | 'studio' | ||
9 | }) { | ||
10 | const { user, fallback, type } = options | ||
11 | |||
12 | if (!user) return fallback | ||
13 | |||
14 | const now = new Date() | ||
15 | const lastWeek = new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7) | ||
16 | |||
17 | const videoUploadedByUser = await VideoModel.countVideosUploadedByUserSince(user.id, lastWeek) | ||
18 | |||
19 | const base = type === 'vod' | ||
20 | ? JOB_PRIORITY.TRANSCODING | ||
21 | : JOB_PRIORITY.VIDEO_STUDIO | ||
22 | |||
23 | return base + videoUploadedByUser | ||
24 | } | ||
diff --git a/server/lib/transcoding/transcoding-quick-transcode.ts b/server/lib/transcoding/transcoding-quick-transcode.ts deleted file mode 100644 index 53f12cd06..000000000 --- a/server/lib/transcoding/transcoding-quick-transcode.ts +++ /dev/null | |||
@@ -1,12 +0,0 @@ | |||
1 | import { FfprobeData } from 'fluent-ffmpeg' | ||
2 | import { CONFIG } from '@server/initializers/config' | ||
3 | import { canDoQuickAudioTranscode, canDoQuickVideoTranscode, ffprobePromise } from '@shared/ffmpeg' | ||
4 | |||
5 | export async function canDoQuickTranscode (path: string, existingProbe?: FfprobeData): Promise<boolean> { | ||
6 | if (CONFIG.TRANSCODING.PROFILE !== 'default') return false | ||
7 | |||
8 | const probe = existingProbe || await ffprobePromise(path) | ||
9 | |||
10 | return await canDoQuickVideoTranscode(path, probe) && | ||
11 | await canDoQuickAudioTranscode(path, probe) | ||
12 | } | ||
diff --git a/server/lib/transcoding/transcoding-resolutions.ts b/server/lib/transcoding/transcoding-resolutions.ts deleted file mode 100644 index 9a6bf5722..000000000 --- a/server/lib/transcoding/transcoding-resolutions.ts +++ /dev/null | |||
@@ -1,73 +0,0 @@ | |||
1 | import { CONFIG } from '@server/initializers/config' | ||
2 | import { toEven } from '@shared/core-utils' | ||
3 | import { VideoResolution } from '@shared/models' | ||
4 | |||
5 | export function buildOriginalFileResolution (inputResolution: number) { | ||
6 | if (CONFIG.TRANSCODING.ALWAYS_TRANSCODE_ORIGINAL_RESOLUTION === true) { | ||
7 | return toEven(inputResolution) | ||
8 | } | ||
9 | |||
10 | const resolutions = computeResolutionsToTranscode({ | ||
11 | input: inputResolution, | ||
12 | type: 'vod', | ||
13 | includeInput: false, | ||
14 | strictLower: false, | ||
15 | // We don't really care about the audio resolution in this context | ||
16 | hasAudio: true | ||
17 | }) | ||
18 | |||
19 | if (resolutions.length === 0) { | ||
20 | return toEven(inputResolution) | ||
21 | } | ||
22 | |||
23 | return Math.max(...resolutions) | ||
24 | } | ||
25 | |||
26 | export function computeResolutionsToTranscode (options: { | ||
27 | input: number | ||
28 | type: 'vod' | 'live' | ||
29 | includeInput: boolean | ||
30 | strictLower: boolean | ||
31 | hasAudio: boolean | ||
32 | }) { | ||
33 | const { input, type, includeInput, strictLower, hasAudio } = options | ||
34 | |||
35 | const configResolutions = type === 'vod' | ||
36 | ? CONFIG.TRANSCODING.RESOLUTIONS | ||
37 | : CONFIG.LIVE.TRANSCODING.RESOLUTIONS | ||
38 | |||
39 | const resolutionsEnabled = new Set<number>() | ||
40 | |||
41 | // Put in the order we want to proceed jobs | ||
42 | const availableResolutions: VideoResolution[] = [ | ||
43 | VideoResolution.H_NOVIDEO, | ||
44 | VideoResolution.H_480P, | ||
45 | VideoResolution.H_360P, | ||
46 | VideoResolution.H_720P, | ||
47 | VideoResolution.H_240P, | ||
48 | VideoResolution.H_144P, | ||
49 | VideoResolution.H_1080P, | ||
50 | VideoResolution.H_1440P, | ||
51 | VideoResolution.H_4K | ||
52 | ] | ||
53 | |||
54 | for (const resolution of availableResolutions) { | ||
55 | // Resolution not enabled | ||
56 | if (configResolutions[resolution + 'p'] !== true) continue | ||
57 | // Too big resolution for input file | ||
58 | if (input < resolution) continue | ||
59 | // We only want lower resolutions than input file | ||
60 | if (strictLower && input === resolution) continue | ||
61 | // Audio resolutio but no audio in the video | ||
62 | if (resolution === VideoResolution.H_NOVIDEO && !hasAudio) continue | ||
63 | |||
64 | resolutionsEnabled.add(resolution) | ||
65 | } | ||
66 | |||
67 | if (includeInput) { | ||
68 | // Always use an even resolution to avoid issues with ffmpeg | ||
69 | resolutionsEnabled.add(toEven(input)) | ||
70 | } | ||
71 | |||
72 | return Array.from(resolutionsEnabled) | ||
73 | } | ||
diff --git a/server/lib/transcoding/web-transcoding.ts b/server/lib/transcoding/web-transcoding.ts deleted file mode 100644 index f92d457a0..000000000 --- a/server/lib/transcoding/web-transcoding.ts +++ /dev/null | |||
@@ -1,263 +0,0 @@ | |||
1 | import { Job } from 'bullmq' | ||
2 | import { copyFile, move, remove, stat } from 'fs-extra' | ||
3 | import { basename, join } from 'path' | ||
4 | import { computeOutputFPS } from '@server/helpers/ffmpeg' | ||
5 | import { createTorrentAndSetInfoHash } from '@server/helpers/webtorrent' | ||
6 | import { VideoModel } from '@server/models/video/video' | ||
7 | import { MVideoFile, MVideoFullLight } from '@server/types/models' | ||
8 | import { ffprobePromise, getVideoStreamDuration, getVideoStreamFPS, TranscodeVODOptionsType } from '@shared/ffmpeg' | ||
9 | import { VideoResolution, VideoStorage } from '@shared/models' | ||
10 | import { CONFIG } from '../../initializers/config' | ||
11 | import { VideoFileModel } from '../../models/video/video-file' | ||
12 | import { JobQueue } from '../job-queue' | ||
13 | import { generateWebVideoFilename } from '../paths' | ||
14 | import { buildFileMetadata } from '../video-file' | ||
15 | import { VideoPathManager } from '../video-path-manager' | ||
16 | import { buildFFmpegVOD } from './shared' | ||
17 | import { buildOriginalFileResolution } from './transcoding-resolutions' | ||
18 | |||
19 | // Optimize the original video file and replace it. The resolution is not changed. | ||
20 | export async function optimizeOriginalVideofile (options: { | ||
21 | video: MVideoFullLight | ||
22 | inputVideoFile: MVideoFile | ||
23 | quickTranscode: boolean | ||
24 | job: Job | ||
25 | }) { | ||
26 | const { video, inputVideoFile, quickTranscode, job } = options | ||
27 | |||
28 | const transcodeDirectory = CONFIG.STORAGE.TMP_DIR | ||
29 | const newExtname = '.mp4' | ||
30 | |||
31 | // Will be released by our transcodeVOD function once ffmpeg is ran | ||
32 | const inputFileMutexReleaser = await VideoPathManager.Instance.lockFiles(video.uuid) | ||
33 | |||
34 | try { | ||
35 | await video.reload() | ||
36 | await inputVideoFile.reload() | ||
37 | |||
38 | const fileWithVideoOrPlaylist = inputVideoFile.withVideoOrPlaylist(video) | ||
39 | |||
40 | const result = await VideoPathManager.Instance.makeAvailableVideoFile(fileWithVideoOrPlaylist, async videoInputPath => { | ||
41 | const videoOutputPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname) | ||
42 | |||
43 | const transcodeType: TranscodeVODOptionsType = quickTranscode | ||
44 | ? 'quick-transcode' | ||
45 | : 'video' | ||
46 | |||
47 | const resolution = buildOriginalFileResolution(inputVideoFile.resolution) | ||
48 | const fps = computeOutputFPS({ inputFPS: inputVideoFile.fps, resolution }) | ||
49 | |||
50 | // Could be very long! | ||
51 | await buildFFmpegVOD(job).transcode({ | ||
52 | type: transcodeType, | ||
53 | |||
54 | inputPath: videoInputPath, | ||
55 | outputPath: videoOutputPath, | ||
56 | |||
57 | inputFileMutexReleaser, | ||
58 | |||
59 | resolution, | ||
60 | fps | ||
61 | }) | ||
62 | |||
63 | // Important to do this before getVideoFilename() to take in account the new filename | ||
64 | inputVideoFile.resolution = resolution | ||
65 | inputVideoFile.extname = newExtname | ||
66 | inputVideoFile.filename = generateWebVideoFilename(resolution, newExtname) | ||
67 | inputVideoFile.storage = VideoStorage.FILE_SYSTEM | ||
68 | |||
69 | const { videoFile } = await onWebVideoFileTranscoding({ | ||
70 | video, | ||
71 | videoFile: inputVideoFile, | ||
72 | videoOutputPath | ||
73 | }) | ||
74 | |||
75 | await remove(videoInputPath) | ||
76 | |||
77 | return { transcodeType, videoFile } | ||
78 | }) | ||
79 | |||
80 | return result | ||
81 | } finally { | ||
82 | inputFileMutexReleaser() | ||
83 | } | ||
84 | } | ||
85 | |||
86 | // Transcode the original video file to a lower resolution compatible with web browsers | ||
87 | export async function transcodeNewWebVideoResolution (options: { | ||
88 | video: MVideoFullLight | ||
89 | resolution: VideoResolution | ||
90 | fps: number | ||
91 | job: Job | ||
92 | }) { | ||
93 | const { video: videoArg, resolution, fps, job } = options | ||
94 | |||
95 | const transcodeDirectory = CONFIG.STORAGE.TMP_DIR | ||
96 | const newExtname = '.mp4' | ||
97 | |||
98 | const inputFileMutexReleaser = await VideoPathManager.Instance.lockFiles(videoArg.uuid) | ||
99 | |||
100 | try { | ||
101 | const video = await VideoModel.loadFull(videoArg.uuid) | ||
102 | const file = video.getMaxQualityFile().withVideoOrPlaylist(video) | ||
103 | |||
104 | const result = await VideoPathManager.Instance.makeAvailableVideoFile(file, async videoInputPath => { | ||
105 | const newVideoFile = new VideoFileModel({ | ||
106 | resolution, | ||
107 | extname: newExtname, | ||
108 | filename: generateWebVideoFilename(resolution, newExtname), | ||
109 | size: 0, | ||
110 | videoId: video.id | ||
111 | }) | ||
112 | |||
113 | const videoOutputPath = join(transcodeDirectory, newVideoFile.filename) | ||
114 | |||
115 | const transcodeOptions = { | ||
116 | type: 'video' as 'video', | ||
117 | |||
118 | inputPath: videoInputPath, | ||
119 | outputPath: videoOutputPath, | ||
120 | |||
121 | inputFileMutexReleaser, | ||
122 | |||
123 | resolution, | ||
124 | fps | ||
125 | } | ||
126 | |||
127 | await buildFFmpegVOD(job).transcode(transcodeOptions) | ||
128 | |||
129 | return onWebVideoFileTranscoding({ video, videoFile: newVideoFile, videoOutputPath }) | ||
130 | }) | ||
131 | |||
132 | return result | ||
133 | } finally { | ||
134 | inputFileMutexReleaser() | ||
135 | } | ||
136 | } | ||
137 | |||
138 | // Merge an image with an audio file to create a video | ||
139 | export async function mergeAudioVideofile (options: { | ||
140 | video: MVideoFullLight | ||
141 | resolution: VideoResolution | ||
142 | fps: number | ||
143 | job: Job | ||
144 | }) { | ||
145 | const { video: videoArg, resolution, fps, job } = options | ||
146 | |||
147 | const transcodeDirectory = CONFIG.STORAGE.TMP_DIR | ||
148 | const newExtname = '.mp4' | ||
149 | |||
150 | const inputFileMutexReleaser = await VideoPathManager.Instance.lockFiles(videoArg.uuid) | ||
151 | |||
152 | try { | ||
153 | const video = await VideoModel.loadFull(videoArg.uuid) | ||
154 | const inputVideoFile = video.getMinQualityFile() | ||
155 | |||
156 | const fileWithVideoOrPlaylist = inputVideoFile.withVideoOrPlaylist(video) | ||
157 | |||
158 | const result = await VideoPathManager.Instance.makeAvailableVideoFile(fileWithVideoOrPlaylist, async audioInputPath => { | ||
159 | const videoOutputPath = join(transcodeDirectory, video.id + '-transcoded' + newExtname) | ||
160 | |||
161 | // If the user updates the video preview during transcoding | ||
162 | const previewPath = video.getPreview().getPath() | ||
163 | const tmpPreviewPath = join(CONFIG.STORAGE.TMP_DIR, basename(previewPath)) | ||
164 | await copyFile(previewPath, tmpPreviewPath) | ||
165 | |||
166 | const transcodeOptions = { | ||
167 | type: 'merge-audio' as 'merge-audio', | ||
168 | |||
169 | inputPath: tmpPreviewPath, | ||
170 | outputPath: videoOutputPath, | ||
171 | |||
172 | inputFileMutexReleaser, | ||
173 | |||
174 | audioPath: audioInputPath, | ||
175 | resolution, | ||
176 | fps | ||
177 | } | ||
178 | |||
179 | try { | ||
180 | await buildFFmpegVOD(job).transcode(transcodeOptions) | ||
181 | |||
182 | await remove(audioInputPath) | ||
183 | await remove(tmpPreviewPath) | ||
184 | } catch (err) { | ||
185 | await remove(tmpPreviewPath) | ||
186 | throw err | ||
187 | } | ||
188 | |||
189 | // Important to do this before getVideoFilename() to take in account the new file extension | ||
190 | inputVideoFile.extname = newExtname | ||
191 | inputVideoFile.resolution = resolution | ||
192 | inputVideoFile.filename = generateWebVideoFilename(inputVideoFile.resolution, newExtname) | ||
193 | |||
194 | // ffmpeg generated a new video file, so update the video duration | ||
195 | // See https://trac.ffmpeg.org/ticket/5456 | ||
196 | video.duration = await getVideoStreamDuration(videoOutputPath) | ||
197 | await video.save() | ||
198 | |||
199 | return onWebVideoFileTranscoding({ | ||
200 | video, | ||
201 | videoFile: inputVideoFile, | ||
202 | videoOutputPath, | ||
203 | wasAudioFile: true | ||
204 | }) | ||
205 | }) | ||
206 | |||
207 | return result | ||
208 | } finally { | ||
209 | inputFileMutexReleaser() | ||
210 | } | ||
211 | } | ||
212 | |||
213 | export async function onWebVideoFileTranscoding (options: { | ||
214 | video: MVideoFullLight | ||
215 | videoFile: MVideoFile | ||
216 | videoOutputPath: string | ||
217 | wasAudioFile?: boolean // default false | ||
218 | }) { | ||
219 | const { video, videoFile, videoOutputPath, wasAudioFile } = options | ||
220 | |||
221 | const mutexReleaser = await VideoPathManager.Instance.lockFiles(video.uuid) | ||
222 | |||
223 | try { | ||
224 | await video.reload() | ||
225 | |||
226 | const outputPath = VideoPathManager.Instance.getFSVideoFileOutputPath(video, videoFile) | ||
227 | |||
228 | const stats = await stat(videoOutputPath) | ||
229 | |||
230 | const probe = await ffprobePromise(videoOutputPath) | ||
231 | const fps = await getVideoStreamFPS(videoOutputPath, probe) | ||
232 | const metadata = await buildFileMetadata(videoOutputPath, probe) | ||
233 | |||
234 | await move(videoOutputPath, outputPath, { overwrite: true }) | ||
235 | |||
236 | videoFile.size = stats.size | ||
237 | videoFile.fps = fps | ||
238 | videoFile.metadata = metadata | ||
239 | |||
240 | await createTorrentAndSetInfoHash(video, videoFile) | ||
241 | |||
242 | const oldFile = await VideoFileModel.loadWebVideoFile({ videoId: video.id, fps: videoFile.fps, resolution: videoFile.resolution }) | ||
243 | if (oldFile) await video.removeWebVideoFile(oldFile) | ||
244 | |||
245 | await VideoFileModel.customUpsert(videoFile, 'video', undefined) | ||
246 | video.VideoFiles = await video.$get('VideoFiles') | ||
247 | |||
248 | if (wasAudioFile) { | ||
249 | await JobQueue.Instance.createJob({ | ||
250 | type: 'generate-video-storyboard' as 'generate-video-storyboard', | ||
251 | payload: { | ||
252 | videoUUID: video.uuid, | ||
253 | // No need to federate, we process these jobs sequentially | ||
254 | federate: false | ||
255 | } | ||
256 | }) | ||
257 | } | ||
258 | |||
259 | return { video, videoFile } | ||
260 | } finally { | ||
261 | mutexReleaser() | ||
262 | } | ||
263 | } | ||