diff options
author | Chocobozzz <me@florianbigard.com> | 2023-07-31 14:34:36 +0200 |
---|---|---|
committer | Chocobozzz <me@florianbigard.com> | 2023-08-11 15:02:33 +0200 |
commit | 3a4992633ee62d5edfbb484d9c6bcb3cf158489d (patch) | |
tree | e4510b39bdac9c318fdb4b47018d08f15368b8f0 /shared/ffmpeg/ffmpeg-edition.ts | |
parent | 04d1da5621d25d59bd5fa1543b725c497bf5d9a8 (diff) | |
download | PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.gz PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.tar.zst PeerTube-3a4992633ee62d5edfbb484d9c6bcb3cf158489d.zip |
Migrate server to ESM
Sorry for the very big commit that may lead to git log issues and merge
conflicts, but it's a major step forward:
* Server can be faster at startup because imports() are async and we can
easily lazy import big modules
* Angular doesn't seem to support ES import (with .js extension), so we
had to correctly organize peertube into a monorepo:
* Use yarn workspace feature
* Use typescript reference projects for dependencies
* Shared projects have been moved into "packages", each one is now a
node module (with a dedicated package.json/tsconfig.json)
* server/tools have been moved into apps/ and is now a dedicated app
bundled and published on NPM so users don't have to build peertube
cli tools manually
* server/tests have been moved into packages/ so we don't compile
them every time we want to run the server
* Use isolatedModule option:
* Had to move from const enum to const
(https://www.typescriptlang.org/docs/handbook/enums.html#objects-vs-enums)
* Had to explictely specify "type" imports when used in decorators
* Prefer tsx (that uses esbuild under the hood) instead of ts-node to
load typescript files (tests with mocha or scripts):
* To reduce test complexity as esbuild doesn't support decorator
metadata, we only test server files that do not import server
models
* We still build tests files into js files for a faster CI
* Remove unmaintained peertube CLI import script
* Removed some barrels to speed up execution (less imports)
Diffstat (limited to 'shared/ffmpeg/ffmpeg-edition.ts')
-rw-r--r-- | shared/ffmpeg/ffmpeg-edition.ts | 239 |
1 files changed, 0 insertions, 239 deletions
diff --git a/shared/ffmpeg/ffmpeg-edition.ts b/shared/ffmpeg/ffmpeg-edition.ts deleted file mode 100644 index 724ca1ea9..000000000 --- a/shared/ffmpeg/ffmpeg-edition.ts +++ /dev/null | |||
@@ -1,239 +0,0 @@ | |||
1 | import { FilterSpecification } from 'fluent-ffmpeg' | ||
2 | import { FFmpegCommandWrapper, FFmpegCommandWrapperOptions } from './ffmpeg-command-wrapper' | ||
3 | import { presetVOD } from './shared/presets' | ||
4 | import { ffprobePromise, getVideoStreamDimensionsInfo, getVideoStreamDuration, getVideoStreamFPS, hasAudioStream } from './ffprobe' | ||
5 | |||
6 | export class FFmpegEdition { | ||
7 | private readonly commandWrapper: FFmpegCommandWrapper | ||
8 | |||
9 | constructor (options: FFmpegCommandWrapperOptions) { | ||
10 | this.commandWrapper = new FFmpegCommandWrapper(options) | ||
11 | } | ||
12 | |||
13 | async cutVideo (options: { | ||
14 | inputPath: string | ||
15 | outputPath: string | ||
16 | start?: number | ||
17 | end?: number | ||
18 | }) { | ||
19 | const { inputPath, outputPath } = options | ||
20 | |||
21 | const mainProbe = await ffprobePromise(inputPath) | ||
22 | const fps = await getVideoStreamFPS(inputPath, mainProbe) | ||
23 | const { resolution } = await getVideoStreamDimensionsInfo(inputPath, mainProbe) | ||
24 | |||
25 | const command = this.commandWrapper.buildCommand(inputPath) | ||
26 | .output(outputPath) | ||
27 | |||
28 | await presetVOD({ | ||
29 | commandWrapper: this.commandWrapper, | ||
30 | input: inputPath, | ||
31 | resolution, | ||
32 | fps, | ||
33 | canCopyAudio: false, | ||
34 | canCopyVideo: false | ||
35 | }) | ||
36 | |||
37 | if (options.start) { | ||
38 | command.outputOption('-ss ' + options.start) | ||
39 | } | ||
40 | |||
41 | if (options.end) { | ||
42 | command.outputOption('-to ' + options.end) | ||
43 | } | ||
44 | |||
45 | await this.commandWrapper.runCommand() | ||
46 | } | ||
47 | |||
48 | async addWatermark (options: { | ||
49 | inputPath: string | ||
50 | watermarkPath: string | ||
51 | outputPath: string | ||
52 | |||
53 | videoFilters: { | ||
54 | watermarkSizeRatio: number | ||
55 | horitonzalMarginRatio: number | ||
56 | verticalMarginRatio: number | ||
57 | } | ||
58 | }) { | ||
59 | const { watermarkPath, inputPath, outputPath, videoFilters } = options | ||
60 | |||
61 | const videoProbe = await ffprobePromise(inputPath) | ||
62 | const fps = await getVideoStreamFPS(inputPath, videoProbe) | ||
63 | const { resolution } = await getVideoStreamDimensionsInfo(inputPath, videoProbe) | ||
64 | |||
65 | const command = this.commandWrapper.buildCommand(inputPath) | ||
66 | .output(outputPath) | ||
67 | |||
68 | command.input(watermarkPath) | ||
69 | |||
70 | await presetVOD({ | ||
71 | commandWrapper: this.commandWrapper, | ||
72 | input: inputPath, | ||
73 | resolution, | ||
74 | fps, | ||
75 | canCopyAudio: true, | ||
76 | canCopyVideo: false | ||
77 | }) | ||
78 | |||
79 | const complexFilter: FilterSpecification[] = [ | ||
80 | // Scale watermark | ||
81 | { | ||
82 | inputs: [ '[1]', '[0]' ], | ||
83 | filter: 'scale2ref', | ||
84 | options: { | ||
85 | w: 'oh*mdar', | ||
86 | h: `ih*${videoFilters.watermarkSizeRatio}` | ||
87 | }, | ||
88 | outputs: [ '[watermark]', '[video]' ] | ||
89 | }, | ||
90 | |||
91 | { | ||
92 | inputs: [ '[video]', '[watermark]' ], | ||
93 | filter: 'overlay', | ||
94 | options: { | ||
95 | x: `main_w - overlay_w - (main_h * ${videoFilters.horitonzalMarginRatio})`, | ||
96 | y: `main_h * ${videoFilters.verticalMarginRatio}` | ||
97 | } | ||
98 | } | ||
99 | ] | ||
100 | |||
101 | command.complexFilter(complexFilter) | ||
102 | |||
103 | await this.commandWrapper.runCommand() | ||
104 | } | ||
105 | |||
106 | async addIntroOutro (options: { | ||
107 | inputPath: string | ||
108 | introOutroPath: string | ||
109 | outputPath: string | ||
110 | type: 'intro' | 'outro' | ||
111 | }) { | ||
112 | const { introOutroPath, inputPath, outputPath, type } = options | ||
113 | |||
114 | const mainProbe = await ffprobePromise(inputPath) | ||
115 | const fps = await getVideoStreamFPS(inputPath, mainProbe) | ||
116 | const { resolution } = await getVideoStreamDimensionsInfo(inputPath, mainProbe) | ||
117 | const mainHasAudio = await hasAudioStream(inputPath, mainProbe) | ||
118 | |||
119 | const introOutroProbe = await ffprobePromise(introOutroPath) | ||
120 | const introOutroHasAudio = await hasAudioStream(introOutroPath, introOutroProbe) | ||
121 | |||
122 | const command = this.commandWrapper.buildCommand(inputPath) | ||
123 | .output(outputPath) | ||
124 | |||
125 | command.input(introOutroPath) | ||
126 | |||
127 | if (!introOutroHasAudio && mainHasAudio) { | ||
128 | const duration = await getVideoStreamDuration(introOutroPath, introOutroProbe) | ||
129 | |||
130 | command.input('anullsrc') | ||
131 | command.withInputFormat('lavfi') | ||
132 | command.withInputOption('-t ' + duration) | ||
133 | } | ||
134 | |||
135 | await presetVOD({ | ||
136 | commandWrapper: this.commandWrapper, | ||
137 | input: inputPath, | ||
138 | resolution, | ||
139 | fps, | ||
140 | canCopyAudio: false, | ||
141 | canCopyVideo: false | ||
142 | }) | ||
143 | |||
144 | // Add black background to correctly scale intro/outro with padding | ||
145 | const complexFilter: FilterSpecification[] = [ | ||
146 | { | ||
147 | inputs: [ '1', '0' ], | ||
148 | filter: 'scale2ref', | ||
149 | options: { | ||
150 | w: 'iw', | ||
151 | h: `ih` | ||
152 | }, | ||
153 | outputs: [ 'intro-outro', 'main' ] | ||
154 | }, | ||
155 | { | ||
156 | inputs: [ 'intro-outro', 'main' ], | ||
157 | filter: 'scale2ref', | ||
158 | options: { | ||
159 | w: 'iw', | ||
160 | h: `ih` | ||
161 | }, | ||
162 | outputs: [ 'to-scale', 'main' ] | ||
163 | }, | ||
164 | { | ||
165 | inputs: 'to-scale', | ||
166 | filter: 'drawbox', | ||
167 | options: { | ||
168 | t: 'fill' | ||
169 | }, | ||
170 | outputs: [ 'to-scale-bg' ] | ||
171 | }, | ||
172 | { | ||
173 | inputs: [ '1', 'to-scale-bg' ], | ||
174 | filter: 'scale2ref', | ||
175 | options: { | ||
176 | w: 'iw', | ||
177 | h: 'ih', | ||
178 | force_original_aspect_ratio: 'decrease', | ||
179 | flags: 'spline' | ||
180 | }, | ||
181 | outputs: [ 'to-scale', 'to-scale-bg' ] | ||
182 | }, | ||
183 | { | ||
184 | inputs: [ 'to-scale-bg', 'to-scale' ], | ||
185 | filter: 'overlay', | ||
186 | options: { | ||
187 | x: '(main_w - overlay_w)/2', | ||
188 | y: '(main_h - overlay_h)/2' | ||
189 | }, | ||
190 | outputs: 'intro-outro-resized' | ||
191 | } | ||
192 | ] | ||
193 | |||
194 | const concatFilter = { | ||
195 | inputs: [], | ||
196 | filter: 'concat', | ||
197 | options: { | ||
198 | n: 2, | ||
199 | v: 1, | ||
200 | unsafe: 1 | ||
201 | }, | ||
202 | outputs: [ 'v' ] | ||
203 | } | ||
204 | |||
205 | const introOutroFilterInputs = [ 'intro-outro-resized' ] | ||
206 | const mainFilterInputs = [ 'main' ] | ||
207 | |||
208 | if (mainHasAudio) { | ||
209 | mainFilterInputs.push('0:a') | ||
210 | |||
211 | if (introOutroHasAudio) { | ||
212 | introOutroFilterInputs.push('1:a') | ||
213 | } else { | ||
214 | // Silent input | ||
215 | introOutroFilterInputs.push('2:a') | ||
216 | } | ||
217 | } | ||
218 | |||
219 | if (type === 'intro') { | ||
220 | concatFilter.inputs = [ ...introOutroFilterInputs, ...mainFilterInputs ] | ||
221 | } else { | ||
222 | concatFilter.inputs = [ ...mainFilterInputs, ...introOutroFilterInputs ] | ||
223 | } | ||
224 | |||
225 | if (mainHasAudio) { | ||
226 | concatFilter.options['a'] = 1 | ||
227 | concatFilter.outputs.push('a') | ||
228 | |||
229 | command.outputOption('-map [a]') | ||
230 | } | ||
231 | |||
232 | command.outputOption('-map [v]') | ||
233 | |||
234 | complexFilter.push(concatFilter) | ||
235 | command.complexFilter(complexFilter) | ||
236 | |||
237 | await this.commandWrapper.runCommand() | ||
238 | } | ||
239 | } | ||