diff options
Diffstat (limited to 'server')
-rw-r--r-- | server/controllers/api/videos/import.ts | 10 | ||||
-rw-r--r-- | server/helpers/requests.ts | 9 | ||||
-rw-r--r-- | server/helpers/youtube-dl.ts | 394 | ||||
-rw-r--r-- | server/helpers/youtube-dl/index.ts | 3 | ||||
-rw-r--r-- | server/helpers/youtube-dl/youtube-dl-cli.ts | 198 | ||||
-rw-r--r-- | server/helpers/youtube-dl/youtube-dl-info-builder.ts | 154 | ||||
-rw-r--r-- | server/helpers/youtube-dl/youtube-dl-wrapper.ts | 135 | ||||
-rw-r--r-- | server/initializers/config.ts | 13 | ||||
-rw-r--r-- | server/initializers/constants.ts | 7 | ||||
-rw-r--r-- | server/lib/job-queue/handlers/video-import.ts | 6 | ||||
-rw-r--r-- | server/lib/schedulers/youtube-dl-update-scheduler.ts | 4 | ||||
-rw-r--r-- | server/tests/api/server/proxy.ts | 107 | ||||
-rw-r--r-- | server/tests/api/videos/video-imports.ts | 632 | ||||
-rw-r--r-- | server/tests/fixtures/video_import_preview_yt_dlp.jpg | bin | 0 -> 15844 bytes | |||
-rw-r--r-- | server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg | bin | 0 -> 10163 bytes | |||
-rw-r--r-- | server/tools/peertube-import-videos.ts | 179 |
16 files changed, 1010 insertions, 841 deletions
diff --git a/server/controllers/api/videos/import.ts b/server/controllers/api/videos/import.ts index 4265f3217..eddb9b32d 100644 --- a/server/controllers/api/videos/import.ts +++ b/server/controllers/api/videos/import.ts | |||
@@ -26,7 +26,7 @@ import { isArray } from '../../../helpers/custom-validators/misc' | |||
26 | import { cleanUpReqFiles, createReqFiles } from '../../../helpers/express-utils' | 26 | import { cleanUpReqFiles, createReqFiles } from '../../../helpers/express-utils' |
27 | import { logger } from '../../../helpers/logger' | 27 | import { logger } from '../../../helpers/logger' |
28 | import { getSecureTorrentName } from '../../../helpers/utils' | 28 | import { getSecureTorrentName } from '../../../helpers/utils' |
29 | import { YoutubeDL, YoutubeDLInfo } from '../../../helpers/youtube-dl' | 29 | import { YoutubeDLWrapper, YoutubeDLInfo } from '../../../helpers/youtube-dl' |
30 | import { CONFIG } from '../../../initializers/config' | 30 | import { CONFIG } from '../../../initializers/config' |
31 | import { MIMETYPES } from '../../../initializers/constants' | 31 | import { MIMETYPES } from '../../../initializers/constants' |
32 | import { sequelizeTypescript } from '../../../initializers/database' | 32 | import { sequelizeTypescript } from '../../../initializers/database' |
@@ -134,12 +134,12 @@ async function addYoutubeDLImport (req: express.Request, res: express.Response) | |||
134 | const targetUrl = body.targetUrl | 134 | const targetUrl = body.targetUrl |
135 | const user = res.locals.oauth.token.User | 135 | const user = res.locals.oauth.token.User |
136 | 136 | ||
137 | const youtubeDL = new YoutubeDL(targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod')) | 137 | const youtubeDL = new YoutubeDLWrapper(targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod')) |
138 | 138 | ||
139 | // Get video infos | 139 | // Get video infos |
140 | let youtubeDLInfo: YoutubeDLInfo | 140 | let youtubeDLInfo: YoutubeDLInfo |
141 | try { | 141 | try { |
142 | youtubeDLInfo = await youtubeDL.getYoutubeDLInfo() | 142 | youtubeDLInfo = await youtubeDL.getInfoForDownload() |
143 | } catch (err) { | 143 | } catch (err) { |
144 | logger.info('Cannot fetch information from import for URL %s.', targetUrl, { err }) | 144 | logger.info('Cannot fetch information from import for URL %s.', targetUrl, { err }) |
145 | 145 | ||
@@ -373,9 +373,9 @@ function extractNameFromArray (name: string | string[]) { | |||
373 | return isArray(name) ? name[0] : name | 373 | return isArray(name) ? name[0] : name |
374 | } | 374 | } |
375 | 375 | ||
376 | async function processYoutubeSubtitles (youtubeDL: YoutubeDL, targetUrl: string, videoId: number) { | 376 | async function processYoutubeSubtitles (youtubeDL: YoutubeDLWrapper, targetUrl: string, videoId: number) { |
377 | try { | 377 | try { |
378 | const subtitles = await youtubeDL.getYoutubeDLSubs() | 378 | const subtitles = await youtubeDL.getSubtitles() |
379 | 379 | ||
380 | logger.info('Will create %s subtitles from youtube import %s.', subtitles.length, targetUrl) | 380 | logger.info('Will create %s subtitles from youtube import %s.', subtitles.length, targetUrl) |
381 | 381 | ||
diff --git a/server/helpers/requests.ts b/server/helpers/requests.ts index 991270952..d93f55776 100644 --- a/server/helpers/requests.ts +++ b/server/helpers/requests.ts | |||
@@ -1,9 +1,9 @@ | |||
1 | import { createWriteStream, remove } from 'fs-extra' | 1 | import { createWriteStream, remove } from 'fs-extra' |
2 | import got, { CancelableRequest, Options as GotOptions, RequestError } from 'got' | 2 | import got, { CancelableRequest, Options as GotOptions, RequestError, Response } from 'got' |
3 | import { HttpProxyAgent, HttpsProxyAgent } from 'hpagent' | 3 | import { HttpProxyAgent, HttpsProxyAgent } from 'hpagent' |
4 | import { join } from 'path' | 4 | import { join } from 'path' |
5 | import { CONFIG } from '../initializers/config' | 5 | import { CONFIG } from '../initializers/config' |
6 | import { ACTIVITY_PUB, PEERTUBE_VERSION, REQUEST_TIMEOUT, WEBSERVER } from '../initializers/constants' | 6 | import { ACTIVITY_PUB, BINARY_CONTENT_TYPES, PEERTUBE_VERSION, REQUEST_TIMEOUT, WEBSERVER } from '../initializers/constants' |
7 | import { pipelinePromise } from './core-utils' | 7 | import { pipelinePromise } from './core-utils' |
8 | import { processImage } from './image-utils' | 8 | import { processImage } from './image-utils' |
9 | import { logger } from './logger' | 9 | import { logger } from './logger' |
@@ -180,12 +180,17 @@ function getUserAgent () { | |||
180 | return `PeerTube/${PEERTUBE_VERSION} (+${WEBSERVER.URL})` | 180 | return `PeerTube/${PEERTUBE_VERSION} (+${WEBSERVER.URL})` |
181 | } | 181 | } |
182 | 182 | ||
183 | function isBinaryResponse (result: Response<any>) { | ||
184 | return BINARY_CONTENT_TYPES.has(result.headers['content-type']) | ||
185 | } | ||
186 | |||
183 | // --------------------------------------------------------------------------- | 187 | // --------------------------------------------------------------------------- |
184 | 188 | ||
185 | export { | 189 | export { |
186 | doRequest, | 190 | doRequest, |
187 | doJSONRequest, | 191 | doJSONRequest, |
188 | doRequestAndSaveToFile, | 192 | doRequestAndSaveToFile, |
193 | isBinaryResponse, | ||
189 | downloadImage, | 194 | downloadImage, |
190 | peertubeGot | 195 | peertubeGot |
191 | } | 196 | } |
diff --git a/server/helpers/youtube-dl.ts b/server/helpers/youtube-dl.ts deleted file mode 100644 index 0392ec4c7..000000000 --- a/server/helpers/youtube-dl.ts +++ /dev/null | |||
@@ -1,394 +0,0 @@ | |||
1 | import { createWriteStream } from 'fs' | ||
2 | import { ensureDir, move, pathExists, remove, writeFile } from 'fs-extra' | ||
3 | import { join } from 'path' | ||
4 | import { CONFIG } from '@server/initializers/config' | ||
5 | import { HttpStatusCode } from '../../shared/models/http/http-error-codes' | ||
6 | import { VideoResolution } from '../../shared/models/videos' | ||
7 | import { CONSTRAINTS_FIELDS, VIDEO_CATEGORIES, VIDEO_LANGUAGES, VIDEO_LICENCES } from '../initializers/constants' | ||
8 | import { peertubeTruncate, pipelinePromise, root } from './core-utils' | ||
9 | import { isVideoFileExtnameValid } from './custom-validators/videos' | ||
10 | import { logger } from './logger' | ||
11 | import { peertubeGot } from './requests' | ||
12 | import { generateVideoImportTmpPath } from './utils' | ||
13 | |||
14 | export type YoutubeDLInfo = { | ||
15 | name?: string | ||
16 | description?: string | ||
17 | category?: number | ||
18 | language?: string | ||
19 | licence?: number | ||
20 | nsfw?: boolean | ||
21 | tags?: string[] | ||
22 | thumbnailUrl?: string | ||
23 | ext?: string | ||
24 | originallyPublishedAt?: Date | ||
25 | } | ||
26 | |||
27 | export type YoutubeDLSubs = { | ||
28 | language: string | ||
29 | filename: string | ||
30 | path: string | ||
31 | }[] | ||
32 | |||
33 | const processOptions = { | ||
34 | maxBuffer: 1024 * 1024 * 10 // 10MB | ||
35 | } | ||
36 | |||
37 | class YoutubeDL { | ||
38 | |||
39 | constructor (private readonly url: string = '', private readonly enabledResolutions: number[] = []) { | ||
40 | |||
41 | } | ||
42 | |||
43 | getYoutubeDLInfo (opts?: string[]): Promise<YoutubeDLInfo> { | ||
44 | return new Promise<YoutubeDLInfo>((res, rej) => { | ||
45 | let args = opts || [] | ||
46 | |||
47 | if (CONFIG.IMPORT.VIDEOS.HTTP.FORCE_IPV4) { | ||
48 | args.push('--force-ipv4') | ||
49 | } | ||
50 | |||
51 | args = this.wrapWithProxyOptions(args) | ||
52 | args = [ '-f', this.getYoutubeDLVideoFormat() ].concat(args) | ||
53 | |||
54 | YoutubeDL.safeGetYoutubeDL() | ||
55 | .then(youtubeDL => { | ||
56 | youtubeDL.getInfo(this.url, args, processOptions, (err, info) => { | ||
57 | if (err) return rej(err) | ||
58 | if (info.is_live === true) return rej(new Error('Cannot download a live streaming.')) | ||
59 | |||
60 | const obj = this.buildVideoInfo(this.normalizeObject(info)) | ||
61 | if (obj.name && obj.name.length < CONSTRAINTS_FIELDS.VIDEOS.NAME.min) obj.name += ' video' | ||
62 | |||
63 | return res(obj) | ||
64 | }) | ||
65 | }) | ||
66 | .catch(err => rej(err)) | ||
67 | }) | ||
68 | } | ||
69 | |||
70 | getYoutubeDLSubs (opts?: object): Promise<YoutubeDLSubs> { | ||
71 | return new Promise<YoutubeDLSubs>((res, rej) => { | ||
72 | const cwd = CONFIG.STORAGE.TMP_DIR | ||
73 | const options = opts || { all: true, format: 'vtt', cwd } | ||
74 | |||
75 | YoutubeDL.safeGetYoutubeDL() | ||
76 | .then(youtubeDL => { | ||
77 | youtubeDL.getSubs(this.url, options, (err, files) => { | ||
78 | if (err) return rej(err) | ||
79 | if (!files) return [] | ||
80 | |||
81 | logger.debug('Get subtitles from youtube dl.', { url: this.url, files }) | ||
82 | |||
83 | const subtitles = files.reduce((acc, filename) => { | ||
84 | const matched = filename.match(/\.([a-z]{2})(-[a-z]+)?\.(vtt|ttml)/i) | ||
85 | if (!matched || !matched[1]) return acc | ||
86 | |||
87 | return [ | ||
88 | ...acc, | ||
89 | { | ||
90 | language: matched[1], | ||
91 | path: join(cwd, filename), | ||
92 | filename | ||
93 | } | ||
94 | ] | ||
95 | }, []) | ||
96 | |||
97 | return res(subtitles) | ||
98 | }) | ||
99 | }) | ||
100 | .catch(err => rej(err)) | ||
101 | }) | ||
102 | } | ||
103 | |||
104 | getYoutubeDLVideoFormat () { | ||
105 | /** | ||
106 | * list of format selectors in order or preference | ||
107 | * see https://github.com/ytdl-org/youtube-dl#format-selection | ||
108 | * | ||
109 | * case #1 asks for a mp4 using h264 (avc1) and the exact resolution in the hope | ||
110 | * of being able to do a "quick-transcode" | ||
111 | * case #2 is the first fallback. No "quick-transcode" means we can get anything else (like vp9) | ||
112 | * case #3 is the resolution-degraded equivalent of #1, and already a pretty safe fallback | ||
113 | * | ||
114 | * in any case we avoid AV1, see https://github.com/Chocobozzz/PeerTube/issues/3499 | ||
115 | **/ | ||
116 | const resolution = this.enabledResolutions.length === 0 | ||
117 | ? VideoResolution.H_720P | ||
118 | : Math.max(...this.enabledResolutions) | ||
119 | |||
120 | return [ | ||
121 | `bestvideo[vcodec^=avc1][height=${resolution}]+bestaudio[ext=m4a]`, // case #1 | ||
122 | `bestvideo[vcodec!*=av01][vcodec!*=vp9.2][height=${resolution}]+bestaudio`, // case #2 | ||
123 | `bestvideo[vcodec^=avc1][height<=${resolution}]+bestaudio[ext=m4a]`, // case #3 | ||
124 | `bestvideo[vcodec!*=av01][vcodec!*=vp9.2]+bestaudio`, | ||
125 | 'best[vcodec!*=av01][vcodec!*=vp9.2]', // case fallback for known formats | ||
126 | 'best' // Ultimate fallback | ||
127 | ].join('/') | ||
128 | } | ||
129 | |||
130 | downloadYoutubeDLVideo (fileExt: string, timeout: number) { | ||
131 | // Leave empty the extension, youtube-dl will add it | ||
132 | const pathWithoutExtension = generateVideoImportTmpPath(this.url, '') | ||
133 | |||
134 | let timer | ||
135 | |||
136 | logger.info('Importing youtubeDL video %s to %s', this.url, pathWithoutExtension) | ||
137 | |||
138 | let options = [ '-f', this.getYoutubeDLVideoFormat(), '-o', pathWithoutExtension ] | ||
139 | options = this.wrapWithProxyOptions(options) | ||
140 | |||
141 | if (process.env.FFMPEG_PATH) { | ||
142 | options = options.concat([ '--ffmpeg-location', process.env.FFMPEG_PATH ]) | ||
143 | } | ||
144 | |||
145 | logger.debug('YoutubeDL options for %s.', this.url, { options }) | ||
146 | |||
147 | return new Promise<string>((res, rej) => { | ||
148 | YoutubeDL.safeGetYoutubeDL() | ||
149 | .then(youtubeDL => { | ||
150 | youtubeDL.exec(this.url, options, processOptions, async err => { | ||
151 | clearTimeout(timer) | ||
152 | |||
153 | try { | ||
154 | // If youtube-dl did not guess an extension for our file, just use .mp4 as default | ||
155 | if (await pathExists(pathWithoutExtension)) { | ||
156 | await move(pathWithoutExtension, pathWithoutExtension + '.mp4') | ||
157 | } | ||
158 | |||
159 | const path = await this.guessVideoPathWithExtension(pathWithoutExtension, fileExt) | ||
160 | |||
161 | if (err) { | ||
162 | remove(path) | ||
163 | .catch(err => logger.error('Cannot delete path on YoutubeDL error.', { err })) | ||
164 | |||
165 | return rej(err) | ||
166 | } | ||
167 | |||
168 | return res(path) | ||
169 | } catch (err) { | ||
170 | return rej(err) | ||
171 | } | ||
172 | }) | ||
173 | |||
174 | timer = setTimeout(() => { | ||
175 | const err = new Error('YoutubeDL download timeout.') | ||
176 | |||
177 | this.guessVideoPathWithExtension(pathWithoutExtension, fileExt) | ||
178 | .then(path => remove(path)) | ||
179 | .finally(() => rej(err)) | ||
180 | .catch(err => { | ||
181 | logger.error('Cannot remove file in youtubeDL timeout.', { err }) | ||
182 | return rej(err) | ||
183 | }) | ||
184 | }, timeout) | ||
185 | }) | ||
186 | .catch(err => rej(err)) | ||
187 | }) | ||
188 | } | ||
189 | |||
190 | buildOriginallyPublishedAt (obj: any) { | ||
191 | let originallyPublishedAt: Date = null | ||
192 | |||
193 | const uploadDateMatcher = /^(\d{4})(\d{2})(\d{2})$/.exec(obj.upload_date) | ||
194 | if (uploadDateMatcher) { | ||
195 | originallyPublishedAt = new Date() | ||
196 | originallyPublishedAt.setHours(0, 0, 0, 0) | ||
197 | |||
198 | const year = parseInt(uploadDateMatcher[1], 10) | ||
199 | // Month starts from 0 | ||
200 | const month = parseInt(uploadDateMatcher[2], 10) - 1 | ||
201 | const day = parseInt(uploadDateMatcher[3], 10) | ||
202 | |||
203 | originallyPublishedAt.setFullYear(year, month, day) | ||
204 | } | ||
205 | |||
206 | return originallyPublishedAt | ||
207 | } | ||
208 | |||
209 | private async guessVideoPathWithExtension (tmpPath: string, sourceExt: string) { | ||
210 | if (!isVideoFileExtnameValid(sourceExt)) { | ||
211 | throw new Error('Invalid video extension ' + sourceExt) | ||
212 | } | ||
213 | |||
214 | const extensions = [ sourceExt, '.mp4', '.mkv', '.webm' ] | ||
215 | |||
216 | for (const extension of extensions) { | ||
217 | const path = tmpPath + extension | ||
218 | |||
219 | if (await pathExists(path)) return path | ||
220 | } | ||
221 | |||
222 | throw new Error('Cannot guess path of ' + tmpPath) | ||
223 | } | ||
224 | |||
225 | private normalizeObject (obj: any) { | ||
226 | const newObj: any = {} | ||
227 | |||
228 | for (const key of Object.keys(obj)) { | ||
229 | // Deprecated key | ||
230 | if (key === 'resolution') continue | ||
231 | |||
232 | const value = obj[key] | ||
233 | |||
234 | if (typeof value === 'string') { | ||
235 | newObj[key] = value.normalize() | ||
236 | } else { | ||
237 | newObj[key] = value | ||
238 | } | ||
239 | } | ||
240 | |||
241 | return newObj | ||
242 | } | ||
243 | |||
244 | private buildVideoInfo (obj: any): YoutubeDLInfo { | ||
245 | return { | ||
246 | name: this.titleTruncation(obj.title), | ||
247 | description: this.descriptionTruncation(obj.description), | ||
248 | category: this.getCategory(obj.categories), | ||
249 | licence: this.getLicence(obj.license), | ||
250 | language: this.getLanguage(obj.language), | ||
251 | nsfw: this.isNSFW(obj), | ||
252 | tags: this.getTags(obj.tags), | ||
253 | thumbnailUrl: obj.thumbnail || undefined, | ||
254 | originallyPublishedAt: this.buildOriginallyPublishedAt(obj), | ||
255 | ext: obj.ext | ||
256 | } | ||
257 | } | ||
258 | |||
259 | private titleTruncation (title: string) { | ||
260 | return peertubeTruncate(title, { | ||
261 | length: CONSTRAINTS_FIELDS.VIDEOS.NAME.max, | ||
262 | separator: /,? +/, | ||
263 | omission: ' […]' | ||
264 | }) | ||
265 | } | ||
266 | |||
267 | private descriptionTruncation (description: string) { | ||
268 | if (!description || description.length < CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.min) return undefined | ||
269 | |||
270 | return peertubeTruncate(description, { | ||
271 | length: CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.max, | ||
272 | separator: /,? +/, | ||
273 | omission: ' […]' | ||
274 | }) | ||
275 | } | ||
276 | |||
277 | private isNSFW (info: any) { | ||
278 | return info.age_limit && info.age_limit >= 16 | ||
279 | } | ||
280 | |||
281 | private getTags (tags: any) { | ||
282 | if (Array.isArray(tags) === false) return [] | ||
283 | |||
284 | return tags | ||
285 | .filter(t => t.length < CONSTRAINTS_FIELDS.VIDEOS.TAG.max && t.length > CONSTRAINTS_FIELDS.VIDEOS.TAG.min) | ||
286 | .map(t => t.normalize()) | ||
287 | .slice(0, 5) | ||
288 | } | ||
289 | |||
290 | private getLicence (licence: string) { | ||
291 | if (!licence) return undefined | ||
292 | |||
293 | if (licence.includes('Creative Commons Attribution')) return 1 | ||
294 | |||
295 | for (const key of Object.keys(VIDEO_LICENCES)) { | ||
296 | const peertubeLicence = VIDEO_LICENCES[key] | ||
297 | if (peertubeLicence.toLowerCase() === licence.toLowerCase()) return parseInt(key, 10) | ||
298 | } | ||
299 | |||
300 | return undefined | ||
301 | } | ||
302 | |||
303 | private getCategory (categories: string[]) { | ||
304 | if (!categories) return undefined | ||
305 | |||
306 | const categoryString = categories[0] | ||
307 | if (!categoryString || typeof categoryString !== 'string') return undefined | ||
308 | |||
309 | if (categoryString === 'News & Politics') return 11 | ||
310 | |||
311 | for (const key of Object.keys(VIDEO_CATEGORIES)) { | ||
312 | const category = VIDEO_CATEGORIES[key] | ||
313 | if (categoryString.toLowerCase() === category.toLowerCase()) return parseInt(key, 10) | ||
314 | } | ||
315 | |||
316 | return undefined | ||
317 | } | ||
318 | |||
319 | private getLanguage (language: string) { | ||
320 | return VIDEO_LANGUAGES[language] ? language : undefined | ||
321 | } | ||
322 | |||
323 | private wrapWithProxyOptions (options: string[]) { | ||
324 | if (CONFIG.IMPORT.VIDEOS.HTTP.PROXY.ENABLED) { | ||
325 | logger.debug('Using proxy for YoutubeDL') | ||
326 | |||
327 | return [ '--proxy', CONFIG.IMPORT.VIDEOS.HTTP.PROXY.URL ].concat(options) | ||
328 | } | ||
329 | |||
330 | return options | ||
331 | } | ||
332 | |||
333 | // Thanks: https://github.com/przemyslawpluta/node-youtube-dl/blob/master/lib/downloader.js | ||
334 | // We rewrote it to avoid sync calls | ||
335 | static async updateYoutubeDLBinary () { | ||
336 | logger.info('Updating youtubeDL binary.') | ||
337 | |||
338 | const binDirectory = join(root(), 'node_modules', 'youtube-dl', 'bin') | ||
339 | const bin = join(binDirectory, 'youtube-dl') | ||
340 | const detailsPath = join(binDirectory, 'details') | ||
341 | const url = process.env.YOUTUBE_DL_DOWNLOAD_HOST || 'https://yt-dl.org/downloads/latest/youtube-dl' | ||
342 | |||
343 | await ensureDir(binDirectory) | ||
344 | |||
345 | try { | ||
346 | const gotContext = { bodyKBLimit: 20_000 } | ||
347 | |||
348 | const result = await peertubeGot(url, { followRedirect: false, context: gotContext }) | ||
349 | |||
350 | if (result.statusCode !== HttpStatusCode.FOUND_302) { | ||
351 | logger.error('youtube-dl update error: did not get redirect for the latest version link. Status %d', result.statusCode) | ||
352 | return | ||
353 | } | ||
354 | |||
355 | const newUrl = result.headers.location | ||
356 | const newVersion = /\/(\d{4}\.\d\d\.\d\d(\.\d)?)\/youtube-dl$/.exec(newUrl)[1] | ||
357 | |||
358 | const downloadFileStream = peertubeGot.stream(newUrl, { context: gotContext }) | ||
359 | const writeStream = createWriteStream(bin, { mode: 493 }) | ||
360 | |||
361 | await pipelinePromise( | ||
362 | downloadFileStream, | ||
363 | writeStream | ||
364 | ) | ||
365 | |||
366 | const details = JSON.stringify({ version: newVersion, path: bin, exec: 'youtube-dl' }) | ||
367 | await writeFile(detailsPath, details, { encoding: 'utf8' }) | ||
368 | |||
369 | logger.info('youtube-dl updated to version %s.', newVersion) | ||
370 | } catch (err) { | ||
371 | logger.error('Cannot update youtube-dl.', { err }) | ||
372 | } | ||
373 | } | ||
374 | |||
375 | static async safeGetYoutubeDL () { | ||
376 | let youtubeDL | ||
377 | |||
378 | try { | ||
379 | youtubeDL = require('youtube-dl') | ||
380 | } catch (e) { | ||
381 | // Download binary | ||
382 | await this.updateYoutubeDLBinary() | ||
383 | youtubeDL = require('youtube-dl') | ||
384 | } | ||
385 | |||
386 | return youtubeDL | ||
387 | } | ||
388 | } | ||
389 | |||
390 | // --------------------------------------------------------------------------- | ||
391 | |||
392 | export { | ||
393 | YoutubeDL | ||
394 | } | ||
diff --git a/server/helpers/youtube-dl/index.ts b/server/helpers/youtube-dl/index.ts new file mode 100644 index 000000000..6afc77dcf --- /dev/null +++ b/server/helpers/youtube-dl/index.ts | |||
@@ -0,0 +1,3 @@ | |||
1 | export * from './youtube-dl-cli' | ||
2 | export * from './youtube-dl-info-builder' | ||
3 | export * from './youtube-dl-wrapper' | ||
diff --git a/server/helpers/youtube-dl/youtube-dl-cli.ts b/server/helpers/youtube-dl/youtube-dl-cli.ts new file mode 100644 index 000000000..440869205 --- /dev/null +++ b/server/helpers/youtube-dl/youtube-dl-cli.ts | |||
@@ -0,0 +1,198 @@ | |||
1 | import execa from 'execa' | ||
2 | import { pathExists, writeFile } from 'fs-extra' | ||
3 | import { join } from 'path' | ||
4 | import { CONFIG } from '@server/initializers/config' | ||
5 | import { VideoResolution } from '@shared/models' | ||
6 | import { logger, loggerTagsFactory } from '../logger' | ||
7 | import { getProxy, isProxyEnabled } from '../proxy' | ||
8 | import { isBinaryResponse, peertubeGot } from '../requests' | ||
9 | |||
10 | const lTags = loggerTagsFactory('youtube-dl') | ||
11 | |||
12 | const youtubeDLBinaryPath = join(CONFIG.STORAGE.BIN_DIR, CONFIG.IMPORT.VIDEOS.HTTP.YOUTUBE_DL_RELEASE.NAME) | ||
13 | |||
14 | export class YoutubeDLCLI { | ||
15 | |||
16 | static async safeGet () { | ||
17 | if (!await pathExists(youtubeDLBinaryPath)) { | ||
18 | await this.updateYoutubeDLBinary() | ||
19 | } | ||
20 | |||
21 | return new YoutubeDLCLI() | ||
22 | } | ||
23 | |||
24 | static async updateYoutubeDLBinary () { | ||
25 | const url = CONFIG.IMPORT.VIDEOS.HTTP.YOUTUBE_DL_RELEASE.URL | ||
26 | |||
27 | logger.info('Updating youtubeDL binary from %s.', url, lTags()) | ||
28 | |||
29 | const gotOptions = { context: { bodyKBLimit: 20_000 }, responseType: 'buffer' as 'buffer' } | ||
30 | |||
31 | try { | ||
32 | let gotResult = await peertubeGot(url, gotOptions) | ||
33 | |||
34 | if (!isBinaryResponse(gotResult)) { | ||
35 | const json = JSON.parse(gotResult.body.toString()) | ||
36 | const latest = json.filter(release => release.prerelease === false)[0] | ||
37 | if (!latest) throw new Error('Cannot find latest release') | ||
38 | |||
39 | const releaseName = CONFIG.IMPORT.VIDEOS.HTTP.YOUTUBE_DL_RELEASE.NAME | ||
40 | const releaseAsset = latest.assets.find(a => a.name === releaseName) | ||
41 | if (!releaseAsset) throw new Error(`Cannot find appropriate release with name ${releaseName} in release assets`) | ||
42 | |||
43 | gotResult = await peertubeGot(releaseAsset.browser_download_url, gotOptions) | ||
44 | } | ||
45 | |||
46 | if (!isBinaryResponse(gotResult)) { | ||
47 | throw new Error('Not a binary response') | ||
48 | } | ||
49 | |||
50 | await writeFile(youtubeDLBinaryPath, gotResult.body) | ||
51 | |||
52 | logger.info('youtube-dl updated %s.', youtubeDLBinaryPath, lTags()) | ||
53 | } catch (err) { | ||
54 | logger.error('Cannot update youtube-dl from %s.', url, { err, ...lTags() }) | ||
55 | } | ||
56 | } | ||
57 | |||
58 | static getYoutubeDLVideoFormat (enabledResolutions: VideoResolution[]) { | ||
59 | /** | ||
60 | * list of format selectors in order or preference | ||
61 | * see https://github.com/ytdl-org/youtube-dl#format-selection | ||
62 | * | ||
63 | * case #1 asks for a mp4 using h264 (avc1) and the exact resolution in the hope | ||
64 | * of being able to do a "quick-transcode" | ||
65 | * case #2 is the first fallback. No "quick-transcode" means we can get anything else (like vp9) | ||
66 | * case #3 is the resolution-degraded equivalent of #1, and already a pretty safe fallback | ||
67 | * | ||
68 | * in any case we avoid AV1, see https://github.com/Chocobozzz/PeerTube/issues/3499 | ||
69 | **/ | ||
70 | const resolution = enabledResolutions.length === 0 | ||
71 | ? VideoResolution.H_720P | ||
72 | : Math.max(...enabledResolutions) | ||
73 | |||
74 | return [ | ||
75 | `bestvideo[vcodec^=avc1][height=${resolution}]+bestaudio[ext=m4a]`, // case #1 | ||
76 | `bestvideo[vcodec!*=av01][vcodec!*=vp9.2][height=${resolution}]+bestaudio`, // case #2 | ||
77 | `bestvideo[vcodec^=avc1][height<=${resolution}]+bestaudio[ext=m4a]`, // case #3 | ||
78 | `bestvideo[vcodec!*=av01][vcodec!*=vp9.2]+bestaudio`, | ||
79 | 'best[vcodec!*=av01][vcodec!*=vp9.2]', // case fallback for known formats | ||
80 | 'best' // Ultimate fallback | ||
81 | ].join('/') | ||
82 | } | ||
83 | |||
84 | private constructor () { | ||
85 | |||
86 | } | ||
87 | |||
88 | download (options: { | ||
89 | url: string | ||
90 | format: string | ||
91 | output: string | ||
92 | processOptions: execa.NodeOptions | ||
93 | additionalYoutubeDLArgs?: string[] | ||
94 | }) { | ||
95 | return this.run({ | ||
96 | url: options.url, | ||
97 | processOptions: options.processOptions, | ||
98 | args: (options.additionalYoutubeDLArgs || []).concat([ '-f', options.format, '-o', options.output ]) | ||
99 | }) | ||
100 | } | ||
101 | |||
102 | async getInfo (options: { | ||
103 | url: string | ||
104 | format: string | ||
105 | processOptions: execa.NodeOptions | ||
106 | additionalYoutubeDLArgs?: string[] | ||
107 | }) { | ||
108 | const { url, format, additionalYoutubeDLArgs = [], processOptions } = options | ||
109 | |||
110 | const completeArgs = additionalYoutubeDLArgs.concat([ '--dump-json', '-f', format ]) | ||
111 | |||
112 | const data = await this.run({ url, args: completeArgs, processOptions }) | ||
113 | const info = data.map(this.parseInfo) | ||
114 | |||
115 | return info.length === 1 | ||
116 | ? info[0] | ||
117 | : info | ||
118 | } | ||
119 | |||
120 | async getSubs (options: { | ||
121 | url: string | ||
122 | format: 'vtt' | ||
123 | processOptions: execa.NodeOptions | ||
124 | }) { | ||
125 | const { url, format, processOptions } = options | ||
126 | |||
127 | const args = [ '--skip-download', '--all-subs', `--sub-format=${format}` ] | ||
128 | |||
129 | const data = await this.run({ url, args, processOptions }) | ||
130 | const files: string[] = [] | ||
131 | |||
132 | const skipString = '[info] Writing video subtitles to: ' | ||
133 | |||
134 | for (let i = 0, len = data.length; i < len; i++) { | ||
135 | const line = data[i] | ||
136 | |||
137 | if (line.indexOf(skipString) === 0) { | ||
138 | files.push(line.slice(skipString.length)) | ||
139 | } | ||
140 | } | ||
141 | |||
142 | return files | ||
143 | } | ||
144 | |||
145 | private async run (options: { | ||
146 | url: string | ||
147 | args: string[] | ||
148 | processOptions: execa.NodeOptions | ||
149 | }) { | ||
150 | const { url, args, processOptions } = options | ||
151 | |||
152 | let completeArgs = this.wrapWithProxyOptions(args) | ||
153 | completeArgs = this.wrapWithIPOptions(completeArgs) | ||
154 | completeArgs = this.wrapWithFFmpegOptions(completeArgs) | ||
155 | |||
156 | const output = await execa('python', [ youtubeDLBinaryPath, ...completeArgs, url ], processOptions) | ||
157 | |||
158 | logger.debug('Runned youtube-dl command.', { command: output.command, stdout: output.stdout, ...lTags() }) | ||
159 | |||
160 | return output.stdout | ||
161 | ? output.stdout.trim().split(/\r?\n/) | ||
162 | : undefined | ||
163 | } | ||
164 | |||
165 | private wrapWithProxyOptions (args: string[]) { | ||
166 | if (isProxyEnabled()) { | ||
167 | logger.debug('Using proxy %s for YoutubeDL', getProxy(), lTags()) | ||
168 | |||
169 | return [ '--proxy', getProxy() ].concat(args) | ||
170 | } | ||
171 | |||
172 | return args | ||
173 | } | ||
174 | |||
175 | private wrapWithIPOptions (args: string[]) { | ||
176 | if (CONFIG.IMPORT.VIDEOS.HTTP.FORCE_IPV4) { | ||
177 | logger.debug('Force ipv4 for YoutubeDL') | ||
178 | |||
179 | return [ '--force-ipv4' ].concat(args) | ||
180 | } | ||
181 | |||
182 | return args | ||
183 | } | ||
184 | |||
185 | private wrapWithFFmpegOptions (args: string[]) { | ||
186 | if (process.env.FFMPEG_PATH) { | ||
187 | logger.debug('Using ffmpeg location %s for YoutubeDL', process.env.FFMPEG_PATH, lTags()) | ||
188 | |||
189 | return [ '--ffmpeg-location', process.env.FFMPEG_PATH ].concat(args) | ||
190 | } | ||
191 | |||
192 | return args | ||
193 | } | ||
194 | |||
195 | private parseInfo (data: string) { | ||
196 | return JSON.parse(data) | ||
197 | } | ||
198 | } | ||
diff --git a/server/helpers/youtube-dl/youtube-dl-info-builder.ts b/server/helpers/youtube-dl/youtube-dl-info-builder.ts new file mode 100644 index 000000000..9746a7067 --- /dev/null +++ b/server/helpers/youtube-dl/youtube-dl-info-builder.ts | |||
@@ -0,0 +1,154 @@ | |||
1 | import { CONSTRAINTS_FIELDS, VIDEO_CATEGORIES, VIDEO_LANGUAGES, VIDEO_LICENCES } from '../../initializers/constants' | ||
2 | import { peertubeTruncate } from '../core-utils' | ||
3 | |||
4 | type YoutubeDLInfo = { | ||
5 | name?: string | ||
6 | description?: string | ||
7 | category?: number | ||
8 | language?: string | ||
9 | licence?: number | ||
10 | nsfw?: boolean | ||
11 | tags?: string[] | ||
12 | thumbnailUrl?: string | ||
13 | ext?: string | ||
14 | originallyPublishedAt?: Date | ||
15 | } | ||
16 | |||
17 | class YoutubeDLInfoBuilder { | ||
18 | private readonly info: any | ||
19 | |||
20 | constructor (info: any) { | ||
21 | this.info = { ...info } | ||
22 | } | ||
23 | |||
24 | getInfo () { | ||
25 | const obj = this.buildVideoInfo(this.normalizeObject(this.info)) | ||
26 | if (obj.name && obj.name.length < CONSTRAINTS_FIELDS.VIDEOS.NAME.min) obj.name += ' video' | ||
27 | |||
28 | return obj | ||
29 | } | ||
30 | |||
31 | private normalizeObject (obj: any) { | ||
32 | const newObj: any = {} | ||
33 | |||
34 | for (const key of Object.keys(obj)) { | ||
35 | // Deprecated key | ||
36 | if (key === 'resolution') continue | ||
37 | |||
38 | const value = obj[key] | ||
39 | |||
40 | if (typeof value === 'string') { | ||
41 | newObj[key] = value.normalize() | ||
42 | } else { | ||
43 | newObj[key] = value | ||
44 | } | ||
45 | } | ||
46 | |||
47 | return newObj | ||
48 | } | ||
49 | |||
50 | private buildOriginallyPublishedAt (obj: any) { | ||
51 | let originallyPublishedAt: Date = null | ||
52 | |||
53 | const uploadDateMatcher = /^(\d{4})(\d{2})(\d{2})$/.exec(obj.upload_date) | ||
54 | if (uploadDateMatcher) { | ||
55 | originallyPublishedAt = new Date() | ||
56 | originallyPublishedAt.setHours(0, 0, 0, 0) | ||
57 | |||
58 | const year = parseInt(uploadDateMatcher[1], 10) | ||
59 | // Month starts from 0 | ||
60 | const month = parseInt(uploadDateMatcher[2], 10) - 1 | ||
61 | const day = parseInt(uploadDateMatcher[3], 10) | ||
62 | |||
63 | originallyPublishedAt.setFullYear(year, month, day) | ||
64 | } | ||
65 | |||
66 | return originallyPublishedAt | ||
67 | } | ||
68 | |||
69 | private buildVideoInfo (obj: any): YoutubeDLInfo { | ||
70 | return { | ||
71 | name: this.titleTruncation(obj.title), | ||
72 | description: this.descriptionTruncation(obj.description), | ||
73 | category: this.getCategory(obj.categories), | ||
74 | licence: this.getLicence(obj.license), | ||
75 | language: this.getLanguage(obj.language), | ||
76 | nsfw: this.isNSFW(obj), | ||
77 | tags: this.getTags(obj.tags), | ||
78 | thumbnailUrl: obj.thumbnail || undefined, | ||
79 | originallyPublishedAt: this.buildOriginallyPublishedAt(obj), | ||
80 | ext: obj.ext | ||
81 | } | ||
82 | } | ||
83 | |||
84 | private titleTruncation (title: string) { | ||
85 | return peertubeTruncate(title, { | ||
86 | length: CONSTRAINTS_FIELDS.VIDEOS.NAME.max, | ||
87 | separator: /,? +/, | ||
88 | omission: ' […]' | ||
89 | }) | ||
90 | } | ||
91 | |||
92 | private descriptionTruncation (description: string) { | ||
93 | if (!description || description.length < CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.min) return undefined | ||
94 | |||
95 | return peertubeTruncate(description, { | ||
96 | length: CONSTRAINTS_FIELDS.VIDEOS.DESCRIPTION.max, | ||
97 | separator: /,? +/, | ||
98 | omission: ' […]' | ||
99 | }) | ||
100 | } | ||
101 | |||
102 | private isNSFW (info: any) { | ||
103 | return info?.age_limit >= 16 | ||
104 | } | ||
105 | |||
106 | private getTags (tags: string[]) { | ||
107 | if (Array.isArray(tags) === false) return [] | ||
108 | |||
109 | return tags | ||
110 | .filter(t => t.length < CONSTRAINTS_FIELDS.VIDEOS.TAG.max && t.length > CONSTRAINTS_FIELDS.VIDEOS.TAG.min) | ||
111 | .map(t => t.normalize()) | ||
112 | .slice(0, 5) | ||
113 | } | ||
114 | |||
115 | private getLicence (licence: string) { | ||
116 | if (!licence) return undefined | ||
117 | |||
118 | if (licence.includes('Creative Commons Attribution')) return 1 | ||
119 | |||
120 | for (const key of Object.keys(VIDEO_LICENCES)) { | ||
121 | const peertubeLicence = VIDEO_LICENCES[key] | ||
122 | if (peertubeLicence.toLowerCase() === licence.toLowerCase()) return parseInt(key, 10) | ||
123 | } | ||
124 | |||
125 | return undefined | ||
126 | } | ||
127 | |||
128 | private getCategory (categories: string[]) { | ||
129 | if (!categories) return undefined | ||
130 | |||
131 | const categoryString = categories[0] | ||
132 | if (!categoryString || typeof categoryString !== 'string') return undefined | ||
133 | |||
134 | if (categoryString === 'News & Politics') return 11 | ||
135 | |||
136 | for (const key of Object.keys(VIDEO_CATEGORIES)) { | ||
137 | const category = VIDEO_CATEGORIES[key] | ||
138 | if (categoryString.toLowerCase() === category.toLowerCase()) return parseInt(key, 10) | ||
139 | } | ||
140 | |||
141 | return undefined | ||
142 | } | ||
143 | |||
144 | private getLanguage (language: string) { | ||
145 | return VIDEO_LANGUAGES[language] ? language : undefined | ||
146 | } | ||
147 | } | ||
148 | |||
149 | // --------------------------------------------------------------------------- | ||
150 | |||
151 | export { | ||
152 | YoutubeDLInfo, | ||
153 | YoutubeDLInfoBuilder | ||
154 | } | ||
diff --git a/server/helpers/youtube-dl/youtube-dl-wrapper.ts b/server/helpers/youtube-dl/youtube-dl-wrapper.ts new file mode 100644 index 000000000..6960fbae4 --- /dev/null +++ b/server/helpers/youtube-dl/youtube-dl-wrapper.ts | |||
@@ -0,0 +1,135 @@ | |||
1 | import { move, pathExists, readdir, remove } from 'fs-extra' | ||
2 | import { dirname, join } from 'path' | ||
3 | import { CONFIG } from '@server/initializers/config' | ||
4 | import { isVideoFileExtnameValid } from '../custom-validators/videos' | ||
5 | import { logger, loggerTagsFactory } from '../logger' | ||
6 | import { generateVideoImportTmpPath } from '../utils' | ||
7 | import { YoutubeDLCLI } from './youtube-dl-cli' | ||
8 | import { YoutubeDLInfo, YoutubeDLInfoBuilder } from './youtube-dl-info-builder' | ||
9 | |||
10 | const lTags = loggerTagsFactory('youtube-dl') | ||
11 | |||
12 | export type YoutubeDLSubs = { | ||
13 | language: string | ||
14 | filename: string | ||
15 | path: string | ||
16 | }[] | ||
17 | |||
18 | const processOptions = { | ||
19 | maxBuffer: 1024 * 1024 * 10 // 10MB | ||
20 | } | ||
21 | |||
22 | class YoutubeDLWrapper { | ||
23 | |||
24 | constructor (private readonly url: string = '', private readonly enabledResolutions: number[] = []) { | ||
25 | |||
26 | } | ||
27 | |||
28 | async getInfoForDownload (youtubeDLArgs: string[] = []): Promise<YoutubeDLInfo> { | ||
29 | const youtubeDL = await YoutubeDLCLI.safeGet() | ||
30 | |||
31 | const info = await youtubeDL.getInfo({ | ||
32 | url: this.url, | ||
33 | format: YoutubeDLCLI.getYoutubeDLVideoFormat(this.enabledResolutions), | ||
34 | additionalYoutubeDLArgs: youtubeDLArgs, | ||
35 | processOptions | ||
36 | }) | ||
37 | |||
38 | if (info.is_live === true) throw new Error('Cannot download a live streaming.') | ||
39 | |||
40 | const infoBuilder = new YoutubeDLInfoBuilder(info) | ||
41 | |||
42 | return infoBuilder.getInfo() | ||
43 | } | ||
44 | |||
45 | async getSubtitles (): Promise<YoutubeDLSubs> { | ||
46 | const cwd = CONFIG.STORAGE.TMP_DIR | ||
47 | |||
48 | const youtubeDL = await YoutubeDLCLI.safeGet() | ||
49 | |||
50 | const files = await youtubeDL.getSubs({ url: this.url, format: 'vtt', processOptions: { cwd } }) | ||
51 | if (!files) return [] | ||
52 | |||
53 | logger.debug('Get subtitles from youtube dl.', { url: this.url, files, ...lTags() }) | ||
54 | |||
55 | const subtitles = files.reduce((acc, filename) => { | ||
56 | const matched = filename.match(/\.([a-z]{2})(-[a-z]+)?\.(vtt|ttml)/i) | ||
57 | if (!matched || !matched[1]) return acc | ||
58 | |||
59 | return [ | ||
60 | ...acc, | ||
61 | { | ||
62 | language: matched[1], | ||
63 | path: join(cwd, filename), | ||
64 | filename | ||
65 | } | ||
66 | ] | ||
67 | }, []) | ||
68 | |||
69 | return subtitles | ||
70 | } | ||
71 | |||
72 | async downloadVideo (fileExt: string, timeout: number): Promise<string> { | ||
73 | // Leave empty the extension, youtube-dl will add it | ||
74 | const pathWithoutExtension = generateVideoImportTmpPath(this.url, '') | ||
75 | |||
76 | let timer: NodeJS.Timeout | ||
77 | |||
78 | logger.info('Importing youtubeDL video %s to %s', this.url, pathWithoutExtension, lTags()) | ||
79 | |||
80 | const youtubeDL = await YoutubeDLCLI.safeGet() | ||
81 | |||
82 | const timeoutPromise = new Promise<string>((_, rej) => { | ||
83 | timer = setTimeout(() => rej(new Error('YoutubeDL download timeout.')), timeout) | ||
84 | }) | ||
85 | |||
86 | const downloadPromise = youtubeDL.download({ | ||
87 | url: this.url, | ||
88 | format: YoutubeDLCLI.getYoutubeDLVideoFormat(this.enabledResolutions), | ||
89 | output: pathWithoutExtension, | ||
90 | processOptions | ||
91 | }).then(() => clearTimeout(timer)) | ||
92 | .then(async () => { | ||
93 | // If youtube-dl did not guess an extension for our file, just use .mp4 as default | ||
94 | if (await pathExists(pathWithoutExtension)) { | ||
95 | await move(pathWithoutExtension, pathWithoutExtension + '.mp4') | ||
96 | } | ||
97 | |||
98 | return this.guessVideoPathWithExtension(pathWithoutExtension, fileExt) | ||
99 | }) | ||
100 | |||
101 | return Promise.race([ downloadPromise, timeoutPromise ]) | ||
102 | .catch(async err => { | ||
103 | const path = await this.guessVideoPathWithExtension(pathWithoutExtension, fileExt) | ||
104 | |||
105 | remove(path) | ||
106 | .catch(err => logger.error('Cannot remove file in youtubeDL timeout.', { err, ...lTags() })) | ||
107 | |||
108 | throw err | ||
109 | }) | ||
110 | } | ||
111 | |||
112 | private async guessVideoPathWithExtension (tmpPath: string, sourceExt: string) { | ||
113 | if (!isVideoFileExtnameValid(sourceExt)) { | ||
114 | throw new Error('Invalid video extension ' + sourceExt) | ||
115 | } | ||
116 | |||
117 | const extensions = [ sourceExt, '.mp4', '.mkv', '.webm' ] | ||
118 | |||
119 | for (const extension of extensions) { | ||
120 | const path = tmpPath + extension | ||
121 | |||
122 | if (await pathExists(path)) return path | ||
123 | } | ||
124 | |||
125 | const directoryContent = await readdir(dirname(tmpPath)) | ||
126 | |||
127 | throw new Error(`Cannot guess path of ${tmpPath}. Directory content: ${directoryContent.join(', ')}`) | ||
128 | } | ||
129 | } | ||
130 | |||
131 | // --------------------------------------------------------------------------- | ||
132 | |||
133 | export { | ||
134 | YoutubeDLWrapper | ||
135 | } | ||
diff --git a/server/initializers/config.ts b/server/initializers/config.ts index 3a7c72a1c..e20efe02c 100644 --- a/server/initializers/config.ts +++ b/server/initializers/config.ts | |||
@@ -69,6 +69,7 @@ const CONFIG = { | |||
69 | 69 | ||
70 | STORAGE: { | 70 | STORAGE: { |
71 | TMP_DIR: buildPath(config.get<string>('storage.tmp')), | 71 | TMP_DIR: buildPath(config.get<string>('storage.tmp')), |
72 | BIN_DIR: buildPath(config.get<string>('storage.bin')), | ||
72 | ACTOR_IMAGES: buildPath(config.get<string>('storage.avatars')), | 73 | ACTOR_IMAGES: buildPath(config.get<string>('storage.avatars')), |
73 | LOG_DIR: buildPath(config.get<string>('storage.logs')), | 74 | LOG_DIR: buildPath(config.get<string>('storage.logs')), |
74 | VIDEOS_DIR: buildPath(config.get<string>('storage.videos')), | 75 | VIDEOS_DIR: buildPath(config.get<string>('storage.videos')), |
@@ -292,11 +293,13 @@ const CONFIG = { | |||
292 | 293 | ||
293 | HTTP: { | 294 | HTTP: { |
294 | get ENABLED () { return config.get<boolean>('import.videos.http.enabled') }, | 295 | get ENABLED () { return config.get<boolean>('import.videos.http.enabled') }, |
295 | get FORCE_IPV4 () { return config.get<boolean>('import.videos.http.force_ipv4') }, | 296 | |
296 | PROXY: { | 297 | YOUTUBE_DL_RELEASE: { |
297 | get ENABLED () { return config.get<boolean>('import.videos.http.proxy.enabled') }, | 298 | get URL () { return config.get<string>('import.videos.http.youtube_dl_release.url') }, |
298 | get URL () { return config.get<string>('import.videos.http.proxy.url') } | 299 | get NAME () { return config.get<string>('import.videos.http.youtube_dl_release.name') } |
299 | } | 300 | }, |
301 | |||
302 | get FORCE_IPV4 () { return config.get<boolean>('import.videos.http.force_ipv4') } | ||
300 | }, | 303 | }, |
301 | TORRENT: { | 304 | TORRENT: { |
302 | get ENABLED () { return config.get<boolean>('import.videos.torrent.enabled') } | 305 | get ENABLED () { return config.get<boolean>('import.videos.torrent.enabled') } |
diff --git a/server/initializers/constants.ts b/server/initializers/constants.ts index dcbad9264..1d434d5ab 100644 --- a/server/initializers/constants.ts +++ b/server/initializers/constants.ts | |||
@@ -497,6 +497,12 @@ const MIMETYPES = { | |||
497 | MIMETYPES.AUDIO.EXT_MIMETYPE = invert(MIMETYPES.AUDIO.MIMETYPE_EXT) | 497 | MIMETYPES.AUDIO.EXT_MIMETYPE = invert(MIMETYPES.AUDIO.MIMETYPE_EXT) |
498 | MIMETYPES.IMAGE.EXT_MIMETYPE = invert(MIMETYPES.IMAGE.MIMETYPE_EXT) | 498 | MIMETYPES.IMAGE.EXT_MIMETYPE = invert(MIMETYPES.IMAGE.MIMETYPE_EXT) |
499 | 499 | ||
500 | const BINARY_CONTENT_TYPES = new Set([ | ||
501 | 'binary/octet-stream', | ||
502 | 'application/octet-stream', | ||
503 | 'application/x-binary' | ||
504 | ]) | ||
505 | |||
500 | // --------------------------------------------------------------------------- | 506 | // --------------------------------------------------------------------------- |
501 | 507 | ||
502 | const OVERVIEWS = { | 508 | const OVERVIEWS = { |
@@ -903,6 +909,7 @@ export { | |||
903 | MIMETYPES, | 909 | MIMETYPES, |
904 | CRAWL_REQUEST_CONCURRENCY, | 910 | CRAWL_REQUEST_CONCURRENCY, |
905 | DEFAULT_AUDIO_RESOLUTION, | 911 | DEFAULT_AUDIO_RESOLUTION, |
912 | BINARY_CONTENT_TYPES, | ||
906 | JOB_COMPLETED_LIFETIME, | 913 | JOB_COMPLETED_LIFETIME, |
907 | HTTP_SIGNATURE, | 914 | HTTP_SIGNATURE, |
908 | VIDEO_IMPORT_STATES, | 915 | VIDEO_IMPORT_STATES, |
diff --git a/server/lib/job-queue/handlers/video-import.ts b/server/lib/job-queue/handlers/video-import.ts index 8313c2561..4ce1a6c30 100644 --- a/server/lib/job-queue/handlers/video-import.ts +++ b/server/lib/job-queue/handlers/video-import.ts | |||
@@ -2,7 +2,7 @@ import { Job } from 'bull' | |||
2 | import { move, remove, stat } from 'fs-extra' | 2 | import { move, remove, stat } from 'fs-extra' |
3 | import { getLowercaseExtension } from '@server/helpers/core-utils' | 3 | import { getLowercaseExtension } from '@server/helpers/core-utils' |
4 | import { retryTransactionWrapper } from '@server/helpers/database-utils' | 4 | import { retryTransactionWrapper } from '@server/helpers/database-utils' |
5 | import { YoutubeDL } from '@server/helpers/youtube-dl' | 5 | import { YoutubeDLWrapper } from '@server/helpers/youtube-dl' |
6 | import { isPostImportVideoAccepted } from '@server/lib/moderation' | 6 | import { isPostImportVideoAccepted } from '@server/lib/moderation' |
7 | import { generateWebTorrentVideoFilename } from '@server/lib/paths' | 7 | import { generateWebTorrentVideoFilename } from '@server/lib/paths' |
8 | import { Hooks } from '@server/lib/plugins/hooks' | 8 | import { Hooks } from '@server/lib/plugins/hooks' |
@@ -77,10 +77,10 @@ async function processYoutubeDLImport (job: Job, payload: VideoImportYoutubeDLPa | |||
77 | videoImportId: videoImport.id | 77 | videoImportId: videoImport.id |
78 | } | 78 | } |
79 | 79 | ||
80 | const youtubeDL = new YoutubeDL(videoImport.targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod')) | 80 | const youtubeDL = new YoutubeDLWrapper(videoImport.targetUrl, ServerConfigManager.Instance.getEnabledResolutions('vod')) |
81 | 81 | ||
82 | return processFile( | 82 | return processFile( |
83 | () => youtubeDL.downloadYoutubeDLVideo(payload.fileExt, VIDEO_IMPORT_TIMEOUT), | 83 | () => youtubeDL.downloadVideo(payload.fileExt, VIDEO_IMPORT_TIMEOUT), |
84 | videoImport, | 84 | videoImport, |
85 | options | 85 | options |
86 | ) | 86 | ) |
diff --git a/server/lib/schedulers/youtube-dl-update-scheduler.ts b/server/lib/schedulers/youtube-dl-update-scheduler.ts index 898691c13..93d02f8a9 100644 --- a/server/lib/schedulers/youtube-dl-update-scheduler.ts +++ b/server/lib/schedulers/youtube-dl-update-scheduler.ts | |||
@@ -1,4 +1,4 @@ | |||
1 | import { YoutubeDL } from '@server/helpers/youtube-dl' | 1 | import { YoutubeDLCLI } from '@server/helpers/youtube-dl' |
2 | import { SCHEDULER_INTERVALS_MS } from '../../initializers/constants' | 2 | import { SCHEDULER_INTERVALS_MS } from '../../initializers/constants' |
3 | import { AbstractScheduler } from './abstract-scheduler' | 3 | import { AbstractScheduler } from './abstract-scheduler' |
4 | 4 | ||
@@ -13,7 +13,7 @@ export class YoutubeDlUpdateScheduler extends AbstractScheduler { | |||
13 | } | 13 | } |
14 | 14 | ||
15 | protected internalExecute () { | 15 | protected internalExecute () { |
16 | return YoutubeDL.updateYoutubeDLBinary() | 16 | return YoutubeDLCLI.updateYoutubeDLBinary() |
17 | } | 17 | } |
18 | 18 | ||
19 | static get Instance () { | 19 | static get Instance () { |
diff --git a/server/tests/api/server/proxy.ts b/server/tests/api/server/proxy.ts index 72bd49078..29f3e10d8 100644 --- a/server/tests/api/server/proxy.ts +++ b/server/tests/api/server/proxy.ts | |||
@@ -2,8 +2,18 @@ | |||
2 | 2 | ||
3 | import 'mocha' | 3 | import 'mocha' |
4 | import * as chai from 'chai' | 4 | import * as chai from 'chai' |
5 | import { cleanupTests, createMultipleServers, doubleFollow, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/extra-utils' | 5 | import { |
6 | cleanupTests, | ||
7 | createMultipleServers, | ||
8 | doubleFollow, | ||
9 | FIXTURE_URLS, | ||
10 | PeerTubeServer, | ||
11 | setAccessTokensToServers, | ||
12 | setDefaultVideoChannel, | ||
13 | waitJobs | ||
14 | } from '@shared/extra-utils' | ||
6 | import { MockProxy } from '@shared/extra-utils/mock-servers/mock-proxy' | 15 | import { MockProxy } from '@shared/extra-utils/mock-servers/mock-proxy' |
16 | import { HttpStatusCode, VideoPrivacy } from '@shared/models' | ||
7 | 17 | ||
8 | const expect = chai.expect | 18 | const expect = chai.expect |
9 | 19 | ||
@@ -25,43 +35,90 @@ describe('Test proxy', function () { | |||
25 | goodEnv.HTTP_PROXY = 'http://localhost:' + proxyPort | 35 | goodEnv.HTTP_PROXY = 'http://localhost:' + proxyPort |
26 | 36 | ||
27 | await setAccessTokensToServers(servers) | 37 | await setAccessTokensToServers(servers) |
38 | await setDefaultVideoChannel(servers) | ||
28 | await doubleFollow(servers[0], servers[1]) | 39 | await doubleFollow(servers[0], servers[1]) |
29 | }) | 40 | }) |
30 | 41 | ||
31 | it('Should succeed federation with the appropriate proxy config', async function () { | 42 | describe('Federation', function () { |
32 | await servers[0].kill() | ||
33 | await servers[0].run({}, { env: goodEnv }) | ||
34 | 43 | ||
35 | await servers[0].videos.quickUpload({ name: 'video 1' }) | 44 | it('Should succeed federation with the appropriate proxy config', async function () { |
45 | this.timeout(40000) | ||
36 | 46 | ||
37 | await waitJobs(servers) | 47 | await servers[0].kill() |
48 | await servers[0].run({}, { env: goodEnv }) | ||
38 | 49 | ||
39 | for (const server of servers) { | 50 | await servers[0].videos.quickUpload({ name: 'video 1' }) |
40 | const { total, data } = await server.videos.list() | 51 | |
41 | expect(total).to.equal(1) | 52 | await waitJobs(servers) |
42 | expect(data).to.have.lengthOf(1) | 53 | |
43 | } | 54 | for (const server of servers) { |
55 | const { total, data } = await server.videos.list() | ||
56 | expect(total).to.equal(1) | ||
57 | expect(data).to.have.lengthOf(1) | ||
58 | } | ||
59 | }) | ||
60 | |||
61 | it('Should fail federation with a wrong proxy config', async function () { | ||
62 | this.timeout(40000) | ||
63 | |||
64 | await servers[0].kill() | ||
65 | await servers[0].run({}, { env: badEnv }) | ||
66 | |||
67 | await servers[0].videos.quickUpload({ name: 'video 2' }) | ||
68 | |||
69 | await waitJobs(servers) | ||
70 | |||
71 | { | ||
72 | const { total, data } = await servers[0].videos.list() | ||
73 | expect(total).to.equal(2) | ||
74 | expect(data).to.have.lengthOf(2) | ||
75 | } | ||
76 | |||
77 | { | ||
78 | const { total, data } = await servers[1].videos.list() | ||
79 | expect(total).to.equal(1) | ||
80 | expect(data).to.have.lengthOf(1) | ||
81 | } | ||
82 | }) | ||
44 | }) | 83 | }) |
45 | 84 | ||
46 | it('Should fail federation with a wrong proxy config', async function () { | 85 | describe('Videos import', async function () { |
47 | await servers[0].kill() | 86 | |
48 | await servers[0].run({}, { env: badEnv }) | 87 | function quickImport (expectedStatus: HttpStatusCode = HttpStatusCode.OK_200) { |
88 | return servers[0].imports.importVideo({ | ||
89 | attributes: { | ||
90 | name: 'video import', | ||
91 | channelId: servers[0].store.channel.id, | ||
92 | privacy: VideoPrivacy.PUBLIC, | ||
93 | targetUrl: FIXTURE_URLS.peertube_long | ||
94 | }, | ||
95 | expectedStatus | ||
96 | }) | ||
97 | } | ||
98 | |||
99 | it('Should succeed import with the appropriate proxy config', async function () { | ||
100 | this.timeout(40000) | ||
101 | |||
102 | await servers[0].kill() | ||
103 | await servers[0].run({}, { env: goodEnv }) | ||
49 | 104 | ||
50 | await servers[0].videos.quickUpload({ name: 'video 2' }) | 105 | await quickImport() |
51 | 106 | ||
52 | await waitJobs(servers) | 107 | await waitJobs(servers) |
53 | 108 | ||
54 | { | ||
55 | const { total, data } = await servers[0].videos.list() | 109 | const { total, data } = await servers[0].videos.list() |
56 | expect(total).to.equal(2) | 110 | expect(total).to.equal(3) |
57 | expect(data).to.have.lengthOf(2) | 111 | expect(data).to.have.lengthOf(3) |
58 | } | 112 | }) |
59 | 113 | ||
60 | { | 114 | it('Should fail import with a wrong proxy config', async function () { |
61 | const { total, data } = await servers[1].videos.list() | 115 | this.timeout(40000) |
62 | expect(total).to.equal(1) | 116 | |
63 | expect(data).to.have.lengthOf(1) | 117 | await servers[0].kill() |
64 | } | 118 | await servers[0].run({}, { env: badEnv }) |
119 | |||
120 | await quickImport(HttpStatusCode.BAD_REQUEST_400) | ||
121 | }) | ||
65 | }) | 122 | }) |
66 | 123 | ||
67 | after(async function () { | 124 | after(async function () { |
diff --git a/server/tests/api/videos/video-imports.ts b/server/tests/api/videos/video-imports.ts index 948c779e8..cfb188060 100644 --- a/server/tests/api/videos/video-imports.ts +++ b/server/tests/api/videos/video-imports.ts | |||
@@ -1,368 +1,444 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import 'mocha' | 3 | import 'mocha' |
4 | import * as chai from 'chai' | 4 | import { expect } from 'chai' |
5 | import { pathExists, remove } from 'fs-extra' | ||
6 | import { join } from 'path' | ||
5 | import { | 7 | import { |
6 | areHttpImportTestsDisabled, | 8 | areHttpImportTestsDisabled, |
7 | cleanupTests, | 9 | cleanupTests, |
8 | createMultipleServers, | 10 | createMultipleServers, |
11 | createSingleServer, | ||
9 | doubleFollow, | 12 | doubleFollow, |
10 | FIXTURE_URLS, | 13 | FIXTURE_URLS, |
11 | PeerTubeServer, | 14 | PeerTubeServer, |
12 | setAccessTokensToServers, | 15 | setAccessTokensToServers, |
16 | setDefaultVideoChannel, | ||
13 | testCaptionFile, | 17 | testCaptionFile, |
14 | testImage, | 18 | testImage, |
15 | waitJobs | 19 | waitJobs |
16 | } from '@shared/extra-utils' | 20 | } from '@shared/extra-utils' |
17 | import { VideoPrivacy, VideoResolution } from '@shared/models' | 21 | import { VideoPrivacy, VideoResolution } from '@shared/models' |
18 | 22 | ||
19 | const expect = chai.expect | 23 | async function checkVideosServer1 (server: PeerTubeServer, idHttp: string, idMagnet: string, idTorrent: string) { |
24 | const videoHttp = await server.videos.get({ id: idHttp }) | ||
25 | |||
26 | expect(videoHttp.name).to.equal('small video - youtube') | ||
27 | // FIXME: youtube-dl seems broken | ||
28 | // expect(videoHttp.category.label).to.equal('News & Politics') | ||
29 | // expect(videoHttp.licence.label).to.equal('Attribution') | ||
30 | expect(videoHttp.language.label).to.equal('Unknown') | ||
31 | expect(videoHttp.nsfw).to.be.false | ||
32 | expect(videoHttp.description).to.equal('this is a super description') | ||
33 | expect(videoHttp.tags).to.deep.equal([ 'tag1', 'tag2' ]) | ||
34 | expect(videoHttp.files).to.have.lengthOf(1) | ||
35 | |||
36 | const originallyPublishedAt = new Date(videoHttp.originallyPublishedAt) | ||
37 | expect(originallyPublishedAt.getDate()).to.equal(14) | ||
38 | expect(originallyPublishedAt.getMonth()).to.equal(0) | ||
39 | expect(originallyPublishedAt.getFullYear()).to.equal(2019) | ||
40 | |||
41 | const videoMagnet = await server.videos.get({ id: idMagnet }) | ||
42 | const videoTorrent = await server.videos.get({ id: idTorrent }) | ||
43 | |||
44 | for (const video of [ videoMagnet, videoTorrent ]) { | ||
45 | expect(video.category.label).to.equal('Misc') | ||
46 | expect(video.licence.label).to.equal('Unknown') | ||
47 | expect(video.language.label).to.equal('Unknown') | ||
48 | expect(video.nsfw).to.be.false | ||
49 | expect(video.description).to.equal('this is a super torrent description') | ||
50 | expect(video.tags).to.deep.equal([ 'tag_torrent1', 'tag_torrent2' ]) | ||
51 | expect(video.files).to.have.lengthOf(1) | ||
52 | } | ||
53 | |||
54 | expect(videoTorrent.name).to.contain('你好 世界 720p.mp4') | ||
55 | expect(videoMagnet.name).to.contain('super peertube2 video') | ||
56 | |||
57 | const bodyCaptions = await server.captions.list({ videoId: idHttp }) | ||
58 | expect(bodyCaptions.total).to.equal(2) | ||
59 | } | ||
60 | |||
61 | async function checkVideoServer2 (server: PeerTubeServer, id: number | string) { | ||
62 | const video = await server.videos.get({ id }) | ||
63 | |||
64 | expect(video.name).to.equal('my super name') | ||
65 | expect(video.category.label).to.equal('Entertainment') | ||
66 | expect(video.licence.label).to.equal('Public Domain Dedication') | ||
67 | expect(video.language.label).to.equal('English') | ||
68 | expect(video.nsfw).to.be.false | ||
69 | expect(video.description).to.equal('my super description') | ||
70 | expect(video.tags).to.deep.equal([ 'supertag1', 'supertag2' ]) | ||
71 | |||
72 | expect(video.files).to.have.lengthOf(1) | ||
73 | |||
74 | const bodyCaptions = await server.captions.list({ videoId: id }) | ||
75 | expect(bodyCaptions.total).to.equal(2) | ||
76 | } | ||
20 | 77 | ||
21 | describe('Test video imports', function () { | 78 | describe('Test video imports', function () { |
22 | let servers: PeerTubeServer[] = [] | ||
23 | let channelIdServer1: number | ||
24 | let channelIdServer2: number | ||
25 | 79 | ||
26 | if (areHttpImportTestsDisabled()) return | 80 | if (areHttpImportTestsDisabled()) return |
27 | 81 | ||
28 | async function checkVideosServer1 (server: PeerTubeServer, idHttp: string, idMagnet: string, idTorrent: string) { | 82 | function runSuite (mode: 'youtube-dl' | 'yt-dlp') { |
29 | const videoHttp = await server.videos.get({ id: idHttp }) | ||
30 | |||
31 | expect(videoHttp.name).to.equal('small video - youtube') | ||
32 | // FIXME: youtube-dl seems broken | ||
33 | // expect(videoHttp.category.label).to.equal('News & Politics') | ||
34 | // expect(videoHttp.licence.label).to.equal('Attribution') | ||
35 | expect(videoHttp.language.label).to.equal('Unknown') | ||
36 | expect(videoHttp.nsfw).to.be.false | ||
37 | expect(videoHttp.description).to.equal('this is a super description') | ||
38 | expect(videoHttp.tags).to.deep.equal([ 'tag1', 'tag2' ]) | ||
39 | expect(videoHttp.files).to.have.lengthOf(1) | ||
40 | |||
41 | const originallyPublishedAt = new Date(videoHttp.originallyPublishedAt) | ||
42 | expect(originallyPublishedAt.getDate()).to.equal(14) | ||
43 | expect(originallyPublishedAt.getMonth()).to.equal(0) | ||
44 | expect(originallyPublishedAt.getFullYear()).to.equal(2019) | ||
45 | |||
46 | const videoMagnet = await server.videos.get({ id: idMagnet }) | ||
47 | const videoTorrent = await server.videos.get({ id: idTorrent }) | ||
48 | |||
49 | for (const video of [ videoMagnet, videoTorrent ]) { | ||
50 | expect(video.category.label).to.equal('Misc') | ||
51 | expect(video.licence.label).to.equal('Unknown') | ||
52 | expect(video.language.label).to.equal('Unknown') | ||
53 | expect(video.nsfw).to.be.false | ||
54 | expect(video.description).to.equal('this is a super torrent description') | ||
55 | expect(video.tags).to.deep.equal([ 'tag_torrent1', 'tag_torrent2' ]) | ||
56 | expect(video.files).to.have.lengthOf(1) | ||
57 | } | ||
58 | 83 | ||
59 | expect(videoTorrent.name).to.contain('你好 世界 720p.mp4') | 84 | describe('Import ' + mode, function () { |
60 | expect(videoMagnet.name).to.contain('super peertube2 video') | 85 | let servers: PeerTubeServer[] = [] |
61 | 86 | ||
62 | const bodyCaptions = await server.captions.list({ videoId: idHttp }) | 87 | before(async function () { |
63 | expect(bodyCaptions.total).to.equal(2) | 88 | this.timeout(30_000) |
64 | } | ||
65 | 89 | ||
66 | async function checkVideoServer2 (server: PeerTubeServer, id: number | string) { | 90 | // Run servers |
67 | const video = await server.videos.get({ id }) | 91 | servers = await createMultipleServers(2, { |
92 | import: { | ||
93 | videos: { | ||
94 | http: { | ||
95 | youtube_dl_release: { | ||
96 | url: mode === 'youtube-dl' | ||
97 | ? 'https://yt-dl.org/downloads/latest/youtube-dl' | ||
98 | : 'https://api.github.com/repos/yt-dlp/yt-dlp/releases', | ||
68 | 99 | ||
69 | expect(video.name).to.equal('my super name') | 100 | name: mode |
70 | expect(video.category.label).to.equal('Entertainment') | 101 | } |
71 | expect(video.licence.label).to.equal('Public Domain Dedication') | 102 | } |
72 | expect(video.language.label).to.equal('English') | 103 | } |
73 | expect(video.nsfw).to.be.false | 104 | } |
74 | expect(video.description).to.equal('my super description') | 105 | }) |
75 | expect(video.tags).to.deep.equal([ 'supertag1', 'supertag2' ]) | ||
76 | 106 | ||
77 | expect(video.files).to.have.lengthOf(1) | 107 | await setAccessTokensToServers(servers) |
108 | await setDefaultVideoChannel(servers) | ||
78 | 109 | ||
79 | const bodyCaptions = await server.captions.list({ videoId: id }) | 110 | await doubleFollow(servers[0], servers[1]) |
80 | expect(bodyCaptions.total).to.equal(2) | 111 | }) |
81 | } | ||
82 | 112 | ||
83 | before(async function () { | 113 | it('Should import videos on server 1', async function () { |
84 | this.timeout(30_000) | 114 | this.timeout(60_000) |
85 | 115 | ||
86 | // Run servers | 116 | const baseAttributes = { |
87 | servers = await createMultipleServers(2) | 117 | channelId: servers[0].store.channel.id, |
118 | privacy: VideoPrivacy.PUBLIC | ||
119 | } | ||
88 | 120 | ||
89 | await setAccessTokensToServers(servers) | 121 | { |
122 | const attributes = { ...baseAttributes, targetUrl: FIXTURE_URLS.youtube } | ||
123 | const { video } = await servers[0].imports.importVideo({ attributes }) | ||
124 | expect(video.name).to.equal('small video - youtube') | ||
90 | 125 | ||
91 | { | 126 | { |
92 | const { videoChannels } = await servers[0].users.getMyInfo() | 127 | expect(video.thumbnailPath).to.match(new RegExp(`^/static/thumbnails/.+.jpg$`)) |
93 | channelIdServer1 = videoChannels[0].id | 128 | expect(video.previewPath).to.match(new RegExp(`^/lazy-static/previews/.+.jpg$`)) |
94 | } | ||
95 | 129 | ||
96 | { | 130 | const suffix = mode === 'yt-dlp' |
97 | const { videoChannels } = await servers[1].users.getMyInfo() | 131 | ? '_yt_dlp' |
98 | channelIdServer2 = videoChannels[0].id | 132 | : '' |
99 | } | ||
100 | 133 | ||
101 | await doubleFollow(servers[0], servers[1]) | 134 | await testImage(servers[0].url, 'video_import_thumbnail' + suffix, video.thumbnailPath) |
102 | }) | 135 | await testImage(servers[0].url, 'video_import_preview' + suffix, video.previewPath) |
136 | } | ||
103 | 137 | ||
104 | it('Should import videos on server 1', async function () { | 138 | const bodyCaptions = await servers[0].captions.list({ videoId: video.id }) |
105 | this.timeout(60_000) | 139 | const videoCaptions = bodyCaptions.data |
140 | expect(videoCaptions).to.have.lengthOf(2) | ||
106 | 141 | ||
107 | const baseAttributes = { | 142 | { |
108 | channelId: channelIdServer1, | 143 | const enCaption = videoCaptions.find(caption => caption.language.id === 'en') |
109 | privacy: VideoPrivacy.PUBLIC | 144 | expect(enCaption).to.exist |
110 | } | 145 | expect(enCaption.language.label).to.equal('English') |
146 | expect(enCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-en.vtt$`)) | ||
111 | 147 | ||
112 | { | 148 | const regex = `WEBVTT[ \n]+Kind: captions[ \n]+Language: en[ \n]+00:00:01.600 --> 00:00:04.200[ \n]+English \\(US\\)[ \n]+` + |
113 | const attributes = { ...baseAttributes, targetUrl: FIXTURE_URLS.youtube } | 149 | `00:00:05.900 --> 00:00:07.999[ \n]+This is a subtitle in American English[ \n]+` + |
114 | const { video } = await servers[0].imports.importVideo({ attributes }) | 150 | `00:00:10.000 --> 00:00:14.000[ \n]+Adding subtitles is very easy to do` |
115 | expect(video.name).to.equal('small video - youtube') | 151 | await testCaptionFile(servers[0].url, enCaption.captionPath, new RegExp(regex)) |
152 | } | ||
116 | 153 | ||
117 | expect(video.thumbnailPath).to.match(new RegExp(`^/static/thumbnails/.+.jpg$`)) | 154 | { |
118 | expect(video.previewPath).to.match(new RegExp(`^/lazy-static/previews/.+.jpg$`)) | 155 | const frCaption = videoCaptions.find(caption => caption.language.id === 'fr') |
156 | expect(frCaption).to.exist | ||
157 | expect(frCaption.language.label).to.equal('French') | ||
158 | expect(frCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-fr.vtt`)) | ||
119 | 159 | ||
120 | await testImage(servers[0].url, 'video_import_thumbnail', video.thumbnailPath) | 160 | const regex = `WEBVTT[ \n]+Kind: captions[ \n]+Language: fr[ \n]+00:00:01.600 --> 00:00:04.200[ \n]+` + |
121 | await testImage(servers[0].url, 'video_import_preview', video.previewPath) | 161 | `Français \\(FR\\)[ \n]+00:00:05.900 --> 00:00:07.999[ \n]+C'est un sous-titre français[ \n]+` + |
162 | `00:00:10.000 --> 00:00:14.000[ \n]+Ajouter un sous-titre est vraiment facile` | ||
122 | 163 | ||
123 | const bodyCaptions = await servers[0].captions.list({ videoId: video.id }) | 164 | await testCaptionFile(servers[0].url, frCaption.captionPath, new RegExp(regex)) |
124 | const videoCaptions = bodyCaptions.data | 165 | } |
125 | expect(videoCaptions).to.have.lengthOf(2) | 166 | } |
126 | 167 | ||
127 | const enCaption = videoCaptions.find(caption => caption.language.id === 'en') | 168 | { |
128 | expect(enCaption).to.exist | 169 | const attributes = { |
129 | expect(enCaption.language.label).to.equal('English') | 170 | ...baseAttributes, |
130 | expect(enCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-en.vtt$`)) | 171 | magnetUri: FIXTURE_URLS.magnet, |
131 | await testCaptionFile(servers[0].url, enCaption.captionPath, `WEBVTT | 172 | description: 'this is a super torrent description', |
132 | Kind: captions | 173 | tags: [ 'tag_torrent1', 'tag_torrent2' ] |
133 | Language: en | 174 | } |
175 | const { video } = await servers[0].imports.importVideo({ attributes }) | ||
176 | expect(video.name).to.equal('super peertube2 video') | ||
177 | } | ||
134 | 178 | ||
135 | 00:00:01.600 --> 00:00:04.200 | 179 | { |
136 | English (US) | 180 | const attributes = { |
181 | ...baseAttributes, | ||
182 | torrentfile: 'video-720p.torrent' as any, | ||
183 | description: 'this is a super torrent description', | ||
184 | tags: [ 'tag_torrent1', 'tag_torrent2' ] | ||
185 | } | ||
186 | const { video } = await servers[0].imports.importVideo({ attributes }) | ||
187 | expect(video.name).to.equal('你好 世界 720p.mp4') | ||
188 | } | ||
189 | }) | ||
137 | 190 | ||
138 | 00:00:05.900 --> 00:00:07.999 | 191 | it('Should list the videos to import in my videos on server 1', async function () { |
139 | This is a subtitle in American English | 192 | const { total, data } = await servers[0].videos.listMyVideos({ sort: 'createdAt' }) |
140 | 193 | ||
141 | 00:00:10.000 --> 00:00:14.000 | 194 | expect(total).to.equal(3) |
142 | Adding subtitles is very easy to do`) | ||
143 | 195 | ||
144 | const frCaption = videoCaptions.find(caption => caption.language.id === 'fr') | 196 | expect(data).to.have.lengthOf(3) |
145 | expect(frCaption).to.exist | 197 | expect(data[0].name).to.equal('small video - youtube') |
146 | expect(frCaption.language.label).to.equal('French') | 198 | expect(data[1].name).to.equal('super peertube2 video') |
147 | expect(frCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-fr.vtt`)) | 199 | expect(data[2].name).to.equal('你好 世界 720p.mp4') |
148 | await testCaptionFile(servers[0].url, frCaption.captionPath, `WEBVTT | 200 | }) |
149 | Kind: captions | ||
150 | Language: fr | ||
151 | 201 | ||
152 | 00:00:01.600 --> 00:00:04.200 | 202 | it('Should list the videos to import in my imports on server 1', async function () { |
153 | Français (FR) | 203 | const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ sort: '-createdAt' }) |
204 | expect(total).to.equal(3) | ||
154 | 205 | ||
155 | 00:00:05.900 --> 00:00:07.999 | 206 | expect(videoImports).to.have.lengthOf(3) |
156 | C'est un sous-titre français | ||
157 | 207 | ||
158 | 00:00:10.000 --> 00:00:14.000 | 208 | expect(videoImports[2].targetUrl).to.equal(FIXTURE_URLS.youtube) |
159 | Ajouter un sous-titre est vraiment facile`) | 209 | expect(videoImports[2].magnetUri).to.be.null |
160 | } | 210 | expect(videoImports[2].torrentName).to.be.null |
211 | expect(videoImports[2].video.name).to.equal('small video - youtube') | ||
161 | 212 | ||
162 | { | 213 | expect(videoImports[1].targetUrl).to.be.null |
163 | const attributes = { | 214 | expect(videoImports[1].magnetUri).to.equal(FIXTURE_URLS.magnet) |
164 | ...baseAttributes, | 215 | expect(videoImports[1].torrentName).to.be.null |
165 | magnetUri: FIXTURE_URLS.magnet, | 216 | expect(videoImports[1].video.name).to.equal('super peertube2 video') |
166 | description: 'this is a super torrent description', | ||
167 | tags: [ 'tag_torrent1', 'tag_torrent2' ] | ||
168 | } | ||
169 | const { video } = await servers[0].imports.importVideo({ attributes }) | ||
170 | expect(video.name).to.equal('super peertube2 video') | ||
171 | } | ||
172 | 217 | ||
173 | { | 218 | expect(videoImports[0].targetUrl).to.be.null |
174 | const attributes = { | 219 | expect(videoImports[0].magnetUri).to.be.null |
175 | ...baseAttributes, | 220 | expect(videoImports[0].torrentName).to.equal('video-720p.torrent') |
176 | torrentfile: 'video-720p.torrent' as any, | 221 | expect(videoImports[0].video.name).to.equal('你好 世界 720p.mp4') |
177 | description: 'this is a super torrent description', | 222 | }) |
178 | tags: [ 'tag_torrent1', 'tag_torrent2' ] | ||
179 | } | ||
180 | const { video } = await servers[0].imports.importVideo({ attributes }) | ||
181 | expect(video.name).to.equal('你好 世界 720p.mp4') | ||
182 | } | ||
183 | }) | ||
184 | 223 | ||
185 | it('Should list the videos to import in my videos on server 1', async function () { | 224 | it('Should have the video listed on the two instances', async function () { |
186 | const { total, data } = await servers[0].videos.listMyVideos({ sort: 'createdAt' }) | 225 | this.timeout(120_000) |
187 | 226 | ||
188 | expect(total).to.equal(3) | 227 | await waitJobs(servers) |
189 | 228 | ||
190 | expect(data).to.have.lengthOf(3) | 229 | for (const server of servers) { |
191 | expect(data[0].name).to.equal('small video - youtube') | 230 | const { total, data } = await server.videos.list() |
192 | expect(data[1].name).to.equal('super peertube2 video') | 231 | expect(total).to.equal(3) |
193 | expect(data[2].name).to.equal('你好 世界 720p.mp4') | 232 | expect(data).to.have.lengthOf(3) |
194 | }) | ||
195 | 233 | ||
196 | it('Should list the videos to import in my imports on server 1', async function () { | 234 | const [ videoHttp, videoMagnet, videoTorrent ] = data |
197 | const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ sort: '-createdAt' }) | 235 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) |
198 | expect(total).to.equal(3) | 236 | } |
237 | }) | ||
238 | |||
239 | it('Should import a video on server 2 with some fields', async function () { | ||
240 | this.timeout(60_000) | ||
241 | |||
242 | const attributes = { | ||
243 | targetUrl: FIXTURE_URLS.youtube, | ||
244 | channelId: servers[1].store.channel.id, | ||
245 | privacy: VideoPrivacy.PUBLIC, | ||
246 | category: 10, | ||
247 | licence: 7, | ||
248 | language: 'en', | ||
249 | name: 'my super name', | ||
250 | description: 'my super description', | ||
251 | tags: [ 'supertag1', 'supertag2' ] | ||
252 | } | ||
253 | const { video } = await servers[1].imports.importVideo({ attributes }) | ||
254 | expect(video.name).to.equal('my super name') | ||
255 | }) | ||
199 | 256 | ||
200 | expect(videoImports).to.have.lengthOf(3) | 257 | it('Should have the videos listed on the two instances', async function () { |
258 | this.timeout(120_000) | ||
201 | 259 | ||
202 | expect(videoImports[2].targetUrl).to.equal(FIXTURE_URLS.youtube) | 260 | await waitJobs(servers) |
203 | expect(videoImports[2].magnetUri).to.be.null | ||
204 | expect(videoImports[2].torrentName).to.be.null | ||
205 | expect(videoImports[2].video.name).to.equal('small video - youtube') | ||
206 | 261 | ||
207 | expect(videoImports[1].targetUrl).to.be.null | 262 | for (const server of servers) { |
208 | expect(videoImports[1].magnetUri).to.equal(FIXTURE_URLS.magnet) | 263 | const { total, data } = await server.videos.list() |
209 | expect(videoImports[1].torrentName).to.be.null | 264 | expect(total).to.equal(4) |
210 | expect(videoImports[1].video.name).to.equal('super peertube2 video') | 265 | expect(data).to.have.lengthOf(4) |
211 | 266 | ||
212 | expect(videoImports[0].targetUrl).to.be.null | 267 | await checkVideoServer2(server, data[0].uuid) |
213 | expect(videoImports[0].magnetUri).to.be.null | ||
214 | expect(videoImports[0].torrentName).to.equal('video-720p.torrent') | ||
215 | expect(videoImports[0].video.name).to.equal('你好 世界 720p.mp4') | ||
216 | }) | ||
217 | 268 | ||
218 | it('Should have the video listed on the two instances', async function () { | 269 | const [ , videoHttp, videoMagnet, videoTorrent ] = data |
219 | this.timeout(120_000) | 270 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) |
271 | } | ||
272 | }) | ||
220 | 273 | ||
221 | await waitJobs(servers) | 274 | it('Should import a video that will be transcoded', async function () { |
275 | this.timeout(240_000) | ||
222 | 276 | ||
223 | for (const server of servers) { | 277 | const attributes = { |
224 | const { total, data } = await server.videos.list() | 278 | name: 'transcoded video', |
225 | expect(total).to.equal(3) | 279 | magnetUri: FIXTURE_URLS.magnet, |
226 | expect(data).to.have.lengthOf(3) | 280 | channelId: servers[1].store.channel.id, |
281 | privacy: VideoPrivacy.PUBLIC | ||
282 | } | ||
283 | const { video } = await servers[1].imports.importVideo({ attributes }) | ||
284 | const videoUUID = video.uuid | ||
227 | 285 | ||
228 | const [ videoHttp, videoMagnet, videoTorrent ] = data | 286 | await waitJobs(servers) |
229 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) | ||
230 | } | ||
231 | }) | ||
232 | 287 | ||
233 | it('Should import a video on server 2 with some fields', async function () { | 288 | for (const server of servers) { |
234 | this.timeout(60_000) | 289 | const video = await server.videos.get({ id: videoUUID }) |
235 | |||
236 | const attributes = { | ||
237 | targetUrl: FIXTURE_URLS.youtube, | ||
238 | channelId: channelIdServer2, | ||
239 | privacy: VideoPrivacy.PUBLIC, | ||
240 | category: 10, | ||
241 | licence: 7, | ||
242 | language: 'en', | ||
243 | name: 'my super name', | ||
244 | description: 'my super description', | ||
245 | tags: [ 'supertag1', 'supertag2' ] | ||
246 | } | ||
247 | const { video } = await servers[1].imports.importVideo({ attributes }) | ||
248 | expect(video.name).to.equal('my super name') | ||
249 | }) | ||
250 | 290 | ||
251 | it('Should have the videos listed on the two instances', async function () { | 291 | expect(video.name).to.equal('transcoded video') |
252 | this.timeout(120_000) | 292 | expect(video.files).to.have.lengthOf(4) |
293 | } | ||
294 | }) | ||
295 | |||
296 | it('Should import no HDR version on a HDR video', async function () { | ||
297 | this.timeout(300_000) | ||
298 | |||
299 | const config = { | ||
300 | transcoding: { | ||
301 | enabled: true, | ||
302 | resolutions: { | ||
303 | '240p': true, | ||
304 | '360p': false, | ||
305 | '480p': false, | ||
306 | '720p': false, | ||
307 | '1080p': false, // the resulting resolution shouldn't be higher than this, and not vp9.2/av01 | ||
308 | '1440p': false, | ||
309 | '2160p': false | ||
310 | }, | ||
311 | webtorrent: { enabled: true }, | ||
312 | hls: { enabled: false } | ||
313 | }, | ||
314 | import: { | ||
315 | videos: { | ||
316 | http: { | ||
317 | enabled: true | ||
318 | }, | ||
319 | torrent: { | ||
320 | enabled: true | ||
321 | } | ||
322 | } | ||
323 | } | ||
324 | } | ||
325 | await servers[0].config.updateCustomSubConfig({ newConfig: config }) | ||
253 | 326 | ||
254 | await waitJobs(servers) | 327 | const attributes = { |
328 | name: 'hdr video', | ||
329 | targetUrl: FIXTURE_URLS.youtubeHDR, | ||
330 | channelId: servers[0].store.channel.id, | ||
331 | privacy: VideoPrivacy.PUBLIC | ||
332 | } | ||
333 | const { video: videoImported } = await servers[0].imports.importVideo({ attributes }) | ||
334 | const videoUUID = videoImported.uuid | ||
335 | |||
336 | await waitJobs(servers) | ||
337 | |||
338 | // test resolution | ||
339 | const video = await servers[0].videos.get({ id: videoUUID }) | ||
340 | expect(video.name).to.equal('hdr video') | ||
341 | const maxResolution = Math.max.apply(Math, video.files.map(function (o) { return o.resolution.id })) | ||
342 | expect(maxResolution, 'expected max resolution not met').to.equals(VideoResolution.H_240P) | ||
343 | }) | ||
344 | |||
345 | it('Should import a peertube video', async function () { | ||
346 | this.timeout(120_000) | ||
347 | |||
348 | // TODO: include peertube_short when https://github.com/ytdl-org/youtube-dl/pull/29475 is merged | ||
349 | for (const targetUrl of [ FIXTURE_URLS.peertube_long ]) { | ||
350 | // for (const targetUrl of [ FIXTURE_URLS.peertube_long, FIXTURE_URLS.peertube_short ]) { | ||
351 | await servers[0].config.disableTranscoding() | ||
352 | |||
353 | const attributes = { | ||
354 | targetUrl, | ||
355 | channelId: servers[0].store.channel.id, | ||
356 | privacy: VideoPrivacy.PUBLIC | ||
357 | } | ||
358 | const { video } = await servers[0].imports.importVideo({ attributes }) | ||
359 | const videoUUID = video.uuid | ||
255 | 360 | ||
256 | for (const server of servers) { | 361 | await waitJobs(servers) |
257 | const { total, data } = await server.videos.list() | ||
258 | expect(total).to.equal(4) | ||
259 | expect(data).to.have.lengthOf(4) | ||
260 | 362 | ||
261 | await checkVideoServer2(server, data[0].uuid) | 363 | for (const server of servers) { |
364 | const video = await server.videos.get({ id: videoUUID }) | ||
262 | 365 | ||
263 | const [ , videoHttp, videoMagnet, videoTorrent ] = data | 366 | expect(video.name).to.equal('E2E tests') |
264 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) | 367 | } |
265 | } | 368 | } |
266 | }) | 369 | }) |
267 | 370 | ||
268 | it('Should import a video that will be transcoded', async function () { | 371 | after(async function () { |
269 | this.timeout(240_000) | 372 | await cleanupTests(servers) |
373 | }) | ||
374 | }) | ||
375 | } | ||
270 | 376 | ||
271 | const attributes = { | 377 | runSuite('youtube-dl') |
272 | name: 'transcoded video', | ||
273 | magnetUri: FIXTURE_URLS.magnet, | ||
274 | channelId: channelIdServer2, | ||
275 | privacy: VideoPrivacy.PUBLIC | ||
276 | } | ||
277 | const { video } = await servers[1].imports.importVideo({ attributes }) | ||
278 | const videoUUID = video.uuid | ||
279 | 378 | ||
280 | await waitJobs(servers) | 379 | runSuite('yt-dlp') |
281 | 380 | ||
282 | for (const server of servers) { | 381 | describe('Auto update', function () { |
283 | const video = await server.videos.get({ id: videoUUID }) | 382 | let server: PeerTubeServer |
284 | 383 | ||
285 | expect(video.name).to.equal('transcoded video') | 384 | function quickPeerTubeImport () { |
286 | expect(video.files).to.have.lengthOf(4) | 385 | const attributes = { |
386 | targetUrl: FIXTURE_URLS.peertube_long, | ||
387 | channelId: server.store.channel.id, | ||
388 | privacy: VideoPrivacy.PUBLIC | ||
389 | } | ||
390 | |||
391 | return server.imports.importVideo({ attributes }) | ||
287 | } | 392 | } |
288 | }) | ||
289 | 393 | ||
290 | it('Should import no HDR version on a HDR video', async function () { | 394 | async function testBinaryUpdate (releaseUrl: string, releaseName: string) { |
291 | this.timeout(300_000) | 395 | await remove(join(server.servers.buildDirectory('bin'), releaseName)) |
292 | 396 | ||
293 | const config = { | 397 | await server.kill() |
294 | transcoding: { | 398 | await server.run({ |
295 | enabled: true, | 399 | import: { |
296 | resolutions: { | 400 | videos: { |
297 | '240p': true, | 401 | http: { |
298 | '360p': false, | 402 | youtube_dl_release: { |
299 | '480p': false, | 403 | url: releaseUrl, |
300 | '720p': false, | 404 | name: releaseName |
301 | '1080p': false, // the resulting resolution shouldn't be higher than this, and not vp9.2/av01 | 405 | } |
302 | '1440p': false, | 406 | } |
303 | '2160p': false | ||
304 | }, | ||
305 | webtorrent: { enabled: true }, | ||
306 | hls: { enabled: false } | ||
307 | }, | ||
308 | import: { | ||
309 | videos: { | ||
310 | http: { | ||
311 | enabled: true | ||
312 | }, | ||
313 | torrent: { | ||
314 | enabled: true | ||
315 | } | 407 | } |
316 | } | 408 | } |
317 | } | 409 | }) |
318 | } | 410 | |
319 | await servers[0].config.updateCustomSubConfig({ newConfig: config }) | 411 | await quickPeerTubeImport() |
320 | 412 | ||
321 | const attributes = { | 413 | expect(await pathExists(join(server.servers.buildDirectory('bin'), releaseName))).to.be.true |
322 | name: 'hdr video', | ||
323 | targetUrl: FIXTURE_URLS.youtubeHDR, | ||
324 | channelId: channelIdServer1, | ||
325 | privacy: VideoPrivacy.PUBLIC | ||
326 | } | 414 | } |
327 | const { video: videoImported } = await servers[0].imports.importVideo({ attributes }) | ||
328 | const videoUUID = videoImported.uuid | ||
329 | 415 | ||
330 | await waitJobs(servers) | 416 | before(async function () { |
417 | this.timeout(30_000) | ||
331 | 418 | ||
332 | // test resolution | 419 | // Run servers |
333 | const video = await servers[0].videos.get({ id: videoUUID }) | 420 | server = await createSingleServer(1) |
334 | expect(video.name).to.equal('hdr video') | ||
335 | const maxResolution = Math.max.apply(Math, video.files.map(function (o) { return o.resolution.id })) | ||
336 | expect(maxResolution, 'expected max resolution not met').to.equals(VideoResolution.H_240P) | ||
337 | }) | ||
338 | 421 | ||
339 | it('Should import a peertube video', async function () { | 422 | await setAccessTokensToServers([ server ]) |
340 | this.timeout(120_000) | 423 | await setDefaultVideoChannel([ server ]) |
424 | }) | ||
341 | 425 | ||
342 | // TODO: include peertube_short when https://github.com/ytdl-org/youtube-dl/pull/29475 is merged | 426 | it('Should update youtube-dl from github URL', async function () { |
343 | for (const targetUrl of [ FIXTURE_URLS.peertube_long ]) { | 427 | this.timeout(120_000) |
344 | // for (const targetUrl of [ FIXTURE_URLS.peertube_long, FIXTURE_URLS.peertube_short ]) { | ||
345 | await servers[0].config.disableTranscoding() | ||
346 | 428 | ||
347 | const attributes = { | 429 | await testBinaryUpdate('https://api.github.com/repos/ytdl-org/youtube-dl/releases', 'youtube-dl') |
348 | targetUrl, | 430 | }) |
349 | channelId: channelIdServer1, | ||
350 | privacy: VideoPrivacy.PUBLIC | ||
351 | } | ||
352 | const { video } = await servers[0].imports.importVideo({ attributes }) | ||
353 | const videoUUID = video.uuid | ||
354 | 431 | ||
355 | await waitJobs(servers) | 432 | it('Should update youtube-dl from raw URL', async function () { |
433 | this.timeout(120_000) | ||
356 | 434 | ||
357 | for (const server of servers) { | 435 | await testBinaryUpdate('https://yt-dl.org/downloads/latest/youtube-dl', 'youtube-dl') |
358 | const video = await server.videos.get({ id: videoUUID }) | 436 | }) |
359 | 437 | ||
360 | expect(video.name).to.equal('E2E tests') | 438 | it('Should update youtube-dl from youtube-dl fork', async function () { |
361 | } | 439 | this.timeout(120_000) |
362 | } | ||
363 | }) | ||
364 | 440 | ||
365 | after(async function () { | 441 | await testBinaryUpdate('https://api.github.com/repos/yt-dlp/yt-dlp/releases', 'yt-dlp') |
366 | await cleanupTests(servers) | 442 | }) |
367 | }) | 443 | }) |
368 | }) | 444 | }) |
diff --git a/server/tests/fixtures/video_import_preview_yt_dlp.jpg b/server/tests/fixtures/video_import_preview_yt_dlp.jpg new file mode 100644 index 000000000..9e8833bf9 --- /dev/null +++ b/server/tests/fixtures/video_import_preview_yt_dlp.jpg | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg b/server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg new file mode 100644 index 000000000..f672a785a --- /dev/null +++ b/server/tests/fixtures/video_import_thumbnail_yt_dlp.jpg | |||
Binary files differ | |||
diff --git a/server/tools/peertube-import-videos.ts b/server/tools/peertube-import-videos.ts index 758b561e1..54ac910e6 100644 --- a/server/tools/peertube-import-videos.ts +++ b/server/tools/peertube-import-videos.ts | |||
@@ -4,13 +4,9 @@ registerTSPaths() | |||
4 | import { program } from 'commander' | 4 | import { program } from 'commander' |
5 | import { accessSync, constants } from 'fs' | 5 | import { accessSync, constants } from 'fs' |
6 | import { remove } from 'fs-extra' | 6 | import { remove } from 'fs-extra' |
7 | import { truncate } from 'lodash' | ||
8 | import { join } from 'path' | 7 | import { join } from 'path' |
9 | import { promisify } from 'util' | ||
10 | import { YoutubeDL } from '@server/helpers/youtube-dl' | ||
11 | import { sha256 } from '../helpers/core-utils' | 8 | import { sha256 } from '../helpers/core-utils' |
12 | import { doRequestAndSaveToFile } from '../helpers/requests' | 9 | import { doRequestAndSaveToFile } from '../helpers/requests' |
13 | import { CONSTRAINTS_FIELDS } from '../initializers/constants' | ||
14 | import { | 10 | import { |
15 | assignToken, | 11 | assignToken, |
16 | buildCommonVideoOptions, | 12 | buildCommonVideoOptions, |
@@ -19,8 +15,8 @@ import { | |||
19 | getLogger, | 15 | getLogger, |
20 | getServerCredentials | 16 | getServerCredentials |
21 | } from './cli' | 17 | } from './cli' |
22 | import { PeerTubeServer } from '@shared/extra-utils' | 18 | import { wait } from '@shared/extra-utils' |
23 | 19 | import { YoutubeDLCLI, YoutubeDLInfo, YoutubeDLInfoBuilder } from '@server/helpers/youtube-dl' | |
24 | import prompt = require('prompt') | 20 | import prompt = require('prompt') |
25 | 21 | ||
26 | const processOptions = { | 22 | const processOptions = { |
@@ -73,7 +69,7 @@ getServerCredentials(command) | |||
73 | async function run (url: string, username: string, password: string) { | 69 | async function run (url: string, username: string, password: string) { |
74 | if (!password) password = await promptPassword() | 70 | if (!password) password = await promptPassword() |
75 | 71 | ||
76 | const youtubeDLBinary = await YoutubeDL.safeGetYoutubeDL() | 72 | const youtubeDLBinary = await YoutubeDLCLI.safeGet() |
77 | 73 | ||
78 | let info = await getYoutubeDLInfo(youtubeDLBinary, options.targetUrl, command.args) | 74 | let info = await getYoutubeDLInfo(youtubeDLBinary, options.targetUrl, command.args) |
79 | 75 | ||
@@ -96,8 +92,6 @@ async function run (url: string, username: string, password: string) { | |||
96 | } else if (options.last) { | 92 | } else if (options.last) { |
97 | infoArray = infoArray.slice(-options.last) | 93 | infoArray = infoArray.slice(-options.last) |
98 | } | 94 | } |
99 | // Normalize utf8 fields | ||
100 | infoArray = infoArray.map(i => normalizeObject(i)) | ||
101 | 95 | ||
102 | log.info('Will download and upload %d videos.\n', infoArray.length) | 96 | log.info('Will download and upload %d videos.\n', infoArray.length) |
103 | 97 | ||
@@ -105,8 +99,9 @@ async function run (url: string, username: string, password: string) { | |||
105 | try { | 99 | try { |
106 | if (index > 0 && options.waitInterval) { | 100 | if (index > 0 && options.waitInterval) { |
107 | log.info("Wait for %d seconds before continuing.", options.waitInterval / 1000) | 101 | log.info("Wait for %d seconds before continuing.", options.waitInterval / 1000) |
108 | await new Promise(res => setTimeout(res, options.waitInterval)) | 102 | await wait(options.waitInterval) |
109 | } | 103 | } |
104 | |||
110 | await processVideo({ | 105 | await processVideo({ |
111 | cwd: options.tmpdir, | 106 | cwd: options.tmpdir, |
112 | url, | 107 | url, |
@@ -131,29 +126,26 @@ async function processVideo (parameters: { | |||
131 | youtubeInfo: any | 126 | youtubeInfo: any |
132 | }) { | 127 | }) { |
133 | const { youtubeInfo, cwd, url, username, password } = parameters | 128 | const { youtubeInfo, cwd, url, username, password } = parameters |
134 | const youtubeDL = new YoutubeDL('', []) | ||
135 | 129 | ||
136 | log.debug('Fetching object.', youtubeInfo) | 130 | log.debug('Fetching object.', youtubeInfo) |
137 | 131 | ||
138 | const videoInfo = await fetchObject(youtubeInfo) | 132 | const videoInfo = await fetchObject(youtubeInfo) |
139 | log.debug('Fetched object.', videoInfo) | 133 | log.debug('Fetched object.', videoInfo) |
140 | 134 | ||
141 | const originallyPublishedAt = youtubeDL.buildOriginallyPublishedAt(videoInfo) | 135 | if (options.since && videoInfo.originallyPublishedAt && videoInfo.originallyPublishedAt.getTime() < options.since.getTime()) { |
142 | 136 | log.info('Video "%s" has been published before "%s", don\'t upload it.\n', videoInfo.name, formatDate(options.since)) | |
143 | if (options.since && originallyPublishedAt && originallyPublishedAt.getTime() < options.since.getTime()) { | ||
144 | log.info('Video "%s" has been published before "%s", don\'t upload it.\n', videoInfo.title, formatDate(options.since)) | ||
145 | return | 137 | return |
146 | } | 138 | } |
147 | 139 | ||
148 | if (options.until && originallyPublishedAt && originallyPublishedAt.getTime() > options.until.getTime()) { | 140 | if (options.until && videoInfo.originallyPublishedAt && videoInfo.originallyPublishedAt.getTime() > options.until.getTime()) { |
149 | log.info('Video "%s" has been published after "%s", don\'t upload it.\n', videoInfo.title, formatDate(options.until)) | 141 | log.info('Video "%s" has been published after "%s", don\'t upload it.\n', videoInfo.name, formatDate(options.until)) |
150 | return | 142 | return |
151 | } | 143 | } |
152 | 144 | ||
153 | const server = buildServer(url) | 145 | const server = buildServer(url) |
154 | const { data } = await server.search.advancedVideoSearch({ | 146 | const { data } = await server.search.advancedVideoSearch({ |
155 | search: { | 147 | search: { |
156 | search: videoInfo.title, | 148 | search: videoInfo.name, |
157 | sort: '-match', | 149 | sort: '-match', |
158 | searchTarget: 'local' | 150 | searchTarget: 'local' |
159 | } | 151 | } |
@@ -161,28 +153,32 @@ async function processVideo (parameters: { | |||
161 | 153 | ||
162 | log.info('############################################################\n') | 154 | log.info('############################################################\n') |
163 | 155 | ||
164 | if (data.find(v => v.name === videoInfo.title)) { | 156 | if (data.find(v => v.name === videoInfo.name)) { |
165 | log.info('Video "%s" already exists, don\'t reupload it.\n', videoInfo.title) | 157 | log.info('Video "%s" already exists, don\'t reupload it.\n', videoInfo.name) |
166 | return | 158 | return |
167 | } | 159 | } |
168 | 160 | ||
169 | const path = join(cwd, sha256(videoInfo.url) + '.mp4') | 161 | const path = join(cwd, sha256(videoInfo.url) + '.mp4') |
170 | 162 | ||
171 | log.info('Downloading video "%s"...', videoInfo.title) | 163 | log.info('Downloading video "%s"...', videoInfo.name) |
172 | 164 | ||
173 | const youtubeDLOptions = [ '-f', youtubeDL.getYoutubeDLVideoFormat(), ...command.args, '-o', path ] | ||
174 | try { | 165 | try { |
175 | const youtubeDLBinary = await YoutubeDL.safeGetYoutubeDL() | 166 | const youtubeDLBinary = await YoutubeDLCLI.safeGet() |
176 | const youtubeDLExec = promisify(youtubeDLBinary.exec).bind(youtubeDLBinary) | 167 | const output = await youtubeDLBinary.download({ |
177 | const output = await youtubeDLExec(videoInfo.url, youtubeDLOptions, processOptions) | 168 | url: videoInfo.url, |
169 | format: YoutubeDLCLI.getYoutubeDLVideoFormat([]), | ||
170 | output: path, | ||
171 | additionalYoutubeDLArgs: command.args, | ||
172 | processOptions | ||
173 | }) | ||
174 | |||
178 | log.info(output.join('\n')) | 175 | log.info(output.join('\n')) |
179 | await uploadVideoOnPeerTube({ | 176 | await uploadVideoOnPeerTube({ |
180 | youtubeDL, | ||
181 | cwd, | 177 | cwd, |
182 | url, | 178 | url, |
183 | username, | 179 | username, |
184 | password, | 180 | password, |
185 | videoInfo: normalizeObject(videoInfo), | 181 | videoInfo, |
186 | videoPath: path | 182 | videoPath: path |
187 | }) | 183 | }) |
188 | } catch (err) { | 184 | } catch (err) { |
@@ -191,57 +187,34 @@ async function processVideo (parameters: { | |||
191 | } | 187 | } |
192 | 188 | ||
193 | async function uploadVideoOnPeerTube (parameters: { | 189 | async function uploadVideoOnPeerTube (parameters: { |
194 | youtubeDL: YoutubeDL | 190 | videoInfo: YoutubeDLInfo |
195 | videoInfo: any | ||
196 | videoPath: string | 191 | videoPath: string |
197 | cwd: string | 192 | cwd: string |
198 | url: string | 193 | url: string |
199 | username: string | 194 | username: string |
200 | password: string | 195 | password: string |
201 | }) { | 196 | }) { |
202 | const { youtubeDL, videoInfo, videoPath, cwd, url, username, password } = parameters | 197 | const { videoInfo, videoPath, cwd, url, username, password } = parameters |
203 | 198 | ||
204 | const server = buildServer(url) | 199 | const server = buildServer(url) |
205 | await assignToken(server, username, password) | 200 | await assignToken(server, username, password) |
206 | 201 | ||
207 | const category = await getCategory(server, videoInfo.categories) | 202 | let thumbnailfile: string |
208 | const licence = getLicence(videoInfo.license) | 203 | if (videoInfo.thumbnailUrl) { |
209 | let tags = [] | 204 | thumbnailfile = join(cwd, sha256(videoInfo.thumbnailUrl) + '.jpg') |
210 | if (Array.isArray(videoInfo.tags)) { | ||
211 | tags = videoInfo.tags | ||
212 | .filter(t => t.length < CONSTRAINTS_FIELDS.VIDEOS.TAG.max && t.length > CONSTRAINTS_FIELDS.VIDEOS.TAG.min) | ||
213 | .map(t => t.normalize()) | ||
214 | .slice(0, 5) | ||
215 | } | ||
216 | |||
217 | let thumbnailfile | ||
218 | if (videoInfo.thumbnail) { | ||
219 | thumbnailfile = join(cwd, sha256(videoInfo.thumbnail) + '.jpg') | ||
220 | 205 | ||
221 | await doRequestAndSaveToFile(videoInfo.thumbnail, thumbnailfile) | 206 | await doRequestAndSaveToFile(videoInfo.thumbnailUrl, thumbnailfile) |
222 | } | 207 | } |
223 | 208 | ||
224 | const originallyPublishedAt = youtubeDL.buildOriginallyPublishedAt(videoInfo) | 209 | const baseAttributes = await buildVideoAttributesFromCommander(server, program, videoInfo) |
225 | |||
226 | const defaultAttributes = { | ||
227 | name: truncate(videoInfo.title, { | ||
228 | length: CONSTRAINTS_FIELDS.VIDEOS.NAME.max, | ||
229 | separator: /,? +/, | ||
230 | omission: ' […]' | ||
231 | }), | ||
232 | category, | ||
233 | licence, | ||
234 | nsfw: isNSFW(videoInfo), | ||
235 | description: videoInfo.description, | ||
236 | tags | ||
237 | } | ||
238 | |||
239 | const baseAttributes = await buildVideoAttributesFromCommander(server, program, defaultAttributes) | ||
240 | 210 | ||
241 | const attributes = { | 211 | const attributes = { |
242 | ...baseAttributes, | 212 | ...baseAttributes, |
243 | 213 | ||
244 | originallyPublishedAt: originallyPublishedAt ? originallyPublishedAt.toISOString() : null, | 214 | originallyPublishedAt: videoInfo.originallyPublishedAt |
215 | ? videoInfo.originallyPublishedAt.toISOString() | ||
216 | : null, | ||
217 | |||
245 | thumbnailfile, | 218 | thumbnailfile, |
246 | previewfile: thumbnailfile, | 219 | previewfile: thumbnailfile, |
247 | fixture: videoPath | 220 | fixture: videoPath |
@@ -266,67 +239,26 @@ async function uploadVideoOnPeerTube (parameters: { | |||
266 | await remove(videoPath) | 239 | await remove(videoPath) |
267 | if (thumbnailfile) await remove(thumbnailfile) | 240 | if (thumbnailfile) await remove(thumbnailfile) |
268 | 241 | ||
269 | log.warn('Uploaded video "%s"!\n', attributes.name) | 242 | log.info('Uploaded video "%s"!\n', attributes.name) |
270 | } | 243 | } |
271 | 244 | ||
272 | /* ---------------------------------------------------------- */ | 245 | /* ---------------------------------------------------------- */ |
273 | 246 | ||
274 | async function getCategory (server: PeerTubeServer, categories: string[]) { | 247 | async function fetchObject (info: any) { |
275 | if (!categories) return undefined | 248 | const url = buildUrl(info) |
276 | |||
277 | const categoryString = categories[0] | ||
278 | |||
279 | if (categoryString === 'News & Politics') return 11 | ||
280 | |||
281 | const categoriesServer = await server.videos.getCategories() | ||
282 | |||
283 | for (const key of Object.keys(categoriesServer)) { | ||
284 | const categoryServer = categoriesServer[key] | ||
285 | if (categoryString.toLowerCase() === categoryServer.toLowerCase()) return parseInt(key, 10) | ||
286 | } | ||
287 | |||
288 | return undefined | ||
289 | } | ||
290 | |||
291 | function getLicence (licence: string) { | ||
292 | if (!licence) return undefined | ||
293 | |||
294 | if (licence.includes('Creative Commons Attribution licence')) return 1 | ||
295 | |||
296 | return undefined | ||
297 | } | ||
298 | |||
299 | function normalizeObject (obj: any) { | ||
300 | const newObj: any = {} | ||
301 | |||
302 | for (const key of Object.keys(obj)) { | ||
303 | // Deprecated key | ||
304 | if (key === 'resolution') continue | ||
305 | |||
306 | const value = obj[key] | ||
307 | |||
308 | if (typeof value === 'string') { | ||
309 | newObj[key] = value.normalize() | ||
310 | } else { | ||
311 | newObj[key] = value | ||
312 | } | ||
313 | } | ||
314 | 249 | ||
315 | return newObj | 250 | const youtubeDLCLI = await YoutubeDLCLI.safeGet() |
316 | } | 251 | const result = await youtubeDLCLI.getInfo({ |
252 | url, | ||
253 | format: YoutubeDLCLI.getYoutubeDLVideoFormat([]), | ||
254 | processOptions | ||
255 | }) | ||
317 | 256 | ||
318 | function fetchObject (info: any) { | 257 | const builder = new YoutubeDLInfoBuilder(result) |
319 | const url = buildUrl(info) | ||
320 | 258 | ||
321 | return new Promise<any>(async (res, rej) => { | 259 | const videoInfo = builder.getInfo() |
322 | const youtubeDL = await YoutubeDL.safeGetYoutubeDL() | ||
323 | youtubeDL.getInfo(url, undefined, processOptions, (err, videoInfo) => { | ||
324 | if (err) return rej(err) | ||
325 | 260 | ||
326 | const videoInfoWithUrl = Object.assign(videoInfo, { url }) | 261 | return { ...videoInfo, url } |
327 | return res(normalizeObject(videoInfoWithUrl)) | ||
328 | }) | ||
329 | }) | ||
330 | } | 262 | } |
331 | 263 | ||
332 | function buildUrl (info: any) { | 264 | function buildUrl (info: any) { |
@@ -340,10 +272,6 @@ function buildUrl (info: any) { | |||
340 | return 'https://www.youtube.com/watch?v=' + info.id | 272 | return 'https://www.youtube.com/watch?v=' + info.id |
341 | } | 273 | } |
342 | 274 | ||
343 | function isNSFW (info: any) { | ||
344 | return info.age_limit && info.age_limit >= 16 | ||
345 | } | ||
346 | |||
347 | function normalizeTargetUrl (url: string) { | 275 | function normalizeTargetUrl (url: string) { |
348 | let normalizedUrl = url.replace(/\/+$/, '') | 276 | let normalizedUrl = url.replace(/\/+$/, '') |
349 | 277 | ||
@@ -404,14 +332,11 @@ function exitError (message: string, ...meta: any[]) { | |||
404 | process.exit(-1) | 332 | process.exit(-1) |
405 | } | 333 | } |
406 | 334 | ||
407 | function getYoutubeDLInfo (youtubeDL: any, url: string, args: string[]) { | 335 | function getYoutubeDLInfo (youtubeDLCLI: YoutubeDLCLI, url: string, args: string[]) { |
408 | return new Promise<any>((res, rej) => { | 336 | return youtubeDLCLI.getInfo({ |
409 | const options = [ '-j', '--flat-playlist', '--playlist-reverse', ...args ] | 337 | url, |
410 | 338 | format: YoutubeDLCLI.getYoutubeDLVideoFormat([]), | |
411 | youtubeDL.getInfo(url, options, processOptions, (err, info) => { | 339 | additionalYoutubeDLArgs: [ '-j', '--flat-playlist', '--playlist-reverse', ...args ], |
412 | if (err) return rej(err) | 340 | processOptions |
413 | |||
414 | return res(info) | ||
415 | }) | ||
416 | }) | 341 | }) |
417 | } | 342 | } |