]>
Commit | Line | Data |
---|---|---|
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | |
2 | ||
3 | import 'mocha' | |
4 | import { expect } from 'chai' | |
5 | import { pathExists, readdir, remove } from 'fs-extra' | |
6 | import { join } from 'path' | |
7 | import { FIXTURE_URLS, testCaptionFile, testImage } from '@server/tests/shared' | |
8 | import { areHttpImportTestsDisabled } from '@shared/core-utils' | |
9 | import { HttpStatusCode, Video, VideoImportState, VideoPrivacy, VideoResolution, VideoState } from '@shared/models' | |
10 | import { | |
11 | cleanupTests, | |
12 | createMultipleServers, | |
13 | createSingleServer, | |
14 | doubleFollow, | |
15 | PeerTubeServer, | |
16 | setAccessTokensToServers, | |
17 | setDefaultVideoChannel, | |
18 | waitJobs | |
19 | } from '@shared/server-commands' | |
20 | ||
21 | async function checkVideosServer1 (server: PeerTubeServer, idHttp: string, idMagnet: string, idTorrent: string) { | |
22 | const videoHttp = await server.videos.get({ id: idHttp }) | |
23 | ||
24 | expect(videoHttp.name).to.equal('small video - youtube') | |
25 | // FIXME: youtube-dl seems broken | |
26 | // expect(videoHttp.category.label).to.equal('News & Politics') | |
27 | // expect(videoHttp.licence.label).to.equal('Attribution') | |
28 | expect(videoHttp.language.label).to.equal('Unknown') | |
29 | expect(videoHttp.nsfw).to.be.false | |
30 | expect(videoHttp.description).to.equal('this is a super description') | |
31 | expect(videoHttp.tags).to.deep.equal([ 'tag1', 'tag2' ]) | |
32 | expect(videoHttp.files).to.have.lengthOf(1) | |
33 | ||
34 | const originallyPublishedAt = new Date(videoHttp.originallyPublishedAt) | |
35 | expect(originallyPublishedAt.getDate()).to.equal(14) | |
36 | expect(originallyPublishedAt.getMonth()).to.equal(0) | |
37 | expect(originallyPublishedAt.getFullYear()).to.equal(2019) | |
38 | ||
39 | const videoMagnet = await server.videos.get({ id: idMagnet }) | |
40 | const videoTorrent = await server.videos.get({ id: idTorrent }) | |
41 | ||
42 | for (const video of [ videoMagnet, videoTorrent ]) { | |
43 | expect(video.category.label).to.equal('Misc') | |
44 | expect(video.licence.label).to.equal('Unknown') | |
45 | expect(video.language.label).to.equal('Unknown') | |
46 | expect(video.nsfw).to.be.false | |
47 | expect(video.description).to.equal('this is a super torrent description') | |
48 | expect(video.tags).to.deep.equal([ 'tag_torrent1', 'tag_torrent2' ]) | |
49 | expect(video.files).to.have.lengthOf(1) | |
50 | } | |
51 | ||
52 | expect(videoTorrent.name).to.contain('你好 世界 720p.mp4') | |
53 | expect(videoMagnet.name).to.contain('super peertube2 video') | |
54 | ||
55 | const bodyCaptions = await server.captions.list({ videoId: idHttp }) | |
56 | expect(bodyCaptions.total).to.equal(2) | |
57 | } | |
58 | ||
59 | async function checkVideoServer2 (server: PeerTubeServer, id: number | string) { | |
60 | const video = await server.videos.get({ id }) | |
61 | ||
62 | expect(video.name).to.equal('my super name') | |
63 | expect(video.category.label).to.equal('Entertainment') | |
64 | expect(video.licence.label).to.equal('Public Domain Dedication') | |
65 | expect(video.language.label).to.equal('English') | |
66 | expect(video.nsfw).to.be.false | |
67 | expect(video.description).to.equal('my super description') | |
68 | expect(video.tags).to.deep.equal([ 'supertag1', 'supertag2' ]) | |
69 | ||
70 | expect(video.files).to.have.lengthOf(1) | |
71 | ||
72 | const bodyCaptions = await server.captions.list({ videoId: id }) | |
73 | expect(bodyCaptions.total).to.equal(2) | |
74 | } | |
75 | ||
76 | describe('Test video imports', function () { | |
77 | ||
78 | if (areHttpImportTestsDisabled()) return | |
79 | ||
80 | function runSuite (mode: 'youtube-dl' | 'yt-dlp') { | |
81 | ||
82 | describe('Import ' + mode, function () { | |
83 | let servers: PeerTubeServer[] = [] | |
84 | ||
85 | before(async function () { | |
86 | this.timeout(30_000) | |
87 | ||
88 | // Run servers | |
89 | servers = await createMultipleServers(2, { | |
90 | import: { | |
91 | videos: { | |
92 | http: { | |
93 | youtube_dl_release: { | |
94 | url: mode === 'youtube-dl' | |
95 | ? 'https://yt-dl.org/downloads/latest/youtube-dl' | |
96 | : 'https://api.github.com/repos/yt-dlp/yt-dlp/releases', | |
97 | ||
98 | name: mode | |
99 | } | |
100 | } | |
101 | } | |
102 | } | |
103 | }) | |
104 | ||
105 | await setAccessTokensToServers(servers) | |
106 | await setDefaultVideoChannel(servers) | |
107 | ||
108 | await doubleFollow(servers[0], servers[1]) | |
109 | }) | |
110 | ||
111 | it('Should import videos on server 1', async function () { | |
112 | this.timeout(60_000) | |
113 | ||
114 | const baseAttributes = { | |
115 | channelId: servers[0].store.channel.id, | |
116 | privacy: VideoPrivacy.PUBLIC | |
117 | } | |
118 | ||
119 | { | |
120 | const attributes = { ...baseAttributes, targetUrl: FIXTURE_URLS.youtube } | |
121 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
122 | expect(video.name).to.equal('small video - youtube') | |
123 | ||
124 | { | |
125 | expect(video.thumbnailPath).to.match(new RegExp(`^/static/thumbnails/.+.jpg$`)) | |
126 | expect(video.previewPath).to.match(new RegExp(`^/lazy-static/previews/.+.jpg$`)) | |
127 | ||
128 | const suffix = mode === 'yt-dlp' | |
129 | ? '_yt_dlp' | |
130 | : '' | |
131 | ||
132 | await testImage(servers[0].url, 'video_import_thumbnail' + suffix, video.thumbnailPath) | |
133 | await testImage(servers[0].url, 'video_import_preview' + suffix, video.previewPath) | |
134 | } | |
135 | ||
136 | const bodyCaptions = await servers[0].captions.list({ videoId: video.id }) | |
137 | const videoCaptions = bodyCaptions.data | |
138 | expect(videoCaptions).to.have.lengthOf(2) | |
139 | ||
140 | { | |
141 | const enCaption = videoCaptions.find(caption => caption.language.id === 'en') | |
142 | expect(enCaption).to.exist | |
143 | expect(enCaption.language.label).to.equal('English') | |
144 | expect(enCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-en.vtt$`)) | |
145 | ||
146 | const regex = `WEBVTT[ \n]+Kind: captions[ \n]+` + | |
147 | `(Language: en[ \n]+)?` + | |
148 | `00:00:01.600 --> 00:00:04.200( position:\\d+% line:\\d+%)?[ \n]+English \\(US\\)[ \n]+` + | |
149 | `00:00:05.900 --> 00:00:07.999( position:\\d+% line:\\d+%)?[ \n]+This is a subtitle in American English[ \n]+` + | |
150 | `00:00:10.000 --> 00:00:14.000( position:\\d+% line:\\d+%)?[ \n]+Adding subtitles is very easy to do` | |
151 | await testCaptionFile(servers[0].url, enCaption.captionPath, new RegExp(regex)) | |
152 | } | |
153 | ||
154 | { | |
155 | const frCaption = videoCaptions.find(caption => caption.language.id === 'fr') | |
156 | expect(frCaption).to.exist | |
157 | expect(frCaption.language.label).to.equal('French') | |
158 | expect(frCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-fr.vtt`)) | |
159 | ||
160 | const regex = `WEBVTT[ \n]+Kind: captions[ \n]+` + | |
161 | `(Language: fr[ \n]+)?` + | |
162 | `00:00:01.600 --> 00:00:04.200( position:\\d+% line:\\d+%)?[ \n]+Français \\(FR\\)[ \n]+` + | |
163 | `00:00:05.900 --> 00:00:07.999( position:\\d+% line:\\d+%)?[ \n]+C'est un sous-titre français[ \n]+` + | |
164 | `00:00:10.000 --> 00:00:14.000( position:\\d+% line:\\d+%)?[ \n]+Ajouter un sous-titre est vraiment facile` | |
165 | ||
166 | await testCaptionFile(servers[0].url, frCaption.captionPath, new RegExp(regex)) | |
167 | } | |
168 | } | |
169 | ||
170 | { | |
171 | const attributes = { | |
172 | ...baseAttributes, | |
173 | magnetUri: FIXTURE_URLS.magnet, | |
174 | description: 'this is a super torrent description', | |
175 | tags: [ 'tag_torrent1', 'tag_torrent2' ] | |
176 | } | |
177 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
178 | expect(video.name).to.equal('super peertube2 video') | |
179 | } | |
180 | ||
181 | { | |
182 | const attributes = { | |
183 | ...baseAttributes, | |
184 | torrentfile: 'video-720p.torrent' as any, | |
185 | description: 'this is a super torrent description', | |
186 | tags: [ 'tag_torrent1', 'tag_torrent2' ] | |
187 | } | |
188 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
189 | expect(video.name).to.equal('你好 世界 720p.mp4') | |
190 | } | |
191 | }) | |
192 | ||
193 | it('Should list the videos to import in my videos on server 1', async function () { | |
194 | const { total, data } = await servers[0].videos.listMyVideos({ sort: 'createdAt' }) | |
195 | ||
196 | expect(total).to.equal(3) | |
197 | ||
198 | expect(data).to.have.lengthOf(3) | |
199 | expect(data[0].name).to.equal('small video - youtube') | |
200 | expect(data[1].name).to.equal('super peertube2 video') | |
201 | expect(data[2].name).to.equal('你好 世界 720p.mp4') | |
202 | }) | |
203 | ||
204 | it('Should list the videos to import in my imports on server 1', async function () { | |
205 | const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ sort: '-createdAt' }) | |
206 | expect(total).to.equal(3) | |
207 | ||
208 | expect(videoImports).to.have.lengthOf(3) | |
209 | ||
210 | expect(videoImports[2].targetUrl).to.equal(FIXTURE_URLS.youtube) | |
211 | expect(videoImports[2].magnetUri).to.be.null | |
212 | expect(videoImports[2].torrentName).to.be.null | |
213 | expect(videoImports[2].video.name).to.equal('small video - youtube') | |
214 | ||
215 | expect(videoImports[1].targetUrl).to.be.null | |
216 | expect(videoImports[1].magnetUri).to.equal(FIXTURE_URLS.magnet) | |
217 | expect(videoImports[1].torrentName).to.be.null | |
218 | expect(videoImports[1].video.name).to.equal('super peertube2 video') | |
219 | ||
220 | expect(videoImports[0].targetUrl).to.be.null | |
221 | expect(videoImports[0].magnetUri).to.be.null | |
222 | expect(videoImports[0].torrentName).to.equal('video-720p.torrent') | |
223 | expect(videoImports[0].video.name).to.equal('你好 世界 720p.mp4') | |
224 | }) | |
225 | ||
226 | it('Should filter my imports on target URL', async function () { | |
227 | const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ targetUrl: FIXTURE_URLS.youtube }) | |
228 | expect(total).to.equal(1) | |
229 | expect(videoImports).to.have.lengthOf(1) | |
230 | ||
231 | expect(videoImports[0].targetUrl).to.equal(FIXTURE_URLS.youtube) | |
232 | }) | |
233 | ||
234 | it('Should have the video listed on the two instances', async function () { | |
235 | this.timeout(120_000) | |
236 | ||
237 | await waitJobs(servers) | |
238 | ||
239 | for (const server of servers) { | |
240 | const { total, data } = await server.videos.list() | |
241 | expect(total).to.equal(3) | |
242 | expect(data).to.have.lengthOf(3) | |
243 | ||
244 | const [ videoHttp, videoMagnet, videoTorrent ] = data | |
245 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) | |
246 | } | |
247 | }) | |
248 | ||
249 | it('Should import a video on server 2 with some fields', async function () { | |
250 | this.timeout(60_000) | |
251 | ||
252 | const attributes = { | |
253 | targetUrl: FIXTURE_URLS.youtube, | |
254 | channelId: servers[1].store.channel.id, | |
255 | privacy: VideoPrivacy.PUBLIC, | |
256 | category: 10, | |
257 | licence: 7, | |
258 | language: 'en', | |
259 | name: 'my super name', | |
260 | description: 'my super description', | |
261 | tags: [ 'supertag1', 'supertag2' ] | |
262 | } | |
263 | const { video } = await servers[1].imports.importVideo({ attributes }) | |
264 | expect(video.name).to.equal('my super name') | |
265 | }) | |
266 | ||
267 | it('Should have the videos listed on the two instances', async function () { | |
268 | this.timeout(120_000) | |
269 | ||
270 | await waitJobs(servers) | |
271 | ||
272 | for (const server of servers) { | |
273 | const { total, data } = await server.videos.list() | |
274 | expect(total).to.equal(4) | |
275 | expect(data).to.have.lengthOf(4) | |
276 | ||
277 | await checkVideoServer2(server, data[0].uuid) | |
278 | ||
279 | const [ , videoHttp, videoMagnet, videoTorrent ] = data | |
280 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) | |
281 | } | |
282 | }) | |
283 | ||
284 | it('Should import a video that will be transcoded', async function () { | |
285 | this.timeout(240_000) | |
286 | ||
287 | const attributes = { | |
288 | name: 'transcoded video', | |
289 | magnetUri: FIXTURE_URLS.magnet, | |
290 | channelId: servers[1].store.channel.id, | |
291 | privacy: VideoPrivacy.PUBLIC | |
292 | } | |
293 | const { video } = await servers[1].imports.importVideo({ attributes }) | |
294 | const videoUUID = video.uuid | |
295 | ||
296 | await waitJobs(servers) | |
297 | ||
298 | for (const server of servers) { | |
299 | const video = await server.videos.get({ id: videoUUID }) | |
300 | ||
301 | expect(video.name).to.equal('transcoded video') | |
302 | expect(video.files).to.have.lengthOf(4) | |
303 | } | |
304 | }) | |
305 | ||
306 | it('Should import no HDR version on a HDR video', async function () { | |
307 | this.timeout(300_000) | |
308 | ||
309 | const config = { | |
310 | transcoding: { | |
311 | enabled: true, | |
312 | resolutions: { | |
313 | '144p': true, | |
314 | '240p': true, | |
315 | '360p': false, | |
316 | '480p': false, | |
317 | '720p': false, | |
318 | '1080p': false, // the resulting resolution shouldn't be higher than this, and not vp9.2/av01 | |
319 | '1440p': false, | |
320 | '2160p': false | |
321 | }, | |
322 | webtorrent: { enabled: true }, | |
323 | hls: { enabled: false } | |
324 | }, | |
325 | import: { | |
326 | videos: { | |
327 | http: { | |
328 | enabled: true | |
329 | }, | |
330 | torrent: { | |
331 | enabled: true | |
332 | } | |
333 | } | |
334 | } | |
335 | } | |
336 | await servers[0].config.updateCustomSubConfig({ newConfig: config }) | |
337 | ||
338 | const attributes = { | |
339 | name: 'hdr video', | |
340 | targetUrl: FIXTURE_URLS.youtubeHDR, | |
341 | channelId: servers[0].store.channel.id, | |
342 | privacy: VideoPrivacy.PUBLIC | |
343 | } | |
344 | const { video: videoImported } = await servers[0].imports.importVideo({ attributes }) | |
345 | const videoUUID = videoImported.uuid | |
346 | ||
347 | await waitJobs(servers) | |
348 | ||
349 | // test resolution | |
350 | const video = await servers[0].videos.get({ id: videoUUID }) | |
351 | expect(video.name).to.equal('hdr video') | |
352 | const maxResolution = Math.max.apply(Math, video.files.map(function (o) { return o.resolution.id })) | |
353 | expect(maxResolution, 'expected max resolution not met').to.equals(VideoResolution.H_240P) | |
354 | }) | |
355 | ||
356 | it('Should import a peertube video', async function () { | |
357 | this.timeout(120_000) | |
358 | ||
359 | const toTest = [ FIXTURE_URLS.peertube_long ] | |
360 | ||
361 | // TODO: include peertube_short when https://github.com/ytdl-org/youtube-dl/pull/29475 is merged | |
362 | if (mode === 'yt-dlp') { | |
363 | toTest.push(FIXTURE_URLS.peertube_short) | |
364 | } | |
365 | ||
366 | for (const targetUrl of toTest) { | |
367 | await servers[0].config.disableTranscoding() | |
368 | ||
369 | const attributes = { | |
370 | targetUrl, | |
371 | channelId: servers[0].store.channel.id, | |
372 | privacy: VideoPrivacy.PUBLIC | |
373 | } | |
374 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
375 | const videoUUID = video.uuid | |
376 | ||
377 | await waitJobs(servers) | |
378 | ||
379 | for (const server of servers) { | |
380 | const video = await server.videos.get({ id: videoUUID }) | |
381 | ||
382 | expect(video.name).to.equal('E2E tests') | |
383 | } | |
384 | } | |
385 | }) | |
386 | ||
387 | after(async function () { | |
388 | await cleanupTests(servers) | |
389 | }) | |
390 | }) | |
391 | } | |
392 | ||
393 | runSuite('youtube-dl') | |
394 | ||
395 | runSuite('yt-dlp') | |
396 | ||
397 | describe('Delete/cancel an import', function () { | |
398 | let server: PeerTubeServer | |
399 | ||
400 | let finishedImportId: number | |
401 | let finishedVideo: Video | |
402 | let pendingImportId: number | |
403 | ||
404 | async function importVideo (name: string) { | |
405 | const attributes = { name, channelId: server.store.channel.id, targetUrl: FIXTURE_URLS.goodVideo } | |
406 | const res = await server.imports.importVideo({ attributes }) | |
407 | ||
408 | return res.id | |
409 | } | |
410 | ||
411 | before(async function () { | |
412 | this.timeout(120_000) | |
413 | ||
414 | server = await createSingleServer(1) | |
415 | ||
416 | await setAccessTokensToServers([ server ]) | |
417 | await setDefaultVideoChannel([ server ]) | |
418 | ||
419 | finishedImportId = await importVideo('finished') | |
420 | await waitJobs([ server ]) | |
421 | ||
422 | await server.jobs.pauseJobQueue() | |
423 | pendingImportId = await importVideo('pending') | |
424 | ||
425 | const { data } = await server.imports.getMyVideoImports() | |
426 | expect(data).to.have.lengthOf(2) | |
427 | ||
428 | finishedVideo = data.find(i => i.id === finishedImportId).video | |
429 | }) | |
430 | ||
431 | it('Should delete a video import', async function () { | |
432 | await server.imports.delete({ importId: finishedImportId }) | |
433 | ||
434 | const { data } = await server.imports.getMyVideoImports() | |
435 | expect(data).to.have.lengthOf(1) | |
436 | expect(data[0].id).to.equal(pendingImportId) | |
437 | expect(data[0].state.id).to.equal(VideoImportState.PENDING) | |
438 | }) | |
439 | ||
440 | it('Should not have deleted the associated video', async function () { | |
441 | const video = await server.videos.get({ id: finishedVideo.id, token: server.accessToken, expectedStatus: HttpStatusCode.OK_200 }) | |
442 | expect(video.name).to.equal('finished') | |
443 | expect(video.state.id).to.equal(VideoState.PUBLISHED) | |
444 | }) | |
445 | ||
446 | it('Should cancel a video import', async function () { | |
447 | await server.imports.cancel({ importId: pendingImportId }) | |
448 | ||
449 | const { data } = await server.imports.getMyVideoImports() | |
450 | expect(data).to.have.lengthOf(1) | |
451 | expect(data[0].id).to.equal(pendingImportId) | |
452 | expect(data[0].state.id).to.equal(VideoImportState.CANCELLED) | |
453 | }) | |
454 | ||
455 | it('Should not have processed the cancelled video import', async function () { | |
456 | this.timeout(60_000) | |
457 | ||
458 | await server.jobs.resumeJobQueue() | |
459 | ||
460 | await waitJobs([ server ]) | |
461 | ||
462 | const { data } = await server.imports.getMyVideoImports() | |
463 | expect(data).to.have.lengthOf(1) | |
464 | expect(data[0].id).to.equal(pendingImportId) | |
465 | expect(data[0].state.id).to.equal(VideoImportState.CANCELLED) | |
466 | expect(data[0].video.state.id).to.equal(VideoState.TO_IMPORT) | |
467 | }) | |
468 | ||
469 | it('Should delete the cancelled video import', async function () { | |
470 | await server.imports.delete({ importId: pendingImportId }) | |
471 | const { data } = await server.imports.getMyVideoImports() | |
472 | expect(data).to.have.lengthOf(0) | |
473 | }) | |
474 | ||
475 | after(async function () { | |
476 | await cleanupTests([ server ]) | |
477 | }) | |
478 | }) | |
479 | ||
480 | describe('Auto update', function () { | |
481 | let server: PeerTubeServer | |
482 | ||
483 | function quickPeerTubeImport () { | |
484 | const attributes = { | |
485 | targetUrl: FIXTURE_URLS.peertube_long, | |
486 | channelId: server.store.channel.id, | |
487 | privacy: VideoPrivacy.PUBLIC | |
488 | } | |
489 | ||
490 | return server.imports.importVideo({ attributes }) | |
491 | } | |
492 | ||
493 | async function testBinaryUpdate (releaseUrl: string, releaseName: string) { | |
494 | await remove(join(server.servers.buildDirectory('bin'), releaseName)) | |
495 | ||
496 | await server.kill() | |
497 | await server.run({ | |
498 | import: { | |
499 | videos: { | |
500 | http: { | |
501 | youtube_dl_release: { | |
502 | url: releaseUrl, | |
503 | name: releaseName | |
504 | } | |
505 | } | |
506 | } | |
507 | } | |
508 | }) | |
509 | ||
510 | await quickPeerTubeImport() | |
511 | ||
512 | const base = server.servers.buildDirectory('bin') | |
513 | const content = await readdir(base) | |
514 | const binaryPath = join(base, releaseName) | |
515 | ||
516 | expect(await pathExists(binaryPath), `${binaryPath} does not exist in ${base} (${content.join(', ')})`).to.be.true | |
517 | } | |
518 | ||
519 | before(async function () { | |
520 | this.timeout(30_000) | |
521 | ||
522 | // Run servers | |
523 | server = await createSingleServer(1) | |
524 | ||
525 | await setAccessTokensToServers([ server ]) | |
526 | await setDefaultVideoChannel([ server ]) | |
527 | }) | |
528 | ||
529 | it('Should update youtube-dl from github URL', async function () { | |
530 | this.timeout(120_000) | |
531 | ||
532 | await testBinaryUpdate('https://api.github.com/repos/ytdl-org/youtube-dl/releases', 'youtube-dl') | |
533 | }) | |
534 | ||
535 | it('Should update youtube-dl from raw URL', async function () { | |
536 | this.timeout(120_000) | |
537 | ||
538 | await testBinaryUpdate('https://yt-dl.org/downloads/latest/youtube-dl', 'youtube-dl') | |
539 | }) | |
540 | ||
541 | it('Should update youtube-dl from youtube-dl fork', async function () { | |
542 | this.timeout(120_000) | |
543 | ||
544 | await testBinaryUpdate('https://api.github.com/repos/yt-dlp/yt-dlp/releases', 'yt-dlp') | |
545 | }) | |
546 | ||
547 | after(async function () { | |
548 | await cleanupTests([ server ]) | |
549 | }) | |
550 | }) | |
551 | }) |