]>
Commit | Line | Data |
---|---|---|
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | |
2 | ||
3 | import 'mocha' | |
4 | import { expect } from 'chai' | |
5 | import { pathExists, readdir, remove } from 'fs-extra' | |
6 | import { join } from 'path' | |
7 | import { FIXTURE_URLS, testCaptionFile, testImage } from '@server/tests/shared' | |
8 | import { areHttpImportTestsDisabled } from '@shared/core-utils' | |
9 | import { HttpStatusCode, Video, VideoImportState, VideoPrivacy, VideoResolution, VideoState } from '@shared/models' | |
10 | import { | |
11 | cleanupTests, | |
12 | createMultipleServers, | |
13 | createSingleServer, | |
14 | doubleFollow, | |
15 | PeerTubeServer, | |
16 | setAccessTokensToServers, | |
17 | setDefaultVideoChannel, | |
18 | waitJobs | |
19 | } from '@shared/server-commands' | |
20 | ||
21 | async function checkVideosServer1 (server: PeerTubeServer, idHttp: string, idMagnet: string, idTorrent: string) { | |
22 | const videoHttp = await server.videos.get({ id: idHttp }) | |
23 | ||
24 | expect(videoHttp.name).to.equal('small video - youtube') | |
25 | // FIXME: youtube-dl seems broken | |
26 | // expect(videoHttp.category.label).to.equal('News & Politics') | |
27 | // expect(videoHttp.licence.label).to.equal('Attribution') | |
28 | expect(videoHttp.language.label).to.equal('Unknown') | |
29 | expect(videoHttp.nsfw).to.be.false | |
30 | expect(videoHttp.description).to.equal('this is a super description') | |
31 | expect(videoHttp.tags).to.deep.equal([ 'tag1', 'tag2' ]) | |
32 | expect(videoHttp.files).to.have.lengthOf(1) | |
33 | ||
34 | const originallyPublishedAt = new Date(videoHttp.originallyPublishedAt) | |
35 | expect(originallyPublishedAt.getDate()).to.equal(14) | |
36 | expect(originallyPublishedAt.getMonth()).to.equal(0) | |
37 | expect(originallyPublishedAt.getFullYear()).to.equal(2019) | |
38 | ||
39 | const videoMagnet = await server.videos.get({ id: idMagnet }) | |
40 | const videoTorrent = await server.videos.get({ id: idTorrent }) | |
41 | ||
42 | for (const video of [ videoMagnet, videoTorrent ]) { | |
43 | expect(video.category.label).to.equal('Misc') | |
44 | expect(video.licence.label).to.equal('Unknown') | |
45 | expect(video.language.label).to.equal('Unknown') | |
46 | expect(video.nsfw).to.be.false | |
47 | expect(video.description).to.equal('this is a super torrent description') | |
48 | expect(video.tags).to.deep.equal([ 'tag_torrent1', 'tag_torrent2' ]) | |
49 | expect(video.files).to.have.lengthOf(1) | |
50 | } | |
51 | ||
52 | expect(videoTorrent.name).to.contain('你好 世界 720p.mp4') | |
53 | expect(videoMagnet.name).to.contain('super peertube2 video') | |
54 | ||
55 | const bodyCaptions = await server.captions.list({ videoId: idHttp }) | |
56 | expect(bodyCaptions.total).to.equal(2) | |
57 | } | |
58 | ||
59 | async function checkVideoServer2 (server: PeerTubeServer, id: number | string) { | |
60 | const video = await server.videos.get({ id }) | |
61 | ||
62 | expect(video.name).to.equal('my super name') | |
63 | expect(video.category.label).to.equal('Entertainment') | |
64 | expect(video.licence.label).to.equal('Public Domain Dedication') | |
65 | expect(video.language.label).to.equal('English') | |
66 | expect(video.nsfw).to.be.false | |
67 | expect(video.description).to.equal('my super description') | |
68 | expect(video.tags).to.deep.equal([ 'supertag1', 'supertag2' ]) | |
69 | ||
70 | expect(video.files).to.have.lengthOf(1) | |
71 | ||
72 | const bodyCaptions = await server.captions.list({ videoId: id }) | |
73 | expect(bodyCaptions.total).to.equal(2) | |
74 | } | |
75 | ||
76 | describe('Test video imports', function () { | |
77 | ||
78 | if (areHttpImportTestsDisabled()) return | |
79 | ||
80 | function runSuite (mode: 'youtube-dl' | 'yt-dlp') { | |
81 | ||
82 | describe('Import ' + mode, function () { | |
83 | let servers: PeerTubeServer[] = [] | |
84 | ||
85 | before(async function () { | |
86 | this.timeout(30_000) | |
87 | ||
88 | // Run servers | |
89 | servers = await createMultipleServers(2, { | |
90 | import: { | |
91 | videos: { | |
92 | http: { | |
93 | youtube_dl_release: { | |
94 | url: mode === 'youtube-dl' | |
95 | ? 'https://yt-dl.org/downloads/latest/youtube-dl' | |
96 | : 'https://api.github.com/repos/yt-dlp/yt-dlp/releases', | |
97 | ||
98 | name: mode | |
99 | } | |
100 | } | |
101 | } | |
102 | } | |
103 | }) | |
104 | ||
105 | await setAccessTokensToServers(servers) | |
106 | await setDefaultVideoChannel(servers) | |
107 | ||
108 | await doubleFollow(servers[0], servers[1]) | |
109 | }) | |
110 | ||
111 | it('Should import videos on server 1', async function () { | |
112 | this.timeout(60_000) | |
113 | ||
114 | const baseAttributes = { | |
115 | channelId: servers[0].store.channel.id, | |
116 | privacy: VideoPrivacy.PUBLIC | |
117 | } | |
118 | ||
119 | { | |
120 | const attributes = { ...baseAttributes, targetUrl: FIXTURE_URLS.youtube } | |
121 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
122 | expect(video.name).to.equal('small video - youtube') | |
123 | ||
124 | { | |
125 | expect(video.thumbnailPath).to.match(new RegExp(`^/static/thumbnails/.+.jpg$`)) | |
126 | expect(video.previewPath).to.match(new RegExp(`^/lazy-static/previews/.+.jpg$`)) | |
127 | ||
128 | const suffix = mode === 'yt-dlp' | |
129 | ? '_yt_dlp' | |
130 | : '' | |
131 | ||
132 | await testImage(servers[0].url, 'video_import_thumbnail' + suffix, video.thumbnailPath) | |
133 | await testImage(servers[0].url, 'video_import_preview' + suffix, video.previewPath) | |
134 | } | |
135 | ||
136 | const bodyCaptions = await servers[0].captions.list({ videoId: video.id }) | |
137 | const videoCaptions = bodyCaptions.data | |
138 | expect(videoCaptions).to.have.lengthOf(2) | |
139 | ||
140 | { | |
141 | const enCaption = videoCaptions.find(caption => caption.language.id === 'en') | |
142 | expect(enCaption).to.exist | |
143 | expect(enCaption.language.label).to.equal('English') | |
144 | expect(enCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-en.vtt$`)) | |
145 | ||
146 | const regex = `WEBVTT[ \n]+Kind: captions[ \n]+Language: en[ \n]+00:00:01.600 --> 00:00:04.200[ \n]+English \\(US\\)[ \n]+` + | |
147 | `00:00:05.900 --> 00:00:07.999[ \n]+This is a subtitle in American English[ \n]+` + | |
148 | `00:00:10.000 --> 00:00:14.000[ \n]+Adding subtitles is very easy to do` | |
149 | await testCaptionFile(servers[0].url, enCaption.captionPath, new RegExp(regex)) | |
150 | } | |
151 | ||
152 | { | |
153 | const frCaption = videoCaptions.find(caption => caption.language.id === 'fr') | |
154 | expect(frCaption).to.exist | |
155 | expect(frCaption.language.label).to.equal('French') | |
156 | expect(frCaption.captionPath).to.match(new RegExp(`^/lazy-static/video-captions/.+-fr.vtt`)) | |
157 | ||
158 | const regex = `WEBVTT[ \n]+Kind: captions[ \n]+Language: fr[ \n]+00:00:01.600 --> 00:00:04.200[ \n]+` + | |
159 | `Français \\(FR\\)[ \n]+00:00:05.900 --> 00:00:07.999[ \n]+C'est un sous-titre français[ \n]+` + | |
160 | `00:00:10.000 --> 00:00:14.000[ \n]+Ajouter un sous-titre est vraiment facile` | |
161 | ||
162 | await testCaptionFile(servers[0].url, frCaption.captionPath, new RegExp(regex)) | |
163 | } | |
164 | } | |
165 | ||
166 | { | |
167 | const attributes = { | |
168 | ...baseAttributes, | |
169 | magnetUri: FIXTURE_URLS.magnet, | |
170 | description: 'this is a super torrent description', | |
171 | tags: [ 'tag_torrent1', 'tag_torrent2' ] | |
172 | } | |
173 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
174 | expect(video.name).to.equal('super peertube2 video') | |
175 | } | |
176 | ||
177 | { | |
178 | const attributes = { | |
179 | ...baseAttributes, | |
180 | torrentfile: 'video-720p.torrent' as any, | |
181 | description: 'this is a super torrent description', | |
182 | tags: [ 'tag_torrent1', 'tag_torrent2' ] | |
183 | } | |
184 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
185 | expect(video.name).to.equal('你好 世界 720p.mp4') | |
186 | } | |
187 | }) | |
188 | ||
189 | it('Should list the videos to import in my videos on server 1', async function () { | |
190 | const { total, data } = await servers[0].videos.listMyVideos({ sort: 'createdAt' }) | |
191 | ||
192 | expect(total).to.equal(3) | |
193 | ||
194 | expect(data).to.have.lengthOf(3) | |
195 | expect(data[0].name).to.equal('small video - youtube') | |
196 | expect(data[1].name).to.equal('super peertube2 video') | |
197 | expect(data[2].name).to.equal('你好 世界 720p.mp4') | |
198 | }) | |
199 | ||
200 | it('Should list the videos to import in my imports on server 1', async function () { | |
201 | const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ sort: '-createdAt' }) | |
202 | expect(total).to.equal(3) | |
203 | ||
204 | expect(videoImports).to.have.lengthOf(3) | |
205 | ||
206 | expect(videoImports[2].targetUrl).to.equal(FIXTURE_URLS.youtube) | |
207 | expect(videoImports[2].magnetUri).to.be.null | |
208 | expect(videoImports[2].torrentName).to.be.null | |
209 | expect(videoImports[2].video.name).to.equal('small video - youtube') | |
210 | ||
211 | expect(videoImports[1].targetUrl).to.be.null | |
212 | expect(videoImports[1].magnetUri).to.equal(FIXTURE_URLS.magnet) | |
213 | expect(videoImports[1].torrentName).to.be.null | |
214 | expect(videoImports[1].video.name).to.equal('super peertube2 video') | |
215 | ||
216 | expect(videoImports[0].targetUrl).to.be.null | |
217 | expect(videoImports[0].magnetUri).to.be.null | |
218 | expect(videoImports[0].torrentName).to.equal('video-720p.torrent') | |
219 | expect(videoImports[0].video.name).to.equal('你好 世界 720p.mp4') | |
220 | }) | |
221 | ||
222 | it('Should filter my imports on target URL', async function () { | |
223 | const { total, data: videoImports } = await servers[0].imports.getMyVideoImports({ targetUrl: FIXTURE_URLS.youtube }) | |
224 | expect(total).to.equal(1) | |
225 | expect(videoImports).to.have.lengthOf(1) | |
226 | ||
227 | expect(videoImports[0].targetUrl).to.equal(FIXTURE_URLS.youtube) | |
228 | }) | |
229 | ||
230 | it('Should have the video listed on the two instances', async function () { | |
231 | this.timeout(120_000) | |
232 | ||
233 | await waitJobs(servers) | |
234 | ||
235 | for (const server of servers) { | |
236 | const { total, data } = await server.videos.list() | |
237 | expect(total).to.equal(3) | |
238 | expect(data).to.have.lengthOf(3) | |
239 | ||
240 | const [ videoHttp, videoMagnet, videoTorrent ] = data | |
241 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) | |
242 | } | |
243 | }) | |
244 | ||
245 | it('Should import a video on server 2 with some fields', async function () { | |
246 | this.timeout(60_000) | |
247 | ||
248 | const attributes = { | |
249 | targetUrl: FIXTURE_URLS.youtube, | |
250 | channelId: servers[1].store.channel.id, | |
251 | privacy: VideoPrivacy.PUBLIC, | |
252 | category: 10, | |
253 | licence: 7, | |
254 | language: 'en', | |
255 | name: 'my super name', | |
256 | description: 'my super description', | |
257 | tags: [ 'supertag1', 'supertag2' ] | |
258 | } | |
259 | const { video } = await servers[1].imports.importVideo({ attributes }) | |
260 | expect(video.name).to.equal('my super name') | |
261 | }) | |
262 | ||
263 | it('Should have the videos listed on the two instances', async function () { | |
264 | this.timeout(120_000) | |
265 | ||
266 | await waitJobs(servers) | |
267 | ||
268 | for (const server of servers) { | |
269 | const { total, data } = await server.videos.list() | |
270 | expect(total).to.equal(4) | |
271 | expect(data).to.have.lengthOf(4) | |
272 | ||
273 | await checkVideoServer2(server, data[0].uuid) | |
274 | ||
275 | const [ , videoHttp, videoMagnet, videoTorrent ] = data | |
276 | await checkVideosServer1(server, videoHttp.uuid, videoMagnet.uuid, videoTorrent.uuid) | |
277 | } | |
278 | }) | |
279 | ||
280 | it('Should import a video that will be transcoded', async function () { | |
281 | this.timeout(240_000) | |
282 | ||
283 | const attributes = { | |
284 | name: 'transcoded video', | |
285 | magnetUri: FIXTURE_URLS.magnet, | |
286 | channelId: servers[1].store.channel.id, | |
287 | privacy: VideoPrivacy.PUBLIC | |
288 | } | |
289 | const { video } = await servers[1].imports.importVideo({ attributes }) | |
290 | const videoUUID = video.uuid | |
291 | ||
292 | await waitJobs(servers) | |
293 | ||
294 | for (const server of servers) { | |
295 | const video = await server.videos.get({ id: videoUUID }) | |
296 | ||
297 | expect(video.name).to.equal('transcoded video') | |
298 | expect(video.files).to.have.lengthOf(4) | |
299 | } | |
300 | }) | |
301 | ||
302 | it('Should import no HDR version on a HDR video', async function () { | |
303 | this.timeout(300_000) | |
304 | ||
305 | const config = { | |
306 | transcoding: { | |
307 | enabled: true, | |
308 | resolutions: { | |
309 | '144p': true, | |
310 | '240p': true, | |
311 | '360p': false, | |
312 | '480p': false, | |
313 | '720p': false, | |
314 | '1080p': false, // the resulting resolution shouldn't be higher than this, and not vp9.2/av01 | |
315 | '1440p': false, | |
316 | '2160p': false | |
317 | }, | |
318 | webtorrent: { enabled: true }, | |
319 | hls: { enabled: false } | |
320 | }, | |
321 | import: { | |
322 | videos: { | |
323 | http: { | |
324 | enabled: true | |
325 | }, | |
326 | torrent: { | |
327 | enabled: true | |
328 | } | |
329 | } | |
330 | } | |
331 | } | |
332 | await servers[0].config.updateCustomSubConfig({ newConfig: config }) | |
333 | ||
334 | const attributes = { | |
335 | name: 'hdr video', | |
336 | targetUrl: FIXTURE_URLS.youtubeHDR, | |
337 | channelId: servers[0].store.channel.id, | |
338 | privacy: VideoPrivacy.PUBLIC | |
339 | } | |
340 | const { video: videoImported } = await servers[0].imports.importVideo({ attributes }) | |
341 | const videoUUID = videoImported.uuid | |
342 | ||
343 | await waitJobs(servers) | |
344 | ||
345 | // test resolution | |
346 | const video = await servers[0].videos.get({ id: videoUUID }) | |
347 | expect(video.name).to.equal('hdr video') | |
348 | const maxResolution = Math.max.apply(Math, video.files.map(function (o) { return o.resolution.id })) | |
349 | expect(maxResolution, 'expected max resolution not met').to.equals(VideoResolution.H_240P) | |
350 | }) | |
351 | ||
352 | it('Should import a peertube video', async function () { | |
353 | this.timeout(120_000) | |
354 | ||
355 | const toTest = [ FIXTURE_URLS.peertube_long ] | |
356 | ||
357 | // TODO: include peertube_short when https://github.com/ytdl-org/youtube-dl/pull/29475 is merged | |
358 | if (mode === 'yt-dlp') { | |
359 | toTest.push(FIXTURE_URLS.peertube_short) | |
360 | } | |
361 | ||
362 | for (const targetUrl of toTest) { | |
363 | await servers[0].config.disableTranscoding() | |
364 | ||
365 | const attributes = { | |
366 | targetUrl, | |
367 | channelId: servers[0].store.channel.id, | |
368 | privacy: VideoPrivacy.PUBLIC | |
369 | } | |
370 | const { video } = await servers[0].imports.importVideo({ attributes }) | |
371 | const videoUUID = video.uuid | |
372 | ||
373 | await waitJobs(servers) | |
374 | ||
375 | for (const server of servers) { | |
376 | const video = await server.videos.get({ id: videoUUID }) | |
377 | ||
378 | expect(video.name).to.equal('E2E tests') | |
379 | } | |
380 | } | |
381 | }) | |
382 | ||
383 | after(async function () { | |
384 | await cleanupTests(servers) | |
385 | }) | |
386 | }) | |
387 | } | |
388 | ||
389 | runSuite('youtube-dl') | |
390 | ||
391 | runSuite('yt-dlp') | |
392 | ||
393 | describe('Delete/cancel an import', function () { | |
394 | let server: PeerTubeServer | |
395 | ||
396 | let finishedImportId: number | |
397 | let finishedVideo: Video | |
398 | let pendingImportId: number | |
399 | ||
400 | async function importVideo (name: string) { | |
401 | const attributes = { name, channelId: server.store.channel.id, targetUrl: FIXTURE_URLS.goodVideo } | |
402 | const res = await server.imports.importVideo({ attributes }) | |
403 | ||
404 | return res.id | |
405 | } | |
406 | ||
407 | before(async function () { | |
408 | this.timeout(120_000) | |
409 | ||
410 | server = await createSingleServer(1) | |
411 | ||
412 | await setAccessTokensToServers([ server ]) | |
413 | await setDefaultVideoChannel([ server ]) | |
414 | ||
415 | finishedImportId = await importVideo('finished') | |
416 | await waitJobs([ server ]) | |
417 | ||
418 | await server.jobs.pauseJobQueue() | |
419 | pendingImportId = await importVideo('pending') | |
420 | ||
421 | const { data } = await server.imports.getMyVideoImports() | |
422 | expect(data).to.have.lengthOf(2) | |
423 | ||
424 | finishedVideo = data.find(i => i.id === finishedImportId).video | |
425 | }) | |
426 | ||
427 | it('Should delete a video import', async function () { | |
428 | await server.imports.delete({ importId: finishedImportId }) | |
429 | ||
430 | const { data } = await server.imports.getMyVideoImports() | |
431 | expect(data).to.have.lengthOf(1) | |
432 | expect(data[0].id).to.equal(pendingImportId) | |
433 | expect(data[0].state.id).to.equal(VideoImportState.PENDING) | |
434 | }) | |
435 | ||
436 | it('Should not have deleted the associated video', async function () { | |
437 | const video = await server.videos.get({ id: finishedVideo.id, token: server.accessToken, expectedStatus: HttpStatusCode.OK_200 }) | |
438 | expect(video.name).to.equal('finished') | |
439 | expect(video.state.id).to.equal(VideoState.PUBLISHED) | |
440 | }) | |
441 | ||
442 | it('Should cancel a video import', async function () { | |
443 | await server.imports.cancel({ importId: pendingImportId }) | |
444 | ||
445 | const { data } = await server.imports.getMyVideoImports() | |
446 | expect(data).to.have.lengthOf(1) | |
447 | expect(data[0].id).to.equal(pendingImportId) | |
448 | expect(data[0].state.id).to.equal(VideoImportState.CANCELLED) | |
449 | }) | |
450 | ||
451 | it('Should not have processed the cancelled video import', async function () { | |
452 | this.timeout(60_000) | |
453 | ||
454 | await server.jobs.resumeJobQueue() | |
455 | ||
456 | await waitJobs([ server ]) | |
457 | ||
458 | const { data } = await server.imports.getMyVideoImports() | |
459 | expect(data).to.have.lengthOf(1) | |
460 | expect(data[0].id).to.equal(pendingImportId) | |
461 | expect(data[0].state.id).to.equal(VideoImportState.CANCELLED) | |
462 | expect(data[0].video.state.id).to.equal(VideoState.TO_IMPORT) | |
463 | }) | |
464 | ||
465 | it('Should delete the cancelled video import', async function () { | |
466 | await server.imports.delete({ importId: pendingImportId }) | |
467 | const { data } = await server.imports.getMyVideoImports() | |
468 | expect(data).to.have.lengthOf(0) | |
469 | }) | |
470 | ||
471 | after(async function () { | |
472 | await cleanupTests([ server ]) | |
473 | }) | |
474 | }) | |
475 | ||
476 | describe('Auto update', function () { | |
477 | let server: PeerTubeServer | |
478 | ||
479 | function quickPeerTubeImport () { | |
480 | const attributes = { | |
481 | targetUrl: FIXTURE_URLS.peertube_long, | |
482 | channelId: server.store.channel.id, | |
483 | privacy: VideoPrivacy.PUBLIC | |
484 | } | |
485 | ||
486 | return server.imports.importVideo({ attributes }) | |
487 | } | |
488 | ||
489 | async function testBinaryUpdate (releaseUrl: string, releaseName: string) { | |
490 | await remove(join(server.servers.buildDirectory('bin'), releaseName)) | |
491 | ||
492 | await server.kill() | |
493 | await server.run({ | |
494 | import: { | |
495 | videos: { | |
496 | http: { | |
497 | youtube_dl_release: { | |
498 | url: releaseUrl, | |
499 | name: releaseName | |
500 | } | |
501 | } | |
502 | } | |
503 | } | |
504 | }) | |
505 | ||
506 | await quickPeerTubeImport() | |
507 | ||
508 | const base = server.servers.buildDirectory('bin') | |
509 | const content = await readdir(base) | |
510 | const binaryPath = join(base, releaseName) | |
511 | ||
512 | expect(await pathExists(binaryPath), `${binaryPath} does not exist in ${base} (${content.join(', ')})`).to.be.true | |
513 | } | |
514 | ||
515 | before(async function () { | |
516 | this.timeout(30_000) | |
517 | ||
518 | // Run servers | |
519 | server = await createSingleServer(1) | |
520 | ||
521 | await setAccessTokensToServers([ server ]) | |
522 | await setDefaultVideoChannel([ server ]) | |
523 | }) | |
524 | ||
525 | it('Should update youtube-dl from github URL', async function () { | |
526 | this.timeout(120_000) | |
527 | ||
528 | await testBinaryUpdate('https://api.github.com/repos/ytdl-org/youtube-dl/releases', 'youtube-dl') | |
529 | }) | |
530 | ||
531 | it('Should update youtube-dl from raw URL', async function () { | |
532 | this.timeout(120_000) | |
533 | ||
534 | await testBinaryUpdate('https://yt-dl.org/downloads/latest/youtube-dl', 'youtube-dl') | |
535 | }) | |
536 | ||
537 | it('Should update youtube-dl from youtube-dl fork', async function () { | |
538 | this.timeout(120_000) | |
539 | ||
540 | await testBinaryUpdate('https://api.github.com/repos/yt-dlp/yt-dlp/releases', 'yt-dlp') | |
541 | }) | |
542 | ||
543 | after(async function () { | |
544 | await cleanupTests([ server ]) | |
545 | }) | |
546 | }) | |
547 | }) |