]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/tests/api/redundancy/redundancy.ts
Fix CI using 127.0.0.1 for tests
[github/Chocobozzz/PeerTube.git] / server / tests / api / redundancy / redundancy.ts
1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3 import { expect } from 'chai'
4 import { readdir } from 'fs-extra'
5 import magnetUtil from 'magnet-uri'
6 import { basename, join } from 'path'
7 import { checkSegmentHash, checkVideoFilesWereRemoved, saveVideoInServers } from '@server/tests/shared'
8 import { wait } from '@shared/core-utils'
9 import {
10 HttpStatusCode,
11 VideoDetails,
12 VideoFile,
13 VideoPrivacy,
14 VideoRedundancyStrategy,
15 VideoRedundancyStrategyWithManual
16 } from '@shared/models'
17 import {
18 cleanupTests,
19 createMultipleServers,
20 doubleFollow,
21 killallServers,
22 makeRawRequest,
23 PeerTubeServer,
24 setAccessTokensToServers,
25 waitJobs
26 } from '@shared/server-commands'
27
28 let servers: PeerTubeServer[] = []
29 let video1Server2: VideoDetails
30
31 async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) {
32 const parsed = magnetUtil.decode(file.magnetUri)
33
34 for (const ws of baseWebseeds) {
35 const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`)
36 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
37 }
38
39 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
40
41 for (const url of parsed.urlList) {
42 await makeRawRequest({ url, expectedStatus: HttpStatusCode.OK_200 })
43 }
44 }
45
46 async function createServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
47 const strategies: any[] = []
48
49 if (strategy !== null) {
50 strategies.push(
51 {
52 min_lifetime: '1 hour',
53 strategy,
54 size: '400KB',
55
56 ...additionalParams
57 }
58 )
59 }
60
61 const config = {
62 transcoding: {
63 webtorrent: {
64 enabled: withWebtorrent
65 },
66 hls: {
67 enabled: true
68 }
69 },
70 redundancy: {
71 videos: {
72 check_interval: '5 seconds',
73 strategies
74 }
75 }
76 }
77
78 servers = await createMultipleServers(3, config)
79
80 // Get the access tokens
81 await setAccessTokensToServers(servers)
82
83 {
84 const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } })
85 video1Server2 = await servers[1].videos.get({ id })
86
87 await servers[1].views.simulateView({ id })
88 }
89
90 await waitJobs(servers)
91
92 // Server 1 and server 2 follow each other
93 await doubleFollow(servers[0], servers[1])
94 // Server 1 and server 3 follow each other
95 await doubleFollow(servers[0], servers[2])
96 // Server 2 and server 3 follow each other
97 await doubleFollow(servers[1], servers[2])
98
99 await waitJobs(servers)
100 }
101
102 async function ensureSameFilenames (videoUUID: string) {
103 let webtorrentFilenames: string[]
104 let hlsFilenames: string[]
105
106 for (const server of servers) {
107 const video = await server.videos.getWithToken({ id: videoUUID })
108
109 // Ensure we use the same filenames that the origin
110
111 const localWebtorrentFilenames = video.files.map(f => basename(f.fileUrl)).sort()
112 const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort()
113
114 if (webtorrentFilenames) expect(webtorrentFilenames).to.deep.equal(localWebtorrentFilenames)
115 else webtorrentFilenames = localWebtorrentFilenames
116
117 if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames)
118 else hlsFilenames = localHLSFilenames
119 }
120
121 return { webtorrentFilenames, hlsFilenames }
122 }
123
124 async function check1WebSeed (videoUUID?: string) {
125 if (!videoUUID) videoUUID = video1Server2.uuid
126
127 const webseeds = [
128 `${servers[1].url}/static/webseed/`
129 ]
130
131 for (const server of servers) {
132 // With token to avoid issues with video follow constraints
133 const video = await server.videos.getWithToken({ id: videoUUID })
134
135 for (const f of video.files) {
136 await checkMagnetWebseeds(f, webseeds, server)
137 }
138 }
139
140 await ensureSameFilenames(videoUUID)
141 }
142
143 async function check2Webseeds (videoUUID?: string) {
144 if (!videoUUID) videoUUID = video1Server2.uuid
145
146 const webseeds = [
147 `${servers[0].url}/static/redundancy/`,
148 `${servers[1].url}/static/webseed/`
149 ]
150
151 for (const server of servers) {
152 const video = await server.videos.get({ id: videoUUID })
153
154 for (const file of video.files) {
155 await checkMagnetWebseeds(file, webseeds, server)
156 }
157 }
158
159 const { webtorrentFilenames } = await ensureSameFilenames(videoUUID)
160
161 const directories = [
162 servers[0].getDirectoryPath('redundancy'),
163 servers[1].getDirectoryPath('videos')
164 ]
165
166 for (const directory of directories) {
167 const files = await readdir(directory)
168 expect(files).to.have.length.at.least(4)
169
170 // Ensure we files exist on disk
171 expect(files.find(f => webtorrentFilenames.includes(f))).to.exist
172 }
173 }
174
175 async function check0PlaylistRedundancies (videoUUID?: string) {
176 if (!videoUUID) videoUUID = video1Server2.uuid
177
178 for (const server of servers) {
179 // With token to avoid issues with video follow constraints
180 const video = await server.videos.getWithToken({ id: videoUUID })
181
182 expect(video.streamingPlaylists).to.be.an('array')
183 expect(video.streamingPlaylists).to.have.lengthOf(1)
184 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
185 }
186
187 await ensureSameFilenames(videoUUID)
188 }
189
190 async function check1PlaylistRedundancies (videoUUID?: string) {
191 if (!videoUUID) videoUUID = video1Server2.uuid
192
193 for (const server of servers) {
194 const video = await server.videos.get({ id: videoUUID })
195
196 expect(video.streamingPlaylists).to.have.lengthOf(1)
197 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
198
199 const redundancy = video.streamingPlaylists[0].redundancies[0]
200
201 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
202 }
203
204 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID
205 const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID
206
207 const video = await servers[0].videos.get({ id: videoUUID })
208 const hlsPlaylist = video.streamingPlaylists[0]
209
210 for (const resolution of [ 240, 360, 480, 720 ]) {
211 await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist })
212 }
213
214 const { hlsFilenames } = await ensureSameFilenames(videoUUID)
215
216 const directories = [
217 servers[0].getDirectoryPath('redundancy/hls'),
218 servers[1].getDirectoryPath('streaming-playlists/hls')
219 ]
220
221 for (const directory of directories) {
222 const files = await readdir(join(directory, videoUUID))
223 expect(files).to.have.length.at.least(4)
224
225 // Ensure we files exist on disk
226 expect(files.find(f => hlsFilenames.includes(f))).to.exist
227 }
228 }
229
230 async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
231 let totalSize: number = null
232 let statsLength = 1
233
234 if (strategy !== 'manual') {
235 totalSize = 409600
236 statsLength = 2
237 }
238
239 const data = await servers[0].stats.get()
240 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
241
242 const stat = data.videosRedundancy[0]
243 expect(stat.strategy).to.equal(strategy)
244 expect(stat.totalSize).to.equal(totalSize)
245
246 return stat
247 }
248
249 async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
250 const stat = await checkStatsGlobal(strategy)
251
252 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
253 expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
254 expect(stat.totalVideos).to.equal(1)
255 }
256
257 async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
258 const stat = await checkStatsGlobal(strategy)
259
260 expect(stat.totalUsed).to.equal(0)
261 expect(stat.totalVideoFiles).to.equal(0)
262 expect(stat.totalVideos).to.equal(0)
263 }
264
265 async function findServerFollows () {
266 const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
267 const follows = body.data
268 const server2 = follows.find(f => f.following.host === `${servers[1].host}`)
269 const server3 = follows.find(f => f.following.host === `${servers[2].host}`)
270
271 return { server2, server3 }
272 }
273
274 async function enableRedundancyOnServer1 () {
275 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
276
277 const { server2, server3 } = await findServerFollows()
278
279 expect(server3).to.not.be.undefined
280 expect(server3.following.hostRedundancyAllowed).to.be.false
281
282 expect(server2).to.not.be.undefined
283 expect(server2.following.hostRedundancyAllowed).to.be.true
284 }
285
286 async function disableRedundancyOnServer1 () {
287 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
288
289 const { server2, server3 } = await findServerFollows()
290
291 expect(server3).to.not.be.undefined
292 expect(server3.following.hostRedundancyAllowed).to.be.false
293
294 expect(server2).to.not.be.undefined
295 expect(server2.following.hostRedundancyAllowed).to.be.false
296 }
297
298 describe('Test videos redundancy', function () {
299
300 describe('With most-views strategy', function () {
301 const strategy = 'most-views'
302
303 before(function () {
304 this.timeout(240000)
305
306 return createServers(strategy)
307 })
308
309 it('Should have 1 webseed on the first video', async function () {
310 await check1WebSeed()
311 await check0PlaylistRedundancies()
312 await checkStatsWithoutRedundancy(strategy)
313 })
314
315 it('Should enable redundancy on server 1', function () {
316 return enableRedundancyOnServer1()
317 })
318
319 it('Should have 2 webseeds on the first video', async function () {
320 this.timeout(80000)
321
322 await waitJobs(servers)
323 await servers[0].servers.waitUntilLog('Duplicated ', 5)
324 await waitJobs(servers)
325
326 await check2Webseeds()
327 await check1PlaylistRedundancies()
328 await checkStatsWith1Redundancy(strategy)
329 })
330
331 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
332 this.timeout(80000)
333
334 await disableRedundancyOnServer1()
335
336 await waitJobs(servers)
337 await wait(5000)
338
339 await check1WebSeed()
340 await check0PlaylistRedundancies()
341
342 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
343 })
344
345 after(async function () {
346 return cleanupTests(servers)
347 })
348 })
349
350 describe('With trending strategy', function () {
351 const strategy = 'trending'
352
353 before(function () {
354 this.timeout(240000)
355
356 return createServers(strategy)
357 })
358
359 it('Should have 1 webseed on the first video', async function () {
360 await check1WebSeed()
361 await check0PlaylistRedundancies()
362 await checkStatsWithoutRedundancy(strategy)
363 })
364
365 it('Should enable redundancy on server 1', function () {
366 return enableRedundancyOnServer1()
367 })
368
369 it('Should have 2 webseeds on the first video', async function () {
370 this.timeout(80000)
371
372 await waitJobs(servers)
373 await servers[0].servers.waitUntilLog('Duplicated ', 5)
374 await waitJobs(servers)
375
376 await check2Webseeds()
377 await check1PlaylistRedundancies()
378 await checkStatsWith1Redundancy(strategy)
379 })
380
381 it('Should unfollow server 3 and keep duplicated videos', async function () {
382 this.timeout(80000)
383
384 await servers[0].follows.unfollow({ target: servers[2] })
385
386 await waitJobs(servers)
387 await wait(5000)
388
389 await check2Webseeds()
390 await check1PlaylistRedundancies()
391 await checkStatsWith1Redundancy(strategy)
392 })
393
394 it('Should unfollow server 2 and remove duplicated videos', async function () {
395 this.timeout(80000)
396
397 await servers[0].follows.unfollow({ target: servers[1] })
398
399 await waitJobs(servers)
400 await wait(5000)
401
402 await check1WebSeed()
403 await check0PlaylistRedundancies()
404
405 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
406 })
407
408 after(async function () {
409 await cleanupTests(servers)
410 })
411 })
412
413 describe('With recently added strategy', function () {
414 const strategy = 'recently-added'
415
416 before(function () {
417 this.timeout(240000)
418
419 return createServers(strategy, { min_views: 3 })
420 })
421
422 it('Should have 1 webseed on the first video', async function () {
423 await check1WebSeed()
424 await check0PlaylistRedundancies()
425 await checkStatsWithoutRedundancy(strategy)
426 })
427
428 it('Should enable redundancy on server 1', function () {
429 return enableRedundancyOnServer1()
430 })
431
432 it('Should still have 1 webseed on the first video', async function () {
433 this.timeout(80000)
434
435 await waitJobs(servers)
436 await wait(15000)
437 await waitJobs(servers)
438
439 await check1WebSeed()
440 await check0PlaylistRedundancies()
441 await checkStatsWithoutRedundancy(strategy)
442 })
443
444 it('Should view 2 times the first video to have > min_views config', async function () {
445 this.timeout(80000)
446
447 await servers[0].views.simulateView({ id: video1Server2.uuid })
448 await servers[2].views.simulateView({ id: video1Server2.uuid })
449
450 await wait(10000)
451 await waitJobs(servers)
452 })
453
454 it('Should have 2 webseeds on the first video', async function () {
455 this.timeout(80000)
456
457 await waitJobs(servers)
458 await servers[0].servers.waitUntilLog('Duplicated ', 5)
459 await waitJobs(servers)
460
461 await check2Webseeds()
462 await check1PlaylistRedundancies()
463 await checkStatsWith1Redundancy(strategy)
464 })
465
466 it('Should remove the video and the redundancy files', async function () {
467 this.timeout(20000)
468
469 await saveVideoInServers(servers, video1Server2.uuid)
470 await servers[1].videos.remove({ id: video1Server2.uuid })
471
472 await waitJobs(servers)
473
474 for (const server of servers) {
475 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
476 }
477 })
478
479 after(async function () {
480 await cleanupTests(servers)
481 })
482 })
483
484 describe('With only HLS files', function () {
485 const strategy = 'recently-added'
486
487 before(async function () {
488 this.timeout(240000)
489
490 await createServers(strategy, { min_views: 3 }, false)
491 })
492
493 it('Should have 0 playlist redundancy on the first video', async function () {
494 await check1WebSeed()
495 await check0PlaylistRedundancies()
496 })
497
498 it('Should enable redundancy on server 1', function () {
499 return enableRedundancyOnServer1()
500 })
501
502 it('Should still have 0 redundancy on the first video', async function () {
503 this.timeout(80000)
504
505 await waitJobs(servers)
506 await wait(15000)
507 await waitJobs(servers)
508
509 await check0PlaylistRedundancies()
510 await checkStatsWithoutRedundancy(strategy)
511 })
512
513 it('Should have 1 redundancy on the first video', async function () {
514 this.timeout(160000)
515
516 await servers[0].views.simulateView({ id: video1Server2.uuid })
517 await servers[2].views.simulateView({ id: video1Server2.uuid })
518
519 await wait(10000)
520 await waitJobs(servers)
521
522 await waitJobs(servers)
523 await servers[0].servers.waitUntilLog('Duplicated ', 1)
524 await waitJobs(servers)
525
526 await check1PlaylistRedundancies()
527 await checkStatsWith1Redundancy(strategy, true)
528 })
529
530 it('Should remove the video and the redundancy files', async function () {
531 this.timeout(20000)
532
533 await saveVideoInServers(servers, video1Server2.uuid)
534 await servers[1].videos.remove({ id: video1Server2.uuid })
535
536 await waitJobs(servers)
537
538 for (const server of servers) {
539 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
540 }
541 })
542
543 after(async function () {
544 await cleanupTests(servers)
545 })
546 })
547
548 describe('With manual strategy', function () {
549 before(function () {
550 this.timeout(240000)
551
552 return createServers(null)
553 })
554
555 it('Should have 1 webseed on the first video', async function () {
556 await check1WebSeed()
557 await check0PlaylistRedundancies()
558 await checkStatsWithoutRedundancy('manual')
559 })
560
561 it('Should create a redundancy on first video', async function () {
562 await servers[0].redundancy.addVideo({ videoId: video1Server2.id })
563 })
564
565 it('Should have 2 webseeds on the first video', async function () {
566 this.timeout(80000)
567
568 await waitJobs(servers)
569 await servers[0].servers.waitUntilLog('Duplicated ', 5)
570 await waitJobs(servers)
571
572 await check2Webseeds()
573 await check1PlaylistRedundancies()
574 await checkStatsWith1Redundancy('manual')
575 })
576
577 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
578 this.timeout(80000)
579
580 const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' })
581
582 const videos = body.data
583 expect(videos).to.have.lengthOf(1)
584
585 const video = videos[0]
586
587 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
588 await servers[0].redundancy.removeVideo({ redundancyId: r.id })
589 }
590
591 await waitJobs(servers)
592 await wait(5000)
593
594 await check1WebSeed()
595 await check0PlaylistRedundancies()
596
597 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
598 })
599
600 after(async function () {
601 await cleanupTests(servers)
602 })
603 })
604
605 describe('Test expiration', function () {
606 const strategy = 'recently-added'
607
608 async function checkContains (servers: PeerTubeServer[], str: string) {
609 for (const server of servers) {
610 const video = await server.videos.get({ id: video1Server2.uuid })
611
612 for (const f of video.files) {
613 expect(f.magnetUri).to.contain(str)
614 }
615 }
616 }
617
618 async function checkNotContains (servers: PeerTubeServer[], str: string) {
619 for (const server of servers) {
620 const video = await server.videos.get({ id: video1Server2.uuid })
621
622 for (const f of video.files) {
623 expect(f.magnetUri).to.not.contain(str)
624 }
625 }
626 }
627
628 before(async function () {
629 this.timeout(240000)
630
631 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
632
633 await enableRedundancyOnServer1()
634 })
635
636 it('Should still have 2 webseeds after 10 seconds', async function () {
637 this.timeout(80000)
638
639 await wait(10000)
640
641 try {
642 await checkContains(servers, 'http%3A%2F%2F' + servers[0].hostname + '%3A' + servers[0].port)
643 } catch {
644 // Maybe a server deleted a redundancy in the scheduler
645 await wait(2000)
646
647 await checkContains(servers, 'http%3A%2F%2F' + servers[0].hostname + '%3A' + servers[0].port)
648 }
649 })
650
651 it('Should stop server 1 and expire video redundancy', async function () {
652 this.timeout(80000)
653
654 await killallServers([ servers[0] ])
655
656 await wait(15000)
657
658 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2F' + servers[0].port + '%3A' + servers[0].port)
659 })
660
661 after(async function () {
662 await cleanupTests(servers)
663 })
664 })
665
666 describe('Test file replacement', function () {
667 let video2Server2UUID: string
668 const strategy = 'recently-added'
669
670 before(async function () {
671 this.timeout(240000)
672
673 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
674
675 await enableRedundancyOnServer1()
676
677 await waitJobs(servers)
678 await servers[0].servers.waitUntilLog('Duplicated ', 5)
679 await waitJobs(servers)
680
681 await check2Webseeds()
682 await check1PlaylistRedundancies()
683 await checkStatsWith1Redundancy(strategy)
684
685 const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } })
686 video2Server2UUID = uuid
687
688 // Wait transcoding before federation
689 await waitJobs(servers)
690
691 await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } })
692 })
693
694 it('Should cache video 2 webseeds on the first video', async function () {
695 this.timeout(240000)
696
697 await waitJobs(servers)
698
699 let checked = false
700
701 while (checked === false) {
702 await wait(1000)
703
704 try {
705 await check1WebSeed()
706 await check0PlaylistRedundancies()
707
708 await check2Webseeds(video2Server2UUID)
709 await check1PlaylistRedundancies(video2Server2UUID)
710
711 checked = true
712 } catch {
713 checked = false
714 }
715 }
716 })
717
718 it('Should disable strategy and remove redundancies', async function () {
719 this.timeout(80000)
720
721 await waitJobs(servers)
722
723 await killallServers([ servers[0] ])
724 await servers[0].run({
725 redundancy: {
726 videos: {
727 check_interval: '1 second',
728 strategies: []
729 }
730 }
731 })
732
733 await waitJobs(servers)
734
735 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
736 })
737
738 after(async function () {
739 await cleanupTests(servers)
740 })
741 })
742 })