]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blame_incremental - server/tests/api/redundancy/redundancy.ts
We don't need to import mocha
[github/Chocobozzz/PeerTube.git] / server / tests / api / redundancy / redundancy.ts
... / ...
CommitLineData
1/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3import * as chai from 'chai'
4import { readdir } from 'fs-extra'
5import magnetUtil from 'magnet-uri'
6import { basename, join } from 'path'
7import { checkSegmentHash, checkVideoFilesWereRemoved, saveVideoInServers } from '@server/tests/shared'
8import { root, wait } from '@shared/core-utils'
9import {
10 HttpStatusCode,
11 VideoDetails,
12 VideoFile,
13 VideoPrivacy,
14 VideoRedundancyStrategy,
15 VideoRedundancyStrategyWithManual
16} from '@shared/models'
17import {
18 cleanupTests,
19 createMultipleServers,
20 doubleFollow,
21 killallServers,
22 makeRawRequest,
23 PeerTubeServer,
24 setAccessTokensToServers,
25 waitJobs
26} from '@shared/server-commands'
27
28const expect = chai.expect
29
30let servers: PeerTubeServer[] = []
31let video1Server2: VideoDetails
32
33async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) {
34 const parsed = magnetUtil.decode(file.magnetUri)
35
36 for (const ws of baseWebseeds) {
37 const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`)
38 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
39 }
40
41 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
42
43 for (const url of parsed.urlList) {
44 await makeRawRequest(url, HttpStatusCode.OK_200)
45 }
46}
47
48async function createServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
49 const strategies: any[] = []
50
51 if (strategy !== null) {
52 strategies.push(
53 {
54 min_lifetime: '1 hour',
55 strategy,
56 size: '400KB',
57
58 ...additionalParams
59 }
60 )
61 }
62
63 const config = {
64 transcoding: {
65 webtorrent: {
66 enabled: withWebtorrent
67 },
68 hls: {
69 enabled: true
70 }
71 },
72 redundancy: {
73 videos: {
74 check_interval: '5 seconds',
75 strategies
76 }
77 }
78 }
79
80 servers = await createMultipleServers(3, config)
81
82 // Get the access tokens
83 await setAccessTokensToServers(servers)
84
85 {
86 const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } })
87 video1Server2 = await servers[1].videos.get({ id })
88
89 await servers[1].views.simulateView({ id })
90 }
91
92 await waitJobs(servers)
93
94 // Server 1 and server 2 follow each other
95 await doubleFollow(servers[0], servers[1])
96 // Server 1 and server 3 follow each other
97 await doubleFollow(servers[0], servers[2])
98 // Server 2 and server 3 follow each other
99 await doubleFollow(servers[1], servers[2])
100
101 await waitJobs(servers)
102}
103
104async function ensureSameFilenames (videoUUID: string) {
105 let webtorrentFilenames: string[]
106 let hlsFilenames: string[]
107
108 for (const server of servers) {
109 const video = await server.videos.getWithToken({ id: videoUUID })
110
111 // Ensure we use the same filenames that the origin
112
113 const localWebtorrentFilenames = video.files.map(f => basename(f.fileUrl)).sort()
114 const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort()
115
116 if (webtorrentFilenames) expect(webtorrentFilenames).to.deep.equal(localWebtorrentFilenames)
117 else webtorrentFilenames = localWebtorrentFilenames
118
119 if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames)
120 else hlsFilenames = localHLSFilenames
121 }
122
123 return { webtorrentFilenames, hlsFilenames }
124}
125
126async function check1WebSeed (videoUUID?: string) {
127 if (!videoUUID) videoUUID = video1Server2.uuid
128
129 const webseeds = [
130 `http://localhost:${servers[1].port}/static/webseed/`
131 ]
132
133 for (const server of servers) {
134 // With token to avoid issues with video follow constraints
135 const video = await server.videos.getWithToken({ id: videoUUID })
136
137 for (const f of video.files) {
138 await checkMagnetWebseeds(f, webseeds, server)
139 }
140 }
141
142 await ensureSameFilenames(videoUUID)
143}
144
145async function check2Webseeds (videoUUID?: string) {
146 if (!videoUUID) videoUUID = video1Server2.uuid
147
148 const webseeds = [
149 `http://localhost:${servers[0].port}/static/redundancy/`,
150 `http://localhost:${servers[1].port}/static/webseed/`
151 ]
152
153 for (const server of servers) {
154 const video = await server.videos.get({ id: videoUUID })
155
156 for (const file of video.files) {
157 await checkMagnetWebseeds(file, webseeds, server)
158 }
159 }
160
161 const { webtorrentFilenames } = await ensureSameFilenames(videoUUID)
162
163 const directories = [
164 'test' + servers[0].internalServerNumber + '/redundancy',
165 'test' + servers[1].internalServerNumber + '/videos'
166 ]
167
168 for (const directory of directories) {
169 const files = await readdir(join(root(), directory))
170 expect(files).to.have.length.at.least(4)
171
172 // Ensure we files exist on disk
173 expect(files.find(f => webtorrentFilenames.includes(f))).to.exist
174 }
175}
176
177async function check0PlaylistRedundancies (videoUUID?: string) {
178 if (!videoUUID) videoUUID = video1Server2.uuid
179
180 for (const server of servers) {
181 // With token to avoid issues with video follow constraints
182 const video = await server.videos.getWithToken({ id: videoUUID })
183
184 expect(video.streamingPlaylists).to.be.an('array')
185 expect(video.streamingPlaylists).to.have.lengthOf(1)
186 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
187 }
188
189 await ensureSameFilenames(videoUUID)
190}
191
192async function check1PlaylistRedundancies (videoUUID?: string) {
193 if (!videoUUID) videoUUID = video1Server2.uuid
194
195 for (const server of servers) {
196 const video = await server.videos.get({ id: videoUUID })
197
198 expect(video.streamingPlaylists).to.have.lengthOf(1)
199 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
200
201 const redundancy = video.streamingPlaylists[0].redundancies[0]
202
203 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
204 }
205
206 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID
207 const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID
208
209 const video = await servers[0].videos.get({ id: videoUUID })
210 const hlsPlaylist = video.streamingPlaylists[0]
211
212 for (const resolution of [ 240, 360, 480, 720 ]) {
213 await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist })
214 }
215
216 const { hlsFilenames } = await ensureSameFilenames(videoUUID)
217
218 const directories = [
219 'test' + servers[0].internalServerNumber + '/redundancy/hls',
220 'test' + servers[1].internalServerNumber + '/streaming-playlists/hls'
221 ]
222
223 for (const directory of directories) {
224 const files = await readdir(join(root(), directory, videoUUID))
225 expect(files).to.have.length.at.least(4)
226
227 // Ensure we files exist on disk
228 expect(files.find(f => hlsFilenames.includes(f))).to.exist
229 }
230}
231
232async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
233 let totalSize: number = null
234 let statsLength = 1
235
236 if (strategy !== 'manual') {
237 totalSize = 409600
238 statsLength = 2
239 }
240
241 const data = await servers[0].stats.get()
242 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
243
244 const stat = data.videosRedundancy[0]
245 expect(stat.strategy).to.equal(strategy)
246 expect(stat.totalSize).to.equal(totalSize)
247
248 return stat
249}
250
251async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
252 const stat = await checkStatsGlobal(strategy)
253
254 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
255 expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
256 expect(stat.totalVideos).to.equal(1)
257}
258
259async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
260 const stat = await checkStatsGlobal(strategy)
261
262 expect(stat.totalUsed).to.equal(0)
263 expect(stat.totalVideoFiles).to.equal(0)
264 expect(stat.totalVideos).to.equal(0)
265}
266
267async function findServerFollows () {
268 const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
269 const follows = body.data
270 const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`)
271 const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`)
272
273 return { server2, server3 }
274}
275
276async function enableRedundancyOnServer1 () {
277 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
278
279 const { server2, server3 } = await findServerFollows()
280
281 expect(server3).to.not.be.undefined
282 expect(server3.following.hostRedundancyAllowed).to.be.false
283
284 expect(server2).to.not.be.undefined
285 expect(server2.following.hostRedundancyAllowed).to.be.true
286}
287
288async function disableRedundancyOnServer1 () {
289 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
290
291 const { server2, server3 } = await findServerFollows()
292
293 expect(server3).to.not.be.undefined
294 expect(server3.following.hostRedundancyAllowed).to.be.false
295
296 expect(server2).to.not.be.undefined
297 expect(server2.following.hostRedundancyAllowed).to.be.false
298}
299
300describe('Test videos redundancy', function () {
301
302 describe('With most-views strategy', function () {
303 const strategy = 'most-views'
304
305 before(function () {
306 this.timeout(240000)
307
308 return createServers(strategy)
309 })
310
311 it('Should have 1 webseed on the first video', async function () {
312 await check1WebSeed()
313 await check0PlaylistRedundancies()
314 await checkStatsWithoutRedundancy(strategy)
315 })
316
317 it('Should enable redundancy on server 1', function () {
318 return enableRedundancyOnServer1()
319 })
320
321 it('Should have 2 webseeds on the first video', async function () {
322 this.timeout(80000)
323
324 await waitJobs(servers)
325 await servers[0].servers.waitUntilLog('Duplicated ', 5)
326 await waitJobs(servers)
327
328 await check2Webseeds()
329 await check1PlaylistRedundancies()
330 await checkStatsWith1Redundancy(strategy)
331 })
332
333 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
334 this.timeout(80000)
335
336 await disableRedundancyOnServer1()
337
338 await waitJobs(servers)
339 await wait(5000)
340
341 await check1WebSeed()
342 await check0PlaylistRedundancies()
343
344 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
345 })
346
347 after(async function () {
348 return cleanupTests(servers)
349 })
350 })
351
352 describe('With trending strategy', function () {
353 const strategy = 'trending'
354
355 before(function () {
356 this.timeout(240000)
357
358 return createServers(strategy)
359 })
360
361 it('Should have 1 webseed on the first video', async function () {
362 await check1WebSeed()
363 await check0PlaylistRedundancies()
364 await checkStatsWithoutRedundancy(strategy)
365 })
366
367 it('Should enable redundancy on server 1', function () {
368 return enableRedundancyOnServer1()
369 })
370
371 it('Should have 2 webseeds on the first video', async function () {
372 this.timeout(80000)
373
374 await waitJobs(servers)
375 await servers[0].servers.waitUntilLog('Duplicated ', 5)
376 await waitJobs(servers)
377
378 await check2Webseeds()
379 await check1PlaylistRedundancies()
380 await checkStatsWith1Redundancy(strategy)
381 })
382
383 it('Should unfollow server 3 and keep duplicated videos', async function () {
384 this.timeout(80000)
385
386 await servers[0].follows.unfollow({ target: servers[2] })
387
388 await waitJobs(servers)
389 await wait(5000)
390
391 await check2Webseeds()
392 await check1PlaylistRedundancies()
393 await checkStatsWith1Redundancy(strategy)
394 })
395
396 it('Should unfollow server 2 and remove duplicated videos', async function () {
397 this.timeout(80000)
398
399 await servers[0].follows.unfollow({ target: servers[1] })
400
401 await waitJobs(servers)
402 await wait(5000)
403
404 await check1WebSeed()
405 await check0PlaylistRedundancies()
406
407 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
408 })
409
410 after(async function () {
411 await cleanupTests(servers)
412 })
413 })
414
415 describe('With recently added strategy', function () {
416 const strategy = 'recently-added'
417
418 before(function () {
419 this.timeout(240000)
420
421 return createServers(strategy, { min_views: 3 })
422 })
423
424 it('Should have 1 webseed on the first video', async function () {
425 await check1WebSeed()
426 await check0PlaylistRedundancies()
427 await checkStatsWithoutRedundancy(strategy)
428 })
429
430 it('Should enable redundancy on server 1', function () {
431 return enableRedundancyOnServer1()
432 })
433
434 it('Should still have 1 webseed on the first video', async function () {
435 this.timeout(80000)
436
437 await waitJobs(servers)
438 await wait(15000)
439 await waitJobs(servers)
440
441 await check1WebSeed()
442 await check0PlaylistRedundancies()
443 await checkStatsWithoutRedundancy(strategy)
444 })
445
446 it('Should view 2 times the first video to have > min_views config', async function () {
447 this.timeout(80000)
448
449 await servers[0].views.simulateView({ id: video1Server2.uuid })
450 await servers[2].views.simulateView({ id: video1Server2.uuid })
451
452 await wait(10000)
453 await waitJobs(servers)
454 })
455
456 it('Should have 2 webseeds on the first video', async function () {
457 this.timeout(80000)
458
459 await waitJobs(servers)
460 await servers[0].servers.waitUntilLog('Duplicated ', 5)
461 await waitJobs(servers)
462
463 await check2Webseeds()
464 await check1PlaylistRedundancies()
465 await checkStatsWith1Redundancy(strategy)
466 })
467
468 it('Should remove the video and the redundancy files', async function () {
469 this.timeout(20000)
470
471 await saveVideoInServers(servers, video1Server2.uuid)
472 await servers[1].videos.remove({ id: video1Server2.uuid })
473
474 await waitJobs(servers)
475
476 for (const server of servers) {
477 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
478 }
479 })
480
481 after(async function () {
482 await cleanupTests(servers)
483 })
484 })
485
486 describe('With only HLS files', function () {
487 const strategy = 'recently-added'
488
489 before(async function () {
490 this.timeout(240000)
491
492 await createServers(strategy, { min_views: 3 }, false)
493 })
494
495 it('Should have 0 playlist redundancy on the first video', async function () {
496 await check1WebSeed()
497 await check0PlaylistRedundancies()
498 })
499
500 it('Should enable redundancy on server 1', function () {
501 return enableRedundancyOnServer1()
502 })
503
504 it('Should still have 0 redundancy on the first video', async function () {
505 this.timeout(80000)
506
507 await waitJobs(servers)
508 await wait(15000)
509 await waitJobs(servers)
510
511 await check0PlaylistRedundancies()
512 await checkStatsWithoutRedundancy(strategy)
513 })
514
515 it('Should have 1 redundancy on the first video', async function () {
516 this.timeout(160000)
517
518 await servers[0].views.simulateView({ id: video1Server2.uuid })
519 await servers[2].views.simulateView({ id: video1Server2.uuid })
520
521 await wait(10000)
522 await waitJobs(servers)
523
524 await waitJobs(servers)
525 await servers[0].servers.waitUntilLog('Duplicated ', 1)
526 await waitJobs(servers)
527
528 await check1PlaylistRedundancies()
529 await checkStatsWith1Redundancy(strategy, true)
530 })
531
532 it('Should remove the video and the redundancy files', async function () {
533 this.timeout(20000)
534
535 await saveVideoInServers(servers, video1Server2.uuid)
536 await servers[1].videos.remove({ id: video1Server2.uuid })
537
538 await waitJobs(servers)
539
540 for (const server of servers) {
541 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
542 }
543 })
544
545 after(async function () {
546 await cleanupTests(servers)
547 })
548 })
549
550 describe('With manual strategy', function () {
551 before(function () {
552 this.timeout(240000)
553
554 return createServers(null)
555 })
556
557 it('Should have 1 webseed on the first video', async function () {
558 await check1WebSeed()
559 await check0PlaylistRedundancies()
560 await checkStatsWithoutRedundancy('manual')
561 })
562
563 it('Should create a redundancy on first video', async function () {
564 await servers[0].redundancy.addVideo({ videoId: video1Server2.id })
565 })
566
567 it('Should have 2 webseeds on the first video', async function () {
568 this.timeout(80000)
569
570 await waitJobs(servers)
571 await servers[0].servers.waitUntilLog('Duplicated ', 5)
572 await waitJobs(servers)
573
574 await check2Webseeds()
575 await check1PlaylistRedundancies()
576 await checkStatsWith1Redundancy('manual')
577 })
578
579 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
580 this.timeout(80000)
581
582 const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' })
583
584 const videos = body.data
585 expect(videos).to.have.lengthOf(1)
586
587 const video = videos[0]
588
589 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
590 await servers[0].redundancy.removeVideo({ redundancyId: r.id })
591 }
592
593 await waitJobs(servers)
594 await wait(5000)
595
596 await check1WebSeed()
597 await check0PlaylistRedundancies()
598
599 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
600 })
601
602 after(async function () {
603 await cleanupTests(servers)
604 })
605 })
606
607 describe('Test expiration', function () {
608 const strategy = 'recently-added'
609
610 async function checkContains (servers: PeerTubeServer[], str: string) {
611 for (const server of servers) {
612 const video = await server.videos.get({ id: video1Server2.uuid })
613
614 for (const f of video.files) {
615 expect(f.magnetUri).to.contain(str)
616 }
617 }
618 }
619
620 async function checkNotContains (servers: PeerTubeServer[], str: string) {
621 for (const server of servers) {
622 const video = await server.videos.get({ id: video1Server2.uuid })
623
624 for (const f of video.files) {
625 expect(f.magnetUri).to.not.contain(str)
626 }
627 }
628 }
629
630 before(async function () {
631 this.timeout(240000)
632
633 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
634
635 await enableRedundancyOnServer1()
636 })
637
638 it('Should still have 2 webseeds after 10 seconds', async function () {
639 this.timeout(80000)
640
641 await wait(10000)
642
643 try {
644 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
645 } catch {
646 // Maybe a server deleted a redundancy in the scheduler
647 await wait(2000)
648
649 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
650 }
651 })
652
653 it('Should stop server 1 and expire video redundancy', async function () {
654 this.timeout(80000)
655
656 await killallServers([ servers[0] ])
657
658 await wait(15000)
659
660 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
661 })
662
663 after(async function () {
664 await cleanupTests(servers)
665 })
666 })
667
668 describe('Test file replacement', function () {
669 let video2Server2UUID: string
670 const strategy = 'recently-added'
671
672 before(async function () {
673 this.timeout(240000)
674
675 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
676
677 await enableRedundancyOnServer1()
678
679 await waitJobs(servers)
680 await servers[0].servers.waitUntilLog('Duplicated ', 5)
681 await waitJobs(servers)
682
683 await check2Webseeds()
684 await check1PlaylistRedundancies()
685 await checkStatsWith1Redundancy(strategy)
686
687 const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } })
688 video2Server2UUID = uuid
689
690 // Wait transcoding before federation
691 await waitJobs(servers)
692
693 await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } })
694 })
695
696 it('Should cache video 2 webseeds on the first video', async function () {
697 this.timeout(240000)
698
699 await waitJobs(servers)
700
701 let checked = false
702
703 while (checked === false) {
704 await wait(1000)
705
706 try {
707 await check1WebSeed()
708 await check0PlaylistRedundancies()
709
710 await check2Webseeds(video2Server2UUID)
711 await check1PlaylistRedundancies(video2Server2UUID)
712
713 checked = true
714 } catch {
715 checked = false
716 }
717 }
718 })
719
720 it('Should disable strategy and remove redundancies', async function () {
721 this.timeout(80000)
722
723 await waitJobs(servers)
724
725 await killallServers([ servers[0] ])
726 await servers[0].run({
727 redundancy: {
728 videos: {
729 check_interval: '1 second',
730 strategies: []
731 }
732 }
733 })
734
735 await waitJobs(servers)
736
737 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
738 })
739
740 after(async function () {
741 await cleanupTests(servers)
742 })
743 })
744})