aboutsummaryrefslogtreecommitdiffhomepage
path: root/packages/tests/src/api/redundancy/redundancy.ts
diff options
context:
space:
mode:
Diffstat (limited to 'packages/tests/src/api/redundancy/redundancy.ts')
-rw-r--r--packages/tests/src/api/redundancy/redundancy.ts743
1 files changed, 743 insertions, 0 deletions
diff --git a/packages/tests/src/api/redundancy/redundancy.ts b/packages/tests/src/api/redundancy/redundancy.ts
new file mode 100644
index 000000000..69afae037
--- /dev/null
+++ b/packages/tests/src/api/redundancy/redundancy.ts
@@ -0,0 +1,743 @@
1/* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3import { expect } from 'chai'
4import { readdir } from 'fs/promises'
5import { decode as magnetUriDecode } from 'magnet-uri'
6import { basename, join } from 'path'
7import { wait } from '@peertube/peertube-core-utils'
8import {
9 HttpStatusCode,
10 VideoDetails,
11 VideoFile,
12 VideoPrivacy,
13 VideoRedundancyStrategy,
14 VideoRedundancyStrategyWithManual
15} from '@peertube/peertube-models'
16import {
17 cleanupTests,
18 createMultipleServers,
19 doubleFollow,
20 killallServers,
21 makeRawRequest,
22 PeerTubeServer,
23 setAccessTokensToServers,
24 waitJobs
25} from '@peertube/peertube-server-commands'
26import { checkSegmentHash } from '@tests/shared/streaming-playlists.js'
27import { checkVideoFilesWereRemoved, saveVideoInServers } from '@tests/shared/videos.js'
28
29let servers: PeerTubeServer[] = []
30let video1Server2: VideoDetails
31
32async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) {
33 const parsed = magnetUriDecode(file.magnetUri)
34
35 for (const ws of baseWebseeds) {
36 const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`)
37 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
38 }
39
40 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
41
42 for (const url of parsed.urlList) {
43 await makeRawRequest({ url, expectedStatus: HttpStatusCode.OK_200 })
44 }
45}
46
47async function createServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebVideo = true) {
48 const strategies: any[] = []
49
50 if (strategy !== null) {
51 strategies.push(
52 {
53 min_lifetime: '1 hour',
54 strategy,
55 size: '400KB',
56
57 ...additionalParams
58 }
59 )
60 }
61
62 const config = {
63 transcoding: {
64 web_videos: {
65 enabled: withWebVideo
66 },
67 hls: {
68 enabled: true
69 }
70 },
71 redundancy: {
72 videos: {
73 check_interval: '5 seconds',
74 strategies
75 }
76 }
77 }
78
79 servers = await createMultipleServers(3, config)
80
81 // Get the access tokens
82 await setAccessTokensToServers(servers)
83
84 {
85 const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } })
86 video1Server2 = await servers[1].videos.get({ id })
87
88 await servers[1].views.simulateView({ id })
89 }
90
91 await waitJobs(servers)
92
93 // Server 1 and server 2 follow each other
94 await doubleFollow(servers[0], servers[1])
95 // Server 1 and server 3 follow each other
96 await doubleFollow(servers[0], servers[2])
97 // Server 2 and server 3 follow each other
98 await doubleFollow(servers[1], servers[2])
99
100 await waitJobs(servers)
101}
102
103async function ensureSameFilenames (videoUUID: string) {
104 let webVideoFilenames: string[]
105 let hlsFilenames: string[]
106
107 for (const server of servers) {
108 const video = await server.videos.getWithToken({ id: videoUUID })
109
110 // Ensure we use the same filenames that the origin
111
112 const localWebVideoFilenames = video.files.map(f => basename(f.fileUrl)).sort()
113 const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort()
114
115 if (webVideoFilenames) expect(webVideoFilenames).to.deep.equal(localWebVideoFilenames)
116 else webVideoFilenames = localWebVideoFilenames
117
118 if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames)
119 else hlsFilenames = localHLSFilenames
120 }
121
122 return { webVideoFilenames, hlsFilenames }
123}
124
125async function check1WebSeed (videoUUID?: string) {
126 if (!videoUUID) videoUUID = video1Server2.uuid
127
128 const webseeds = [
129 `${servers[1].url}/static/web-videos/`
130 ]
131
132 for (const server of servers) {
133 // With token to avoid issues with video follow constraints
134 const video = await server.videos.getWithToken({ id: videoUUID })
135
136 for (const f of video.files) {
137 await checkMagnetWebseeds(f, webseeds, server)
138 }
139 }
140
141 await ensureSameFilenames(videoUUID)
142}
143
144async function check2Webseeds (videoUUID?: string) {
145 if (!videoUUID) videoUUID = video1Server2.uuid
146
147 const webseeds = [
148 `${servers[0].url}/static/redundancy/`,
149 `${servers[1].url}/static/web-videos/`
150 ]
151
152 for (const server of servers) {
153 const video = await server.videos.get({ id: videoUUID })
154
155 for (const file of video.files) {
156 await checkMagnetWebseeds(file, webseeds, server)
157 }
158 }
159
160 const { webVideoFilenames } = await ensureSameFilenames(videoUUID)
161
162 const directories = [
163 servers[0].getDirectoryPath('redundancy'),
164 servers[1].getDirectoryPath('web-videos')
165 ]
166
167 for (const directory of directories) {
168 const files = await readdir(directory)
169 expect(files).to.have.length.at.least(4)
170
171 // Ensure we files exist on disk
172 expect(files.find(f => webVideoFilenames.includes(f))).to.exist
173 }
174}
175
176async function check0PlaylistRedundancies (videoUUID?: string) {
177 if (!videoUUID) videoUUID = video1Server2.uuid
178
179 for (const server of servers) {
180 // With token to avoid issues with video follow constraints
181 const video = await server.videos.getWithToken({ id: videoUUID })
182
183 expect(video.streamingPlaylists).to.be.an('array')
184 expect(video.streamingPlaylists).to.have.lengthOf(1)
185 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
186 }
187
188 await ensureSameFilenames(videoUUID)
189}
190
191async function check1PlaylistRedundancies (videoUUID?: string) {
192 if (!videoUUID) videoUUID = video1Server2.uuid
193
194 for (const server of servers) {
195 const video = await server.videos.get({ id: videoUUID })
196
197 expect(video.streamingPlaylists).to.have.lengthOf(1)
198 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
199
200 const redundancy = video.streamingPlaylists[0].redundancies[0]
201
202 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
203 }
204
205 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID
206 const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID
207
208 const video = await servers[0].videos.get({ id: videoUUID })
209 const hlsPlaylist = video.streamingPlaylists[0]
210
211 for (const resolution of [ 240, 360, 480, 720 ]) {
212 await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist })
213 }
214
215 const { hlsFilenames } = await ensureSameFilenames(videoUUID)
216
217 const directories = [
218 servers[0].getDirectoryPath('redundancy/hls'),
219 servers[1].getDirectoryPath('streaming-playlists/hls')
220 ]
221
222 for (const directory of directories) {
223 const files = await readdir(join(directory, videoUUID))
224 expect(files).to.have.length.at.least(4)
225
226 // Ensure we files exist on disk
227 expect(files.find(f => hlsFilenames.includes(f))).to.exist
228 }
229}
230
231async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
232 let totalSize: number = null
233 let statsLength = 1
234
235 if (strategy !== 'manual') {
236 totalSize = 409600
237 statsLength = 2
238 }
239
240 const data = await servers[0].stats.get()
241 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
242
243 const stat = data.videosRedundancy[0]
244 expect(stat.strategy).to.equal(strategy)
245 expect(stat.totalSize).to.equal(totalSize)
246
247 return stat
248}
249
250async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
251 const stat = await checkStatsGlobal(strategy)
252
253 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
254 expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
255 expect(stat.totalVideos).to.equal(1)
256}
257
258async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
259 const stat = await checkStatsGlobal(strategy)
260
261 expect(stat.totalUsed).to.equal(0)
262 expect(stat.totalVideoFiles).to.equal(0)
263 expect(stat.totalVideos).to.equal(0)
264}
265
266async function findServerFollows () {
267 const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
268 const follows = body.data
269 const server2 = follows.find(f => f.following.host === `${servers[1].host}`)
270 const server3 = follows.find(f => f.following.host === `${servers[2].host}`)
271
272 return { server2, server3 }
273}
274
275async function enableRedundancyOnServer1 () {
276 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
277
278 const { server2, server3 } = await findServerFollows()
279
280 expect(server3).to.not.be.undefined
281 expect(server3.following.hostRedundancyAllowed).to.be.false
282
283 expect(server2).to.not.be.undefined
284 expect(server2.following.hostRedundancyAllowed).to.be.true
285}
286
287async function disableRedundancyOnServer1 () {
288 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
289
290 const { server2, server3 } = await findServerFollows()
291
292 expect(server3).to.not.be.undefined
293 expect(server3.following.hostRedundancyAllowed).to.be.false
294
295 expect(server2).to.not.be.undefined
296 expect(server2.following.hostRedundancyAllowed).to.be.false
297}
298
299describe('Test videos redundancy', function () {
300
301 describe('With most-views strategy', function () {
302 const strategy = 'most-views'
303
304 before(function () {
305 this.timeout(240000)
306
307 return createServers(strategy)
308 })
309
310 it('Should have 1 webseed on the first video', async function () {
311 await check1WebSeed()
312 await check0PlaylistRedundancies()
313 await checkStatsWithoutRedundancy(strategy)
314 })
315
316 it('Should enable redundancy on server 1', function () {
317 return enableRedundancyOnServer1()
318 })
319
320 it('Should have 2 webseeds on the first video', async function () {
321 this.timeout(80000)
322
323 await waitJobs(servers)
324 await servers[0].servers.waitUntilLog('Duplicated ', 5)
325 await waitJobs(servers)
326
327 await check2Webseeds()
328 await check1PlaylistRedundancies()
329 await checkStatsWith1Redundancy(strategy)
330 })
331
332 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
333 this.timeout(80000)
334
335 await disableRedundancyOnServer1()
336
337 await waitJobs(servers)
338 await wait(5000)
339
340 await check1WebSeed()
341 await check0PlaylistRedundancies()
342
343 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
344 })
345
346 after(async function () {
347 return cleanupTests(servers)
348 })
349 })
350
351 describe('With trending strategy', function () {
352 const strategy = 'trending'
353
354 before(function () {
355 this.timeout(240000)
356
357 return createServers(strategy)
358 })
359
360 it('Should have 1 webseed on the first video', async function () {
361 await check1WebSeed()
362 await check0PlaylistRedundancies()
363 await checkStatsWithoutRedundancy(strategy)
364 })
365
366 it('Should enable redundancy on server 1', function () {
367 return enableRedundancyOnServer1()
368 })
369
370 it('Should have 2 webseeds on the first video', async function () {
371 this.timeout(80000)
372
373 await waitJobs(servers)
374 await servers[0].servers.waitUntilLog('Duplicated ', 5)
375 await waitJobs(servers)
376
377 await check2Webseeds()
378 await check1PlaylistRedundancies()
379 await checkStatsWith1Redundancy(strategy)
380 })
381
382 it('Should unfollow server 3 and keep duplicated videos', async function () {
383 this.timeout(80000)
384
385 await servers[0].follows.unfollow({ target: servers[2] })
386
387 await waitJobs(servers)
388 await wait(5000)
389
390 await check2Webseeds()
391 await check1PlaylistRedundancies()
392 await checkStatsWith1Redundancy(strategy)
393 })
394
395 it('Should unfollow server 2 and remove duplicated videos', async function () {
396 this.timeout(80000)
397
398 await servers[0].follows.unfollow({ target: servers[1] })
399
400 await waitJobs(servers)
401 await wait(5000)
402
403 await check1WebSeed()
404 await check0PlaylistRedundancies()
405
406 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
407 })
408
409 after(async function () {
410 await cleanupTests(servers)
411 })
412 })
413
414 describe('With recently added strategy', function () {
415 const strategy = 'recently-added'
416
417 before(function () {
418 this.timeout(240000)
419
420 return createServers(strategy, { min_views: 3 })
421 })
422
423 it('Should have 1 webseed on the first video', async function () {
424 await check1WebSeed()
425 await check0PlaylistRedundancies()
426 await checkStatsWithoutRedundancy(strategy)
427 })
428
429 it('Should enable redundancy on server 1', function () {
430 return enableRedundancyOnServer1()
431 })
432
433 it('Should still have 1 webseed on the first video', async function () {
434 this.timeout(80000)
435
436 await waitJobs(servers)
437 await wait(15000)
438 await waitJobs(servers)
439
440 await check1WebSeed()
441 await check0PlaylistRedundancies()
442 await checkStatsWithoutRedundancy(strategy)
443 })
444
445 it('Should view 2 times the first video to have > min_views config', async function () {
446 this.timeout(80000)
447
448 await servers[0].views.simulateView({ id: video1Server2.uuid })
449 await servers[2].views.simulateView({ id: video1Server2.uuid })
450
451 await wait(10000)
452 await waitJobs(servers)
453 })
454
455 it('Should have 2 webseeds on the first video', async function () {
456 this.timeout(80000)
457
458 await waitJobs(servers)
459 await servers[0].servers.waitUntilLog('Duplicated ', 5)
460 await waitJobs(servers)
461
462 await check2Webseeds()
463 await check1PlaylistRedundancies()
464 await checkStatsWith1Redundancy(strategy)
465 })
466
467 it('Should remove the video and the redundancy files', async function () {
468 this.timeout(20000)
469
470 await saveVideoInServers(servers, video1Server2.uuid)
471 await servers[1].videos.remove({ id: video1Server2.uuid })
472
473 await waitJobs(servers)
474
475 for (const server of servers) {
476 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
477 }
478 })
479
480 after(async function () {
481 await cleanupTests(servers)
482 })
483 })
484
485 describe('With only HLS files', function () {
486 const strategy = 'recently-added'
487
488 before(async function () {
489 this.timeout(240000)
490
491 await createServers(strategy, { min_views: 3 }, false)
492 })
493
494 it('Should have 0 playlist redundancy on the first video', async function () {
495 await check1WebSeed()
496 await check0PlaylistRedundancies()
497 })
498
499 it('Should enable redundancy on server 1', function () {
500 return enableRedundancyOnServer1()
501 })
502
503 it('Should still have 0 redundancy on the first video', async function () {
504 this.timeout(80000)
505
506 await waitJobs(servers)
507 await wait(15000)
508 await waitJobs(servers)
509
510 await check0PlaylistRedundancies()
511 await checkStatsWithoutRedundancy(strategy)
512 })
513
514 it('Should have 1 redundancy on the first video', async function () {
515 this.timeout(160000)
516
517 await servers[0].views.simulateView({ id: video1Server2.uuid })
518 await servers[2].views.simulateView({ id: video1Server2.uuid })
519
520 await wait(10000)
521 await waitJobs(servers)
522
523 await waitJobs(servers)
524 await servers[0].servers.waitUntilLog('Duplicated ', 1)
525 await waitJobs(servers)
526
527 await check1PlaylistRedundancies()
528 await checkStatsWith1Redundancy(strategy, true)
529 })
530
531 it('Should remove the video and the redundancy files', async function () {
532 this.timeout(20000)
533
534 await saveVideoInServers(servers, video1Server2.uuid)
535 await servers[1].videos.remove({ id: video1Server2.uuid })
536
537 await waitJobs(servers)
538
539 for (const server of servers) {
540 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
541 }
542 })
543
544 after(async function () {
545 await cleanupTests(servers)
546 })
547 })
548
549 describe('With manual strategy', function () {
550 before(function () {
551 this.timeout(240000)
552
553 return createServers(null)
554 })
555
556 it('Should have 1 webseed on the first video', async function () {
557 await check1WebSeed()
558 await check0PlaylistRedundancies()
559 await checkStatsWithoutRedundancy('manual')
560 })
561
562 it('Should create a redundancy on first video', async function () {
563 await servers[0].redundancy.addVideo({ videoId: video1Server2.id })
564 })
565
566 it('Should have 2 webseeds on the first video', async function () {
567 this.timeout(80000)
568
569 await waitJobs(servers)
570 await servers[0].servers.waitUntilLog('Duplicated ', 5)
571 await waitJobs(servers)
572
573 await check2Webseeds()
574 await check1PlaylistRedundancies()
575 await checkStatsWith1Redundancy('manual')
576 })
577
578 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
579 this.timeout(80000)
580
581 const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' })
582
583 const videos = body.data
584 expect(videos).to.have.lengthOf(1)
585
586 const video = videos[0]
587
588 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
589 await servers[0].redundancy.removeVideo({ redundancyId: r.id })
590 }
591
592 await waitJobs(servers)
593 await wait(5000)
594
595 await check1WebSeed()
596 await check0PlaylistRedundancies()
597
598 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
599 })
600
601 after(async function () {
602 await cleanupTests(servers)
603 })
604 })
605
606 describe('Test expiration', function () {
607 const strategy = 'recently-added'
608
609 async function checkContains (servers: PeerTubeServer[], str: string) {
610 for (const server of servers) {
611 const video = await server.videos.get({ id: video1Server2.uuid })
612
613 for (const f of video.files) {
614 expect(f.magnetUri).to.contain(str)
615 }
616 }
617 }
618
619 async function checkNotContains (servers: PeerTubeServer[], str: string) {
620 for (const server of servers) {
621 const video = await server.videos.get({ id: video1Server2.uuid })
622
623 for (const f of video.files) {
624 expect(f.magnetUri).to.not.contain(str)
625 }
626 }
627 }
628
629 before(async function () {
630 this.timeout(240000)
631
632 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
633
634 await enableRedundancyOnServer1()
635 })
636
637 it('Should still have 2 webseeds after 10 seconds', async function () {
638 this.timeout(80000)
639
640 await wait(10000)
641
642 try {
643 await checkContains(servers, 'http%3A%2F%2F' + servers[0].hostname + '%3A' + servers[0].port)
644 } catch {
645 // Maybe a server deleted a redundancy in the scheduler
646 await wait(2000)
647
648 await checkContains(servers, 'http%3A%2F%2F' + servers[0].hostname + '%3A' + servers[0].port)
649 }
650 })
651
652 it('Should stop server 1 and expire video redundancy', async function () {
653 this.timeout(80000)
654
655 await killallServers([ servers[0] ])
656
657 await wait(15000)
658
659 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2F' + servers[0].port + '%3A' + servers[0].port)
660 })
661
662 after(async function () {
663 await cleanupTests(servers)
664 })
665 })
666
667 describe('Test file replacement', function () {
668 let video2Server2UUID: string
669 const strategy = 'recently-added'
670
671 before(async function () {
672 this.timeout(240000)
673
674 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
675
676 await enableRedundancyOnServer1()
677
678 await waitJobs(servers)
679 await servers[0].servers.waitUntilLog('Duplicated ', 5)
680 await waitJobs(servers)
681
682 await check2Webseeds()
683 await check1PlaylistRedundancies()
684 await checkStatsWith1Redundancy(strategy)
685
686 const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } })
687 video2Server2UUID = uuid
688
689 // Wait transcoding before federation
690 await waitJobs(servers)
691
692 await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } })
693 })
694
695 it('Should cache video 2 webseeds on the first video', async function () {
696 this.timeout(240000)
697
698 await waitJobs(servers)
699
700 let checked = false
701
702 while (checked === false) {
703 await wait(1000)
704
705 try {
706 await check1WebSeed()
707 await check0PlaylistRedundancies()
708
709 await check2Webseeds(video2Server2UUID)
710 await check1PlaylistRedundancies(video2Server2UUID)
711
712 checked = true
713 } catch {
714 checked = false
715 }
716 }
717 })
718
719 it('Should disable strategy and remove redundancies', async function () {
720 this.timeout(80000)
721
722 await waitJobs(servers)
723
724 await killallServers([ servers[0] ])
725 await servers[0].run({
726 redundancy: {
727 videos: {
728 check_interval: '1 second',
729 strategies: []
730 }
731 }
732 })
733
734 await waitJobs(servers)
735
736 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
737 })
738
739 after(async function () {
740 await cleanupTests(servers)
741 })
742 })
743})