]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/tests/api/redundancy/redundancy.ts
Fix lint
[github/Chocobozzz/PeerTube.git] / server / tests / api / redundancy / redundancy.ts
1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3 import 'mocha'
4 import * as chai from 'chai'
5 import { readdir } from 'fs-extra'
6 import magnetUtil from 'magnet-uri'
7 import { basename, join } from 'path'
8 import { checkSegmentHash, checkVideoFilesWereRemoved, saveVideoInServers } from '@server/tests/shared'
9 import { root, wait } from '@shared/core-utils'
10 import {
11 HttpStatusCode,
12 VideoDetails,
13 VideoFile,
14 VideoPrivacy,
15 VideoRedundancyStrategy,
16 VideoRedundancyStrategyWithManual
17 } from '@shared/models'
18 import {
19 cleanupTests,
20 createMultipleServers,
21 doubleFollow,
22 killallServers,
23 makeRawRequest,
24 PeerTubeServer,
25 setAccessTokensToServers,
26 waitJobs
27 } from '@shared/server-commands'
28
29 const expect = chai.expect
30
31 let servers: PeerTubeServer[] = []
32 let video1Server2: VideoDetails
33
34 async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) {
35 const parsed = magnetUtil.decode(file.magnetUri)
36
37 for (const ws of baseWebseeds) {
38 const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`)
39 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
40 }
41
42 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
43
44 for (const url of parsed.urlList) {
45 await makeRawRequest(url, HttpStatusCode.OK_200)
46 }
47 }
48
49 async function createServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
50 const strategies: any[] = []
51
52 if (strategy !== null) {
53 strategies.push(
54 {
55 min_lifetime: '1 hour',
56 strategy,
57 size: '400KB',
58
59 ...additionalParams
60 }
61 )
62 }
63
64 const config = {
65 transcoding: {
66 webtorrent: {
67 enabled: withWebtorrent
68 },
69 hls: {
70 enabled: true
71 }
72 },
73 redundancy: {
74 videos: {
75 check_interval: '5 seconds',
76 strategies
77 }
78 }
79 }
80
81 servers = await createMultipleServers(3, config)
82
83 // Get the access tokens
84 await setAccessTokensToServers(servers)
85
86 {
87 const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } })
88 video1Server2 = await servers[1].videos.get({ id })
89
90 await servers[1].views.simulateView({ id })
91 }
92
93 await waitJobs(servers)
94
95 // Server 1 and server 2 follow each other
96 await doubleFollow(servers[0], servers[1])
97 // Server 1 and server 3 follow each other
98 await doubleFollow(servers[0], servers[2])
99 // Server 2 and server 3 follow each other
100 await doubleFollow(servers[1], servers[2])
101
102 await waitJobs(servers)
103 }
104
105 async function ensureSameFilenames (videoUUID: string) {
106 let webtorrentFilenames: string[]
107 let hlsFilenames: string[]
108
109 for (const server of servers) {
110 const video = await server.videos.getWithToken({ id: videoUUID })
111
112 // Ensure we use the same filenames that the origin
113
114 const localWebtorrentFilenames = video.files.map(f => basename(f.fileUrl)).sort()
115 const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort()
116
117 if (webtorrentFilenames) expect(webtorrentFilenames).to.deep.equal(localWebtorrentFilenames)
118 else webtorrentFilenames = localWebtorrentFilenames
119
120 if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames)
121 else hlsFilenames = localHLSFilenames
122 }
123
124 return { webtorrentFilenames, hlsFilenames }
125 }
126
127 async function check1WebSeed (videoUUID?: string) {
128 if (!videoUUID) videoUUID = video1Server2.uuid
129
130 const webseeds = [
131 `http://localhost:${servers[1].port}/static/webseed/`
132 ]
133
134 for (const server of servers) {
135 // With token to avoid issues with video follow constraints
136 const video = await server.videos.getWithToken({ id: videoUUID })
137
138 for (const f of video.files) {
139 await checkMagnetWebseeds(f, webseeds, server)
140 }
141 }
142
143 await ensureSameFilenames(videoUUID)
144 }
145
146 async function check2Webseeds (videoUUID?: string) {
147 if (!videoUUID) videoUUID = video1Server2.uuid
148
149 const webseeds = [
150 `http://localhost:${servers[0].port}/static/redundancy/`,
151 `http://localhost:${servers[1].port}/static/webseed/`
152 ]
153
154 for (const server of servers) {
155 const video = await server.videos.get({ id: videoUUID })
156
157 for (const file of video.files) {
158 await checkMagnetWebseeds(file, webseeds, server)
159 }
160 }
161
162 const { webtorrentFilenames } = await ensureSameFilenames(videoUUID)
163
164 const directories = [
165 'test' + servers[0].internalServerNumber + '/redundancy',
166 'test' + servers[1].internalServerNumber + '/videos'
167 ]
168
169 for (const directory of directories) {
170 const files = await readdir(join(root(), directory))
171 expect(files).to.have.length.at.least(4)
172
173 // Ensure we files exist on disk
174 expect(files.find(f => webtorrentFilenames.includes(f))).to.exist
175 }
176 }
177
178 async function check0PlaylistRedundancies (videoUUID?: string) {
179 if (!videoUUID) videoUUID = video1Server2.uuid
180
181 for (const server of servers) {
182 // With token to avoid issues with video follow constraints
183 const video = await server.videos.getWithToken({ id: videoUUID })
184
185 expect(video.streamingPlaylists).to.be.an('array')
186 expect(video.streamingPlaylists).to.have.lengthOf(1)
187 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
188 }
189
190 await ensureSameFilenames(videoUUID)
191 }
192
193 async function check1PlaylistRedundancies (videoUUID?: string) {
194 if (!videoUUID) videoUUID = video1Server2.uuid
195
196 for (const server of servers) {
197 const video = await server.videos.get({ id: videoUUID })
198
199 expect(video.streamingPlaylists).to.have.lengthOf(1)
200 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
201
202 const redundancy = video.streamingPlaylists[0].redundancies[0]
203
204 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
205 }
206
207 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID
208 const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID
209
210 const video = await servers[0].videos.get({ id: videoUUID })
211 const hlsPlaylist = video.streamingPlaylists[0]
212
213 for (const resolution of [ 240, 360, 480, 720 ]) {
214 await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist })
215 }
216
217 const { hlsFilenames } = await ensureSameFilenames(videoUUID)
218
219 const directories = [
220 'test' + servers[0].internalServerNumber + '/redundancy/hls',
221 'test' + servers[1].internalServerNumber + '/streaming-playlists/hls'
222 ]
223
224 for (const directory of directories) {
225 const files = await readdir(join(root(), directory, videoUUID))
226 expect(files).to.have.length.at.least(4)
227
228 // Ensure we files exist on disk
229 expect(files.find(f => hlsFilenames.includes(f))).to.exist
230 }
231 }
232
233 async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
234 let totalSize: number = null
235 let statsLength = 1
236
237 if (strategy !== 'manual') {
238 totalSize = 409600
239 statsLength = 2
240 }
241
242 const data = await servers[0].stats.get()
243 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
244
245 const stat = data.videosRedundancy[0]
246 expect(stat.strategy).to.equal(strategy)
247 expect(stat.totalSize).to.equal(totalSize)
248
249 return stat
250 }
251
252 async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
253 const stat = await checkStatsGlobal(strategy)
254
255 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
256 expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
257 expect(stat.totalVideos).to.equal(1)
258 }
259
260 async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
261 const stat = await checkStatsGlobal(strategy)
262
263 expect(stat.totalUsed).to.equal(0)
264 expect(stat.totalVideoFiles).to.equal(0)
265 expect(stat.totalVideos).to.equal(0)
266 }
267
268 async function findServerFollows () {
269 const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
270 const follows = body.data
271 const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`)
272 const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`)
273
274 return { server2, server3 }
275 }
276
277 async function enableRedundancyOnServer1 () {
278 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
279
280 const { server2, server3 } = await findServerFollows()
281
282 expect(server3).to.not.be.undefined
283 expect(server3.following.hostRedundancyAllowed).to.be.false
284
285 expect(server2).to.not.be.undefined
286 expect(server2.following.hostRedundancyAllowed).to.be.true
287 }
288
289 async function disableRedundancyOnServer1 () {
290 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
291
292 const { server2, server3 } = await findServerFollows()
293
294 expect(server3).to.not.be.undefined
295 expect(server3.following.hostRedundancyAllowed).to.be.false
296
297 expect(server2).to.not.be.undefined
298 expect(server2.following.hostRedundancyAllowed).to.be.false
299 }
300
301 describe('Test videos redundancy', function () {
302
303 describe('With most-views strategy', function () {
304 const strategy = 'most-views'
305
306 before(function () {
307 this.timeout(240000)
308
309 return createServers(strategy)
310 })
311
312 it('Should have 1 webseed on the first video', async function () {
313 await check1WebSeed()
314 await check0PlaylistRedundancies()
315 await checkStatsWithoutRedundancy(strategy)
316 })
317
318 it('Should enable redundancy on server 1', function () {
319 return enableRedundancyOnServer1()
320 })
321
322 it('Should have 2 webseeds on the first video', async function () {
323 this.timeout(80000)
324
325 await waitJobs(servers)
326 await servers[0].servers.waitUntilLog('Duplicated ', 5)
327 await waitJobs(servers)
328
329 await check2Webseeds()
330 await check1PlaylistRedundancies()
331 await checkStatsWith1Redundancy(strategy)
332 })
333
334 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
335 this.timeout(80000)
336
337 await disableRedundancyOnServer1()
338
339 await waitJobs(servers)
340 await wait(5000)
341
342 await check1WebSeed()
343 await check0PlaylistRedundancies()
344
345 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
346 })
347
348 after(async function () {
349 return cleanupTests(servers)
350 })
351 })
352
353 describe('With trending strategy', function () {
354 const strategy = 'trending'
355
356 before(function () {
357 this.timeout(240000)
358
359 return createServers(strategy)
360 })
361
362 it('Should have 1 webseed on the first video', async function () {
363 await check1WebSeed()
364 await check0PlaylistRedundancies()
365 await checkStatsWithoutRedundancy(strategy)
366 })
367
368 it('Should enable redundancy on server 1', function () {
369 return enableRedundancyOnServer1()
370 })
371
372 it('Should have 2 webseeds on the first video', async function () {
373 this.timeout(80000)
374
375 await waitJobs(servers)
376 await servers[0].servers.waitUntilLog('Duplicated ', 5)
377 await waitJobs(servers)
378
379 await check2Webseeds()
380 await check1PlaylistRedundancies()
381 await checkStatsWith1Redundancy(strategy)
382 })
383
384 it('Should unfollow server 3 and keep duplicated videos', async function () {
385 this.timeout(80000)
386
387 await servers[0].follows.unfollow({ target: servers[2] })
388
389 await waitJobs(servers)
390 await wait(5000)
391
392 await check2Webseeds()
393 await check1PlaylistRedundancies()
394 await checkStatsWith1Redundancy(strategy)
395 })
396
397 it('Should unfollow server 2 and remove duplicated videos', async function () {
398 this.timeout(80000)
399
400 await servers[0].follows.unfollow({ target: servers[1] })
401
402 await waitJobs(servers)
403 await wait(5000)
404
405 await check1WebSeed()
406 await check0PlaylistRedundancies()
407
408 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
409 })
410
411 after(async function () {
412 await cleanupTests(servers)
413 })
414 })
415
416 describe('With recently added strategy', function () {
417 const strategy = 'recently-added'
418
419 before(function () {
420 this.timeout(240000)
421
422 return createServers(strategy, { min_views: 3 })
423 })
424
425 it('Should have 1 webseed on the first video', async function () {
426 await check1WebSeed()
427 await check0PlaylistRedundancies()
428 await checkStatsWithoutRedundancy(strategy)
429 })
430
431 it('Should enable redundancy on server 1', function () {
432 return enableRedundancyOnServer1()
433 })
434
435 it('Should still have 1 webseed on the first video', async function () {
436 this.timeout(80000)
437
438 await waitJobs(servers)
439 await wait(15000)
440 await waitJobs(servers)
441
442 await check1WebSeed()
443 await check0PlaylistRedundancies()
444 await checkStatsWithoutRedundancy(strategy)
445 })
446
447 it('Should view 2 times the first video to have > min_views config', async function () {
448 this.timeout(80000)
449
450 await servers[0].views.simulateView({ id: video1Server2.uuid })
451 await servers[2].views.simulateView({ id: video1Server2.uuid })
452
453 await wait(10000)
454 await waitJobs(servers)
455 })
456
457 it('Should have 2 webseeds on the first video', async function () {
458 this.timeout(80000)
459
460 await waitJobs(servers)
461 await servers[0].servers.waitUntilLog('Duplicated ', 5)
462 await waitJobs(servers)
463
464 await check2Webseeds()
465 await check1PlaylistRedundancies()
466 await checkStatsWith1Redundancy(strategy)
467 })
468
469 it('Should remove the video and the redundancy files', async function () {
470 this.timeout(20000)
471
472 await saveVideoInServers(servers, video1Server2.uuid)
473 await servers[1].videos.remove({ id: video1Server2.uuid })
474
475 await waitJobs(servers)
476
477 for (const server of servers) {
478 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
479 }
480 })
481
482 after(async function () {
483 await cleanupTests(servers)
484 })
485 })
486
487 describe('With only HLS files', function () {
488 const strategy = 'recently-added'
489
490 before(async function () {
491 this.timeout(240000)
492
493 await createServers(strategy, { min_views: 3 }, false)
494 })
495
496 it('Should have 0 playlist redundancy on the first video', async function () {
497 await check1WebSeed()
498 await check0PlaylistRedundancies()
499 })
500
501 it('Should enable redundancy on server 1', function () {
502 return enableRedundancyOnServer1()
503 })
504
505 it('Should still have 0 redundancy on the first video', async function () {
506 this.timeout(80000)
507
508 await waitJobs(servers)
509 await wait(15000)
510 await waitJobs(servers)
511
512 await check0PlaylistRedundancies()
513 await checkStatsWithoutRedundancy(strategy)
514 })
515
516 it('Should have 1 redundancy on the first video', async function () {
517 this.timeout(160000)
518
519 await servers[0].views.simulateView({ id: video1Server2.uuid })
520 await servers[2].views.simulateView({ id: video1Server2.uuid })
521
522 await wait(10000)
523 await waitJobs(servers)
524
525 await waitJobs(servers)
526 await servers[0].servers.waitUntilLog('Duplicated ', 1)
527 await waitJobs(servers)
528
529 await check1PlaylistRedundancies()
530 await checkStatsWith1Redundancy(strategy, true)
531 })
532
533 it('Should remove the video and the redundancy files', async function () {
534 this.timeout(20000)
535
536 await saveVideoInServers(servers, video1Server2.uuid)
537 await servers[1].videos.remove({ id: video1Server2.uuid })
538
539 await waitJobs(servers)
540
541 for (const server of servers) {
542 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
543 }
544 })
545
546 after(async function () {
547 await cleanupTests(servers)
548 })
549 })
550
551 describe('With manual strategy', function () {
552 before(function () {
553 this.timeout(240000)
554
555 return createServers(null)
556 })
557
558 it('Should have 1 webseed on the first video', async function () {
559 await check1WebSeed()
560 await check0PlaylistRedundancies()
561 await checkStatsWithoutRedundancy('manual')
562 })
563
564 it('Should create a redundancy on first video', async function () {
565 await servers[0].redundancy.addVideo({ videoId: video1Server2.id })
566 })
567
568 it('Should have 2 webseeds on the first video', async function () {
569 this.timeout(80000)
570
571 await waitJobs(servers)
572 await servers[0].servers.waitUntilLog('Duplicated ', 5)
573 await waitJobs(servers)
574
575 await check2Webseeds()
576 await check1PlaylistRedundancies()
577 await checkStatsWith1Redundancy('manual')
578 })
579
580 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
581 this.timeout(80000)
582
583 const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' })
584
585 const videos = body.data
586 expect(videos).to.have.lengthOf(1)
587
588 const video = videos[0]
589
590 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
591 await servers[0].redundancy.removeVideo({ redundancyId: r.id })
592 }
593
594 await waitJobs(servers)
595 await wait(5000)
596
597 await check1WebSeed()
598 await check0PlaylistRedundancies()
599
600 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
601 })
602
603 after(async function () {
604 await cleanupTests(servers)
605 })
606 })
607
608 describe('Test expiration', function () {
609 const strategy = 'recently-added'
610
611 async function checkContains (servers: PeerTubeServer[], str: string) {
612 for (const server of servers) {
613 const video = await server.videos.get({ id: video1Server2.uuid })
614
615 for (const f of video.files) {
616 expect(f.magnetUri).to.contain(str)
617 }
618 }
619 }
620
621 async function checkNotContains (servers: PeerTubeServer[], str: string) {
622 for (const server of servers) {
623 const video = await server.videos.get({ id: video1Server2.uuid })
624
625 for (const f of video.files) {
626 expect(f.magnetUri).to.not.contain(str)
627 }
628 }
629 }
630
631 before(async function () {
632 this.timeout(240000)
633
634 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
635
636 await enableRedundancyOnServer1()
637 })
638
639 it('Should still have 2 webseeds after 10 seconds', async function () {
640 this.timeout(80000)
641
642 await wait(10000)
643
644 try {
645 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
646 } catch {
647 // Maybe a server deleted a redundancy in the scheduler
648 await wait(2000)
649
650 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
651 }
652 })
653
654 it('Should stop server 1 and expire video redundancy', async function () {
655 this.timeout(80000)
656
657 await killallServers([ servers[0] ])
658
659 await wait(15000)
660
661 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
662 })
663
664 after(async function () {
665 await cleanupTests(servers)
666 })
667 })
668
669 describe('Test file replacement', function () {
670 let video2Server2UUID: string
671 const strategy = 'recently-added'
672
673 before(async function () {
674 this.timeout(240000)
675
676 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
677
678 await enableRedundancyOnServer1()
679
680 await waitJobs(servers)
681 await servers[0].servers.waitUntilLog('Duplicated ', 5)
682 await waitJobs(servers)
683
684 await check2Webseeds()
685 await check1PlaylistRedundancies()
686 await checkStatsWith1Redundancy(strategy)
687
688 const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } })
689 video2Server2UUID = uuid
690
691 // Wait transcoding before federation
692 await waitJobs(servers)
693
694 await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } })
695 })
696
697 it('Should cache video 2 webseeds on the first video', async function () {
698 this.timeout(240000)
699
700 await waitJobs(servers)
701
702 let checked = false
703
704 while (checked === false) {
705 await wait(1000)
706
707 try {
708 await check1WebSeed()
709 await check0PlaylistRedundancies()
710
711 await check2Webseeds(video2Server2UUID)
712 await check1PlaylistRedundancies(video2Server2UUID)
713
714 checked = true
715 } catch {
716 checked = false
717 }
718 }
719 })
720
721 it('Should disable strategy and remove redundancies', async function () {
722 this.timeout(80000)
723
724 await waitJobs(servers)
725
726 await killallServers([ servers[0] ])
727 await servers[0].run({
728 redundancy: {
729 videos: {
730 check_interval: '1 second',
731 strategies: []
732 }
733 }
734 })
735
736 await waitJobs(servers)
737
738 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
739 })
740
741 after(async function () {
742 await cleanupTests(servers)
743 })
744 })
745 })