]> git.immae.eu Git - github/Chocobozzz/PeerTube.git/blob - server/tests/api/redundancy/redundancy.ts
Fix redundancy with HLS only files
[github/Chocobozzz/PeerTube.git] / server / tests / api / redundancy / redundancy.ts
1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
2
3 import * as chai from 'chai'
4 import 'mocha'
5 import { VideoDetails } from '../../../../shared/models/videos'
6 import {
7 checkSegmentHash,
8 checkVideoFilesWereRemoved,
9 cleanupTests,
10 doubleFollow,
11 flushAndRunMultipleServers,
12 getFollowingListPaginationAndSort,
13 getVideo,
14 getVideoWithToken,
15 immutableAssign,
16 killallServers,
17 makeGetRequest,
18 removeVideo,
19 reRunServer,
20 root,
21 ServerInfo,
22 setAccessTokensToServers,
23 unfollow,
24 updateCustomConfig,
25 updateCustomSubConfig,
26 uploadVideo,
27 viewVideo,
28 wait,
29 waitUntilLog
30 } from '../../../../shared/extra-utils'
31 import { waitJobs } from '../../../../shared/extra-utils/server/jobs'
32
33 import * as magnetUtil from 'magnet-uri'
34 import {
35 addVideoRedundancy,
36 listVideoRedundancies,
37 removeVideoRedundancy,
38 updateRedundancy
39 } from '../../../../shared/extra-utils/server/redundancy'
40 import { ActorFollow } from '../../../../shared/models/actors'
41 import { readdir } from 'fs-extra'
42 import { join } from 'path'
43 import { VideoRedundancy, VideoRedundancyStrategy, VideoRedundancyStrategyWithManual } from '../../../../shared/models/redundancy'
44 import { getStats } from '../../../../shared/extra-utils/server/stats'
45 import { ServerStats } from '../../../../shared/models/server/server-stats.model'
46 import { HttpStatusCode } from '../../../../shared/core-utils/miscs/http-error-codes'
47
48 const expect = chai.expect
49
50 let servers: ServerInfo[] = []
51 let video1Server2UUID: string
52 let video1Server2Id: number
53
54 function checkMagnetWebseeds (file: { magnetUri: string, resolution: { id: number } }, baseWebseeds: string[], server: ServerInfo) {
55 const parsed = magnetUtil.decode(file.magnetUri)
56
57 for (const ws of baseWebseeds) {
58 const found = parsed.urlList.find(url => url === `${ws}-${file.resolution.id}.mp4`)
59 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
60 }
61
62 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
63 }
64
65 async function flushAndRunServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
66 const strategies: any[] = []
67
68 if (strategy !== null) {
69 strategies.push(
70 immutableAssign({
71 min_lifetime: '1 hour',
72 strategy: strategy,
73 size: '400KB'
74 }, additionalParams)
75 )
76 }
77
78 const config = {
79 transcoding: {
80 webtorrent: {
81 enabled: withWebtorrent
82 },
83 hls: {
84 enabled: true
85 }
86 },
87 redundancy: {
88 videos: {
89 check_interval: '5 seconds',
90 strategies
91 }
92 }
93 }
94
95 servers = await flushAndRunMultipleServers(3, config)
96
97 // Get the access tokens
98 await setAccessTokensToServers(servers)
99
100 {
101 const res = await uploadVideo(servers[1].url, servers[1].accessToken, { name: 'video 1 server 2' })
102 video1Server2UUID = res.body.video.uuid
103 video1Server2Id = res.body.video.id
104
105 await viewVideo(servers[1].url, video1Server2UUID)
106 }
107
108 await waitJobs(servers)
109
110 // Server 1 and server 2 follow each other
111 await doubleFollow(servers[0], servers[1])
112 // Server 1 and server 3 follow each other
113 await doubleFollow(servers[0], servers[2])
114 // Server 2 and server 3 follow each other
115 await doubleFollow(servers[1], servers[2])
116
117 await waitJobs(servers)
118 }
119
120 async function check1WebSeed (videoUUID?: string) {
121 if (!videoUUID) videoUUID = video1Server2UUID
122
123 const webseeds = [
124 `http://localhost:${servers[1].port}/static/webseed/${videoUUID}`
125 ]
126
127 for (const server of servers) {
128 // With token to avoid issues with video follow constraints
129 const res = await getVideoWithToken(server.url, server.accessToken, videoUUID)
130
131 const video: VideoDetails = res.body
132 for (const f of video.files) {
133 checkMagnetWebseeds(f, webseeds, server)
134 }
135 }
136 }
137
138 async function check2Webseeds (videoUUID?: string) {
139 if (!videoUUID) videoUUID = video1Server2UUID
140
141 const webseeds = [
142 `http://localhost:${servers[0].port}/static/redundancy/${videoUUID}`,
143 `http://localhost:${servers[1].port}/static/webseed/${videoUUID}`
144 ]
145
146 for (const server of servers) {
147 const res = await getVideo(server.url, videoUUID)
148
149 const video: VideoDetails = res.body
150
151 for (const file of video.files) {
152 checkMagnetWebseeds(file, webseeds, server)
153
154 await makeGetRequest({
155 url: servers[0].url,
156 statusCodeExpected: HttpStatusCode.OK_200,
157 path: '/static/redundancy/' + `${videoUUID}-${file.resolution.id}.mp4`,
158 contentType: null
159 })
160 await makeGetRequest({
161 url: servers[1].url,
162 statusCodeExpected: HttpStatusCode.OK_200,
163 path: `/static/webseed/${videoUUID}-${file.resolution.id}.mp4`,
164 contentType: null
165 })
166 }
167 }
168
169 const directories = [
170 'test' + servers[0].internalServerNumber + '/redundancy',
171 'test' + servers[1].internalServerNumber + '/videos'
172 ]
173
174 for (const directory of directories) {
175 const files = await readdir(join(root(), directory))
176 expect(files).to.have.length.at.least(4)
177
178 for (const resolution of [ 240, 360, 480, 720 ]) {
179 expect(files.find(f => f === `${videoUUID}-${resolution}.mp4`)).to.not.be.undefined
180 }
181 }
182 }
183
184 async function check0PlaylistRedundancies (videoUUID?: string) {
185 if (!videoUUID) videoUUID = video1Server2UUID
186
187 for (const server of servers) {
188 // With token to avoid issues with video follow constraints
189 const res = await getVideoWithToken(server.url, server.accessToken, videoUUID)
190 const video: VideoDetails = res.body
191
192 expect(video.streamingPlaylists).to.be.an('array')
193 expect(video.streamingPlaylists).to.have.lengthOf(1)
194 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
195 }
196 }
197
198 async function check1PlaylistRedundancies (videoUUID?: string) {
199 if (!videoUUID) videoUUID = video1Server2UUID
200
201 for (const server of servers) {
202 const res = await getVideo(server.url, videoUUID)
203 const video: VideoDetails = res.body
204
205 expect(video.streamingPlaylists).to.have.lengthOf(1)
206 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
207
208 const redundancy = video.streamingPlaylists[0].redundancies[0]
209
210 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
211 }
212
213 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls'
214 const baseUrlSegment = servers[0].url + '/static/redundancy/hls'
215
216 const res = await getVideo(servers[0].url, videoUUID)
217 const hlsPlaylist = (res.body as VideoDetails).streamingPlaylists[0]
218
219 for (const resolution of [ 240, 360, 480, 720 ]) {
220 await checkSegmentHash(baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist)
221 }
222
223 const directories = [
224 'test' + servers[0].internalServerNumber + '/redundancy/hls',
225 'test' + servers[1].internalServerNumber + '/streaming-playlists/hls'
226 ]
227
228 for (const directory of directories) {
229 const files = await readdir(join(root(), directory, videoUUID))
230 expect(files).to.have.length.at.least(4)
231
232 for (const resolution of [ 240, 360, 480, 720 ]) {
233 const filename = `${videoUUID}-${resolution}-fragmented.mp4`
234
235 expect(files.find(f => f === filename)).to.not.be.undefined
236 }
237 }
238 }
239
240 async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
241 let totalSize: number = null
242 let statsLength = 1
243
244 if (strategy !== 'manual') {
245 totalSize = 409600
246 statsLength = 2
247 }
248
249 const res = await getStats(servers[0].url)
250 const data: ServerStats = res.body
251
252 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
253
254 const stat = data.videosRedundancy[0]
255 expect(stat.strategy).to.equal(strategy)
256 expect(stat.totalSize).to.equal(totalSize)
257
258 return stat
259 }
260
261 async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual) {
262 const stat = await checkStatsGlobal(strategy)
263
264 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
265 expect(stat.totalVideoFiles).to.equal(4)
266 expect(stat.totalVideos).to.equal(1)
267 }
268
269 async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
270 const stat = await checkStatsGlobal(strategy)
271
272 expect(stat.totalUsed).to.equal(0)
273 expect(stat.totalVideoFiles).to.equal(0)
274 expect(stat.totalVideos).to.equal(0)
275 }
276
277 async function enableRedundancyOnServer1 () {
278 await updateRedundancy(servers[0].url, servers[0].accessToken, servers[1].host, true)
279
280 const res = await getFollowingListPaginationAndSort({ url: servers[0].url, start: 0, count: 5, sort: '-createdAt' })
281 const follows: ActorFollow[] = res.body.data
282 const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`)
283 const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`)
284
285 expect(server3).to.not.be.undefined
286 expect(server3.following.hostRedundancyAllowed).to.be.false
287
288 expect(server2).to.not.be.undefined
289 expect(server2.following.hostRedundancyAllowed).to.be.true
290 }
291
292 async function disableRedundancyOnServer1 () {
293 await updateRedundancy(servers[0].url, servers[0].accessToken, servers[1].host, false)
294
295 const res = await getFollowingListPaginationAndSort({ url: servers[0].url, start: 0, count: 5, sort: '-createdAt' })
296 const follows: ActorFollow[] = res.body.data
297 const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`)
298 const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`)
299
300 expect(server3).to.not.be.undefined
301 expect(server3.following.hostRedundancyAllowed).to.be.false
302
303 expect(server2).to.not.be.undefined
304 expect(server2.following.hostRedundancyAllowed).to.be.false
305 }
306
307 describe('Test videos redundancy', function () {
308
309 describe('With most-views strategy', function () {
310 const strategy = 'most-views'
311
312 before(function () {
313 this.timeout(120000)
314
315 return flushAndRunServers(strategy)
316 })
317
318 it('Should have 1 webseed on the first video', async function () {
319 await check1WebSeed()
320 await check0PlaylistRedundancies()
321 await checkStatsWithoutRedundancy(strategy)
322 })
323
324 it('Should enable redundancy on server 1', function () {
325 return enableRedundancyOnServer1()
326 })
327
328 it('Should have 2 webseeds on the first video', async function () {
329 this.timeout(80000)
330
331 await waitJobs(servers)
332 await waitUntilLog(servers[0], 'Duplicated ', 5)
333 await waitJobs(servers)
334
335 await check2Webseeds()
336 await check1PlaylistRedundancies()
337 await checkStatsWith1Redundancy(strategy)
338 })
339
340 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
341 this.timeout(80000)
342
343 await disableRedundancyOnServer1()
344
345 await waitJobs(servers)
346 await wait(5000)
347
348 await check1WebSeed()
349 await check0PlaylistRedundancies()
350
351 await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ 'videos', join('playlists', 'hls') ])
352 })
353
354 after(async function () {
355 return cleanupTests(servers)
356 })
357 })
358
359 describe('With trending strategy', function () {
360 const strategy = 'trending'
361
362 before(function () {
363 this.timeout(120000)
364
365 return flushAndRunServers(strategy)
366 })
367
368 it('Should have 1 webseed on the first video', async function () {
369 await check1WebSeed()
370 await check0PlaylistRedundancies()
371 await checkStatsWithoutRedundancy(strategy)
372 })
373
374 it('Should enable redundancy on server 1', function () {
375 return enableRedundancyOnServer1()
376 })
377
378 it('Should have 2 webseeds on the first video', async function () {
379 this.timeout(80000)
380
381 await waitJobs(servers)
382 await waitUntilLog(servers[0], 'Duplicated ', 5)
383 await waitJobs(servers)
384
385 await check2Webseeds()
386 await check1PlaylistRedundancies()
387 await checkStatsWith1Redundancy(strategy)
388 })
389
390 it('Should unfollow on server 1 and remove duplicated videos', async function () {
391 this.timeout(80000)
392
393 await unfollow(servers[0].url, servers[0].accessToken, servers[1])
394
395 await waitJobs(servers)
396 await wait(5000)
397
398 await check1WebSeed()
399 await check0PlaylistRedundancies()
400
401 await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ 'videos' ])
402 })
403
404 after(async function () {
405 await cleanupTests(servers)
406 })
407 })
408
409 describe('With recently added strategy', function () {
410 const strategy = 'recently-added'
411
412 before(function () {
413 this.timeout(120000)
414
415 return flushAndRunServers(strategy, { min_views: 3 })
416 })
417
418 it('Should have 1 webseed on the first video', async function () {
419 await check1WebSeed()
420 await check0PlaylistRedundancies()
421 await checkStatsWithoutRedundancy(strategy)
422 })
423
424 it('Should enable redundancy on server 1', function () {
425 return enableRedundancyOnServer1()
426 })
427
428 it('Should still have 1 webseed on the first video', async function () {
429 this.timeout(80000)
430
431 await waitJobs(servers)
432 await wait(15000)
433 await waitJobs(servers)
434
435 await check1WebSeed()
436 await check0PlaylistRedundancies()
437 await checkStatsWithoutRedundancy(strategy)
438 })
439
440 it('Should view 2 times the first video to have > min_views config', async function () {
441 this.timeout(80000)
442
443 await viewVideo(servers[0].url, video1Server2UUID)
444 await viewVideo(servers[2].url, video1Server2UUID)
445
446 await wait(10000)
447 await waitJobs(servers)
448 })
449
450 it('Should have 2 webseeds on the first video', async function () {
451 this.timeout(80000)
452
453 await waitJobs(servers)
454 await waitUntilLog(servers[0], 'Duplicated ', 5)
455 await waitJobs(servers)
456
457 await check2Webseeds()
458 await check1PlaylistRedundancies()
459 await checkStatsWith1Redundancy(strategy)
460 })
461
462 it('Should remove the video and the redundancy files', async function () {
463 this.timeout(20000)
464
465 await removeVideo(servers[1].url, servers[1].accessToken, video1Server2UUID)
466
467 await waitJobs(servers)
468
469 for (const server of servers) {
470 await checkVideoFilesWereRemoved(video1Server2UUID, server.internalServerNumber)
471 }
472 })
473
474 after(async function () {
475 await cleanupTests(servers)
476 })
477 })
478
479 describe('With only HLS files', function () {
480 const strategy = 'recently-added'
481
482 before(async function () {
483 this.timeout(120000)
484
485 await flushAndRunServers(strategy, { min_views: 3 }, false)
486 })
487
488 it('Should have 0 playlist redundancy on the first video', async function () {
489 await check1WebSeed()
490 await check0PlaylistRedundancies()
491 })
492
493 it('Should enable redundancy on server 1', function () {
494 return enableRedundancyOnServer1()
495 })
496
497 it('Should still have 0 redundancy on the first video', async function () {
498 this.timeout(80000)
499
500 await waitJobs(servers)
501 await wait(15000)
502 await waitJobs(servers)
503
504 await check0PlaylistRedundancies()
505 await checkStatsWithoutRedundancy(strategy)
506 })
507
508 it('Should have 1 redundancy on the first video', async function () {
509 this.timeout(160000)
510
511 await viewVideo(servers[0].url, video1Server2UUID)
512 await viewVideo(servers[2].url, video1Server2UUID)
513
514 await wait(10000)
515 await waitJobs(servers)
516
517 await waitJobs(servers)
518 await waitUntilLog(servers[0], 'Duplicated ', 1)
519 await waitJobs(servers)
520
521 await check1PlaylistRedundancies()
522 await checkStatsWith1Redundancy(strategy)
523 })
524
525 it('Should remove the video and the redundancy files', async function () {
526 this.timeout(20000)
527
528 await removeVideo(servers[1].url, servers[1].accessToken, video1Server2UUID)
529
530 await waitJobs(servers)
531
532 for (const server of servers) {
533 await checkVideoFilesWereRemoved(video1Server2UUID, server.internalServerNumber)
534 }
535 })
536 })
537
538 describe('With manual strategy', function () {
539 before(function () {
540 this.timeout(120000)
541
542 return flushAndRunServers(null)
543 })
544
545 it('Should have 1 webseed on the first video', async function () {
546 await check1WebSeed()
547 await check0PlaylistRedundancies()
548 await checkStatsWithoutRedundancy('manual')
549 })
550
551 it('Should create a redundancy on first video', async function () {
552 await addVideoRedundancy({
553 url: servers[0].url,
554 accessToken: servers[0].accessToken,
555 videoId: video1Server2Id
556 })
557 })
558
559 it('Should have 2 webseeds on the first video', async function () {
560 this.timeout(80000)
561
562 await waitJobs(servers)
563 await waitUntilLog(servers[0], 'Duplicated ', 5)
564 await waitJobs(servers)
565
566 await check2Webseeds()
567 await check1PlaylistRedundancies()
568 await checkStatsWith1Redundancy('manual')
569 })
570
571 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
572 this.timeout(80000)
573
574 const res = await listVideoRedundancies({
575 url: servers[0].url,
576 accessToken: servers[0].accessToken,
577 target: 'remote-videos'
578 })
579
580 const videos = res.body.data as VideoRedundancy[]
581 expect(videos).to.have.lengthOf(1)
582
583 const video = videos[0]
584 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
585 await removeVideoRedundancy({
586 url: servers[0].url,
587 accessToken: servers[0].accessToken,
588 redundancyId: r.id
589 })
590 }
591
592 await waitJobs(servers)
593 await wait(5000)
594
595 await check1WebSeed()
596 await check0PlaylistRedundancies()
597
598 await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].serverNumber, [ 'videos' ])
599 })
600
601 after(async function () {
602 await cleanupTests(servers)
603 })
604 })
605
606 describe('Test expiration', function () {
607 const strategy = 'recently-added'
608
609 async function checkContains (servers: ServerInfo[], str: string) {
610 for (const server of servers) {
611 const res = await getVideo(server.url, video1Server2UUID)
612 const video: VideoDetails = res.body
613
614 for (const f of video.files) {
615 expect(f.magnetUri).to.contain(str)
616 }
617 }
618 }
619
620 async function checkNotContains (servers: ServerInfo[], str: string) {
621 for (const server of servers) {
622 const res = await getVideo(server.url, video1Server2UUID)
623 const video: VideoDetails = res.body
624
625 for (const f of video.files) {
626 expect(f.magnetUri).to.not.contain(str)
627 }
628 }
629 }
630
631 before(async function () {
632 this.timeout(120000)
633
634 await flushAndRunServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
635
636 await enableRedundancyOnServer1()
637 })
638
639 it('Should still have 2 webseeds after 10 seconds', async function () {
640 this.timeout(80000)
641
642 await wait(10000)
643
644 try {
645 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
646 } catch {
647 // Maybe a server deleted a redundancy in the scheduler
648 await wait(2000)
649
650 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
651 }
652 })
653
654 it('Should stop server 1 and expire video redundancy', async function () {
655 this.timeout(80000)
656
657 killallServers([ servers[0] ])
658
659 await wait(15000)
660
661 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
662 })
663
664 after(async function () {
665 await cleanupTests(servers)
666 })
667 })
668
669 describe('Test file replacement', function () {
670 let video2Server2UUID: string
671 const strategy = 'recently-added'
672
673 before(async function () {
674 this.timeout(120000)
675
676 await flushAndRunServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
677
678 await enableRedundancyOnServer1()
679
680 await waitJobs(servers)
681 await waitUntilLog(servers[0], 'Duplicated ', 5)
682 await waitJobs(servers)
683
684 await check2Webseeds()
685 await check1PlaylistRedundancies()
686 await checkStatsWith1Redundancy(strategy)
687
688 const res = await uploadVideo(servers[1].url, servers[1].accessToken, { name: 'video 2 server 2' })
689 video2Server2UUID = res.body.video.uuid
690 })
691
692 it('Should cache video 2 webseeds on the first video', async function () {
693 this.timeout(120000)
694
695 await waitJobs(servers)
696
697 let checked = false
698
699 while (checked === false) {
700 await wait(1000)
701
702 try {
703 await check1WebSeed(video1Server2UUID)
704 await check0PlaylistRedundancies(video1Server2UUID)
705 await check2Webseeds(video2Server2UUID)
706 await check1PlaylistRedundancies(video2Server2UUID)
707
708 checked = true
709 } catch {
710 checked = false
711 }
712 }
713 })
714
715 it('Should disable strategy and remove redundancies', async function () {
716 this.timeout(80000)
717
718 await waitJobs(servers)
719
720 killallServers([ servers[0] ])
721 await reRunServer(servers[0], {
722 redundancy: {
723 videos: {
724 check_interval: '1 second',
725 strategies: []
726 }
727 }
728 })
729
730 await waitJobs(servers)
731
732 await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ join('redundancy', 'hls') ])
733 })
734
735 after(async function () {
736 await cleanupTests(servers)
737 })
738 })
739 })