1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
4 import * as chai from 'chai'
5 import { readdir } from 'fs-extra'
6 import magnetUtil from 'magnet-uri'
7 import { basename, join } from 'path'
8 import { checkSegmentHash, checkVideoFilesWereRemoved, saveVideoInServers } from '@server/tests/shared'
9 import { root, wait } from '@shared/core-utils'
15 VideoRedundancyStrategy,
16 VideoRedundancyStrategyWithManual
17 } from '@shared/models'
20 createMultipleServers,
25 setAccessTokensToServers,
27 } from '@shared/server-commands'
29 const expect = chai.expect
31 let servers: PeerTubeServer[] = []
32 let video1Server2: VideoDetails
34 async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) {
35 const parsed = magnetUtil.decode(file.magnetUri)
37 for (const ws of baseWebseeds) {
38 const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`)
39 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
42 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
44 for (const url of parsed.urlList) {
45 await makeRawRequest(url, HttpStatusCode.OK_200)
49 async function createServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
50 const strategies: any[] = []
52 if (strategy !== null) {
55 min_lifetime: '1 hour',
67 enabled: withWebtorrent
75 check_interval: '5 seconds',
81 servers = await createMultipleServers(3, config)
83 // Get the access tokens
84 await setAccessTokensToServers(servers)
87 const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } })
88 video1Server2 = await servers[1].videos.get({ id })
90 await servers[1].views.simulateView({ id })
93 await waitJobs(servers)
95 // Server 1 and server 2 follow each other
96 await doubleFollow(servers[0], servers[1])
97 // Server 1 and server 3 follow each other
98 await doubleFollow(servers[0], servers[2])
99 // Server 2 and server 3 follow each other
100 await doubleFollow(servers[1], servers[2])
102 await waitJobs(servers)
105 async function ensureSameFilenames (videoUUID: string) {
106 let webtorrentFilenames: string[]
107 let hlsFilenames: string[]
109 for (const server of servers) {
110 const video = await server.videos.getWithToken({ id: videoUUID })
112 // Ensure we use the same filenames that the origin
114 const localWebtorrentFilenames = video.files.map(f => basename(f.fileUrl)).sort()
115 const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort()
117 if (webtorrentFilenames) expect(webtorrentFilenames).to.deep.equal(localWebtorrentFilenames)
118 else webtorrentFilenames = localWebtorrentFilenames
120 if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames)
121 else hlsFilenames = localHLSFilenames
124 return { webtorrentFilenames, hlsFilenames }
127 async function check1WebSeed (videoUUID?: string) {
128 if (!videoUUID) videoUUID = video1Server2.uuid
131 `http://localhost:${servers[1].port}/static/webseed/`
134 for (const server of servers) {
135 // With token to avoid issues with video follow constraints
136 const video = await server.videos.getWithToken({ id: videoUUID })
138 for (const f of video.files) {
139 await checkMagnetWebseeds(f, webseeds, server)
143 await ensureSameFilenames(videoUUID)
146 async function check2Webseeds (videoUUID?: string) {
147 if (!videoUUID) videoUUID = video1Server2.uuid
150 `http://localhost:${servers[0].port}/static/redundancy/`,
151 `http://localhost:${servers[1].port}/static/webseed/`
154 for (const server of servers) {
155 const video = await server.videos.get({ id: videoUUID })
157 for (const file of video.files) {
158 await checkMagnetWebseeds(file, webseeds, server)
162 const { webtorrentFilenames } = await ensureSameFilenames(videoUUID)
164 const directories = [
165 'test' + servers[0].internalServerNumber + '/redundancy',
166 'test' + servers[1].internalServerNumber + '/videos'
169 for (const directory of directories) {
170 const files = await readdir(join(root(), directory))
171 expect(files).to.have.length.at.least(4)
173 // Ensure we files exist on disk
174 expect(files.find(f => webtorrentFilenames.includes(f))).to.exist
178 async function check0PlaylistRedundancies (videoUUID?: string) {
179 if (!videoUUID) videoUUID = video1Server2.uuid
181 for (const server of servers) {
182 // With token to avoid issues with video follow constraints
183 const video = await server.videos.getWithToken({ id: videoUUID })
185 expect(video.streamingPlaylists).to.be.an('array')
186 expect(video.streamingPlaylists).to.have.lengthOf(1)
187 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
190 await ensureSameFilenames(videoUUID)
193 async function check1PlaylistRedundancies (videoUUID?: string) {
194 if (!videoUUID) videoUUID = video1Server2.uuid
196 for (const server of servers) {
197 const video = await server.videos.get({ id: videoUUID })
199 expect(video.streamingPlaylists).to.have.lengthOf(1)
200 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
202 const redundancy = video.streamingPlaylists[0].redundancies[0]
204 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
207 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID
208 const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID
210 const video = await servers[0].videos.get({ id: videoUUID })
211 const hlsPlaylist = video.streamingPlaylists[0]
213 for (const resolution of [ 240, 360, 480, 720 ]) {
214 await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist })
217 const { hlsFilenames } = await ensureSameFilenames(videoUUID)
219 const directories = [
220 'test' + servers[0].internalServerNumber + '/redundancy/hls',
221 'test' + servers[1].internalServerNumber + '/streaming-playlists/hls'
224 for (const directory of directories) {
225 const files = await readdir(join(root(), directory, videoUUID))
226 expect(files).to.have.length.at.least(4)
228 // Ensure we files exist on disk
229 expect(files.find(f => hlsFilenames.includes(f))).to.exist
233 async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
234 let totalSize: number = null
237 if (strategy !== 'manual') {
242 const data = await servers[0].stats.get()
243 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
245 const stat = data.videosRedundancy[0]
246 expect(stat.strategy).to.equal(strategy)
247 expect(stat.totalSize).to.equal(totalSize)
252 async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
253 const stat = await checkStatsGlobal(strategy)
255 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
256 expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
257 expect(stat.totalVideos).to.equal(1)
260 async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
261 const stat = await checkStatsGlobal(strategy)
263 expect(stat.totalUsed).to.equal(0)
264 expect(stat.totalVideoFiles).to.equal(0)
265 expect(stat.totalVideos).to.equal(0)
268 async function findServerFollows () {
269 const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
270 const follows = body.data
271 const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`)
272 const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`)
274 return { server2, server3 }
277 async function enableRedundancyOnServer1 () {
278 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
280 const { server2, server3 } = await findServerFollows()
282 expect(server3).to.not.be.undefined
283 expect(server3.following.hostRedundancyAllowed).to.be.false
285 expect(server2).to.not.be.undefined
286 expect(server2.following.hostRedundancyAllowed).to.be.true
289 async function disableRedundancyOnServer1 () {
290 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
292 const { server2, server3 } = await findServerFollows()
294 expect(server3).to.not.be.undefined
295 expect(server3.following.hostRedundancyAllowed).to.be.false
297 expect(server2).to.not.be.undefined
298 expect(server2.following.hostRedundancyAllowed).to.be.false
301 describe('Test videos redundancy', function () {
303 describe('With most-views strategy', function () {
304 const strategy = 'most-views'
309 return createServers(strategy)
312 it('Should have 1 webseed on the first video', async function () {
313 await check1WebSeed()
314 await check0PlaylistRedundancies()
315 await checkStatsWithoutRedundancy(strategy)
318 it('Should enable redundancy on server 1', function () {
319 return enableRedundancyOnServer1()
322 it('Should have 2 webseeds on the first video', async function () {
325 await waitJobs(servers)
326 await servers[0].servers.waitUntilLog('Duplicated ', 5)
327 await waitJobs(servers)
329 await check2Webseeds()
330 await check1PlaylistRedundancies()
331 await checkStatsWith1Redundancy(strategy)
334 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
337 await disableRedundancyOnServer1()
339 await waitJobs(servers)
342 await check1WebSeed()
343 await check0PlaylistRedundancies()
345 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
348 after(async function () {
349 return cleanupTests(servers)
353 describe('With trending strategy', function () {
354 const strategy = 'trending'
359 return createServers(strategy)
362 it('Should have 1 webseed on the first video', async function () {
363 await check1WebSeed()
364 await check0PlaylistRedundancies()
365 await checkStatsWithoutRedundancy(strategy)
368 it('Should enable redundancy on server 1', function () {
369 return enableRedundancyOnServer1()
372 it('Should have 2 webseeds on the first video', async function () {
375 await waitJobs(servers)
376 await servers[0].servers.waitUntilLog('Duplicated ', 5)
377 await waitJobs(servers)
379 await check2Webseeds()
380 await check1PlaylistRedundancies()
381 await checkStatsWith1Redundancy(strategy)
384 it('Should unfollow server 3 and keep duplicated videos', async function () {
387 await servers[0].follows.unfollow({ target: servers[2] })
389 await waitJobs(servers)
392 await check2Webseeds()
393 await check1PlaylistRedundancies()
394 await checkStatsWith1Redundancy(strategy)
397 it('Should unfollow server 2 and remove duplicated videos', async function () {
400 await servers[0].follows.unfollow({ target: servers[1] })
402 await waitJobs(servers)
405 await check1WebSeed()
406 await check0PlaylistRedundancies()
408 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
411 after(async function () {
412 await cleanupTests(servers)
416 describe('With recently added strategy', function () {
417 const strategy = 'recently-added'
422 return createServers(strategy, { min_views: 3 })
425 it('Should have 1 webseed on the first video', async function () {
426 await check1WebSeed()
427 await check0PlaylistRedundancies()
428 await checkStatsWithoutRedundancy(strategy)
431 it('Should enable redundancy on server 1', function () {
432 return enableRedundancyOnServer1()
435 it('Should still have 1 webseed on the first video', async function () {
438 await waitJobs(servers)
440 await waitJobs(servers)
442 await check1WebSeed()
443 await check0PlaylistRedundancies()
444 await checkStatsWithoutRedundancy(strategy)
447 it('Should view 2 times the first video to have > min_views config', async function () {
450 await servers[0].views.simulateView({ id: video1Server2.uuid })
451 await servers[2].views.simulateView({ id: video1Server2.uuid })
454 await waitJobs(servers)
457 it('Should have 2 webseeds on the first video', async function () {
460 await waitJobs(servers)
461 await servers[0].servers.waitUntilLog('Duplicated ', 5)
462 await waitJobs(servers)
464 await check2Webseeds()
465 await check1PlaylistRedundancies()
466 await checkStatsWith1Redundancy(strategy)
469 it('Should remove the video and the redundancy files', async function () {
472 await saveVideoInServers(servers, video1Server2.uuid)
473 await servers[1].videos.remove({ id: video1Server2.uuid })
475 await waitJobs(servers)
477 for (const server of servers) {
478 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
482 after(async function () {
483 await cleanupTests(servers)
487 describe('With only HLS files', function () {
488 const strategy = 'recently-added'
490 before(async function () {
493 await createServers(strategy, { min_views: 3 }, false)
496 it('Should have 0 playlist redundancy on the first video', async function () {
497 await check1WebSeed()
498 await check0PlaylistRedundancies()
501 it('Should enable redundancy on server 1', function () {
502 return enableRedundancyOnServer1()
505 it('Should still have 0 redundancy on the first video', async function () {
508 await waitJobs(servers)
510 await waitJobs(servers)
512 await check0PlaylistRedundancies()
513 await checkStatsWithoutRedundancy(strategy)
516 it('Should have 1 redundancy on the first video', async function () {
519 await servers[0].views.simulateView({ id: video1Server2.uuid })
520 await servers[2].views.simulateView({ id: video1Server2.uuid })
523 await waitJobs(servers)
525 await waitJobs(servers)
526 await servers[0].servers.waitUntilLog('Duplicated ', 1)
527 await waitJobs(servers)
529 await check1PlaylistRedundancies()
530 await checkStatsWith1Redundancy(strategy, true)
533 it('Should remove the video and the redundancy files', async function () {
536 await saveVideoInServers(servers, video1Server2.uuid)
537 await servers[1].videos.remove({ id: video1Server2.uuid })
539 await waitJobs(servers)
541 for (const server of servers) {
542 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
546 after(async function () {
547 await cleanupTests(servers)
551 describe('With manual strategy', function () {
555 return createServers(null)
558 it('Should have 1 webseed on the first video', async function () {
559 await check1WebSeed()
560 await check0PlaylistRedundancies()
561 await checkStatsWithoutRedundancy('manual')
564 it('Should create a redundancy on first video', async function () {
565 await servers[0].redundancy.addVideo({ videoId: video1Server2.id })
568 it('Should have 2 webseeds on the first video', async function () {
571 await waitJobs(servers)
572 await servers[0].servers.waitUntilLog('Duplicated ', 5)
573 await waitJobs(servers)
575 await check2Webseeds()
576 await check1PlaylistRedundancies()
577 await checkStatsWith1Redundancy('manual')
580 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
583 const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' })
585 const videos = body.data
586 expect(videos).to.have.lengthOf(1)
588 const video = videos[0]
590 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
591 await servers[0].redundancy.removeVideo({ redundancyId: r.id })
594 await waitJobs(servers)
597 await check1WebSeed()
598 await check0PlaylistRedundancies()
600 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
603 after(async function () {
604 await cleanupTests(servers)
608 describe('Test expiration', function () {
609 const strategy = 'recently-added'
611 async function checkContains (servers: PeerTubeServer[], str: string) {
612 for (const server of servers) {
613 const video = await server.videos.get({ id: video1Server2.uuid })
615 for (const f of video.files) {
616 expect(f.magnetUri).to.contain(str)
621 async function checkNotContains (servers: PeerTubeServer[], str: string) {
622 for (const server of servers) {
623 const video = await server.videos.get({ id: video1Server2.uuid })
625 for (const f of video.files) {
626 expect(f.magnetUri).to.not.contain(str)
631 before(async function () {
634 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
636 await enableRedundancyOnServer1()
639 it('Should still have 2 webseeds after 10 seconds', async function () {
645 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
647 // Maybe a server deleted a redundancy in the scheduler
650 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
654 it('Should stop server 1 and expire video redundancy', async function () {
657 await killallServers([ servers[0] ])
661 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
664 after(async function () {
665 await cleanupTests(servers)
669 describe('Test file replacement', function () {
670 let video2Server2UUID: string
671 const strategy = 'recently-added'
673 before(async function () {
676 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
678 await enableRedundancyOnServer1()
680 await waitJobs(servers)
681 await servers[0].servers.waitUntilLog('Duplicated ', 5)
682 await waitJobs(servers)
684 await check2Webseeds()
685 await check1PlaylistRedundancies()
686 await checkStatsWith1Redundancy(strategy)
688 const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } })
689 video2Server2UUID = uuid
691 // Wait transcoding before federation
692 await waitJobs(servers)
694 await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } })
697 it('Should cache video 2 webseeds on the first video', async function () {
700 await waitJobs(servers)
704 while (checked === false) {
708 await check1WebSeed()
709 await check0PlaylistRedundancies()
711 await check2Webseeds(video2Server2UUID)
712 await check1PlaylistRedundancies(video2Server2UUID)
721 it('Should disable strategy and remove redundancies', async function () {
724 await waitJobs(servers)
726 await killallServers([ servers[0] ])
727 await servers[0].run({
730 check_interval: '1 second',
736 await waitJobs(servers)
738 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
741 after(async function () {
742 await cleanupTests(servers)