1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
4 import * as chai from 'chai'
5 import { readdir } from 'fs-extra'
6 import * as magnetUtil from 'magnet-uri'
7 import { basename, join } from 'path'
10 checkVideoFilesWereRemoved,
12 createMultipleServers,
19 setAccessTokensToServers,
22 } from '@shared/extra-utils'
28 VideoRedundancyStrategy,
29 VideoRedundancyStrategyWithManual
30 } from '@shared/models'
32 const expect = chai.expect
34 let servers: PeerTubeServer[] = []
35 let video1Server2: VideoDetails
37 async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) {
38 const parsed = magnetUtil.decode(file.magnetUri)
40 for (const ws of baseWebseeds) {
41 const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`)
42 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
45 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
47 for (const url of parsed.urlList) {
48 await makeRawRequest(url, HttpStatusCode.OK_200)
52 async function createSingleServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
53 const strategies: any[] = []
55 if (strategy !== null) {
58 min_lifetime: '1 hour',
70 enabled: withWebtorrent
78 check_interval: '5 seconds',
84 servers = await createMultipleServers(3, config)
86 // Get the access tokens
87 await setAccessTokensToServers(servers)
90 const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } })
91 video1Server2 = await servers[1].videos.get({ id })
93 await servers[1].videos.view({ id })
96 await waitJobs(servers)
98 // Server 1 and server 2 follow each other
99 await doubleFollow(servers[0], servers[1])
100 // Server 1 and server 3 follow each other
101 await doubleFollow(servers[0], servers[2])
102 // Server 2 and server 3 follow each other
103 await doubleFollow(servers[1], servers[2])
105 await waitJobs(servers)
108 async function ensureSameFilenames (videoUUID: string) {
109 let webtorrentFilenames: string[]
110 let hlsFilenames: string[]
112 for (const server of servers) {
113 const video = await server.videos.getWithToken({ id: videoUUID })
115 // Ensure we use the same filenames that the origin
117 const localWebtorrentFilenames = video.files.map(f => basename(f.fileUrl)).sort()
118 const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort()
120 if (webtorrentFilenames) expect(webtorrentFilenames).to.deep.equal(localWebtorrentFilenames)
121 else webtorrentFilenames = localWebtorrentFilenames
123 if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames)
124 else hlsFilenames = localHLSFilenames
127 return { webtorrentFilenames, hlsFilenames }
130 async function check1WebSeed (videoUUID?: string) {
131 if (!videoUUID) videoUUID = video1Server2.uuid
134 `http://localhost:${servers[1].port}/static/webseed/`
137 for (const server of servers) {
138 // With token to avoid issues with video follow constraints
139 const video = await server.videos.getWithToken({ id: videoUUID })
141 for (const f of video.files) {
142 await checkMagnetWebseeds(f, webseeds, server)
146 await ensureSameFilenames(videoUUID)
149 async function check2Webseeds (videoUUID?: string) {
150 if (!videoUUID) videoUUID = video1Server2.uuid
153 `http://localhost:${servers[0].port}/static/redundancy/`,
154 `http://localhost:${servers[1].port}/static/webseed/`
157 for (const server of servers) {
158 const video = await server.videos.get({ id: videoUUID })
160 for (const file of video.files) {
161 await checkMagnetWebseeds(file, webseeds, server)
165 const { webtorrentFilenames } = await ensureSameFilenames(videoUUID)
167 const directories = [
168 'test' + servers[0].internalServerNumber + '/redundancy',
169 'test' + servers[1].internalServerNumber + '/videos'
172 for (const directory of directories) {
173 const files = await readdir(join(root(), directory))
174 expect(files).to.have.length.at.least(4)
176 // Ensure we files exist on disk
177 expect(files.find(f => webtorrentFilenames.includes(f))).to.exist
181 async function check0PlaylistRedundancies (videoUUID?: string) {
182 if (!videoUUID) videoUUID = video1Server2.uuid
184 for (const server of servers) {
185 // With token to avoid issues with video follow constraints
186 const video = await server.videos.getWithToken({ id: videoUUID })
188 expect(video.streamingPlaylists).to.be.an('array')
189 expect(video.streamingPlaylists).to.have.lengthOf(1)
190 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
193 await ensureSameFilenames(videoUUID)
196 async function check1PlaylistRedundancies (videoUUID?: string) {
197 if (!videoUUID) videoUUID = video1Server2.uuid
199 for (const server of servers) {
200 const video = await server.videos.get({ id: videoUUID })
202 expect(video.streamingPlaylists).to.have.lengthOf(1)
203 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
205 const redundancy = video.streamingPlaylists[0].redundancies[0]
207 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
210 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls'
211 const baseUrlSegment = servers[0].url + '/static/redundancy/hls'
213 const video = await servers[0].videos.get({ id: videoUUID })
214 const hlsPlaylist = video.streamingPlaylists[0]
216 for (const resolution of [ 240, 360, 480, 720 ]) {
217 await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist })
220 const { hlsFilenames } = await ensureSameFilenames(videoUUID)
222 const directories = [
223 'test' + servers[0].internalServerNumber + '/redundancy/hls',
224 'test' + servers[1].internalServerNumber + '/streaming-playlists/hls'
227 for (const directory of directories) {
228 const files = await readdir(join(root(), directory, videoUUID))
229 expect(files).to.have.length.at.least(4)
231 // Ensure we files exist on disk
232 expect(files.find(f => hlsFilenames.includes(f))).to.exist
236 async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
237 let totalSize: number = null
240 if (strategy !== 'manual') {
245 const data = await servers[0].stats.get()
246 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
248 const stat = data.videosRedundancy[0]
249 expect(stat.strategy).to.equal(strategy)
250 expect(stat.totalSize).to.equal(totalSize)
255 async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
256 const stat = await checkStatsGlobal(strategy)
258 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
259 expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
260 expect(stat.totalVideos).to.equal(1)
263 async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
264 const stat = await checkStatsGlobal(strategy)
266 expect(stat.totalUsed).to.equal(0)
267 expect(stat.totalVideoFiles).to.equal(0)
268 expect(stat.totalVideos).to.equal(0)
271 async function findServerFollows () {
272 const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
273 const follows = body.data
274 const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`)
275 const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`)
277 return { server2, server3 }
280 async function enableRedundancyOnServer1 () {
281 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
283 const { server2, server3 } = await findServerFollows()
285 expect(server3).to.not.be.undefined
286 expect(server3.following.hostRedundancyAllowed).to.be.false
288 expect(server2).to.not.be.undefined
289 expect(server2.following.hostRedundancyAllowed).to.be.true
292 async function disableRedundancyOnServer1 () {
293 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
295 const { server2, server3 } = await findServerFollows()
297 expect(server3).to.not.be.undefined
298 expect(server3.following.hostRedundancyAllowed).to.be.false
300 expect(server2).to.not.be.undefined
301 expect(server2.following.hostRedundancyAllowed).to.be.false
304 describe('Test videos redundancy', function () {
306 describe('With most-views strategy', function () {
307 const strategy = 'most-views'
312 return createSingleServers(strategy)
315 it('Should have 1 webseed on the first video', async function () {
316 await check1WebSeed()
317 await check0PlaylistRedundancies()
318 await checkStatsWithoutRedundancy(strategy)
321 it('Should enable redundancy on server 1', function () {
322 return enableRedundancyOnServer1()
325 it('Should have 2 webseeds on the first video', async function () {
328 await waitJobs(servers)
329 await servers[0].servers.waitUntilLog('Duplicated ', 5)
330 await waitJobs(servers)
332 await check2Webseeds()
333 await check1PlaylistRedundancies()
334 await checkStatsWith1Redundancy(strategy)
337 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
340 await disableRedundancyOnServer1()
342 await waitJobs(servers)
345 await check1WebSeed()
346 await check0PlaylistRedundancies()
348 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
351 after(async function () {
352 return cleanupTests(servers)
356 describe('With trending strategy', function () {
357 const strategy = 'trending'
362 return createSingleServers(strategy)
365 it('Should have 1 webseed on the first video', async function () {
366 await check1WebSeed()
367 await check0PlaylistRedundancies()
368 await checkStatsWithoutRedundancy(strategy)
371 it('Should enable redundancy on server 1', function () {
372 return enableRedundancyOnServer1()
375 it('Should have 2 webseeds on the first video', async function () {
378 await waitJobs(servers)
379 await servers[0].servers.waitUntilLog('Duplicated ', 5)
380 await waitJobs(servers)
382 await check2Webseeds()
383 await check1PlaylistRedundancies()
384 await checkStatsWith1Redundancy(strategy)
387 it('Should unfollow on server 1 and remove duplicated videos', async function () {
390 await servers[0].follows.unfollow({ target: servers[1] })
392 await waitJobs(servers)
395 await check1WebSeed()
396 await check0PlaylistRedundancies()
398 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
401 after(async function () {
402 await cleanupTests(servers)
406 describe('With recently added strategy', function () {
407 const strategy = 'recently-added'
412 return createSingleServers(strategy, { min_views: 3 })
415 it('Should have 1 webseed on the first video', async function () {
416 await check1WebSeed()
417 await check0PlaylistRedundancies()
418 await checkStatsWithoutRedundancy(strategy)
421 it('Should enable redundancy on server 1', function () {
422 return enableRedundancyOnServer1()
425 it('Should still have 1 webseed on the first video', async function () {
428 await waitJobs(servers)
430 await waitJobs(servers)
432 await check1WebSeed()
433 await check0PlaylistRedundancies()
434 await checkStatsWithoutRedundancy(strategy)
437 it('Should view 2 times the first video to have > min_views config', async function () {
440 await servers[0].videos.view({ id: video1Server2.uuid })
441 await servers[2].videos.view({ id: video1Server2.uuid })
444 await waitJobs(servers)
447 it('Should have 2 webseeds on the first video', async function () {
450 await waitJobs(servers)
451 await servers[0].servers.waitUntilLog('Duplicated ', 5)
452 await waitJobs(servers)
454 await check2Webseeds()
455 await check1PlaylistRedundancies()
456 await checkStatsWith1Redundancy(strategy)
459 it('Should remove the video and the redundancy files', async function () {
462 await saveVideoInServers(servers, video1Server2.uuid)
463 await servers[1].videos.remove({ id: video1Server2.uuid })
465 await waitJobs(servers)
467 for (const server of servers) {
468 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
472 after(async function () {
473 await cleanupTests(servers)
477 describe('With only HLS files', function () {
478 const strategy = 'recently-added'
480 before(async function () {
483 await createSingleServers(strategy, { min_views: 3 }, false)
486 it('Should have 0 playlist redundancy on the first video', async function () {
487 await check1WebSeed()
488 await check0PlaylistRedundancies()
491 it('Should enable redundancy on server 1', function () {
492 return enableRedundancyOnServer1()
495 it('Should still have 0 redundancy on the first video', async function () {
498 await waitJobs(servers)
500 await waitJobs(servers)
502 await check0PlaylistRedundancies()
503 await checkStatsWithoutRedundancy(strategy)
506 it('Should have 1 redundancy on the first video', async function () {
509 await servers[0].videos.view({ id: video1Server2.uuid })
510 await servers[2].videos.view({ id: video1Server2.uuid })
513 await waitJobs(servers)
515 await waitJobs(servers)
516 await servers[0].servers.waitUntilLog('Duplicated ', 1)
517 await waitJobs(servers)
519 await check1PlaylistRedundancies()
520 await checkStatsWith1Redundancy(strategy, true)
523 it('Should remove the video and the redundancy files', async function () {
526 await saveVideoInServers(servers, video1Server2.uuid)
527 await servers[1].videos.remove({ id: video1Server2.uuid })
529 await waitJobs(servers)
531 for (const server of servers) {
532 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
536 after(async function () {
537 await cleanupTests(servers)
541 describe('With manual strategy', function () {
545 return createSingleServers(null)
548 it('Should have 1 webseed on the first video', async function () {
549 await check1WebSeed()
550 await check0PlaylistRedundancies()
551 await checkStatsWithoutRedundancy('manual')
554 it('Should create a redundancy on first video', async function () {
555 await servers[0].redundancy.addVideo({ videoId: video1Server2.id })
558 it('Should have 2 webseeds on the first video', async function () {
561 await waitJobs(servers)
562 await servers[0].servers.waitUntilLog('Duplicated ', 5)
563 await waitJobs(servers)
565 await check2Webseeds()
566 await check1PlaylistRedundancies()
567 await checkStatsWith1Redundancy('manual')
570 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
573 const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' })
575 const videos = body.data
576 expect(videos).to.have.lengthOf(1)
578 const video = videos[0]
580 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
581 await servers[0].redundancy.removeVideo({ redundancyId: r.id })
584 await waitJobs(servers)
587 await check1WebSeed()
588 await check0PlaylistRedundancies()
590 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
593 after(async function () {
594 await cleanupTests(servers)
598 describe('Test expiration', function () {
599 const strategy = 'recently-added'
601 async function checkContains (servers: PeerTubeServer[], str: string) {
602 for (const server of servers) {
603 const video = await server.videos.get({ id: video1Server2.uuid })
605 for (const f of video.files) {
606 expect(f.magnetUri).to.contain(str)
611 async function checkNotContains (servers: PeerTubeServer[], str: string) {
612 for (const server of servers) {
613 const video = await server.videos.get({ id: video1Server2.uuid })
615 for (const f of video.files) {
616 expect(f.magnetUri).to.not.contain(str)
621 before(async function () {
624 await createSingleServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
626 await enableRedundancyOnServer1()
629 it('Should still have 2 webseeds after 10 seconds', async function () {
635 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
637 // Maybe a server deleted a redundancy in the scheduler
640 await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
644 it('Should stop server 1 and expire video redundancy', async function () {
647 await killallServers([ servers[0] ])
651 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2Flocalhost%3A' + servers[0].port)
654 after(async function () {
655 await cleanupTests(servers)
659 describe('Test file replacement', function () {
660 let video2Server2UUID: string
661 const strategy = 'recently-added'
663 before(async function () {
666 await createSingleServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
668 await enableRedundancyOnServer1()
670 await waitJobs(servers)
671 await servers[0].servers.waitUntilLog('Duplicated ', 5)
672 await waitJobs(servers)
674 await check2Webseeds()
675 await check1PlaylistRedundancies()
676 await checkStatsWith1Redundancy(strategy)
678 const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } })
679 video2Server2UUID = uuid
681 // Wait transcoding before federation
682 await waitJobs(servers)
684 await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } })
687 it('Should cache video 2 webseeds on the first video', async function () {
690 await waitJobs(servers)
694 while (checked === false) {
698 await check1WebSeed()
699 await check0PlaylistRedundancies()
701 await check2Webseeds(video2Server2UUID)
702 await check1PlaylistRedundancies(video2Server2UUID)
711 it('Should disable strategy and remove redundancies', async function () {
714 await waitJobs(servers)
716 await killallServers([ servers[0] ])
717 await servers[0].run({
720 check_interval: '1 second',
726 await waitJobs(servers)
728 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
731 after(async function () {
732 await cleanupTests(servers)