1 /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */
3 import { expect } from 'chai'
4 import { readdir } from 'fs-extra'
5 import { decode as magnetUriDecode } from 'magnet-uri'
6 import { basename, join } from 'path'
7 import { checkSegmentHash, checkVideoFilesWereRemoved, saveVideoInServers } from '@server/tests/shared'
8 import { wait } from '@shared/core-utils'
14 VideoRedundancyStrategy,
15 VideoRedundancyStrategyWithManual
16 } from '@shared/models'
19 createMultipleServers,
24 setAccessTokensToServers,
26 } from '@shared/server-commands'
28 let servers: PeerTubeServer[] = []
29 let video1Server2: VideoDetails
31 async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) {
32 const parsed = magnetUriDecode(file.magnetUri)
34 for (const ws of baseWebseeds) {
35 const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`)
36 expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined
39 expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length)
41 for (const url of parsed.urlList) {
42 await makeRawRequest({ url, expectedStatus: HttpStatusCode.OK_200 })
46 async function createServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) {
47 const strategies: any[] = []
49 if (strategy !== null) {
52 min_lifetime: '1 hour',
64 enabled: withWebtorrent
72 check_interval: '5 seconds',
78 servers = await createMultipleServers(3, config)
80 // Get the access tokens
81 await setAccessTokensToServers(servers)
84 const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } })
85 video1Server2 = await servers[1].videos.get({ id })
87 await servers[1].views.simulateView({ id })
90 await waitJobs(servers)
92 // Server 1 and server 2 follow each other
93 await doubleFollow(servers[0], servers[1])
94 // Server 1 and server 3 follow each other
95 await doubleFollow(servers[0], servers[2])
96 // Server 2 and server 3 follow each other
97 await doubleFollow(servers[1], servers[2])
99 await waitJobs(servers)
102 async function ensureSameFilenames (videoUUID: string) {
103 let webtorrentFilenames: string[]
104 let hlsFilenames: string[]
106 for (const server of servers) {
107 const video = await server.videos.getWithToken({ id: videoUUID })
109 // Ensure we use the same filenames that the origin
111 const localWebtorrentFilenames = video.files.map(f => basename(f.fileUrl)).sort()
112 const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort()
114 if (webtorrentFilenames) expect(webtorrentFilenames).to.deep.equal(localWebtorrentFilenames)
115 else webtorrentFilenames = localWebtorrentFilenames
117 if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames)
118 else hlsFilenames = localHLSFilenames
121 return { webtorrentFilenames, hlsFilenames }
124 async function check1WebSeed (videoUUID?: string) {
125 if (!videoUUID) videoUUID = video1Server2.uuid
128 `${servers[1].url}/static/webseed/`
131 for (const server of servers) {
132 // With token to avoid issues with video follow constraints
133 const video = await server.videos.getWithToken({ id: videoUUID })
135 for (const f of video.files) {
136 await checkMagnetWebseeds(f, webseeds, server)
140 await ensureSameFilenames(videoUUID)
143 async function check2Webseeds (videoUUID?: string) {
144 if (!videoUUID) videoUUID = video1Server2.uuid
147 `${servers[0].url}/static/redundancy/`,
148 `${servers[1].url}/static/webseed/`
151 for (const server of servers) {
152 const video = await server.videos.get({ id: videoUUID })
154 for (const file of video.files) {
155 await checkMagnetWebseeds(file, webseeds, server)
159 const { webtorrentFilenames } = await ensureSameFilenames(videoUUID)
161 const directories = [
162 servers[0].getDirectoryPath('redundancy'),
163 servers[1].getDirectoryPath('videos')
166 for (const directory of directories) {
167 const files = await readdir(directory)
168 expect(files).to.have.length.at.least(4)
170 // Ensure we files exist on disk
171 expect(files.find(f => webtorrentFilenames.includes(f))).to.exist
175 async function check0PlaylistRedundancies (videoUUID?: string) {
176 if (!videoUUID) videoUUID = video1Server2.uuid
178 for (const server of servers) {
179 // With token to avoid issues with video follow constraints
180 const video = await server.videos.getWithToken({ id: videoUUID })
182 expect(video.streamingPlaylists).to.be.an('array')
183 expect(video.streamingPlaylists).to.have.lengthOf(1)
184 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0)
187 await ensureSameFilenames(videoUUID)
190 async function check1PlaylistRedundancies (videoUUID?: string) {
191 if (!videoUUID) videoUUID = video1Server2.uuid
193 for (const server of servers) {
194 const video = await server.videos.get({ id: videoUUID })
196 expect(video.streamingPlaylists).to.have.lengthOf(1)
197 expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1)
199 const redundancy = video.streamingPlaylists[0].redundancies[0]
201 expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID)
204 const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID
205 const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID
207 const video = await servers[0].videos.get({ id: videoUUID })
208 const hlsPlaylist = video.streamingPlaylists[0]
210 for (const resolution of [ 240, 360, 480, 720 ]) {
211 await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist })
214 const { hlsFilenames } = await ensureSameFilenames(videoUUID)
216 const directories = [
217 servers[0].getDirectoryPath('redundancy/hls'),
218 servers[1].getDirectoryPath('streaming-playlists/hls')
221 for (const directory of directories) {
222 const files = await readdir(join(directory, videoUUID))
223 expect(files).to.have.length.at.least(4)
225 // Ensure we files exist on disk
226 expect(files.find(f => hlsFilenames.includes(f))).to.exist
230 async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) {
231 let totalSize: number = null
234 if (strategy !== 'manual') {
239 const data = await servers[0].stats.get()
240 expect(data.videosRedundancy).to.have.lengthOf(statsLength)
242 const stat = data.videosRedundancy[0]
243 expect(stat.strategy).to.equal(strategy)
244 expect(stat.totalSize).to.equal(totalSize)
249 async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) {
250 const stat = await checkStatsGlobal(strategy)
252 expect(stat.totalUsed).to.be.at.least(1).and.below(409601)
253 expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8)
254 expect(stat.totalVideos).to.equal(1)
257 async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) {
258 const stat = await checkStatsGlobal(strategy)
260 expect(stat.totalUsed).to.equal(0)
261 expect(stat.totalVideoFiles).to.equal(0)
262 expect(stat.totalVideos).to.equal(0)
265 async function findServerFollows () {
266 const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' })
267 const follows = body.data
268 const server2 = follows.find(f => f.following.host === `${servers[1].host}`)
269 const server3 = follows.find(f => f.following.host === `${servers[2].host}`)
271 return { server2, server3 }
274 async function enableRedundancyOnServer1 () {
275 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true })
277 const { server2, server3 } = await findServerFollows()
279 expect(server3).to.not.be.undefined
280 expect(server3.following.hostRedundancyAllowed).to.be.false
282 expect(server2).to.not.be.undefined
283 expect(server2.following.hostRedundancyAllowed).to.be.true
286 async function disableRedundancyOnServer1 () {
287 await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false })
289 const { server2, server3 } = await findServerFollows()
291 expect(server3).to.not.be.undefined
292 expect(server3.following.hostRedundancyAllowed).to.be.false
294 expect(server2).to.not.be.undefined
295 expect(server2.following.hostRedundancyAllowed).to.be.false
298 describe('Test videos redundancy', function () {
300 describe('With most-views strategy', function () {
301 const strategy = 'most-views'
306 return createServers(strategy)
309 it('Should have 1 webseed on the first video', async function () {
310 await check1WebSeed()
311 await check0PlaylistRedundancies()
312 await checkStatsWithoutRedundancy(strategy)
315 it('Should enable redundancy on server 1', function () {
316 return enableRedundancyOnServer1()
319 it('Should have 2 webseeds on the first video', async function () {
322 await waitJobs(servers)
323 await servers[0].servers.waitUntilLog('Duplicated ', 5)
324 await waitJobs(servers)
326 await check2Webseeds()
327 await check1PlaylistRedundancies()
328 await checkStatsWith1Redundancy(strategy)
331 it('Should undo redundancy on server 1 and remove duplicated videos', async function () {
334 await disableRedundancyOnServer1()
336 await waitJobs(servers)
339 await check1WebSeed()
340 await check0PlaylistRedundancies()
342 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
345 after(async function () {
346 return cleanupTests(servers)
350 describe('With trending strategy', function () {
351 const strategy = 'trending'
356 return createServers(strategy)
359 it('Should have 1 webseed on the first video', async function () {
360 await check1WebSeed()
361 await check0PlaylistRedundancies()
362 await checkStatsWithoutRedundancy(strategy)
365 it('Should enable redundancy on server 1', function () {
366 return enableRedundancyOnServer1()
369 it('Should have 2 webseeds on the first video', async function () {
372 await waitJobs(servers)
373 await servers[0].servers.waitUntilLog('Duplicated ', 5)
374 await waitJobs(servers)
376 await check2Webseeds()
377 await check1PlaylistRedundancies()
378 await checkStatsWith1Redundancy(strategy)
381 it('Should unfollow server 3 and keep duplicated videos', async function () {
384 await servers[0].follows.unfollow({ target: servers[2] })
386 await waitJobs(servers)
389 await check2Webseeds()
390 await check1PlaylistRedundancies()
391 await checkStatsWith1Redundancy(strategy)
394 it('Should unfollow server 2 and remove duplicated videos', async function () {
397 await servers[0].follows.unfollow({ target: servers[1] })
399 await waitJobs(servers)
402 await check1WebSeed()
403 await check0PlaylistRedundancies()
405 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
408 after(async function () {
409 await cleanupTests(servers)
413 describe('With recently added strategy', function () {
414 const strategy = 'recently-added'
419 return createServers(strategy, { min_views: 3 })
422 it('Should have 1 webseed on the first video', async function () {
423 await check1WebSeed()
424 await check0PlaylistRedundancies()
425 await checkStatsWithoutRedundancy(strategy)
428 it('Should enable redundancy on server 1', function () {
429 return enableRedundancyOnServer1()
432 it('Should still have 1 webseed on the first video', async function () {
435 await waitJobs(servers)
437 await waitJobs(servers)
439 await check1WebSeed()
440 await check0PlaylistRedundancies()
441 await checkStatsWithoutRedundancy(strategy)
444 it('Should view 2 times the first video to have > min_views config', async function () {
447 await servers[0].views.simulateView({ id: video1Server2.uuid })
448 await servers[2].views.simulateView({ id: video1Server2.uuid })
451 await waitJobs(servers)
454 it('Should have 2 webseeds on the first video', async function () {
457 await waitJobs(servers)
458 await servers[0].servers.waitUntilLog('Duplicated ', 5)
459 await waitJobs(servers)
461 await check2Webseeds()
462 await check1PlaylistRedundancies()
463 await checkStatsWith1Redundancy(strategy)
466 it('Should remove the video and the redundancy files', async function () {
469 await saveVideoInServers(servers, video1Server2.uuid)
470 await servers[1].videos.remove({ id: video1Server2.uuid })
472 await waitJobs(servers)
474 for (const server of servers) {
475 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
479 after(async function () {
480 await cleanupTests(servers)
484 describe('With only HLS files', function () {
485 const strategy = 'recently-added'
487 before(async function () {
490 await createServers(strategy, { min_views: 3 }, false)
493 it('Should have 0 playlist redundancy on the first video', async function () {
494 await check1WebSeed()
495 await check0PlaylistRedundancies()
498 it('Should enable redundancy on server 1', function () {
499 return enableRedundancyOnServer1()
502 it('Should still have 0 redundancy on the first video', async function () {
505 await waitJobs(servers)
507 await waitJobs(servers)
509 await check0PlaylistRedundancies()
510 await checkStatsWithoutRedundancy(strategy)
513 it('Should have 1 redundancy on the first video', async function () {
516 await servers[0].views.simulateView({ id: video1Server2.uuid })
517 await servers[2].views.simulateView({ id: video1Server2.uuid })
520 await waitJobs(servers)
522 await waitJobs(servers)
523 await servers[0].servers.waitUntilLog('Duplicated ', 1)
524 await waitJobs(servers)
526 await check1PlaylistRedundancies()
527 await checkStatsWith1Redundancy(strategy, true)
530 it('Should remove the video and the redundancy files', async function () {
533 await saveVideoInServers(servers, video1Server2.uuid)
534 await servers[1].videos.remove({ id: video1Server2.uuid })
536 await waitJobs(servers)
538 for (const server of servers) {
539 await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails })
543 after(async function () {
544 await cleanupTests(servers)
548 describe('With manual strategy', function () {
552 return createServers(null)
555 it('Should have 1 webseed on the first video', async function () {
556 await check1WebSeed()
557 await check0PlaylistRedundancies()
558 await checkStatsWithoutRedundancy('manual')
561 it('Should create a redundancy on first video', async function () {
562 await servers[0].redundancy.addVideo({ videoId: video1Server2.id })
565 it('Should have 2 webseeds on the first video', async function () {
568 await waitJobs(servers)
569 await servers[0].servers.waitUntilLog('Duplicated ', 5)
570 await waitJobs(servers)
572 await check2Webseeds()
573 await check1PlaylistRedundancies()
574 await checkStatsWith1Redundancy('manual')
577 it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
580 const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' })
582 const videos = body.data
583 expect(videos).to.have.lengthOf(1)
585 const video = videos[0]
587 for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
588 await servers[0].redundancy.removeVideo({ redundancyId: r.id })
591 await waitJobs(servers)
594 await check1WebSeed()
595 await check0PlaylistRedundancies()
597 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
600 after(async function () {
601 await cleanupTests(servers)
605 describe('Test expiration', function () {
606 const strategy = 'recently-added'
608 async function checkContains (servers: PeerTubeServer[], str: string) {
609 for (const server of servers) {
610 const video = await server.videos.get({ id: video1Server2.uuid })
612 for (const f of video.files) {
613 expect(f.magnetUri).to.contain(str)
618 async function checkNotContains (servers: PeerTubeServer[], str: string) {
619 for (const server of servers) {
620 const video = await server.videos.get({ id: video1Server2.uuid })
622 for (const f of video.files) {
623 expect(f.magnetUri).to.not.contain(str)
628 before(async function () {
631 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
633 await enableRedundancyOnServer1()
636 it('Should still have 2 webseeds after 10 seconds', async function () {
642 await checkContains(servers, 'http%3A%2F%2F' + servers[0].hostname + '%3A' + servers[0].port)
644 // Maybe a server deleted a redundancy in the scheduler
647 await checkContains(servers, 'http%3A%2F%2F' + servers[0].hostname + '%3A' + servers[0].port)
651 it('Should stop server 1 and expire video redundancy', async function () {
654 await killallServers([ servers[0] ])
658 await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2F' + servers[0].port + '%3A' + servers[0].port)
661 after(async function () {
662 await cleanupTests(servers)
666 describe('Test file replacement', function () {
667 let video2Server2UUID: string
668 const strategy = 'recently-added'
670 before(async function () {
673 await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 })
675 await enableRedundancyOnServer1()
677 await waitJobs(servers)
678 await servers[0].servers.waitUntilLog('Duplicated ', 5)
679 await waitJobs(servers)
681 await check2Webseeds()
682 await check1PlaylistRedundancies()
683 await checkStatsWith1Redundancy(strategy)
685 const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } })
686 video2Server2UUID = uuid
688 // Wait transcoding before federation
689 await waitJobs(servers)
691 await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } })
694 it('Should cache video 2 webseeds on the first video', async function () {
697 await waitJobs(servers)
701 while (checked === false) {
705 await check1WebSeed()
706 await check0PlaylistRedundancies()
708 await check2Webseeds(video2Server2UUID)
709 await check1PlaylistRedundancies(video2Server2UUID)
718 it('Should disable strategy and remove redundancies', async function () {
721 await waitJobs(servers)
723 await killallServers([ servers[0] ])
724 await servers[0].run({
727 check_interval: '1 second',
733 await waitJobs(servers)
735 await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true })
738 after(async function () {
739 await cleanupTests(servers)