X-Git-Url: https://git.immae.eu/?a=blobdiff_plain;f=server%2Ftests%2Fapi%2Fredundancy%2Fredundancy.ts;h=5abed358f99737ee3f885f25ea273f09a63a56f5;hb=8a6828b1664ce3fc535d23c54ed22bab35588d06;hp=d20cb80f10fd5e2bf5470fd1c8a258393f465a42;hpb=57f879a540551c3b958b0991c8e1e3657a4481d8;p=github%2FChocobozzz%2FPeerTube.git diff --git a/server/tests/api/redundancy/redundancy.ts b/server/tests/api/redundancy/redundancy.ts index d20cb80f1..5abed358f 100644 --- a/server/tests/api/redundancy/redundancy.ts +++ b/server/tests/api/redundancy/redundancy.ts @@ -1,63 +1,60 @@ /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ -import 'mocha' -import * as chai from 'chai' +import { expect } from 'chai' import { readdir } from 'fs-extra' -import * as magnetUtil from 'magnet-uri' -import { join } from 'path' -import { HttpStatusCode } from '@shared/core-utils' +import magnetUtil from 'magnet-uri' +import { basename, join } from 'path' +import { checkSegmentHash, checkVideoFilesWereRemoved, saveVideoInServers } from '@server/tests/shared' +import { root, wait } from '@shared/core-utils' +import { + HttpStatusCode, + VideoDetails, + VideoFile, + VideoPrivacy, + VideoRedundancyStrategy, + VideoRedundancyStrategyWithManual +} from '@shared/models' import { - checkSegmentHash, - checkVideoFilesWereRemoved, cleanupTests, + createMultipleServers, doubleFollow, - flushAndRunMultipleServers, - getVideo, - getVideoWithToken, - immutableAssign, killallServers, - makeGetRequest, - removeVideo, - reRunServer, - root, - ServerInfo, + makeRawRequest, + PeerTubeServer, setAccessTokensToServers, - updateVideo, - uploadVideo, - viewVideo, - wait, - waitJobs, - waitUntilLog -} from '@shared/extra-utils' -import { VideoDetails, VideoPrivacy, VideoRedundancyStrategy, VideoRedundancyStrategyWithManual } from '@shared/models' - -const expect = chai.expect - -let servers: ServerInfo[] = [] -let video1Server2UUID: string -let video1Server2Id: number - -function checkMagnetWebseeds (file: { magnetUri: string, resolution: { id: number } }, baseWebseeds: string[], server: ServerInfo) { + waitJobs +} from '@shared/server-commands' + +let servers: PeerTubeServer[] = [] +let video1Server2: VideoDetails + +async function checkMagnetWebseeds (file: VideoFile, baseWebseeds: string[], server: PeerTubeServer) { const parsed = magnetUtil.decode(file.magnetUri) for (const ws of baseWebseeds) { - const found = parsed.urlList.find(url => url === `${ws}-${file.resolution.id}.mp4`) + const found = parsed.urlList.find(url => url === `${ws}${basename(file.fileUrl)}`) expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined } expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length) + + for (const url of parsed.urlList) { + await makeRawRequest(url, HttpStatusCode.OK_200) + } } -async function flushAndRunServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) { +async function createServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) { const strategies: any[] = [] if (strategy !== null) { strategies.push( - immutableAssign({ + { min_lifetime: '1 hour', - strategy: strategy, - size: '400KB' - }, additionalParams) + strategy, + size: '400KB', + + ...additionalParams + } ) } @@ -78,17 +75,16 @@ async function flushAndRunServers (strategy: VideoRedundancyStrategy | null, add } } - servers = await flushAndRunMultipleServers(3, config) + servers = await createMultipleServers(3, config) // Get the access tokens await setAccessTokensToServers(servers) { - const res = await uploadVideo(servers[1].url, servers[1].accessToken, { name: 'video 1 server 2' }) - video1Server2UUID = res.body.video.uuid - video1Server2Id = res.body.video.id + const { id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } }) + video1Server2 = await servers[1].videos.get({ id }) - await viewVideo(servers[1].url, video1Server2UUID) + await servers[1].views.simulateView({ id }) } await waitJobs(servers) @@ -103,55 +99,65 @@ async function flushAndRunServers (strategy: VideoRedundancyStrategy | null, add await waitJobs(servers) } +async function ensureSameFilenames (videoUUID: string) { + let webtorrentFilenames: string[] + let hlsFilenames: string[] + + for (const server of servers) { + const video = await server.videos.getWithToken({ id: videoUUID }) + + // Ensure we use the same filenames that the origin + + const localWebtorrentFilenames = video.files.map(f => basename(f.fileUrl)).sort() + const localHLSFilenames = video.streamingPlaylists[0].files.map(f => basename(f.fileUrl)).sort() + + if (webtorrentFilenames) expect(webtorrentFilenames).to.deep.equal(localWebtorrentFilenames) + else webtorrentFilenames = localWebtorrentFilenames + + if (hlsFilenames) expect(hlsFilenames).to.deep.equal(localHLSFilenames) + else hlsFilenames = localHLSFilenames + } + + return { webtorrentFilenames, hlsFilenames } +} + async function check1WebSeed (videoUUID?: string) { - if (!videoUUID) videoUUID = video1Server2UUID + if (!videoUUID) videoUUID = video1Server2.uuid const webseeds = [ - `http://localhost:${servers[1].port}/static/webseed/${videoUUID}` + `http://localhost:${servers[1].port}/static/webseed/` ] for (const server of servers) { // With token to avoid issues with video follow constraints - const res = await getVideoWithToken(server.url, server.accessToken, videoUUID) + const video = await server.videos.getWithToken({ id: videoUUID }) - const video: VideoDetails = res.body for (const f of video.files) { - checkMagnetWebseeds(f, webseeds, server) + await checkMagnetWebseeds(f, webseeds, server) } } + + await ensureSameFilenames(videoUUID) } async function check2Webseeds (videoUUID?: string) { - if (!videoUUID) videoUUID = video1Server2UUID + if (!videoUUID) videoUUID = video1Server2.uuid const webseeds = [ - `http://localhost:${servers[0].port}/static/redundancy/${videoUUID}`, - `http://localhost:${servers[1].port}/static/webseed/${videoUUID}` + `http://localhost:${servers[0].port}/static/redundancy/`, + `http://localhost:${servers[1].port}/static/webseed/` ] for (const server of servers) { - const res = await getVideo(server.url, videoUUID) - - const video: VideoDetails = res.body + const video = await server.videos.get({ id: videoUUID }) for (const file of video.files) { - checkMagnetWebseeds(file, webseeds, server) - - await makeGetRequest({ - url: servers[0].url, - statusCodeExpected: HttpStatusCode.OK_200, - path: '/static/redundancy/' + `${videoUUID}-${file.resolution.id}.mp4`, - contentType: null - }) - await makeGetRequest({ - url: servers[1].url, - statusCodeExpected: HttpStatusCode.OK_200, - path: `/static/webseed/${videoUUID}-${file.resolution.id}.mp4`, - contentType: null - }) + await checkMagnetWebseeds(file, webseeds, server) } } + const { webtorrentFilenames } = await ensureSameFilenames(videoUUID) + const directories = [ 'test' + servers[0].internalServerNumber + '/redundancy', 'test' + servers[1].internalServerNumber + '/videos' @@ -161,32 +167,31 @@ async function check2Webseeds (videoUUID?: string) { const files = await readdir(join(root(), directory)) expect(files).to.have.length.at.least(4) - for (const resolution of [ 240, 360, 480, 720 ]) { - expect(files.find(f => f === `${videoUUID}-${resolution}.mp4`)).to.not.be.undefined - } + // Ensure we files exist on disk + expect(files.find(f => webtorrentFilenames.includes(f))).to.exist } } async function check0PlaylistRedundancies (videoUUID?: string) { - if (!videoUUID) videoUUID = video1Server2UUID + if (!videoUUID) videoUUID = video1Server2.uuid for (const server of servers) { // With token to avoid issues with video follow constraints - const res = await getVideoWithToken(server.url, server.accessToken, videoUUID) - const video: VideoDetails = res.body + const video = await server.videos.getWithToken({ id: videoUUID }) expect(video.streamingPlaylists).to.be.an('array') expect(video.streamingPlaylists).to.have.lengthOf(1) expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0) } + + await ensureSameFilenames(videoUUID) } async function check1PlaylistRedundancies (videoUUID?: string) { - if (!videoUUID) videoUUID = video1Server2UUID + if (!videoUUID) videoUUID = video1Server2.uuid for (const server of servers) { - const res = await getVideo(server.url, videoUUID) - const video: VideoDetails = res.body + const video = await server.videos.get({ id: videoUUID }) expect(video.streamingPlaylists).to.have.lengthOf(1) expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1) @@ -196,16 +201,18 @@ async function check1PlaylistRedundancies (videoUUID?: string) { expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID) } - const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls' - const baseUrlSegment = servers[0].url + '/static/redundancy/hls' + const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls/' + videoUUID + const baseUrlSegment = servers[0].url + '/static/redundancy/hls/' + videoUUID - const res = await getVideo(servers[0].url, videoUUID) - const hlsPlaylist = (res.body as VideoDetails).streamingPlaylists[0] + const video = await servers[0].videos.get({ id: videoUUID }) + const hlsPlaylist = video.streamingPlaylists[0] for (const resolution of [ 240, 360, 480, 720 ]) { - await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist }) + await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist }) } + const { hlsFilenames } = await ensureSameFilenames(videoUUID) + const directories = [ 'test' + servers[0].internalServerNumber + '/redundancy/hls', 'test' + servers[1].internalServerNumber + '/streaming-playlists/hls' @@ -215,11 +222,8 @@ async function check1PlaylistRedundancies (videoUUID?: string) { const files = await readdir(join(root(), directory, videoUUID)) expect(files).to.have.length.at.least(4) - for (const resolution of [ 240, 360, 480, 720 ]) { - const filename = `${videoUUID}-${resolution}-fragmented.mp4` - - expect(files.find(f => f === filename)).to.not.be.undefined - } + // Ensure we files exist on disk + expect(files.find(f => hlsFilenames.includes(f))).to.exist } } @@ -232,7 +236,7 @@ async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) { statsLength = 2 } - const data = await servers[0].statsCommand.get() + const data = await servers[0].stats.get() expect(data.videosRedundancy).to.have.lengthOf(statsLength) const stat = data.videosRedundancy[0] @@ -259,7 +263,7 @@ async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWit } async function findServerFollows () { - const body = await servers[0].followsCommand.getFollowings({ start: 0, count: 5, sort: '-createdAt' }) + const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' }) const follows = body.data const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`) const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`) @@ -268,7 +272,7 @@ async function findServerFollows () { } async function enableRedundancyOnServer1 () { - await servers[0].redundancyCommand.updateRedundancy({ host: servers[1].host, redundancyAllowed: true }) + await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true }) const { server2, server3 } = await findServerFollows() @@ -280,7 +284,7 @@ async function enableRedundancyOnServer1 () { } async function disableRedundancyOnServer1 () { - await servers[0].redundancyCommand.updateRedundancy({ host: servers[1].host, redundancyAllowed: false }) + await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false }) const { server2, server3 } = await findServerFollows() @@ -297,9 +301,9 @@ describe('Test videos redundancy', function () { const strategy = 'most-views' before(function () { - this.timeout(120000) + this.timeout(240000) - return flushAndRunServers(strategy) + return createServers(strategy) }) it('Should have 1 webseed on the first video', async function () { @@ -316,7 +320,7 @@ describe('Test videos redundancy', function () { this.timeout(80000) await waitJobs(servers) - await waitUntilLog(servers[0], 'Duplicated ', 5) + await servers[0].servers.waitUntilLog('Duplicated ', 5) await waitJobs(servers) await check2Webseeds() @@ -335,7 +339,7 @@ describe('Test videos redundancy', function () { await check1WebSeed() await check0PlaylistRedundancies() - await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ 'videos', join('playlists', 'hls') ]) + await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true }) }) after(async function () { @@ -347,9 +351,9 @@ describe('Test videos redundancy', function () { const strategy = 'trending' before(function () { - this.timeout(120000) + this.timeout(240000) - return flushAndRunServers(strategy) + return createServers(strategy) }) it('Should have 1 webseed on the first video', async function () { @@ -366,18 +370,31 @@ describe('Test videos redundancy', function () { this.timeout(80000) await waitJobs(servers) - await waitUntilLog(servers[0], 'Duplicated ', 5) + await servers[0].servers.waitUntilLog('Duplicated ', 5) + await waitJobs(servers) + + await check2Webseeds() + await check1PlaylistRedundancies() + await checkStatsWith1Redundancy(strategy) + }) + + it('Should unfollow server 3 and keep duplicated videos', async function () { + this.timeout(80000) + + await servers[0].follows.unfollow({ target: servers[2] }) + await waitJobs(servers) + await wait(5000) await check2Webseeds() await check1PlaylistRedundancies() await checkStatsWith1Redundancy(strategy) }) - it('Should unfollow on server 1 and remove duplicated videos', async function () { + it('Should unfollow server 2 and remove duplicated videos', async function () { this.timeout(80000) - await servers[0].followsCommand.unfollow({ target: servers[1] }) + await servers[0].follows.unfollow({ target: servers[1] }) await waitJobs(servers) await wait(5000) @@ -385,7 +402,7 @@ describe('Test videos redundancy', function () { await check1WebSeed() await check0PlaylistRedundancies() - await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ 'videos' ]) + await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true }) }) after(async function () { @@ -397,9 +414,9 @@ describe('Test videos redundancy', function () { const strategy = 'recently-added' before(function () { - this.timeout(120000) + this.timeout(240000) - return flushAndRunServers(strategy, { min_views: 3 }) + return createServers(strategy, { min_views: 3 }) }) it('Should have 1 webseed on the first video', async function () { @@ -427,8 +444,8 @@ describe('Test videos redundancy', function () { it('Should view 2 times the first video to have > min_views config', async function () { this.timeout(80000) - await viewVideo(servers[0].url, video1Server2UUID) - await viewVideo(servers[2].url, video1Server2UUID) + await servers[0].views.simulateView({ id: video1Server2.uuid }) + await servers[2].views.simulateView({ id: video1Server2.uuid }) await wait(10000) await waitJobs(servers) @@ -438,7 +455,7 @@ describe('Test videos redundancy', function () { this.timeout(80000) await waitJobs(servers) - await waitUntilLog(servers[0], 'Duplicated ', 5) + await servers[0].servers.waitUntilLog('Duplicated ', 5) await waitJobs(servers) await check2Webseeds() @@ -449,12 +466,13 @@ describe('Test videos redundancy', function () { it('Should remove the video and the redundancy files', async function () { this.timeout(20000) - await removeVideo(servers[1].url, servers[1].accessToken, video1Server2UUID) + await saveVideoInServers(servers, video1Server2.uuid) + await servers[1].videos.remove({ id: video1Server2.uuid }) await waitJobs(servers) for (const server of servers) { - await checkVideoFilesWereRemoved(video1Server2UUID, server.internalServerNumber) + await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails }) } }) @@ -467,9 +485,9 @@ describe('Test videos redundancy', function () { const strategy = 'recently-added' before(async function () { - this.timeout(120000) + this.timeout(240000) - await flushAndRunServers(strategy, { min_views: 3 }, false) + await createServers(strategy, { min_views: 3 }, false) }) it('Should have 0 playlist redundancy on the first video', async function () { @@ -495,14 +513,14 @@ describe('Test videos redundancy', function () { it('Should have 1 redundancy on the first video', async function () { this.timeout(160000) - await viewVideo(servers[0].url, video1Server2UUID) - await viewVideo(servers[2].url, video1Server2UUID) + await servers[0].views.simulateView({ id: video1Server2.uuid }) + await servers[2].views.simulateView({ id: video1Server2.uuid }) await wait(10000) await waitJobs(servers) await waitJobs(servers) - await waitUntilLog(servers[0], 'Duplicated ', 1) + await servers[0].servers.waitUntilLog('Duplicated ', 1) await waitJobs(servers) await check1PlaylistRedundancies() @@ -512,12 +530,13 @@ describe('Test videos redundancy', function () { it('Should remove the video and the redundancy files', async function () { this.timeout(20000) - await removeVideo(servers[1].url, servers[1].accessToken, video1Server2UUID) + await saveVideoInServers(servers, video1Server2.uuid) + await servers[1].videos.remove({ id: video1Server2.uuid }) await waitJobs(servers) for (const server of servers) { - await checkVideoFilesWereRemoved(video1Server2UUID, server.internalServerNumber) + await checkVideoFilesWereRemoved({ server, video: server.store.videoDetails }) } }) @@ -528,9 +547,9 @@ describe('Test videos redundancy', function () { describe('With manual strategy', function () { before(function () { - this.timeout(120000) + this.timeout(240000) - return flushAndRunServers(null) + return createServers(null) }) it('Should have 1 webseed on the first video', async function () { @@ -540,14 +559,14 @@ describe('Test videos redundancy', function () { }) it('Should create a redundancy on first video', async function () { - await servers[0].redundancyCommand.addVideo({ videoId: video1Server2Id }) + await servers[0].redundancy.addVideo({ videoId: video1Server2.id }) }) it('Should have 2 webseeds on the first video', async function () { this.timeout(80000) await waitJobs(servers) - await waitUntilLog(servers[0], 'Duplicated ', 5) + await servers[0].servers.waitUntilLog('Duplicated ', 5) await waitJobs(servers) await check2Webseeds() @@ -558,7 +577,7 @@ describe('Test videos redundancy', function () { it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () { this.timeout(80000) - const body = await servers[0].redundancyCommand.listVideos({ target: 'remote-videos' }) + const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' }) const videos = body.data expect(videos).to.have.lengthOf(1) @@ -566,7 +585,7 @@ describe('Test videos redundancy', function () { const video = videos[0] for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) { - await servers[0].redundancyCommand.removeVideo({ redundancyId: r.id }) + await servers[0].redundancy.removeVideo({ redundancyId: r.id }) } await waitJobs(servers) @@ -575,7 +594,7 @@ describe('Test videos redundancy', function () { await check1WebSeed() await check0PlaylistRedundancies() - await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].serverNumber, [ 'videos' ]) + await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true }) }) after(async function () { @@ -586,10 +605,9 @@ describe('Test videos redundancy', function () { describe('Test expiration', function () { const strategy = 'recently-added' - async function checkContains (servers: ServerInfo[], str: string) { + async function checkContains (servers: PeerTubeServer[], str: string) { for (const server of servers) { - const res = await getVideo(server.url, video1Server2UUID) - const video: VideoDetails = res.body + const video = await server.videos.get({ id: video1Server2.uuid }) for (const f of video.files) { expect(f.magnetUri).to.contain(str) @@ -597,10 +615,9 @@ describe('Test videos redundancy', function () { } } - async function checkNotContains (servers: ServerInfo[], str: string) { + async function checkNotContains (servers: PeerTubeServer[], str: string) { for (const server of servers) { - const res = await getVideo(server.url, video1Server2UUID) - const video: VideoDetails = res.body + const video = await server.videos.get({ id: video1Server2.uuid }) for (const f of video.files) { expect(f.magnetUri).to.not.contain(str) @@ -609,9 +626,9 @@ describe('Test videos redundancy', function () { } before(async function () { - this.timeout(120000) + this.timeout(240000) - await flushAndRunServers(strategy, { min_lifetime: '7 seconds', min_views: 0 }) + await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 }) await enableRedundancyOnServer1() }) @@ -634,7 +651,7 @@ describe('Test videos redundancy', function () { it('Should stop server 1 and expire video redundancy', async function () { this.timeout(80000) - killallServers([ servers[0] ]) + await killallServers([ servers[0] ]) await wait(15000) @@ -651,31 +668,31 @@ describe('Test videos redundancy', function () { const strategy = 'recently-added' before(async function () { - this.timeout(120000) + this.timeout(240000) - await flushAndRunServers(strategy, { min_lifetime: '7 seconds', min_views: 0 }) + await createServers(strategy, { min_lifetime: '7 seconds', min_views: 0 }) await enableRedundancyOnServer1() await waitJobs(servers) - await waitUntilLog(servers[0], 'Duplicated ', 5) + await servers[0].servers.waitUntilLog('Duplicated ', 5) await waitJobs(servers) - await check2Webseeds(video1Server2UUID) - await check1PlaylistRedundancies(video1Server2UUID) + await check2Webseeds() + await check1PlaylistRedundancies() await checkStatsWith1Redundancy(strategy) - const res = await uploadVideo(servers[1].url, servers[1].accessToken, { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE }) - video2Server2UUID = res.body.video.uuid + const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } }) + video2Server2UUID = uuid // Wait transcoding before federation await waitJobs(servers) - await updateVideo(servers[1].url, servers[1].accessToken, video2Server2UUID, { privacy: VideoPrivacy.PUBLIC }) + await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } }) }) it('Should cache video 2 webseeds on the first video', async function () { - this.timeout(120000) + this.timeout(240000) await waitJobs(servers) @@ -685,8 +702,8 @@ describe('Test videos redundancy', function () { await wait(1000) try { - await check1WebSeed(video1Server2UUID) - await check0PlaylistRedundancies(video1Server2UUID) + await check1WebSeed() + await check0PlaylistRedundancies() await check2Webseeds(video2Server2UUID) await check1PlaylistRedundancies(video2Server2UUID) @@ -703,8 +720,8 @@ describe('Test videos redundancy', function () { await waitJobs(servers) - killallServers([ servers[0] ]) - await reRunServer(servers[0], { + await killallServers([ servers[0] ]) + await servers[0].run({ redundancy: { videos: { check_interval: '1 second', @@ -715,7 +732,7 @@ describe('Test videos redundancy', function () { await waitJobs(servers) - await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].internalServerNumber, [ join('redundancy', 'hls') ]) + await checkVideoFilesWereRemoved({ server: servers[0], video: video1Server2, onlyVideoFiles: true }) }) after(async function () {