+ describe('With manual strategy', function () {
+ before(function () {
+ this.timeout(120000)
+
+ return flushAndRunServers(null)
+ })
+
+ it('Should have 1 webseed on the first video', async function () {
+ await check1WebSeed()
+ await check0PlaylistRedundancies()
+ await checkStatsWith1Webseed('manual')
+ })
+
+ it('Should create a redundancy on first video', async function () {
+ await addVideoRedundancy({
+ url: servers[0].url,
+ accessToken: servers[0].accessToken,
+ videoId: video1Server2Id
+ })
+ })
+
+ it('Should have 2 webseeds on the first video', async function () {
+ this.timeout(80000)
+
+ await waitJobs(servers)
+ await waitUntilLog(servers[0], 'Duplicated ', 5)
+ await waitJobs(servers)
+
+ await check2Webseeds()
+ await check1PlaylistRedundancies()
+ await checkStatsWith2Webseed('manual')
+ })
+
+ it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () {
+ this.timeout(80000)
+
+ const res = await listVideoRedundancies({
+ url: servers[0].url,
+ accessToken: servers[0].accessToken,
+ target: 'remote-videos'
+ })
+
+ const videos = res.body.data as VideoRedundancy[]
+ expect(videos).to.have.lengthOf(1)
+
+ const video = videos[0]
+ for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) {
+ await removeVideoRedundancy({
+ url: servers[0].url,
+ accessToken: servers[0].accessToken,
+ redundancyId: r.id
+ })
+ }
+
+ await waitJobs(servers)
+ await wait(5000)
+
+ await check1WebSeed()
+ await check0PlaylistRedundancies()
+
+ await checkVideoFilesWereRemoved(video1Server2UUID, servers[0].serverNumber, [ 'videos' ])
+ })
+
+ after(async function () {
+ await cleanupTests(servers)
+ })
+ })
+