]>
Commit | Line | Data |
---|---|---|
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | |
2 | ||
3 | import 'mocha' | |
4 | import * as chai from 'chai' | |
5 | import { readdir } from 'fs-extra' | |
6 | import * as magnetUtil from 'magnet-uri' | |
7 | import { join } from 'path' | |
8 | import { | |
9 | checkSegmentHash, | |
10 | checkVideoFilesWereRemoved, | |
11 | cleanupTests, | |
12 | createMultipleServers, | |
13 | doubleFollow, | |
14 | killallServers, | |
15 | makeGetRequest, | |
16 | PeerTubeServer, | |
17 | root, | |
18 | setAccessTokensToServers, | |
19 | wait, | |
20 | waitJobs | |
21 | } from '@shared/extra-utils' | |
22 | import { HttpStatusCode, VideoPrivacy, VideoRedundancyStrategy, VideoRedundancyStrategyWithManual } from '@shared/models' | |
23 | ||
24 | const expect = chai.expect | |
25 | ||
26 | let servers: PeerTubeServer[] = [] | |
27 | let video1Server2UUID: string | |
28 | let video1Server2Id: number | |
29 | ||
30 | function checkMagnetWebseeds (file: { magnetUri: string, resolution: { id: number } }, baseWebseeds: string[], server: PeerTubeServer) { | |
31 | const parsed = magnetUtil.decode(file.magnetUri) | |
32 | ||
33 | for (const ws of baseWebseeds) { | |
34 | const found = parsed.urlList.find(url => url === `${ws}-${file.resolution.id}.mp4`) | |
35 | expect(found, `Webseed ${ws} not found in ${file.magnetUri} on server ${server.url}`).to.not.be.undefined | |
36 | } | |
37 | ||
38 | expect(parsed.urlList).to.have.lengthOf(baseWebseeds.length) | |
39 | } | |
40 | ||
41 | async function createSingleServers (strategy: VideoRedundancyStrategy | null, additionalParams: any = {}, withWebtorrent = true) { | |
42 | const strategies: any[] = [] | |
43 | ||
44 | if (strategy !== null) { | |
45 | strategies.push( | |
46 | { | |
47 | min_lifetime: '1 hour', | |
48 | strategy: strategy, | |
49 | size: '400KB', | |
50 | ||
51 | ...additionalParams | |
52 | } | |
53 | ) | |
54 | } | |
55 | ||
56 | const config = { | |
57 | transcoding: { | |
58 | webtorrent: { | |
59 | enabled: withWebtorrent | |
60 | }, | |
61 | hls: { | |
62 | enabled: true | |
63 | } | |
64 | }, | |
65 | redundancy: { | |
66 | videos: { | |
67 | check_interval: '5 seconds', | |
68 | strategies | |
69 | } | |
70 | } | |
71 | } | |
72 | ||
73 | servers = await createMultipleServers(3, config) | |
74 | ||
75 | // Get the access tokens | |
76 | await setAccessTokensToServers(servers) | |
77 | ||
78 | { | |
79 | const { uuid, id } = await servers[1].videos.upload({ attributes: { name: 'video 1 server 2' } }) | |
80 | video1Server2UUID = uuid | |
81 | video1Server2Id = id | |
82 | ||
83 | await servers[1].videos.view({ id: video1Server2UUID }) | |
84 | } | |
85 | ||
86 | await waitJobs(servers) | |
87 | ||
88 | // Server 1 and server 2 follow each other | |
89 | await doubleFollow(servers[0], servers[1]) | |
90 | // Server 1 and server 3 follow each other | |
91 | await doubleFollow(servers[0], servers[2]) | |
92 | // Server 2 and server 3 follow each other | |
93 | await doubleFollow(servers[1], servers[2]) | |
94 | ||
95 | await waitJobs(servers) | |
96 | } | |
97 | ||
98 | async function check1WebSeed (videoUUID?: string) { | |
99 | if (!videoUUID) videoUUID = video1Server2UUID | |
100 | ||
101 | const webseeds = [ | |
102 | `http://localhost:${servers[1].port}/static/webseed/${videoUUID}` | |
103 | ] | |
104 | ||
105 | for (const server of servers) { | |
106 | // With token to avoid issues with video follow constraints | |
107 | const video = await server.videos.getWithToken({ id: videoUUID }) | |
108 | ||
109 | for (const f of video.files) { | |
110 | checkMagnetWebseeds(f, webseeds, server) | |
111 | } | |
112 | } | |
113 | } | |
114 | ||
115 | async function check2Webseeds (videoUUID?: string) { | |
116 | if (!videoUUID) videoUUID = video1Server2UUID | |
117 | ||
118 | const webseeds = [ | |
119 | `http://localhost:${servers[0].port}/static/redundancy/${videoUUID}`, | |
120 | `http://localhost:${servers[1].port}/static/webseed/${videoUUID}` | |
121 | ] | |
122 | ||
123 | for (const server of servers) { | |
124 | const video = await server.videos.get({ id: videoUUID }) | |
125 | ||
126 | for (const file of video.files) { | |
127 | checkMagnetWebseeds(file, webseeds, server) | |
128 | ||
129 | await makeGetRequest({ | |
130 | url: servers[0].url, | |
131 | expectedStatus: HttpStatusCode.OK_200, | |
132 | path: '/static/redundancy/' + `${videoUUID}-${file.resolution.id}.mp4`, | |
133 | contentType: null | |
134 | }) | |
135 | await makeGetRequest({ | |
136 | url: servers[1].url, | |
137 | expectedStatus: HttpStatusCode.OK_200, | |
138 | path: `/static/webseed/${videoUUID}-${file.resolution.id}.mp4`, | |
139 | contentType: null | |
140 | }) | |
141 | } | |
142 | } | |
143 | ||
144 | const directories = [ | |
145 | 'test' + servers[0].internalServerNumber + '/redundancy', | |
146 | 'test' + servers[1].internalServerNumber + '/videos' | |
147 | ] | |
148 | ||
149 | for (const directory of directories) { | |
150 | const files = await readdir(join(root(), directory)) | |
151 | expect(files).to.have.length.at.least(4) | |
152 | ||
153 | for (const resolution of [ 240, 360, 480, 720 ]) { | |
154 | expect(files.find(f => f === `${videoUUID}-${resolution}.mp4`)).to.not.be.undefined | |
155 | } | |
156 | } | |
157 | } | |
158 | ||
159 | async function check0PlaylistRedundancies (videoUUID?: string) { | |
160 | if (!videoUUID) videoUUID = video1Server2UUID | |
161 | ||
162 | for (const server of servers) { | |
163 | // With token to avoid issues with video follow constraints | |
164 | const video = await server.videos.getWithToken({ id: videoUUID }) | |
165 | ||
166 | expect(video.streamingPlaylists).to.be.an('array') | |
167 | expect(video.streamingPlaylists).to.have.lengthOf(1) | |
168 | expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(0) | |
169 | } | |
170 | } | |
171 | ||
172 | async function check1PlaylistRedundancies (videoUUID?: string) { | |
173 | if (!videoUUID) videoUUID = video1Server2UUID | |
174 | ||
175 | for (const server of servers) { | |
176 | const video = await server.videos.get({ id: videoUUID }) | |
177 | ||
178 | expect(video.streamingPlaylists).to.have.lengthOf(1) | |
179 | expect(video.streamingPlaylists[0].redundancies).to.have.lengthOf(1) | |
180 | ||
181 | const redundancy = video.streamingPlaylists[0].redundancies[0] | |
182 | ||
183 | expect(redundancy.baseUrl).to.equal(servers[0].url + '/static/redundancy/hls/' + videoUUID) | |
184 | } | |
185 | ||
186 | const baseUrlPlaylist = servers[1].url + '/static/streaming-playlists/hls' | |
187 | const baseUrlSegment = servers[0].url + '/static/redundancy/hls' | |
188 | ||
189 | const video = await servers[0].videos.get({ id: videoUUID }) | |
190 | const hlsPlaylist = video.streamingPlaylists[0] | |
191 | ||
192 | for (const resolution of [ 240, 360, 480, 720 ]) { | |
193 | await checkSegmentHash({ server: servers[1], baseUrlPlaylist, baseUrlSegment, videoUUID, resolution, hlsPlaylist }) | |
194 | } | |
195 | ||
196 | const directories = [ | |
197 | 'test' + servers[0].internalServerNumber + '/redundancy/hls', | |
198 | 'test' + servers[1].internalServerNumber + '/streaming-playlists/hls' | |
199 | ] | |
200 | ||
201 | for (const directory of directories) { | |
202 | const files = await readdir(join(root(), directory, videoUUID)) | |
203 | expect(files).to.have.length.at.least(4) | |
204 | ||
205 | for (const resolution of [ 240, 360, 480, 720 ]) { | |
206 | const filename = `${videoUUID}-${resolution}-fragmented.mp4` | |
207 | ||
208 | expect(files.find(f => f === filename)).to.not.be.undefined | |
209 | } | |
210 | } | |
211 | } | |
212 | ||
213 | async function checkStatsGlobal (strategy: VideoRedundancyStrategyWithManual) { | |
214 | let totalSize: number = null | |
215 | let statsLength = 1 | |
216 | ||
217 | if (strategy !== 'manual') { | |
218 | totalSize = 409600 | |
219 | statsLength = 2 | |
220 | } | |
221 | ||
222 | const data = await servers[0].stats.get() | |
223 | expect(data.videosRedundancy).to.have.lengthOf(statsLength) | |
224 | ||
225 | const stat = data.videosRedundancy[0] | |
226 | expect(stat.strategy).to.equal(strategy) | |
227 | expect(stat.totalSize).to.equal(totalSize) | |
228 | ||
229 | return stat | |
230 | } | |
231 | ||
232 | async function checkStatsWith1Redundancy (strategy: VideoRedundancyStrategyWithManual, onlyHls = false) { | |
233 | const stat = await checkStatsGlobal(strategy) | |
234 | ||
235 | expect(stat.totalUsed).to.be.at.least(1).and.below(409601) | |
236 | expect(stat.totalVideoFiles).to.equal(onlyHls ? 4 : 8) | |
237 | expect(stat.totalVideos).to.equal(1) | |
238 | } | |
239 | ||
240 | async function checkStatsWithoutRedundancy (strategy: VideoRedundancyStrategyWithManual) { | |
241 | const stat = await checkStatsGlobal(strategy) | |
242 | ||
243 | expect(stat.totalUsed).to.equal(0) | |
244 | expect(stat.totalVideoFiles).to.equal(0) | |
245 | expect(stat.totalVideos).to.equal(0) | |
246 | } | |
247 | ||
248 | async function findServerFollows () { | |
249 | const body = await servers[0].follows.getFollowings({ start: 0, count: 5, sort: '-createdAt' }) | |
250 | const follows = body.data | |
251 | const server2 = follows.find(f => f.following.host === `localhost:${servers[1].port}`) | |
252 | const server3 = follows.find(f => f.following.host === `localhost:${servers[2].port}`) | |
253 | ||
254 | return { server2, server3 } | |
255 | } | |
256 | ||
257 | async function enableRedundancyOnServer1 () { | |
258 | await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: true }) | |
259 | ||
260 | const { server2, server3 } = await findServerFollows() | |
261 | ||
262 | expect(server3).to.not.be.undefined | |
263 | expect(server3.following.hostRedundancyAllowed).to.be.false | |
264 | ||
265 | expect(server2).to.not.be.undefined | |
266 | expect(server2.following.hostRedundancyAllowed).to.be.true | |
267 | } | |
268 | ||
269 | async function disableRedundancyOnServer1 () { | |
270 | await servers[0].redundancy.updateRedundancy({ host: servers[1].host, redundancyAllowed: false }) | |
271 | ||
272 | const { server2, server3 } = await findServerFollows() | |
273 | ||
274 | expect(server3).to.not.be.undefined | |
275 | expect(server3.following.hostRedundancyAllowed).to.be.false | |
276 | ||
277 | expect(server2).to.not.be.undefined | |
278 | expect(server2.following.hostRedundancyAllowed).to.be.false | |
279 | } | |
280 | ||
281 | describe('Test videos redundancy', function () { | |
282 | ||
283 | describe('With most-views strategy', function () { | |
284 | const strategy = 'most-views' | |
285 | ||
286 | before(function () { | |
287 | this.timeout(120000) | |
288 | ||
289 | return createSingleServers(strategy) | |
290 | }) | |
291 | ||
292 | it('Should have 1 webseed on the first video', async function () { | |
293 | await check1WebSeed() | |
294 | await check0PlaylistRedundancies() | |
295 | await checkStatsWithoutRedundancy(strategy) | |
296 | }) | |
297 | ||
298 | it('Should enable redundancy on server 1', function () { | |
299 | return enableRedundancyOnServer1() | |
300 | }) | |
301 | ||
302 | it('Should have 2 webseeds on the first video', async function () { | |
303 | this.timeout(80000) | |
304 | ||
305 | await waitJobs(servers) | |
306 | await servers[0].servers.waitUntilLog('Duplicated ', 5) | |
307 | await waitJobs(servers) | |
308 | ||
309 | await check2Webseeds() | |
310 | await check1PlaylistRedundancies() | |
311 | await checkStatsWith1Redundancy(strategy) | |
312 | }) | |
313 | ||
314 | it('Should undo redundancy on server 1 and remove duplicated videos', async function () { | |
315 | this.timeout(80000) | |
316 | ||
317 | await disableRedundancyOnServer1() | |
318 | ||
319 | await waitJobs(servers) | |
320 | await wait(5000) | |
321 | ||
322 | await check1WebSeed() | |
323 | await check0PlaylistRedundancies() | |
324 | ||
325 | await checkVideoFilesWereRemoved(video1Server2UUID, servers[0], [ 'videos', join('playlists', 'hls') ]) | |
326 | }) | |
327 | ||
328 | after(async function () { | |
329 | return cleanupTests(servers) | |
330 | }) | |
331 | }) | |
332 | ||
333 | describe('With trending strategy', function () { | |
334 | const strategy = 'trending' | |
335 | ||
336 | before(function () { | |
337 | this.timeout(120000) | |
338 | ||
339 | return createSingleServers(strategy) | |
340 | }) | |
341 | ||
342 | it('Should have 1 webseed on the first video', async function () { | |
343 | await check1WebSeed() | |
344 | await check0PlaylistRedundancies() | |
345 | await checkStatsWithoutRedundancy(strategy) | |
346 | }) | |
347 | ||
348 | it('Should enable redundancy on server 1', function () { | |
349 | return enableRedundancyOnServer1() | |
350 | }) | |
351 | ||
352 | it('Should have 2 webseeds on the first video', async function () { | |
353 | this.timeout(80000) | |
354 | ||
355 | await waitJobs(servers) | |
356 | await servers[0].servers.waitUntilLog('Duplicated ', 5) | |
357 | await waitJobs(servers) | |
358 | ||
359 | await check2Webseeds() | |
360 | await check1PlaylistRedundancies() | |
361 | await checkStatsWith1Redundancy(strategy) | |
362 | }) | |
363 | ||
364 | it('Should unfollow on server 1 and remove duplicated videos', async function () { | |
365 | this.timeout(80000) | |
366 | ||
367 | await servers[0].follows.unfollow({ target: servers[1] }) | |
368 | ||
369 | await waitJobs(servers) | |
370 | await wait(5000) | |
371 | ||
372 | await check1WebSeed() | |
373 | await check0PlaylistRedundancies() | |
374 | ||
375 | await checkVideoFilesWereRemoved(video1Server2UUID, servers[0], [ 'videos' ]) | |
376 | }) | |
377 | ||
378 | after(async function () { | |
379 | await cleanupTests(servers) | |
380 | }) | |
381 | }) | |
382 | ||
383 | describe('With recently added strategy', function () { | |
384 | const strategy = 'recently-added' | |
385 | ||
386 | before(function () { | |
387 | this.timeout(120000) | |
388 | ||
389 | return createSingleServers(strategy, { min_views: 3 }) | |
390 | }) | |
391 | ||
392 | it('Should have 1 webseed on the first video', async function () { | |
393 | await check1WebSeed() | |
394 | await check0PlaylistRedundancies() | |
395 | await checkStatsWithoutRedundancy(strategy) | |
396 | }) | |
397 | ||
398 | it('Should enable redundancy on server 1', function () { | |
399 | return enableRedundancyOnServer1() | |
400 | }) | |
401 | ||
402 | it('Should still have 1 webseed on the first video', async function () { | |
403 | this.timeout(80000) | |
404 | ||
405 | await waitJobs(servers) | |
406 | await wait(15000) | |
407 | await waitJobs(servers) | |
408 | ||
409 | await check1WebSeed() | |
410 | await check0PlaylistRedundancies() | |
411 | await checkStatsWithoutRedundancy(strategy) | |
412 | }) | |
413 | ||
414 | it('Should view 2 times the first video to have > min_views config', async function () { | |
415 | this.timeout(80000) | |
416 | ||
417 | await servers[0].videos.view({ id: video1Server2UUID }) | |
418 | await servers[2].videos.view({ id: video1Server2UUID }) | |
419 | ||
420 | await wait(10000) | |
421 | await waitJobs(servers) | |
422 | }) | |
423 | ||
424 | it('Should have 2 webseeds on the first video', async function () { | |
425 | this.timeout(80000) | |
426 | ||
427 | await waitJobs(servers) | |
428 | await servers[0].servers.waitUntilLog('Duplicated ', 5) | |
429 | await waitJobs(servers) | |
430 | ||
431 | await check2Webseeds() | |
432 | await check1PlaylistRedundancies() | |
433 | await checkStatsWith1Redundancy(strategy) | |
434 | }) | |
435 | ||
436 | it('Should remove the video and the redundancy files', async function () { | |
437 | this.timeout(20000) | |
438 | ||
439 | await servers[1].videos.remove({ id: video1Server2UUID }) | |
440 | ||
441 | await waitJobs(servers) | |
442 | ||
443 | for (const server of servers) { | |
444 | await checkVideoFilesWereRemoved(video1Server2UUID, server) | |
445 | } | |
446 | }) | |
447 | ||
448 | after(async function () { | |
449 | await cleanupTests(servers) | |
450 | }) | |
451 | }) | |
452 | ||
453 | describe('With only HLS files', function () { | |
454 | const strategy = 'recently-added' | |
455 | ||
456 | before(async function () { | |
457 | this.timeout(120000) | |
458 | ||
459 | await createSingleServers(strategy, { min_views: 3 }, false) | |
460 | }) | |
461 | ||
462 | it('Should have 0 playlist redundancy on the first video', async function () { | |
463 | await check1WebSeed() | |
464 | await check0PlaylistRedundancies() | |
465 | }) | |
466 | ||
467 | it('Should enable redundancy on server 1', function () { | |
468 | return enableRedundancyOnServer1() | |
469 | }) | |
470 | ||
471 | it('Should still have 0 redundancy on the first video', async function () { | |
472 | this.timeout(80000) | |
473 | ||
474 | await waitJobs(servers) | |
475 | await wait(15000) | |
476 | await waitJobs(servers) | |
477 | ||
478 | await check0PlaylistRedundancies() | |
479 | await checkStatsWithoutRedundancy(strategy) | |
480 | }) | |
481 | ||
482 | it('Should have 1 redundancy on the first video', async function () { | |
483 | this.timeout(160000) | |
484 | ||
485 | await servers[0].videos.view({ id: video1Server2UUID }) | |
486 | await servers[2].videos.view({ id: video1Server2UUID }) | |
487 | ||
488 | await wait(10000) | |
489 | await waitJobs(servers) | |
490 | ||
491 | await waitJobs(servers) | |
492 | await servers[0].servers.waitUntilLog('Duplicated ', 1) | |
493 | await waitJobs(servers) | |
494 | ||
495 | await check1PlaylistRedundancies() | |
496 | await checkStatsWith1Redundancy(strategy, true) | |
497 | }) | |
498 | ||
499 | it('Should remove the video and the redundancy files', async function () { | |
500 | this.timeout(20000) | |
501 | ||
502 | await servers[1].videos.remove({ id: video1Server2UUID }) | |
503 | ||
504 | await waitJobs(servers) | |
505 | ||
506 | for (const server of servers) { | |
507 | await checkVideoFilesWereRemoved(video1Server2UUID, server) | |
508 | } | |
509 | }) | |
510 | ||
511 | after(async function () { | |
512 | await cleanupTests(servers) | |
513 | }) | |
514 | }) | |
515 | ||
516 | describe('With manual strategy', function () { | |
517 | before(function () { | |
518 | this.timeout(120000) | |
519 | ||
520 | return createSingleServers(null) | |
521 | }) | |
522 | ||
523 | it('Should have 1 webseed on the first video', async function () { | |
524 | await check1WebSeed() | |
525 | await check0PlaylistRedundancies() | |
526 | await checkStatsWithoutRedundancy('manual') | |
527 | }) | |
528 | ||
529 | it('Should create a redundancy on first video', async function () { | |
530 | await servers[0].redundancy.addVideo({ videoId: video1Server2Id }) | |
531 | }) | |
532 | ||
533 | it('Should have 2 webseeds on the first video', async function () { | |
534 | this.timeout(80000) | |
535 | ||
536 | await waitJobs(servers) | |
537 | await servers[0].servers.waitUntilLog('Duplicated ', 5) | |
538 | await waitJobs(servers) | |
539 | ||
540 | await check2Webseeds() | |
541 | await check1PlaylistRedundancies() | |
542 | await checkStatsWith1Redundancy('manual') | |
543 | }) | |
544 | ||
545 | it('Should manually remove redundancies on server 1 and remove duplicated videos', async function () { | |
546 | this.timeout(80000) | |
547 | ||
548 | const body = await servers[0].redundancy.listVideos({ target: 'remote-videos' }) | |
549 | ||
550 | const videos = body.data | |
551 | expect(videos).to.have.lengthOf(1) | |
552 | ||
553 | const video = videos[0] | |
554 | ||
555 | for (const r of video.redundancies.files.concat(video.redundancies.streamingPlaylists)) { | |
556 | await servers[0].redundancy.removeVideo({ redundancyId: r.id }) | |
557 | } | |
558 | ||
559 | await waitJobs(servers) | |
560 | await wait(5000) | |
561 | ||
562 | await check1WebSeed() | |
563 | await check0PlaylistRedundancies() | |
564 | ||
565 | await checkVideoFilesWereRemoved(video1Server2UUID, servers[0], [ 'videos' ]) | |
566 | }) | |
567 | ||
568 | after(async function () { | |
569 | await cleanupTests(servers) | |
570 | }) | |
571 | }) | |
572 | ||
573 | describe('Test expiration', function () { | |
574 | const strategy = 'recently-added' | |
575 | ||
576 | async function checkContains (servers: PeerTubeServer[], str: string) { | |
577 | for (const server of servers) { | |
578 | const video = await server.videos.get({ id: video1Server2UUID }) | |
579 | ||
580 | for (const f of video.files) { | |
581 | expect(f.magnetUri).to.contain(str) | |
582 | } | |
583 | } | |
584 | } | |
585 | ||
586 | async function checkNotContains (servers: PeerTubeServer[], str: string) { | |
587 | for (const server of servers) { | |
588 | const video = await server.videos.get({ id: video1Server2UUID }) | |
589 | ||
590 | for (const f of video.files) { | |
591 | expect(f.magnetUri).to.not.contain(str) | |
592 | } | |
593 | } | |
594 | } | |
595 | ||
596 | before(async function () { | |
597 | this.timeout(120000) | |
598 | ||
599 | await createSingleServers(strategy, { min_lifetime: '7 seconds', min_views: 0 }) | |
600 | ||
601 | await enableRedundancyOnServer1() | |
602 | }) | |
603 | ||
604 | it('Should still have 2 webseeds after 10 seconds', async function () { | |
605 | this.timeout(80000) | |
606 | ||
607 | await wait(10000) | |
608 | ||
609 | try { | |
610 | await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port) | |
611 | } catch { | |
612 | // Maybe a server deleted a redundancy in the scheduler | |
613 | await wait(2000) | |
614 | ||
615 | await checkContains(servers, 'http%3A%2F%2Flocalhost%3A' + servers[0].port) | |
616 | } | |
617 | }) | |
618 | ||
619 | it('Should stop server 1 and expire video redundancy', async function () { | |
620 | this.timeout(80000) | |
621 | ||
622 | await killallServers([ servers[0] ]) | |
623 | ||
624 | await wait(15000) | |
625 | ||
626 | await checkNotContains([ servers[1], servers[2] ], 'http%3A%2F%2Flocalhost%3A' + servers[0].port) | |
627 | }) | |
628 | ||
629 | after(async function () { | |
630 | await cleanupTests(servers) | |
631 | }) | |
632 | }) | |
633 | ||
634 | describe('Test file replacement', function () { | |
635 | let video2Server2UUID: string | |
636 | const strategy = 'recently-added' | |
637 | ||
638 | before(async function () { | |
639 | this.timeout(120000) | |
640 | ||
641 | await createSingleServers(strategy, { min_lifetime: '7 seconds', min_views: 0 }) | |
642 | ||
643 | await enableRedundancyOnServer1() | |
644 | ||
645 | await waitJobs(servers) | |
646 | await servers[0].servers.waitUntilLog('Duplicated ', 5) | |
647 | await waitJobs(servers) | |
648 | ||
649 | await check2Webseeds(video1Server2UUID) | |
650 | await check1PlaylistRedundancies(video1Server2UUID) | |
651 | await checkStatsWith1Redundancy(strategy) | |
652 | ||
653 | const { uuid } = await servers[1].videos.upload({ attributes: { name: 'video 2 server 2', privacy: VideoPrivacy.PRIVATE } }) | |
654 | video2Server2UUID = uuid | |
655 | ||
656 | // Wait transcoding before federation | |
657 | await waitJobs(servers) | |
658 | ||
659 | await servers[1].videos.update({ id: video2Server2UUID, attributes: { privacy: VideoPrivacy.PUBLIC } }) | |
660 | }) | |
661 | ||
662 | it('Should cache video 2 webseeds on the first video', async function () { | |
663 | this.timeout(120000) | |
664 | ||
665 | await waitJobs(servers) | |
666 | ||
667 | let checked = false | |
668 | ||
669 | while (checked === false) { | |
670 | await wait(1000) | |
671 | ||
672 | try { | |
673 | await check1WebSeed(video1Server2UUID) | |
674 | await check0PlaylistRedundancies(video1Server2UUID) | |
675 | ||
676 | await check2Webseeds(video2Server2UUID) | |
677 | await check1PlaylistRedundancies(video2Server2UUID) | |
678 | ||
679 | checked = true | |
680 | } catch { | |
681 | checked = false | |
682 | } | |
683 | } | |
684 | }) | |
685 | ||
686 | it('Should disable strategy and remove redundancies', async function () { | |
687 | this.timeout(80000) | |
688 | ||
689 | await waitJobs(servers) | |
690 | ||
691 | await killallServers([ servers[0] ]) | |
692 | await servers[0].run({ | |
693 | redundancy: { | |
694 | videos: { | |
695 | check_interval: '1 second', | |
696 | strategies: [] | |
697 | } | |
698 | } | |
699 | }) | |
700 | ||
701 | await waitJobs(servers) | |
702 | ||
703 | await checkVideoFilesWereRemoved(video1Server2UUID, servers[0], [ join('redundancy', 'hls') ]) | |
704 | }) | |
705 | ||
706 | after(async function () { | |
707 | await cleanupTests(servers) | |
708 | }) | |
709 | }) | |
710 | }) |