diff options
Diffstat (limited to 'server')
80 files changed, 3865 insertions, 604 deletions
diff --git a/server/tests/api/activitypub/cleaner.ts b/server/tests/api/activitypub/cleaner.ts index 1c1495022..d67175e20 100644 --- a/server/tests/api/activitypub/cleaner.ts +++ b/server/tests/api/activitypub/cleaner.ts | |||
@@ -1,6 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { SQLCommand } from '@server/tests/shared' | ||
4 | import { wait } from '@shared/core-utils' | 5 | import { wait } from '@shared/core-utils' |
5 | import { | 6 | import { |
6 | cleanupTests, | 7 | cleanupTests, |
@@ -13,6 +14,8 @@ import { | |||
13 | 14 | ||
14 | describe('Test AP cleaner', function () { | 15 | describe('Test AP cleaner', function () { |
15 | let servers: PeerTubeServer[] = [] | 16 | let servers: PeerTubeServer[] = [] |
17 | const sqlCommands: SQLCommand[] = [] | ||
18 | |||
16 | let videoUUID1: string | 19 | let videoUUID1: string |
17 | let videoUUID2: string | 20 | let videoUUID2: string |
18 | let videoUUID3: string | 21 | let videoUUID3: string |
@@ -56,6 +59,8 @@ describe('Test AP cleaner', function () { | |||
56 | await server.videos.rate({ id: uuid, rating: 'like' }) | 59 | await server.videos.rate({ id: uuid, rating: 'like' }) |
57 | await server.comments.createThread({ videoId: uuid, text: 'comment' }) | 60 | await server.comments.createThread({ videoId: uuid, text: 'comment' }) |
58 | } | 61 | } |
62 | |||
63 | sqlCommands.push(new SQLCommand(server)) | ||
59 | } | 64 | } |
60 | 65 | ||
61 | await waitJobs(servers) | 66 | await waitJobs(servers) |
@@ -75,9 +80,9 @@ describe('Test AP cleaner', function () { | |||
75 | it('Should destroy server 3 internal likes and correctly clean them', async function () { | 80 | it('Should destroy server 3 internal likes and correctly clean them', async function () { |
76 | this.timeout(20000) | 81 | this.timeout(20000) |
77 | 82 | ||
78 | await servers[2].sql.deleteAll('accountVideoRate') | 83 | await sqlCommands[2].deleteAll('accountVideoRate') |
79 | for (const uuid of videoUUIDs) { | 84 | for (const uuid of videoUUIDs) { |
80 | await servers[2].sql.setVideoField(uuid, 'likes', '0') | 85 | await sqlCommands[2].setVideoField(uuid, 'likes', '0') |
81 | } | 86 | } |
82 | 87 | ||
83 | await wait(5000) | 88 | await wait(5000) |
@@ -121,10 +126,10 @@ describe('Test AP cleaner', function () { | |||
121 | it('Should destroy server 3 internal dislikes and correctly clean them', async function () { | 126 | it('Should destroy server 3 internal dislikes and correctly clean them', async function () { |
122 | this.timeout(20000) | 127 | this.timeout(20000) |
123 | 128 | ||
124 | await servers[2].sql.deleteAll('accountVideoRate') | 129 | await sqlCommands[2].deleteAll('accountVideoRate') |
125 | 130 | ||
126 | for (const uuid of videoUUIDs) { | 131 | for (const uuid of videoUUIDs) { |
127 | await servers[2].sql.setVideoField(uuid, 'dislikes', '0') | 132 | await sqlCommands[2].setVideoField(uuid, 'dislikes', '0') |
128 | } | 133 | } |
129 | 134 | ||
130 | await wait(5000) | 135 | await wait(5000) |
@@ -148,15 +153,15 @@ describe('Test AP cleaner', function () { | |||
148 | it('Should destroy server 3 internal shares and correctly clean them', async function () { | 153 | it('Should destroy server 3 internal shares and correctly clean them', async function () { |
149 | this.timeout(20000) | 154 | this.timeout(20000) |
150 | 155 | ||
151 | const preCount = await servers[0].sql.getVideoShareCount() | 156 | const preCount = await sqlCommands[0].getVideoShareCount() |
152 | expect(preCount).to.equal(6) | 157 | expect(preCount).to.equal(6) |
153 | 158 | ||
154 | await servers[2].sql.deleteAll('videoShare') | 159 | await sqlCommands[2].deleteAll('videoShare') |
155 | await wait(5000) | 160 | await wait(5000) |
156 | await waitJobs(servers) | 161 | await waitJobs(servers) |
157 | 162 | ||
158 | // Still 6 because we don't have remote shares on local videos | 163 | // Still 6 because we don't have remote shares on local videos |
159 | const postCount = await servers[0].sql.getVideoShareCount() | 164 | const postCount = await sqlCommands[0].getVideoShareCount() |
160 | expect(postCount).to.equal(6) | 165 | expect(postCount).to.equal(6) |
161 | }) | 166 | }) |
162 | 167 | ||
@@ -168,7 +173,7 @@ describe('Test AP cleaner', function () { | |||
168 | expect(total).to.equal(3) | 173 | expect(total).to.equal(3) |
169 | } | 174 | } |
170 | 175 | ||
171 | await servers[2].sql.deleteAll('videoComment') | 176 | await sqlCommands[2].deleteAll('videoComment') |
172 | 177 | ||
173 | await wait(5000) | 178 | await wait(5000) |
174 | await waitJobs(servers) | 179 | await waitJobs(servers) |
@@ -185,7 +190,7 @@ describe('Test AP cleaner', function () { | |||
185 | async function check (like: string, ofServerUrl: string, urlSuffix: string, remote: 'true' | 'false') { | 190 | async function check (like: string, ofServerUrl: string, urlSuffix: string, remote: 'true' | 'false') { |
186 | const query = `SELECT "videoId", "accountVideoRate".url FROM "accountVideoRate" ` + | 191 | const query = `SELECT "videoId", "accountVideoRate".url FROM "accountVideoRate" ` + |
187 | `INNER JOIN video ON "accountVideoRate"."videoId" = video.id AND remote IS ${remote} WHERE "accountVideoRate"."url" LIKE '${like}'` | 192 | `INNER JOIN video ON "accountVideoRate"."videoId" = video.id AND remote IS ${remote} WHERE "accountVideoRate"."url" LIKE '${like}'` |
188 | const res = await servers[0].sql.selectQuery<{ url: string }>(query) | 193 | const res = await sqlCommands[0].selectQuery<{ url: string }>(query) |
189 | 194 | ||
190 | for (const rate of res) { | 195 | for (const rate of res) { |
191 | const matcher = new RegExp(`^${ofServerUrl}/accounts/root/dislikes/\\d+${urlSuffix}$`) | 196 | const matcher = new RegExp(`^${ofServerUrl}/accounts/root/dislikes/\\d+${urlSuffix}$`) |
@@ -214,7 +219,7 @@ describe('Test AP cleaner', function () { | |||
214 | 219 | ||
215 | { | 220 | { |
216 | const query = `UPDATE "accountVideoRate" SET url = url || 'stan'` | 221 | const query = `UPDATE "accountVideoRate" SET url = url || 'stan'` |
217 | await servers[1].sql.updateQuery(query) | 222 | await sqlCommands[1].updateQuery(query) |
218 | 223 | ||
219 | await wait(5000) | 224 | await wait(5000) |
220 | await waitJobs(servers) | 225 | await waitJobs(servers) |
@@ -231,7 +236,7 @@ describe('Test AP cleaner', function () { | |||
231 | const query = `SELECT "videoId", "videoComment".url, uuid as "videoUUID" FROM "videoComment" ` + | 236 | const query = `SELECT "videoId", "videoComment".url, uuid as "videoUUID" FROM "videoComment" ` + |
232 | `INNER JOIN video ON "videoComment"."videoId" = video.id AND remote IS ${remote} WHERE "videoComment"."url" LIKE '${like}'` | 237 | `INNER JOIN video ON "videoComment"."videoId" = video.id AND remote IS ${remote} WHERE "videoComment"."url" LIKE '${like}'` |
233 | 238 | ||
234 | const res = await servers[0].sql.selectQuery<{ url: string, videoUUID: string }>(query) | 239 | const res = await sqlCommands[0].selectQuery<{ url: string, videoUUID: string }>(query) |
235 | 240 | ||
236 | for (const comment of res) { | 241 | for (const comment of res) { |
237 | const matcher = new RegExp(`${ofServerUrl}/videos/watch/${comment.videoUUID}/comments/\\d+${urlSuffix}`) | 242 | const matcher = new RegExp(`${ofServerUrl}/videos/watch/${comment.videoUUID}/comments/\\d+${urlSuffix}`) |
@@ -257,7 +262,7 @@ describe('Test AP cleaner', function () { | |||
257 | 262 | ||
258 | { | 263 | { |
259 | const query = `UPDATE "videoComment" SET url = url || 'kyle'` | 264 | const query = `UPDATE "videoComment" SET url = url || 'kyle'` |
260 | await servers[1].sql.updateQuery(query) | 265 | await sqlCommands[1].updateQuery(query) |
261 | 266 | ||
262 | await wait(5000) | 267 | await wait(5000) |
263 | await waitJobs(servers) | 268 | await waitJobs(servers) |
@@ -328,6 +333,10 @@ describe('Test AP cleaner', function () { | |||
328 | }) | 333 | }) |
329 | 334 | ||
330 | after(async function () { | 335 | after(async function () { |
336 | for (const sql of sqlCommands) { | ||
337 | await sql.cleanup() | ||
338 | } | ||
339 | |||
331 | await cleanupTests(servers) | 340 | await cleanupTests(servers) |
332 | }) | 341 | }) |
333 | }) | 342 | }) |
diff --git a/server/tests/api/activitypub/fetch.ts b/server/tests/api/activitypub/fetch.ts index f0caea507..3899a6a49 100644 --- a/server/tests/api/activitypub/fetch.ts +++ b/server/tests/api/activitypub/fetch.ts | |||
@@ -1,6 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { SQLCommand } from '@server/tests/shared' | ||
4 | import { | 5 | import { |
5 | cleanupTests, | 6 | cleanupTests, |
6 | createMultipleServers, | 7 | createMultipleServers, |
@@ -12,6 +13,7 @@ import { | |||
12 | 13 | ||
13 | describe('Test ActivityPub fetcher', function () { | 14 | describe('Test ActivityPub fetcher', function () { |
14 | let servers: PeerTubeServer[] | 15 | let servers: PeerTubeServer[] |
16 | let sqlCommandServer1: SQLCommand | ||
15 | 17 | ||
16 | // --------------------------------------------------------------- | 18 | // --------------------------------------------------------------- |
17 | 19 | ||
@@ -34,15 +36,17 @@ describe('Test ActivityPub fetcher', function () { | |||
34 | const { uuid } = await servers[0].videos.upload({ attributes: { name: 'bad video root' } }) | 36 | const { uuid } = await servers[0].videos.upload({ attributes: { name: 'bad video root' } }) |
35 | await servers[0].videos.upload({ token: userAccessToken, attributes: { name: 'video user' } }) | 37 | await servers[0].videos.upload({ token: userAccessToken, attributes: { name: 'video user' } }) |
36 | 38 | ||
39 | sqlCommandServer1 = new SQLCommand(servers[0]) | ||
40 | |||
37 | { | 41 | { |
38 | const to = servers[0].url + '/accounts/user1' | 42 | const to = servers[0].url + '/accounts/user1' |
39 | const value = servers[1].url + '/accounts/user1' | 43 | const value = servers[1].url + '/accounts/user1' |
40 | await servers[0].sql.setActorField(to, 'url', value) | 44 | await sqlCommandServer1.setActorField(to, 'url', value) |
41 | } | 45 | } |
42 | 46 | ||
43 | { | 47 | { |
44 | const value = servers[2].url + '/videos/watch/' + uuid | 48 | const value = servers[2].url + '/videos/watch/' + uuid |
45 | await servers[0].sql.setVideoField(uuid, 'url', value) | 49 | await sqlCommandServer1.setVideoField(uuid, 'url', value) |
46 | } | 50 | } |
47 | }) | 51 | }) |
48 | 52 | ||
@@ -72,6 +76,7 @@ describe('Test ActivityPub fetcher', function () { | |||
72 | after(async function () { | 76 | after(async function () { |
73 | this.timeout(20000) | 77 | this.timeout(20000) |
74 | 78 | ||
79 | await sqlCommandServer1.cleanup() | ||
75 | await cleanupTests(servers) | 80 | await cleanupTests(servers) |
76 | }) | 81 | }) |
77 | }) | 82 | }) |
diff --git a/server/tests/api/activitypub/refresher.ts b/server/tests/api/activitypub/refresher.ts index 4fb22f512..6c48b7ac8 100644 --- a/server/tests/api/activitypub/refresher.ts +++ b/server/tests/api/activitypub/refresher.ts | |||
@@ -1,5 +1,6 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { SQLCommand } from '@server/tests/shared' | ||
3 | import { wait } from '@shared/core-utils' | 4 | import { wait } from '@shared/core-utils' |
4 | import { HttpStatusCode, VideoPlaylistPrivacy } from '@shared/models' | 5 | import { HttpStatusCode, VideoPlaylistPrivacy } from '@shared/models' |
5 | import { | 6 | import { |
@@ -15,6 +16,7 @@ import { | |||
15 | 16 | ||
16 | describe('Test AP refresher', function () { | 17 | describe('Test AP refresher', function () { |
17 | let servers: PeerTubeServer[] = [] | 18 | let servers: PeerTubeServer[] = [] |
19 | let sqlCommandServer2: SQLCommand | ||
18 | let videoUUID1: string | 20 | let videoUUID1: string |
19 | let videoUUID2: string | 21 | let videoUUID2: string |
20 | let videoUUID3: string | 22 | let videoUUID3: string |
@@ -61,6 +63,8 @@ describe('Test AP refresher', function () { | |||
61 | } | 63 | } |
62 | 64 | ||
63 | await doubleFollow(servers[0], servers[1]) | 65 | await doubleFollow(servers[0], servers[1]) |
66 | |||
67 | sqlCommandServer2 = new SQLCommand(servers[1]) | ||
64 | }) | 68 | }) |
65 | 69 | ||
66 | describe('Videos refresher', function () { | 70 | describe('Videos refresher', function () { |
@@ -71,7 +75,7 @@ describe('Test AP refresher', function () { | |||
71 | await wait(10000) | 75 | await wait(10000) |
72 | 76 | ||
73 | // Change UUID so the remote server returns a 404 | 77 | // Change UUID so the remote server returns a 404 |
74 | await servers[1].sql.setVideoField(videoUUID1, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174f') | 78 | await sqlCommandServer2.setVideoField(videoUUID1, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174f') |
75 | 79 | ||
76 | await servers[0].videos.get({ id: videoUUID1 }) | 80 | await servers[0].videos.get({ id: videoUUID1 }) |
77 | await servers[0].videos.get({ id: videoUUID2 }) | 81 | await servers[0].videos.get({ id: videoUUID2 }) |
@@ -87,7 +91,7 @@ describe('Test AP refresher', function () { | |||
87 | 91 | ||
88 | await killallServers([ servers[1] ]) | 92 | await killallServers([ servers[1] ]) |
89 | 93 | ||
90 | await servers[1].sql.setVideoField(videoUUID3, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174e') | 94 | await sqlCommandServer2.setVideoField(videoUUID3, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b174e') |
91 | 95 | ||
92 | // Video will need a refresh | 96 | // Video will need a refresh |
93 | await wait(10000) | 97 | await wait(10000) |
@@ -113,7 +117,7 @@ describe('Test AP refresher', function () { | |||
113 | 117 | ||
114 | // Change actor name so the remote server returns a 404 | 118 | // Change actor name so the remote server returns a 404 |
115 | const to = servers[1].url + '/accounts/user2' | 119 | const to = servers[1].url + '/accounts/user2' |
116 | await servers[1].sql.setActorField(to, 'preferredUsername', 'toto') | 120 | await sqlCommandServer2.setActorField(to, 'preferredUsername', 'toto') |
117 | 121 | ||
118 | await command.get({ accountName: 'user1@' + servers[1].host }) | 122 | await command.get({ accountName: 'user1@' + servers[1].host }) |
119 | await command.get({ accountName: 'user2@' + servers[1].host }) | 123 | await command.get({ accountName: 'user2@' + servers[1].host }) |
@@ -133,7 +137,7 @@ describe('Test AP refresher', function () { | |||
133 | await wait(10000) | 137 | await wait(10000) |
134 | 138 | ||
135 | // Change UUID so the remote server returns a 404 | 139 | // Change UUID so the remote server returns a 404 |
136 | await servers[1].sql.setPlaylistField(playlistUUID2, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b178e') | 140 | await sqlCommandServer2.setPlaylistField(playlistUUID2, 'uuid', '304afe4f-39f9-4d49-8ed7-ac57b86b178e') |
137 | 141 | ||
138 | await servers[0].playlists.get({ playlistId: playlistUUID1 }) | 142 | await servers[0].playlists.get({ playlistId: playlistUUID1 }) |
139 | await servers[0].playlists.get({ playlistId: playlistUUID2 }) | 143 | await servers[0].playlists.get({ playlistId: playlistUUID2 }) |
@@ -148,6 +152,8 @@ describe('Test AP refresher', function () { | |||
148 | after(async function () { | 152 | after(async function () { |
149 | this.timeout(10000) | 153 | this.timeout(10000) |
150 | 154 | ||
155 | await sqlCommandServer2.cleanup() | ||
156 | |||
151 | await cleanupTests(servers) | 157 | await cleanupTests(servers) |
152 | }) | 158 | }) |
153 | }) | 159 | }) |
diff --git a/server/tests/api/activitypub/security.ts b/server/tests/api/activitypub/security.ts index c6f171633..d6a07b87f 100644 --- a/server/tests/api/activitypub/security.ts +++ b/server/tests/api/activitypub/security.ts | |||
@@ -5,26 +5,26 @@ import { buildDigest } from '@server/helpers/peertube-crypto' | |||
5 | import { ACTIVITY_PUB, HTTP_SIGNATURE } from '@server/initializers/constants' | 5 | import { ACTIVITY_PUB, HTTP_SIGNATURE } from '@server/initializers/constants' |
6 | import { activityPubContextify } from '@server/lib/activitypub/context' | 6 | import { activityPubContextify } from '@server/lib/activitypub/context' |
7 | import { buildGlobalHeaders, signAndContextify } from '@server/lib/activitypub/send' | 7 | import { buildGlobalHeaders, signAndContextify } from '@server/lib/activitypub/send' |
8 | import { makePOSTAPRequest } from '@server/tests/shared' | 8 | import { makePOSTAPRequest, SQLCommand } from '@server/tests/shared' |
9 | import { buildAbsoluteFixturePath, wait } from '@shared/core-utils' | 9 | import { buildAbsoluteFixturePath, wait } from '@shared/core-utils' |
10 | import { HttpStatusCode } from '@shared/models' | 10 | import { HttpStatusCode } from '@shared/models' |
11 | import { cleanupTests, createMultipleServers, killallServers, PeerTubeServer } from '@shared/server-commands' | 11 | import { cleanupTests, createMultipleServers, killallServers, PeerTubeServer } from '@shared/server-commands' |
12 | 12 | ||
13 | function setKeysOfServer (onServer: PeerTubeServer, ofServer: PeerTubeServer, publicKey: string, privateKey: string) { | 13 | function setKeysOfServer (onServer: SQLCommand, ofServerUrl: string, publicKey: string, privateKey: string) { |
14 | const url = ofServer.url + '/accounts/peertube' | 14 | const url = ofServerUrl + '/accounts/peertube' |
15 | 15 | ||
16 | return Promise.all([ | 16 | return Promise.all([ |
17 | onServer.sql.setActorField(url, 'publicKey', publicKey), | 17 | onServer.setActorField(url, 'publicKey', publicKey), |
18 | onServer.sql.setActorField(url, 'privateKey', privateKey) | 18 | onServer.setActorField(url, 'privateKey', privateKey) |
19 | ]) | 19 | ]) |
20 | } | 20 | } |
21 | 21 | ||
22 | function setUpdatedAtOfServer (onServer: PeerTubeServer, ofServer: PeerTubeServer, updatedAt: string) { | 22 | function setUpdatedAtOfServer (onServer: SQLCommand, ofServerUrl: string, updatedAt: string) { |
23 | const url = ofServer.url + '/accounts/peertube' | 23 | const url = ofServerUrl + '/accounts/peertube' |
24 | 24 | ||
25 | return Promise.all([ | 25 | return Promise.all([ |
26 | onServer.sql.setActorField(url, 'createdAt', updatedAt), | 26 | onServer.setActorField(url, 'createdAt', updatedAt), |
27 | onServer.sql.setActorField(url, 'updatedAt', updatedAt) | 27 | onServer.setActorField(url, 'updatedAt', updatedAt) |
28 | ]) | 28 | ]) |
29 | } | 29 | } |
30 | 30 | ||
@@ -71,6 +71,8 @@ async function makeFollowRequest (to: { url: string }, by: { url: string, privat | |||
71 | 71 | ||
72 | describe('Test ActivityPub security', function () { | 72 | describe('Test ActivityPub security', function () { |
73 | let servers: PeerTubeServer[] | 73 | let servers: PeerTubeServer[] |
74 | let sqlCommands: SQLCommand[] | ||
75 | |||
74 | let url: string | 76 | let url: string |
75 | 77 | ||
76 | const keys = require(buildAbsoluteFixturePath('./ap-json/peertube/keys.json')) | 78 | const keys = require(buildAbsoluteFixturePath('./ap-json/peertube/keys.json')) |
@@ -90,10 +92,12 @@ describe('Test ActivityPub security', function () { | |||
90 | 92 | ||
91 | servers = await createMultipleServers(3) | 93 | servers = await createMultipleServers(3) |
92 | 94 | ||
95 | sqlCommands = servers.map(s => new SQLCommand(s)) | ||
96 | |||
93 | url = servers[0].url + '/inbox' | 97 | url = servers[0].url + '/inbox' |
94 | 98 | ||
95 | await setKeysOfServer(servers[0], servers[1], keys.publicKey, null) | 99 | await setKeysOfServer(sqlCommands[0], servers[1].url, keys.publicKey, null) |
96 | await setKeysOfServer(servers[1], servers[1], keys.publicKey, keys.privateKey) | 100 | await setKeysOfServer(sqlCommands[1], servers[1].url, keys.publicKey, keys.privateKey) |
97 | 101 | ||
98 | const to = { url: servers[0].url + '/accounts/peertube' } | 102 | const to = { url: servers[0].url + '/accounts/peertube' } |
99 | const by = { url: servers[1].url + '/accounts/peertube', privateKey: keys.privateKey } | 103 | const by = { url: servers[1].url + '/accounts/peertube', privateKey: keys.privateKey } |
@@ -130,8 +134,8 @@ describe('Test ActivityPub security', function () { | |||
130 | }) | 134 | }) |
131 | 135 | ||
132 | it('Should fail with bad keys', async function () { | 136 | it('Should fail with bad keys', async function () { |
133 | await setKeysOfServer(servers[0], servers[1], invalidKeys.publicKey, invalidKeys.privateKey) | 137 | await setKeysOfServer(sqlCommands[0], servers[1].url, invalidKeys.publicKey, invalidKeys.privateKey) |
134 | await setKeysOfServer(servers[1], servers[1], invalidKeys.publicKey, invalidKeys.privateKey) | 138 | await setKeysOfServer(sqlCommands[1], servers[1].url, invalidKeys.publicKey, invalidKeys.privateKey) |
135 | 139 | ||
136 | const body = await activityPubContextify(getAnnounceWithoutContext(servers[1]), 'Announce') | 140 | const body = await activityPubContextify(getAnnounceWithoutContext(servers[1]), 'Announce') |
137 | const headers = buildGlobalHeaders(body) | 141 | const headers = buildGlobalHeaders(body) |
@@ -145,8 +149,8 @@ describe('Test ActivityPub security', function () { | |||
145 | }) | 149 | }) |
146 | 150 | ||
147 | it('Should reject requests without appropriate signed headers', async function () { | 151 | it('Should reject requests without appropriate signed headers', async function () { |
148 | await setKeysOfServer(servers[0], servers[1], keys.publicKey, keys.privateKey) | 152 | await setKeysOfServer(sqlCommands[0], servers[1].url, keys.publicKey, keys.privateKey) |
149 | await setKeysOfServer(servers[1], servers[1], keys.publicKey, keys.privateKey) | 153 | await setKeysOfServer(sqlCommands[1], servers[1].url, keys.publicKey, keys.privateKey) |
150 | 154 | ||
151 | const body = await activityPubContextify(getAnnounceWithoutContext(servers[1]), 'Announce') | 155 | const body = await activityPubContextify(getAnnounceWithoutContext(servers[1]), 'Announce') |
152 | const headers = buildGlobalHeaders(body) | 156 | const headers = buildGlobalHeaders(body) |
@@ -194,8 +198,8 @@ describe('Test ActivityPub security', function () { | |||
194 | 198 | ||
195 | // Update keys of server 2 to invalid keys | 199 | // Update keys of server 2 to invalid keys |
196 | // Server 1 should refresh the actor and fail | 200 | // Server 1 should refresh the actor and fail |
197 | await setKeysOfServer(servers[1], servers[1], invalidKeys.publicKey, invalidKeys.privateKey) | 201 | await setKeysOfServer(sqlCommands[1], servers[1].url, invalidKeys.publicKey, invalidKeys.privateKey) |
198 | await setUpdatedAtOfServer(servers[0], servers[1], '2015-07-17 22:00:00+00') | 202 | await setUpdatedAtOfServer(sqlCommands[0], servers[1].url, '2015-07-17 22:00:00+00') |
199 | 203 | ||
200 | // Invalid peertube actor cache | 204 | // Invalid peertube actor cache |
201 | await killallServers([ servers[1] ]) | 205 | await killallServers([ servers[1] ]) |
@@ -218,9 +222,9 @@ describe('Test ActivityPub security', function () { | |||
218 | before(async function () { | 222 | before(async function () { |
219 | this.timeout(10000) | 223 | this.timeout(10000) |
220 | 224 | ||
221 | await setKeysOfServer(servers[0], servers[1], keys.publicKey, keys.privateKey) | 225 | await setKeysOfServer(sqlCommands[0], servers[1].url, keys.publicKey, keys.privateKey) |
222 | await setKeysOfServer(servers[1], servers[1], keys.publicKey, keys.privateKey) | 226 | await setKeysOfServer(sqlCommands[1], servers[1].url, keys.publicKey, keys.privateKey) |
223 | await setKeysOfServer(servers[2], servers[2], keys.publicKey, keys.privateKey) | 227 | await setKeysOfServer(sqlCommands[2], servers[2].url, keys.publicKey, keys.privateKey) |
224 | 228 | ||
225 | const to = { url: servers[0].url + '/accounts/peertube' } | 229 | const to = { url: servers[0].url + '/accounts/peertube' } |
226 | const by = { url: servers[2].url + '/accounts/peertube', privateKey: keys.privateKey } | 230 | const by = { url: servers[2].url + '/accounts/peertube', privateKey: keys.privateKey } |
@@ -230,8 +234,8 @@ describe('Test ActivityPub security', function () { | |||
230 | it('Should fail with bad keys', async function () { | 234 | it('Should fail with bad keys', async function () { |
231 | this.timeout(10000) | 235 | this.timeout(10000) |
232 | 236 | ||
233 | await setKeysOfServer(servers[0], servers[2], invalidKeys.publicKey, invalidKeys.privateKey) | 237 | await setKeysOfServer(sqlCommands[0], servers[2].url, invalidKeys.publicKey, invalidKeys.privateKey) |
234 | await setKeysOfServer(servers[2], servers[2], invalidKeys.publicKey, invalidKeys.privateKey) | 238 | await setKeysOfServer(sqlCommands[2], servers[2].url, invalidKeys.publicKey, invalidKeys.privateKey) |
235 | 239 | ||
236 | const body = getAnnounceWithoutContext(servers[1]) | 240 | const body = getAnnounceWithoutContext(servers[1]) |
237 | body.actor = servers[2].url + '/accounts/peertube' | 241 | body.actor = servers[2].url + '/accounts/peertube' |
@@ -252,8 +256,8 @@ describe('Test ActivityPub security', function () { | |||
252 | it('Should fail with an altered body', async function () { | 256 | it('Should fail with an altered body', async function () { |
253 | this.timeout(10000) | 257 | this.timeout(10000) |
254 | 258 | ||
255 | await setKeysOfServer(servers[0], servers[2], keys.publicKey, keys.privateKey) | 259 | await setKeysOfServer(sqlCommands[0], servers[2].url, keys.publicKey, keys.privateKey) |
256 | await setKeysOfServer(servers[0], servers[2], keys.publicKey, keys.privateKey) | 260 | await setKeysOfServer(sqlCommands[0], servers[2].url, keys.publicKey, keys.privateKey) |
257 | 261 | ||
258 | const body = getAnnounceWithoutContext(servers[1]) | 262 | const body = getAnnounceWithoutContext(servers[1]) |
259 | body.actor = servers[2].url + '/accounts/peertube' | 263 | body.actor = servers[2].url + '/accounts/peertube' |
@@ -296,7 +300,7 @@ describe('Test ActivityPub security', function () { | |||
296 | 300 | ||
297 | // Update keys of server 3 to invalid keys | 301 | // Update keys of server 3 to invalid keys |
298 | // Server 1 should refresh the actor and fail | 302 | // Server 1 should refresh the actor and fail |
299 | await setKeysOfServer(servers[2], servers[2], invalidKeys.publicKey, invalidKeys.privateKey) | 303 | await setKeysOfServer(sqlCommands[2], servers[2].url, invalidKeys.publicKey, invalidKeys.privateKey) |
300 | 304 | ||
301 | const body = getAnnounceWithoutContext(servers[1]) | 305 | const body = getAnnounceWithoutContext(servers[1]) |
302 | body.actor = servers[2].url + '/accounts/peertube' | 306 | body.actor = servers[2].url + '/accounts/peertube' |
@@ -316,7 +320,9 @@ describe('Test ActivityPub security', function () { | |||
316 | }) | 320 | }) |
317 | 321 | ||
318 | after(async function () { | 322 | after(async function () { |
319 | this.timeout(10000) | 323 | for (const sql of sqlCommands) { |
324 | await sql.cleanup() | ||
325 | } | ||
320 | 326 | ||
321 | await cleanupTests(servers) | 327 | await cleanupTests(servers) |
322 | }) | 328 | }) |
diff --git a/server/tests/api/check-params/config.ts b/server/tests/api/check-params/config.ts index f49a4b868..c5cda203e 100644 --- a/server/tests/api/check-params/config.ts +++ b/server/tests/api/check-params/config.ts | |||
@@ -103,6 +103,9 @@ describe('Test config API validators', function () { | |||
103 | }, | 103 | }, |
104 | transcoding: { | 104 | transcoding: { |
105 | enabled: true, | 105 | enabled: true, |
106 | remoteRunners: { | ||
107 | enabled: true | ||
108 | }, | ||
106 | allowAdditionalExtensions: true, | 109 | allowAdditionalExtensions: true, |
107 | allowAudioFiles: true, | 110 | allowAudioFiles: true, |
108 | concurrency: 1, | 111 | concurrency: 1, |
@@ -140,6 +143,9 @@ describe('Test config API validators', function () { | |||
140 | 143 | ||
141 | transcoding: { | 144 | transcoding: { |
142 | enabled: true, | 145 | enabled: true, |
146 | remoteRunners: { | ||
147 | enabled: true | ||
148 | }, | ||
143 | threads: 4, | 149 | threads: 4, |
144 | profile: 'live_profile', | 150 | profile: 'live_profile', |
145 | resolutions: { | 151 | resolutions: { |
diff --git a/server/tests/api/check-params/index.ts b/server/tests/api/check-params/index.ts index ddbcb42f8..400d312d3 100644 --- a/server/tests/api/check-params/index.ts +++ b/server/tests/api/check-params/index.ts | |||
@@ -16,6 +16,7 @@ import './my-user' | |||
16 | import './plugins' | 16 | import './plugins' |
17 | import './redundancy' | 17 | import './redundancy' |
18 | import './registrations' | 18 | import './registrations' |
19 | import './runners' | ||
19 | import './search' | 20 | import './search' |
20 | import './services' | 21 | import './services' |
21 | import './transcoding' | 22 | import './transcoding' |
diff --git a/server/tests/api/check-params/runners.ts b/server/tests/api/check-params/runners.ts new file mode 100644 index 000000000..4da6fd91d --- /dev/null +++ b/server/tests/api/check-params/runners.ts | |||
@@ -0,0 +1,702 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | import { checkBadCountPagination, checkBadSortPagination, checkBadStartPagination } from '@server/tests/shared' | ||
3 | import { HttpStatusCode, RunnerJob, RunnerJobState, RunnerJobSuccessPayload, RunnerJobUpdatePayload, VideoPrivacy } from '@shared/models' | ||
4 | import { | ||
5 | cleanupTests, | ||
6 | createSingleServer, | ||
7 | makePostBodyRequest, | ||
8 | PeerTubeServer, | ||
9 | sendRTMPStream, | ||
10 | setAccessTokensToServers, | ||
11 | setDefaultVideoChannel, | ||
12 | stopFfmpeg, | ||
13 | waitJobs | ||
14 | } from '@shared/server-commands' | ||
15 | |||
16 | const badUUID = '910ec12a-d9e6-458b-a274-0abb655f9464' | ||
17 | |||
18 | describe('Test managing runners', function () { | ||
19 | let server: PeerTubeServer | ||
20 | |||
21 | let userToken: string | ||
22 | |||
23 | let registrationTokenId: number | ||
24 | let registrationToken: string | ||
25 | |||
26 | let runnerToken: string | ||
27 | let runnerToken2: string | ||
28 | |||
29 | let completedJobToken: string | ||
30 | let completedJobUUID: string | ||
31 | |||
32 | let cancelledJobUUID: string | ||
33 | |||
34 | before(async function () { | ||
35 | this.timeout(120000) | ||
36 | |||
37 | const config = { | ||
38 | rates_limit: { | ||
39 | api: { | ||
40 | max: 5000 | ||
41 | } | ||
42 | } | ||
43 | } | ||
44 | |||
45 | server = await createSingleServer(1, config) | ||
46 | await setAccessTokensToServers([ server ]) | ||
47 | await setDefaultVideoChannel([ server ]) | ||
48 | |||
49 | userToken = await server.users.generateUserAndToken('user1') | ||
50 | |||
51 | const { data } = await server.runnerRegistrationTokens.list() | ||
52 | registrationToken = data[0].registrationToken | ||
53 | registrationTokenId = data[0].id | ||
54 | |||
55 | await server.config.enableTranscoding(true, true) | ||
56 | await server.config.enableRemoteTranscoding() | ||
57 | runnerToken = await server.runners.autoRegisterRunner() | ||
58 | runnerToken2 = await server.runners.autoRegisterRunner() | ||
59 | |||
60 | { | ||
61 | await server.videos.quickUpload({ name: 'video 1' }) | ||
62 | await server.videos.quickUpload({ name: 'video 2' }) | ||
63 | |||
64 | await waitJobs([ server ]) | ||
65 | |||
66 | { | ||
67 | const job = await server.runnerJobs.autoProcessWebVideoJob(runnerToken) | ||
68 | completedJobToken = job.jobToken | ||
69 | completedJobUUID = job.uuid | ||
70 | } | ||
71 | |||
72 | { | ||
73 | const { job } = await server.runnerJobs.autoAccept({ runnerToken }) | ||
74 | cancelledJobUUID = job.uuid | ||
75 | await server.runnerJobs.cancelByAdmin({ jobUUID: cancelledJobUUID }) | ||
76 | } | ||
77 | } | ||
78 | }) | ||
79 | |||
80 | describe('Managing runner registration tokens', function () { | ||
81 | |||
82 | describe('Common', function () { | ||
83 | |||
84 | it('Should fail to generate, list or delete runner registration token without oauth token', async function () { | ||
85 | const expectedStatus = HttpStatusCode.UNAUTHORIZED_401 | ||
86 | |||
87 | await server.runnerRegistrationTokens.generate({ token: null, expectedStatus }) | ||
88 | await server.runnerRegistrationTokens.list({ token: null, expectedStatus }) | ||
89 | await server.runnerRegistrationTokens.delete({ token: null, id: registrationTokenId, expectedStatus }) | ||
90 | }) | ||
91 | |||
92 | it('Should fail to generate, list or delete runner registration token without admin rights', async function () { | ||
93 | const expectedStatus = HttpStatusCode.FORBIDDEN_403 | ||
94 | |||
95 | await server.runnerRegistrationTokens.generate({ token: userToken, expectedStatus }) | ||
96 | await server.runnerRegistrationTokens.list({ token: userToken, expectedStatus }) | ||
97 | await server.runnerRegistrationTokens.delete({ token: userToken, id: registrationTokenId, expectedStatus }) | ||
98 | }) | ||
99 | }) | ||
100 | |||
101 | describe('Delete', function () { | ||
102 | |||
103 | it('Should fail to delete with a bad id', async function () { | ||
104 | await server.runnerRegistrationTokens.delete({ id: 404, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
105 | }) | ||
106 | }) | ||
107 | |||
108 | describe('List', function () { | ||
109 | const path = '/api/v1/runners/registration-tokens' | ||
110 | |||
111 | it('Should fail to list with a bad start pagination', async function () { | ||
112 | await checkBadStartPagination(server.url, path, server.accessToken) | ||
113 | }) | ||
114 | |||
115 | it('Should fail to list with a bad count pagination', async function () { | ||
116 | await checkBadCountPagination(server.url, path, server.accessToken) | ||
117 | }) | ||
118 | |||
119 | it('Should fail to list with an incorrect sort', async function () { | ||
120 | await checkBadSortPagination(server.url, path, server.accessToken) | ||
121 | }) | ||
122 | |||
123 | it('Should succeed to list with the correct params', async function () { | ||
124 | await server.runnerRegistrationTokens.list({ start: 0, count: 5, sort: '-createdAt' }) | ||
125 | }) | ||
126 | }) | ||
127 | }) | ||
128 | |||
129 | describe('Managing runners', function () { | ||
130 | let toDeleteId: number | ||
131 | |||
132 | describe('Register', function () { | ||
133 | const name = 'runner name' | ||
134 | |||
135 | it('Should fail with a bad registration token', async function () { | ||
136 | const expectedStatus = HttpStatusCode.BAD_REQUEST_400 | ||
137 | |||
138 | await server.runners.register({ name, registrationToken: 'a'.repeat(4000), expectedStatus }) | ||
139 | await server.runners.register({ name, registrationToken: null, expectedStatus }) | ||
140 | }) | ||
141 | |||
142 | it('Should fail with an unknown registration token', async function () { | ||
143 | await server.runners.register({ name, registrationToken: 'aaa', expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
144 | }) | ||
145 | |||
146 | it('Should fail with a bad name', async function () { | ||
147 | const expectedStatus = HttpStatusCode.BAD_REQUEST_400 | ||
148 | |||
149 | await server.runners.register({ name: '', registrationToken, expectedStatus }) | ||
150 | await server.runners.register({ name: 'a'.repeat(200), registrationToken, expectedStatus }) | ||
151 | }) | ||
152 | |||
153 | it('Should fail with an invalid description', async function () { | ||
154 | const expectedStatus = HttpStatusCode.BAD_REQUEST_400 | ||
155 | |||
156 | await server.runners.register({ name, description: '', registrationToken, expectedStatus }) | ||
157 | await server.runners.register({ name, description: 'a'.repeat(5000), registrationToken, expectedStatus }) | ||
158 | }) | ||
159 | |||
160 | it('Should succeed with the correct params', async function () { | ||
161 | const { id } = await server.runners.register({ name, description: 'super description', registrationToken }) | ||
162 | |||
163 | toDeleteId = id | ||
164 | }) | ||
165 | }) | ||
166 | |||
167 | describe('Delete', function () { | ||
168 | |||
169 | it('Should fail without oauth token', async function () { | ||
170 | await server.runners.delete({ token: null, id: toDeleteId, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 }) | ||
171 | }) | ||
172 | |||
173 | it('Should fail without admin rights', async function () { | ||
174 | await server.runners.delete({ token: userToken, id: toDeleteId, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | ||
175 | }) | ||
176 | |||
177 | it('Should fail with a bad id', async function () { | ||
178 | await server.runners.delete({ id: 'hi' as any, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
179 | }) | ||
180 | |||
181 | it('Should fail with an unknown id', async function () { | ||
182 | await server.runners.delete({ id: 404, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
183 | }) | ||
184 | |||
185 | it('Should succeed with the correct params', async function () { | ||
186 | await server.runners.delete({ id: toDeleteId }) | ||
187 | }) | ||
188 | }) | ||
189 | |||
190 | describe('List', function () { | ||
191 | const path = '/api/v1/runners' | ||
192 | |||
193 | it('Should fail without oauth token', async function () { | ||
194 | await server.runners.list({ token: null, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 }) | ||
195 | }) | ||
196 | |||
197 | it('Should fail without admin rights', async function () { | ||
198 | await server.runners.list({ token: userToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | ||
199 | }) | ||
200 | |||
201 | it('Should fail to list with a bad start pagination', async function () { | ||
202 | await checkBadStartPagination(server.url, path, server.accessToken) | ||
203 | }) | ||
204 | |||
205 | it('Should fail to list with a bad count pagination', async function () { | ||
206 | await checkBadCountPagination(server.url, path, server.accessToken) | ||
207 | }) | ||
208 | |||
209 | it('Should fail to list with an incorrect sort', async function () { | ||
210 | await checkBadSortPagination(server.url, path, server.accessToken) | ||
211 | }) | ||
212 | |||
213 | it('Should succeed to list with the correct params', async function () { | ||
214 | await server.runners.list({ start: 0, count: 5, sort: '-createdAt' }) | ||
215 | }) | ||
216 | }) | ||
217 | |||
218 | }) | ||
219 | |||
220 | describe('Runner jobs by admin', function () { | ||
221 | |||
222 | describe('Cancel', function () { | ||
223 | let jobUUID: string | ||
224 | |||
225 | before(async function () { | ||
226 | this.timeout(60000) | ||
227 | |||
228 | await server.videos.quickUpload({ name: 'video' }) | ||
229 | await waitJobs([ server ]) | ||
230 | |||
231 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
232 | jobUUID = availableJobs[0].uuid | ||
233 | }) | ||
234 | |||
235 | it('Should fail without oauth token', async function () { | ||
236 | await server.runnerJobs.cancelByAdmin({ token: null, jobUUID, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 }) | ||
237 | }) | ||
238 | |||
239 | it('Should fail without admin rights', async function () { | ||
240 | await server.runnerJobs.cancelByAdmin({ token: userToken, jobUUID, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | ||
241 | }) | ||
242 | |||
243 | it('Should fail with a bad job uuid', async function () { | ||
244 | await server.runnerJobs.cancelByAdmin({ jobUUID: 'hello', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
245 | }) | ||
246 | |||
247 | it('Should fail with an unknown job uuid', async function () { | ||
248 | const jobUUID = badUUID | ||
249 | await server.runnerJobs.cancelByAdmin({ jobUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
250 | }) | ||
251 | |||
252 | it('Should succeed with the correct params', async function () { | ||
253 | await server.runnerJobs.cancelByAdmin({ jobUUID }) | ||
254 | }) | ||
255 | }) | ||
256 | |||
257 | describe('List', function () { | ||
258 | const path = '/api/v1/runners/jobs' | ||
259 | |||
260 | it('Should fail without oauth token', async function () { | ||
261 | await server.runnerJobs.list({ token: null, expectedStatus: HttpStatusCode.UNAUTHORIZED_401 }) | ||
262 | }) | ||
263 | |||
264 | it('Should fail without admin rights', async function () { | ||
265 | await server.runnerJobs.list({ token: userToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | ||
266 | }) | ||
267 | |||
268 | it('Should fail to list with a bad start pagination', async function () { | ||
269 | await checkBadStartPagination(server.url, path, server.accessToken) | ||
270 | }) | ||
271 | |||
272 | it('Should fail to list with a bad count pagination', async function () { | ||
273 | await checkBadCountPagination(server.url, path, server.accessToken) | ||
274 | }) | ||
275 | |||
276 | it('Should fail to list with an incorrect sort', async function () { | ||
277 | await checkBadSortPagination(server.url, path, server.accessToken) | ||
278 | }) | ||
279 | |||
280 | it('Should succeed to list with the correct params', async function () { | ||
281 | await server.runnerJobs.list({ start: 0, count: 5, sort: '-createdAt' }) | ||
282 | }) | ||
283 | }) | ||
284 | |||
285 | }) | ||
286 | |||
287 | describe('Runner jobs by runners', function () { | ||
288 | let jobUUID: string | ||
289 | let jobToken: string | ||
290 | let videoUUID: string | ||
291 | |||
292 | let jobUUID2: string | ||
293 | let jobToken2: string | ||
294 | |||
295 | let videoUUID2: string | ||
296 | |||
297 | let pendingUUID: string | ||
298 | |||
299 | let liveAcceptedJob: RunnerJob & { jobToken: string } | ||
300 | |||
301 | async function fetchFiles (options: { | ||
302 | jobUUID: string | ||
303 | videoUUID: string | ||
304 | runnerToken: string | ||
305 | jobToken: string | ||
306 | expectedStatus: HttpStatusCode | ||
307 | }) { | ||
308 | const { jobUUID, expectedStatus, videoUUID, runnerToken, jobToken } = options | ||
309 | |||
310 | const basePath = '/api/v1/runners/jobs/' + jobUUID + '/files/videos/' + videoUUID | ||
311 | const paths = [ `${basePath}/max-quality`, `${basePath}/previews/max-quality` ] | ||
312 | |||
313 | for (const path of paths) { | ||
314 | await makePostBodyRequest({ url: server.url, path, fields: { runnerToken, jobToken }, expectedStatus }) | ||
315 | } | ||
316 | } | ||
317 | |||
318 | before(async function () { | ||
319 | this.timeout(120000) | ||
320 | |||
321 | { | ||
322 | await server.runnerJobs.cancelAllJobs({ state: RunnerJobState.PENDING }) | ||
323 | } | ||
324 | |||
325 | { | ||
326 | const { uuid } = await server.videos.quickUpload({ name: 'video' }) | ||
327 | videoUUID = uuid | ||
328 | |||
329 | await waitJobs([ server ]) | ||
330 | |||
331 | const { job } = await server.runnerJobs.autoAccept({ runnerToken }) | ||
332 | jobUUID = job.uuid | ||
333 | jobToken = job.jobToken | ||
334 | } | ||
335 | |||
336 | { | ||
337 | const { uuid } = await server.videos.quickUpload({ name: 'video' }) | ||
338 | videoUUID2 = uuid | ||
339 | |||
340 | await waitJobs([ server ]) | ||
341 | |||
342 | const { job } = await server.runnerJobs.autoAccept({ runnerToken: runnerToken2 }) | ||
343 | jobUUID2 = job.uuid | ||
344 | jobToken2 = job.jobToken | ||
345 | } | ||
346 | |||
347 | { | ||
348 | await server.videos.quickUpload({ name: 'video' }) | ||
349 | await waitJobs([ server ]) | ||
350 | |||
351 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
352 | pendingUUID = availableJobs[0].uuid | ||
353 | } | ||
354 | |||
355 | { | ||
356 | await server.config.enableLive({ | ||
357 | allowReplay: false, | ||
358 | resolutions: 'max', | ||
359 | transcoding: true | ||
360 | }) | ||
361 | |||
362 | const { live } = await server.live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC }) | ||
363 | |||
364 | const ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey }) | ||
365 | await waitJobs([ server ]) | ||
366 | |||
367 | await server.runnerJobs.requestLiveJob(runnerToken) | ||
368 | |||
369 | const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'live-rtmp-hls-transcoding' }) | ||
370 | liveAcceptedJob = job | ||
371 | |||
372 | await stopFfmpeg(ffmpegCommand) | ||
373 | } | ||
374 | }) | ||
375 | |||
376 | describe('Common runner tokens validations', function () { | ||
377 | |||
378 | async function testEndpoints (options: { | ||
379 | jobUUID: string | ||
380 | runnerToken: string | ||
381 | jobToken: string | ||
382 | expectedStatus: HttpStatusCode | ||
383 | }) { | ||
384 | await fetchFiles({ ...options, videoUUID }) | ||
385 | |||
386 | await server.runnerJobs.abort({ ...options, reason: 'reason' }) | ||
387 | await server.runnerJobs.update({ ...options }) | ||
388 | await server.runnerJobs.error({ ...options, message: 'message' }) | ||
389 | await server.runnerJobs.success({ ...options, payload: { videoFile: 'video_short.mp4' } }) | ||
390 | } | ||
391 | |||
392 | it('Should fail with an invalid job uuid', async function () { | ||
393 | await testEndpoints({ jobUUID: 'a', runnerToken, jobToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
394 | }) | ||
395 | |||
396 | it('Should fail with an unknown job uuid', async function () { | ||
397 | const jobUUID = badUUID | ||
398 | await testEndpoints({ jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
399 | }) | ||
400 | |||
401 | it('Should fail with an invalid runner token', async function () { | ||
402 | await testEndpoints({ jobUUID, runnerToken: '', jobToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
403 | }) | ||
404 | |||
405 | it('Should fail with an unknown runner token', async function () { | ||
406 | const runnerToken = badUUID | ||
407 | await testEndpoints({ jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
408 | }) | ||
409 | |||
410 | it('Should fail with an invalid job token job uuid', async function () { | ||
411 | await testEndpoints({ jobUUID, runnerToken, jobToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
412 | }) | ||
413 | |||
414 | it('Should fail with an unknown job token job uuid', async function () { | ||
415 | const jobToken = badUUID | ||
416 | await testEndpoints({ jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
417 | }) | ||
418 | |||
419 | it('Should fail with a runner token not associated to this job', async function () { | ||
420 | await testEndpoints({ jobUUID, runnerToken: runnerToken2, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
421 | }) | ||
422 | |||
423 | it('Should fail with a job uuid not associated to the job token', async function () { | ||
424 | await testEndpoints({ jobUUID: jobUUID2, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
425 | await testEndpoints({ jobUUID, runnerToken, jobToken: jobToken2, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
426 | }) | ||
427 | }) | ||
428 | |||
429 | describe('Unregister', function () { | ||
430 | |||
431 | it('Should fail without a runner token', async function () { | ||
432 | await server.runners.unregister({ runnerToken: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
433 | }) | ||
434 | |||
435 | it('Should fail with a bad a runner token', async function () { | ||
436 | await server.runners.unregister({ runnerToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
437 | }) | ||
438 | |||
439 | it('Should fail with an unknown runner token', async function () { | ||
440 | await server.runners.unregister({ runnerToken: badUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
441 | }) | ||
442 | }) | ||
443 | |||
444 | describe('Request', function () { | ||
445 | |||
446 | it('Should fail without a runner token', async function () { | ||
447 | await server.runnerJobs.request({ runnerToken: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
448 | }) | ||
449 | |||
450 | it('Should fail with a bad a runner token', async function () { | ||
451 | await server.runnerJobs.request({ runnerToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
452 | }) | ||
453 | |||
454 | it('Should fail with an unknown runner token', async function () { | ||
455 | await server.runnerJobs.request({ runnerToken: badUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
456 | }) | ||
457 | }) | ||
458 | |||
459 | describe('Accept', function () { | ||
460 | |||
461 | it('Should fail with a bad a job uuid', async function () { | ||
462 | await server.runnerJobs.accept({ jobUUID: '', runnerToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
463 | }) | ||
464 | |||
465 | it('Should fail with an unknown job uuid', async function () { | ||
466 | await server.runnerJobs.accept({ jobUUID: badUUID, runnerToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
467 | }) | ||
468 | |||
469 | it('Should fail with a job not in pending state', async function () { | ||
470 | await server.runnerJobs.accept({ jobUUID: completedJobUUID, runnerToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
471 | await server.runnerJobs.accept({ jobUUID: cancelledJobUUID, runnerToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
472 | }) | ||
473 | |||
474 | it('Should fail without a runner token', async function () { | ||
475 | await server.runnerJobs.accept({ jobUUID: pendingUUID, runnerToken: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
476 | }) | ||
477 | |||
478 | it('Should fail with a bad a runner token', async function () { | ||
479 | await server.runnerJobs.accept({ jobUUID: pendingUUID, runnerToken: '', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
480 | }) | ||
481 | |||
482 | it('Should fail with an unknown runner token', async function () { | ||
483 | await server.runnerJobs.accept({ jobUUID: pendingUUID, runnerToken: badUUID, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
484 | }) | ||
485 | }) | ||
486 | |||
487 | describe('Abort', function () { | ||
488 | |||
489 | it('Should fail without a reason', async function () { | ||
490 | await server.runnerJobs.abort({ jobUUID, jobToken, runnerToken, reason: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
491 | }) | ||
492 | |||
493 | it('Should fail with a bad reason', async function () { | ||
494 | const reason = 'reason'.repeat(5000) | ||
495 | await server.runnerJobs.abort({ jobUUID, jobToken, runnerToken, reason, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
496 | }) | ||
497 | |||
498 | it('Should fail with a job not in processing state', async function () { | ||
499 | await server.runnerJobs.abort({ | ||
500 | jobUUID: completedJobUUID, | ||
501 | jobToken: completedJobToken, | ||
502 | runnerToken, | ||
503 | reason: 'reason', | ||
504 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
505 | }) | ||
506 | }) | ||
507 | }) | ||
508 | |||
509 | describe('Update', function () { | ||
510 | |||
511 | describe('Common', function () { | ||
512 | |||
513 | it('Should fail with an invalid progress', async function () { | ||
514 | await server.runnerJobs.update({ jobUUID, jobToken, runnerToken, progress: 101, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
515 | }) | ||
516 | |||
517 | it('Should fail with a job not in processing state', async function () { | ||
518 | await server.runnerJobs.update({ | ||
519 | jobUUID: completedJobUUID, | ||
520 | jobToken: completedJobToken, | ||
521 | runnerToken, | ||
522 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
523 | }) | ||
524 | }) | ||
525 | }) | ||
526 | |||
527 | describe('Live RTMP to HLS', function () { | ||
528 | const base: RunnerJobUpdatePayload = { | ||
529 | masterPlaylistFile: 'live/master.m3u8', | ||
530 | resolutionPlaylistFilename: '0.m3u8', | ||
531 | resolutionPlaylistFile: 'live/1.m3u8', | ||
532 | type: 'add-chunk', | ||
533 | videoChunkFile: 'live/1-000069.ts', | ||
534 | videoChunkFilename: '1-000068.ts' | ||
535 | } | ||
536 | |||
537 | function testUpdate (payload: RunnerJobUpdatePayload) { | ||
538 | return server.runnerJobs.update({ | ||
539 | jobUUID: liveAcceptedJob.uuid, | ||
540 | jobToken: liveAcceptedJob.jobToken, | ||
541 | payload, | ||
542 | runnerToken, | ||
543 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
544 | }) | ||
545 | } | ||
546 | |||
547 | it('Should fail with an invalid resolutionPlaylistFilename', async function () { | ||
548 | await testUpdate({ ...base, resolutionPlaylistFilename: undefined }) | ||
549 | await testUpdate({ ...base, resolutionPlaylistFilename: 'coucou/hello' }) | ||
550 | await testUpdate({ ...base, resolutionPlaylistFilename: 'hello' }) | ||
551 | }) | ||
552 | |||
553 | it('Should fail with an invalid videoChunkFilename', async function () { | ||
554 | await testUpdate({ ...base, resolutionPlaylistFilename: undefined }) | ||
555 | await testUpdate({ ...base, resolutionPlaylistFilename: 'coucou/hello' }) | ||
556 | await testUpdate({ ...base, resolutionPlaylistFilename: 'hello' }) | ||
557 | }) | ||
558 | |||
559 | it('Should fail with an invalid type', async function () { | ||
560 | await testUpdate({ ...base, type: undefined }) | ||
561 | await testUpdate({ ...base, type: 'toto' as any }) | ||
562 | }) | ||
563 | |||
564 | it('Should succeed with the correct params', async function () { | ||
565 | await server.runnerJobs.update({ | ||
566 | jobUUID: liveAcceptedJob.uuid, | ||
567 | jobToken: liveAcceptedJob.jobToken, | ||
568 | payload: base, | ||
569 | runnerToken | ||
570 | }) | ||
571 | |||
572 | await server.runnerJobs.update({ | ||
573 | jobUUID: liveAcceptedJob.uuid, | ||
574 | jobToken: liveAcceptedJob.jobToken, | ||
575 | payload: { ...base, masterPlaylistFile: undefined }, | ||
576 | runnerToken | ||
577 | }) | ||
578 | }) | ||
579 | }) | ||
580 | }) | ||
581 | |||
582 | describe('Error', function () { | ||
583 | |||
584 | it('Should fail with a missing error message', async function () { | ||
585 | await server.runnerJobs.error({ jobUUID, jobToken, runnerToken, message: null, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
586 | }) | ||
587 | |||
588 | it('Should fail with an invalid error messgae', async function () { | ||
589 | const message = 'a'.repeat(6000) | ||
590 | await server.runnerJobs.error({ jobUUID, jobToken, runnerToken, message, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
591 | }) | ||
592 | |||
593 | it('Should fail with a job not in processing state', async function () { | ||
594 | await server.runnerJobs.error({ | ||
595 | jobUUID: completedJobUUID, | ||
596 | jobToken: completedJobToken, | ||
597 | message: 'my message', | ||
598 | runnerToken, | ||
599 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
600 | }) | ||
601 | }) | ||
602 | }) | ||
603 | |||
604 | describe('Success', function () { | ||
605 | let vodJobUUID: string | ||
606 | let vodJobToken: string | ||
607 | |||
608 | describe('Common', function () { | ||
609 | |||
610 | it('Should fail with a job not in processing state', async function () { | ||
611 | await server.runnerJobs.success({ | ||
612 | jobUUID: completedJobUUID, | ||
613 | jobToken: completedJobToken, | ||
614 | payload: { videoFile: 'video_short.mp4' }, | ||
615 | runnerToken, | ||
616 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
617 | }) | ||
618 | }) | ||
619 | }) | ||
620 | |||
621 | describe('VOD', function () { | ||
622 | |||
623 | it('Should fail with an invalid vod web video payload', async function () { | ||
624 | const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'vod-web-video-transcoding' }) | ||
625 | |||
626 | await server.runnerJobs.success({ | ||
627 | jobUUID: job.uuid, | ||
628 | jobToken: job.jobToken, | ||
629 | payload: { hello: 'video_short.mp4' } as any, | ||
630 | runnerToken, | ||
631 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
632 | }) | ||
633 | |||
634 | vodJobUUID = job.uuid | ||
635 | vodJobToken = job.jobToken | ||
636 | }) | ||
637 | |||
638 | it('Should fail with an invalid vod hls payload', async function () { | ||
639 | // To create HLS jobs | ||
640 | const payload: RunnerJobSuccessPayload = { videoFile: 'video_short.mp4' } | ||
641 | await server.runnerJobs.success({ runnerToken, jobUUID: vodJobUUID, jobToken: vodJobToken, payload }) | ||
642 | |||
643 | await waitJobs([ server ]) | ||
644 | |||
645 | const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'vod-hls-transcoding' }) | ||
646 | |||
647 | await server.runnerJobs.success({ | ||
648 | jobUUID: job.uuid, | ||
649 | jobToken: job.jobToken, | ||
650 | payload: { videoFile: 'video_short.mp4' } as any, | ||
651 | runnerToken, | ||
652 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
653 | }) | ||
654 | }) | ||
655 | |||
656 | it('Should fail with an invalid vod audio merge payload', async function () { | ||
657 | const attributes = { name: 'audio_with_preview', previewfile: 'preview.jpg', fixture: 'sample.ogg' } | ||
658 | await server.videos.upload({ attributes, mode: 'legacy' }) | ||
659 | |||
660 | await waitJobs([ server ]) | ||
661 | |||
662 | const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'vod-audio-merge-transcoding' }) | ||
663 | |||
664 | await server.runnerJobs.success({ | ||
665 | jobUUID: job.uuid, | ||
666 | jobToken: job.jobToken, | ||
667 | payload: { hello: 'video_short.mp4' } as any, | ||
668 | runnerToken, | ||
669 | expectedStatus: HttpStatusCode.BAD_REQUEST_400 | ||
670 | }) | ||
671 | }) | ||
672 | }) | ||
673 | }) | ||
674 | |||
675 | describe('Job files', function () { | ||
676 | |||
677 | describe('Video files', function () { | ||
678 | |||
679 | it('Should fail with an invalid video id', async function () { | ||
680 | await fetchFiles({ videoUUID: 'a', jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | ||
681 | }) | ||
682 | |||
683 | it('Should fail with an unknown video id', async function () { | ||
684 | const videoUUID = '910ec12a-d9e6-458b-a274-0abb655f9464' | ||
685 | await fetchFiles({ videoUUID, jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
686 | }) | ||
687 | |||
688 | it('Should fail with a video id not associated to this job', async function () { | ||
689 | await fetchFiles({ videoUUID: videoUUID2, jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | ||
690 | }) | ||
691 | |||
692 | it('Should succeed with the correct params', async function () { | ||
693 | await fetchFiles({ videoUUID, jobUUID, runnerToken, jobToken, expectedStatus: HttpStatusCode.OK_200 }) | ||
694 | }) | ||
695 | }) | ||
696 | }) | ||
697 | }) | ||
698 | |||
699 | after(async function () { | ||
700 | await cleanupTests([ server ]) | ||
701 | }) | ||
702 | }) | ||
diff --git a/server/tests/api/check-params/video-blacklist.ts b/server/tests/api/check-params/video-blacklist.ts index 4dc84d3f2..8e9f61596 100644 --- a/server/tests/api/check-params/video-blacklist.ts +++ b/server/tests/api/check-params/video-blacklist.ts | |||
@@ -278,7 +278,7 @@ describe('Test video blacklist API validators', function () { | |||
278 | }) | 278 | }) |
279 | 279 | ||
280 | it('Should fail with an invalid type', async function () { | 280 | it('Should fail with an invalid type', async function () { |
281 | await servers[0].blacklist.list({ type: 0, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | 281 | await servers[0].blacklist.list({ type: 0 as any, expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) |
282 | }) | 282 | }) |
283 | 283 | ||
284 | it('Should succeed with the correct parameters', async function () { | 284 | it('Should succeed with the correct parameters', async function () { |
diff --git a/server/tests/api/check-params/video-playlists.ts b/server/tests/api/check-params/video-playlists.ts index 6cb34c8a1..8090897c1 100644 --- a/server/tests/api/check-params/video-playlists.ts +++ b/server/tests/api/check-params/video-playlists.ts | |||
@@ -239,7 +239,7 @@ describe('Test video playlists API validator', function () { | |||
239 | }) | 239 | }) |
240 | 240 | ||
241 | it('Should fail with an incorrect privacy', async function () { | 241 | it('Should fail with an incorrect privacy', async function () { |
242 | const params = getBase({ privacy: 45 }) | 242 | const params = getBase({ privacy: 45 as any }) |
243 | 243 | ||
244 | await command.create(params) | 244 | await command.create(params) |
245 | await command.update(getUpdate(params, playlist.shortUUID)) | 245 | await command.update(getUpdate(params, playlist.shortUUID)) |
diff --git a/server/tests/api/check-params/videos.ts b/server/tests/api/check-params/videos.ts index 2a83143e2..572ca8997 100644 --- a/server/tests/api/check-params/videos.ts +++ b/server/tests/api/check-params/videos.ts | |||
@@ -421,9 +421,9 @@ describe('Test videos API validator', function () { | |||
421 | const error = body as unknown as PeerTubeProblemDocument | 421 | const error = body as unknown as PeerTubeProblemDocument |
422 | 422 | ||
423 | if (mode === 'legacy') { | 423 | if (mode === 'legacy') { |
424 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/uploadLegacy') | 424 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/uploadLegacy') |
425 | } else { | 425 | } else { |
426 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/uploadResumableInit') | 426 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/uploadResumableInit') |
427 | } | 427 | } |
428 | 428 | ||
429 | expect(error.type).to.equal('about:blank') | 429 | expect(error.type).to.equal('about:blank') |
@@ -680,7 +680,7 @@ describe('Test videos API validator', function () { | |||
680 | const res = await makePutBodyRequest({ url: server.url, path: path + video.shortUUID, token: server.accessToken, fields }) | 680 | const res = await makePutBodyRequest({ url: server.url, path: path + video.shortUUID, token: server.accessToken, fields }) |
681 | const error = res.body as PeerTubeProblemDocument | 681 | const error = res.body as PeerTubeProblemDocument |
682 | 682 | ||
683 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/putVideo') | 683 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/putVideo') |
684 | 684 | ||
685 | expect(error.type).to.equal('about:blank') | 685 | expect(error.type).to.equal('about:blank') |
686 | expect(error.title).to.equal('Bad Request') | 686 | expect(error.title).to.equal('Bad Request') |
@@ -729,7 +729,7 @@ describe('Test videos API validator', function () { | |||
729 | const body = await server.videos.get({ id: 'hi', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | 729 | const body = await server.videos.get({ id: 'hi', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) |
730 | const error = body as unknown as PeerTubeProblemDocument | 730 | const error = body as unknown as PeerTubeProblemDocument |
731 | 731 | ||
732 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/getVideo') | 732 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/getVideo') |
733 | 733 | ||
734 | expect(error.type).to.equal('about:blank') | 734 | expect(error.type).to.equal('about:blank') |
735 | expect(error.title).to.equal('Bad Request') | 735 | expect(error.title).to.equal('Bad Request') |
@@ -835,7 +835,7 @@ describe('Test videos API validator', function () { | |||
835 | const body = await server.videos.remove({ id: 'hello', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) | 835 | const body = await server.videos.remove({ id: 'hello', expectedStatus: HttpStatusCode.BAD_REQUEST_400 }) |
836 | const error = body as PeerTubeProblemDocument | 836 | const error = body as PeerTubeProblemDocument |
837 | 837 | ||
838 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api/rest-reference.html#operation/delVideo') | 838 | expect(error.docs).to.equal('https://docs.joinpeertube.org/api-rest-reference.html#operation/delVideo') |
839 | 839 | ||
840 | expect(error.type).to.equal('about:blank') | 840 | expect(error.type).to.equal('about:blank') |
841 | expect(error.title).to.equal('Bad Request') | 841 | expect(error.title).to.equal('Bad Request') |
diff --git a/server/tests/api/index.ts b/server/tests/api/index.ts index 61352a134..ef0c83294 100644 --- a/server/tests/api/index.ts +++ b/server/tests/api/index.ts | |||
@@ -5,6 +5,7 @@ import './moderation' | |||
5 | import './object-storage' | 5 | import './object-storage' |
6 | import './notifications' | 6 | import './notifications' |
7 | import './redundancy' | 7 | import './redundancy' |
8 | import './runners' | ||
8 | import './search' | 9 | import './search' |
9 | import './server' | 10 | import './server' |
10 | import './transcoding' | 11 | import './transcoding' |
diff --git a/server/tests/api/live/live.ts b/server/tests/api/live/live.ts index ceb606af1..f9b0d257b 100644 --- a/server/tests/api/live/live.ts +++ b/server/tests/api/live/live.ts | |||
@@ -2,9 +2,9 @@ | |||
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { basename, join } from 'path' | 4 | import { basename, join } from 'path' |
5 | import { ffprobePromise, getVideoStream } from '@server/helpers/ffmpeg' | 5 | import { SQLCommand, testImage, testLiveVideoResolutions } from '@server/tests/shared' |
6 | import { testImage, testVideoResolutions } from '@server/tests/shared' | ||
7 | import { getAllFiles, wait } from '@shared/core-utils' | 6 | import { getAllFiles, wait } from '@shared/core-utils' |
7 | import { ffprobePromise, getVideoStream } from '@shared/ffmpeg' | ||
8 | import { | 8 | import { |
9 | HttpStatusCode, | 9 | HttpStatusCode, |
10 | LiveVideo, | 10 | LiveVideo, |
@@ -365,6 +365,7 @@ describe('Test live', function () { | |||
365 | 365 | ||
366 | describe('Live transcoding', function () { | 366 | describe('Live transcoding', function () { |
367 | let liveVideoId: string | 367 | let liveVideoId: string |
368 | let sqlCommandServer1: SQLCommand | ||
368 | 369 | ||
369 | async function createLiveWrapper (saveReplay: boolean) { | 370 | async function createLiveWrapper (saveReplay: boolean) { |
370 | const liveAttributes = { | 371 | const liveAttributes = { |
@@ -407,6 +408,8 @@ describe('Test live', function () { | |||
407 | 408 | ||
408 | before(async function () { | 409 | before(async function () { |
409 | await updateConf([]) | 410 | await updateConf([]) |
411 | |||
412 | sqlCommandServer1 = new SQLCommand(servers[0]) | ||
410 | }) | 413 | }) |
411 | 414 | ||
412 | it('Should enable transcoding without additional resolutions', async function () { | 415 | it('Should enable transcoding without additional resolutions', async function () { |
@@ -418,8 +421,9 @@ describe('Test live', function () { | |||
418 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) | 421 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) |
419 | await waitJobs(servers) | 422 | await waitJobs(servers) |
420 | 423 | ||
421 | await testVideoResolutions({ | 424 | await testLiveVideoResolutions({ |
422 | originServer: servers[0], | 425 | originServer: servers[0], |
426 | sqlCommand: sqlCommandServer1, | ||
423 | servers, | 427 | servers, |
424 | liveVideoId, | 428 | liveVideoId, |
425 | resolutions: [ 720 ], | 429 | resolutions: [ 720 ], |
@@ -453,8 +457,9 @@ describe('Test live', function () { | |||
453 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) | 457 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) |
454 | await waitJobs(servers) | 458 | await waitJobs(servers) |
455 | 459 | ||
456 | await testVideoResolutions({ | 460 | await testLiveVideoResolutions({ |
457 | originServer: servers[0], | 461 | originServer: servers[0], |
462 | sqlCommand: sqlCommandServer1, | ||
458 | servers, | 463 | servers, |
459 | liveVideoId, | 464 | liveVideoId, |
460 | resolutions: resolutions.concat([ 720 ]), | 465 | resolutions: resolutions.concat([ 720 ]), |
@@ -505,8 +510,9 @@ describe('Test live', function () { | |||
505 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) | 510 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) |
506 | await waitJobs(servers) | 511 | await waitJobs(servers) |
507 | 512 | ||
508 | await testVideoResolutions({ | 513 | await testLiveVideoResolutions({ |
509 | originServer: servers[0], | 514 | originServer: servers[0], |
515 | sqlCommand: sqlCommandServer1, | ||
510 | servers, | 516 | servers, |
511 | liveVideoId, | 517 | liveVideoId, |
512 | resolutions, | 518 | resolutions, |
@@ -601,8 +607,9 @@ describe('Test live', function () { | |||
601 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) | 607 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) |
602 | await waitJobs(servers) | 608 | await waitJobs(servers) |
603 | 609 | ||
604 | await testVideoResolutions({ | 610 | await testLiveVideoResolutions({ |
605 | originServer: servers[0], | 611 | originServer: servers[0], |
612 | sqlCommand: sqlCommandServer1, | ||
606 | servers, | 613 | servers, |
607 | liveVideoId, | 614 | liveVideoId, |
608 | resolutions, | 615 | resolutions, |
@@ -637,8 +644,9 @@ describe('Test live', function () { | |||
637 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) | 644 | await waitUntilLivePublishedOnAllServers(servers, liveVideoId) |
638 | await waitJobs(servers) | 645 | await waitJobs(servers) |
639 | 646 | ||
640 | await testVideoResolutions({ | 647 | await testLiveVideoResolutions({ |
641 | originServer: servers[0], | 648 | originServer: servers[0], |
649 | sqlCommand: sqlCommandServer1, | ||
642 | servers, | 650 | servers, |
643 | liveVideoId, | 651 | liveVideoId, |
644 | resolutions: [ 720 ], | 652 | resolutions: [ 720 ], |
@@ -661,6 +669,10 @@ describe('Test live', function () { | |||
661 | 669 | ||
662 | expect(hlsFiles[0].resolution.id).to.equal(720) | 670 | expect(hlsFiles[0].resolution.id).to.equal(720) |
663 | }) | 671 | }) |
672 | |||
673 | after(async function () { | ||
674 | await sqlCommandServer1.cleanup() | ||
675 | }) | ||
664 | }) | 676 | }) |
665 | 677 | ||
666 | describe('After a server restart', function () { | 678 | describe('After a server restart', function () { |
diff --git a/server/tests/api/notifications/admin-notifications.ts b/server/tests/api/notifications/admin-notifications.ts index 6f059f622..4824542c9 100644 --- a/server/tests/api/notifications/admin-notifications.ts +++ b/server/tests/api/notifications/admin-notifications.ts | |||
@@ -7,7 +7,8 @@ import { | |||
7 | checkNewPluginVersion, | 7 | checkNewPluginVersion, |
8 | MockJoinPeerTubeVersions, | 8 | MockJoinPeerTubeVersions, |
9 | MockSmtpServer, | 9 | MockSmtpServer, |
10 | prepareNotificationsTest | 10 | prepareNotificationsTest, |
11 | SQLCommand | ||
11 | } from '@server/tests/shared' | 12 | } from '@server/tests/shared' |
12 | import { wait } from '@shared/core-utils' | 13 | import { wait } from '@shared/core-utils' |
13 | import { PluginType, UserNotification, UserNotificationType } from '@shared/models' | 14 | import { PluginType, UserNotification, UserNotificationType } from '@shared/models' |
@@ -15,6 +16,7 @@ import { cleanupTests, PeerTubeServer } from '@shared/server-commands' | |||
15 | 16 | ||
16 | describe('Test admin notifications', function () { | 17 | describe('Test admin notifications', function () { |
17 | let server: PeerTubeServer | 18 | let server: PeerTubeServer |
19 | let sqlCommand: SQLCommand | ||
18 | let userNotifications: UserNotification[] = [] | 20 | let userNotifications: UserNotification[] = [] |
19 | let adminNotifications: UserNotification[] = [] | 21 | let adminNotifications: UserNotification[] = [] |
20 | let emails: object[] = [] | 22 | let emails: object[] = [] |
@@ -58,6 +60,8 @@ describe('Test admin notifications', function () { | |||
58 | 60 | ||
59 | await server.plugins.install({ npmName: 'peertube-plugin-hello-world' }) | 61 | await server.plugins.install({ npmName: 'peertube-plugin-hello-world' }) |
60 | await server.plugins.install({ npmName: 'peertube-theme-background-red' }) | 62 | await server.plugins.install({ npmName: 'peertube-theme-background-red' }) |
63 | |||
64 | sqlCommand = new SQLCommand(server) | ||
61 | }) | 65 | }) |
62 | 66 | ||
63 | describe('Latest PeerTube version notification', function () { | 67 | describe('Latest PeerTube version notification', function () { |
@@ -116,8 +120,8 @@ describe('Test admin notifications', function () { | |||
116 | it('Should send a notification to admins on new plugin version', async function () { | 120 | it('Should send a notification to admins on new plugin version', async function () { |
117 | this.timeout(30000) | 121 | this.timeout(30000) |
118 | 122 | ||
119 | await server.sql.setPluginVersion('hello-world', '0.0.1') | 123 | await sqlCommand.setPluginVersion('hello-world', '0.0.1') |
120 | await server.sql.setPluginLatestVersion('hello-world', '0.0.1') | 124 | await sqlCommand.setPluginLatestVersion('hello-world', '0.0.1') |
121 | await wait(6000) | 125 | await wait(6000) |
122 | 126 | ||
123 | await checkNewPluginVersion({ ...baseParams, pluginType: PluginType.PLUGIN, pluginName: 'hello-world', checkType: 'presence' }) | 127 | await checkNewPluginVersion({ ...baseParams, pluginType: PluginType.PLUGIN, pluginName: 'hello-world', checkType: 'presence' }) |
@@ -138,8 +142,8 @@ describe('Test admin notifications', function () { | |||
138 | it('Should send a new notification after a new plugin release', async function () { | 142 | it('Should send a new notification after a new plugin release', async function () { |
139 | this.timeout(30000) | 143 | this.timeout(30000) |
140 | 144 | ||
141 | await server.sql.setPluginVersion('hello-world', '0.0.1') | 145 | await sqlCommand.setPluginVersion('hello-world', '0.0.1') |
142 | await server.sql.setPluginLatestVersion('hello-world', '0.0.1') | 146 | await sqlCommand.setPluginLatestVersion('hello-world', '0.0.1') |
143 | await wait(6000) | 147 | await wait(6000) |
144 | 148 | ||
145 | expect(adminNotifications.filter(n => n.type === UserNotificationType.NEW_PEERTUBE_VERSION)).to.have.lengthOf(2) | 149 | expect(adminNotifications.filter(n => n.type === UserNotificationType.NEW_PEERTUBE_VERSION)).to.have.lengthOf(2) |
@@ -149,6 +153,7 @@ describe('Test admin notifications', function () { | |||
149 | after(async function () { | 153 | after(async function () { |
150 | MockSmtpServer.Instance.kill() | 154 | MockSmtpServer.Instance.kill() |
151 | 155 | ||
156 | await sqlCommand.cleanup() | ||
152 | await cleanupTests([ server ]) | 157 | await cleanupTests([ server ]) |
153 | }) | 158 | }) |
154 | }) | 159 | }) |
diff --git a/server/tests/api/object-storage/live.ts b/server/tests/api/object-storage/live.ts index 588e0a8d7..c430cd0a0 100644 --- a/server/tests/api/object-storage/live.ts +++ b/server/tests/api/object-storage/live.ts | |||
@@ -1,7 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { expectStartWith, MockObjectStorageProxy, testVideoResolutions } from '@server/tests/shared' | 4 | import { expectStartWith, MockObjectStorageProxy, SQLCommand, testLiveVideoResolutions } from '@server/tests/shared' |
5 | import { areMockObjectStorageTestsDisabled } from '@shared/core-utils' | 5 | import { areMockObjectStorageTestsDisabled } from '@shared/core-utils' |
6 | import { HttpStatusCode, LiveVideoCreate, VideoPrivacy } from '@shared/models' | 6 | import { HttpStatusCode, LiveVideoCreate, VideoPrivacy } from '@shared/models' |
7 | import { | 7 | import { |
@@ -79,6 +79,7 @@ describe('Object storage for lives', function () { | |||
79 | if (areMockObjectStorageTestsDisabled()) return | 79 | if (areMockObjectStorageTestsDisabled()) return |
80 | 80 | ||
81 | let servers: PeerTubeServer[] | 81 | let servers: PeerTubeServer[] |
82 | let sqlCommandServer1: SQLCommand | ||
82 | 83 | ||
83 | before(async function () { | 84 | before(async function () { |
84 | this.timeout(120000) | 85 | this.timeout(120000) |
@@ -92,6 +93,8 @@ describe('Object storage for lives', function () { | |||
92 | await doubleFollow(servers[0], servers[1]) | 93 | await doubleFollow(servers[0], servers[1]) |
93 | 94 | ||
94 | await servers[0].config.enableTranscoding() | 95 | await servers[0].config.enableTranscoding() |
96 | |||
97 | sqlCommandServer1 = new SQLCommand(servers[0]) | ||
95 | }) | 98 | }) |
96 | 99 | ||
97 | describe('Without live transcoding', function () { | 100 | describe('Without live transcoding', function () { |
@@ -109,8 +112,9 @@ describe('Object storage for lives', function () { | |||
109 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID }) | 112 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUID }) |
110 | await waitUntilLivePublishedOnAllServers(servers, videoUUID) | 113 | await waitUntilLivePublishedOnAllServers(servers, videoUUID) |
111 | 114 | ||
112 | await testVideoResolutions({ | 115 | await testLiveVideoResolutions({ |
113 | originServer: servers[0], | 116 | originServer: servers[0], |
117 | sqlCommand: sqlCommandServer1, | ||
114 | servers, | 118 | servers, |
115 | liveVideoId: videoUUID, | 119 | liveVideoId: videoUUID, |
116 | resolutions: [ 720 ], | 120 | resolutions: [ 720 ], |
@@ -155,8 +159,9 @@ describe('Object storage for lives', function () { | |||
155 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDNonPermanent }) | 159 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDNonPermanent }) |
156 | await waitUntilLivePublishedOnAllServers(servers, videoUUIDNonPermanent) | 160 | await waitUntilLivePublishedOnAllServers(servers, videoUUIDNonPermanent) |
157 | 161 | ||
158 | await testVideoResolutions({ | 162 | await testLiveVideoResolutions({ |
159 | originServer: servers[0], | 163 | originServer: servers[0], |
164 | sqlCommand: sqlCommandServer1, | ||
160 | servers, | 165 | servers, |
161 | liveVideoId: videoUUIDNonPermanent, | 166 | liveVideoId: videoUUIDNonPermanent, |
162 | resolutions, | 167 | resolutions, |
@@ -194,8 +199,9 @@ describe('Object storage for lives', function () { | |||
194 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDPermanent }) | 199 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDPermanent }) |
195 | await waitUntilLivePublishedOnAllServers(servers, videoUUIDPermanent) | 200 | await waitUntilLivePublishedOnAllServers(servers, videoUUIDPermanent) |
196 | 201 | ||
197 | await testVideoResolutions({ | 202 | await testLiveVideoResolutions({ |
198 | originServer: servers[0], | 203 | originServer: servers[0], |
204 | sqlCommand: sqlCommandServer1, | ||
199 | servers, | 205 | servers, |
200 | liveVideoId: videoUUIDPermanent, | 206 | liveVideoId: videoUUIDPermanent, |
201 | resolutions, | 207 | resolutions, |
@@ -266,8 +272,9 @@ describe('Object storage for lives', function () { | |||
266 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDPermanent }) | 272 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: videoUUIDPermanent }) |
267 | await waitUntilLivePublishedOnAllServers(servers, videoUUIDPermanent) | 273 | await waitUntilLivePublishedOnAllServers(servers, videoUUIDPermanent) |
268 | 274 | ||
269 | await testVideoResolutions({ | 275 | await testLiveVideoResolutions({ |
270 | originServer: servers[0], | 276 | originServer: servers[0], |
277 | sqlCommand: sqlCommandServer1, | ||
271 | servers, | 278 | servers, |
272 | liveVideoId: videoUUIDPermanent, | 279 | liveVideoId: videoUUIDPermanent, |
273 | resolutions: [ 720 ], | 280 | resolutions: [ 720 ], |
@@ -281,6 +288,8 @@ describe('Object storage for lives', function () { | |||
281 | }) | 288 | }) |
282 | 289 | ||
283 | after(async function () { | 290 | after(async function () { |
291 | await sqlCommandServer1.cleanup() | ||
292 | |||
284 | await killallServers(servers) | 293 | await killallServers(servers) |
285 | }) | 294 | }) |
286 | }) | 295 | }) |
diff --git a/server/tests/api/object-storage/video-static-file-privacy.ts b/server/tests/api/object-storage/video-static-file-privacy.ts index 930c88543..af9d681b2 100644 --- a/server/tests/api/object-storage/video-static-file-privacy.ts +++ b/server/tests/api/object-storage/video-static-file-privacy.ts | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { basename } from 'path' | 4 | import { basename } from 'path' |
5 | import { checkVideoFileTokenReinjection, expectStartWith } from '@server/tests/shared' | 5 | import { checkVideoFileTokenReinjection, expectStartWith, SQLCommand } from '@server/tests/shared' |
6 | import { areScalewayObjectStorageTestsDisabled, getAllFiles, getHLS } from '@shared/core-utils' | 6 | import { areScalewayObjectStorageTestsDisabled, getAllFiles, getHLS } from '@shared/core-utils' |
7 | import { HttpStatusCode, LiveVideo, VideoDetails, VideoPrivacy } from '@shared/models' | 7 | import { HttpStatusCode, LiveVideo, VideoDetails, VideoPrivacy } from '@shared/models' |
8 | import { | 8 | import { |
@@ -30,6 +30,7 @@ describe('Object storage for video static file privacy', function () { | |||
30 | if (areScalewayObjectStorageTestsDisabled()) return | 30 | if (areScalewayObjectStorageTestsDisabled()) return |
31 | 31 | ||
32 | let server: PeerTubeServer | 32 | let server: PeerTubeServer |
33 | let sqlCommand: SQLCommand | ||
33 | let userToken: string | 34 | let userToken: string |
34 | 35 | ||
35 | // --------------------------------------------------------------------------- | 36 | // --------------------------------------------------------------------------- |
@@ -44,7 +45,7 @@ describe('Object storage for video static file privacy', function () { | |||
44 | } | 45 | } |
45 | 46 | ||
46 | for (const file of getAllFiles(video)) { | 47 | for (const file of getAllFiles(video)) { |
47 | const internalFileUrl = await server.sql.getInternalFileUrl(file.id) | 48 | const internalFileUrl = await sqlCommand.getInternalFileUrl(file.id) |
48 | expectStartWith(internalFileUrl, ObjectStorageCommand.getScalewayBaseUrl()) | 49 | expectStartWith(internalFileUrl, ObjectStorageCommand.getScalewayBaseUrl()) |
49 | await makeRawRequest({ url: internalFileUrl, token: server.accessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | 50 | await makeRawRequest({ url: internalFileUrl, token: server.accessToken, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) |
50 | } | 51 | } |
@@ -99,6 +100,8 @@ describe('Object storage for video static file privacy', function () { | |||
99 | await server.config.enableMinimumTranscoding() | 100 | await server.config.enableMinimumTranscoding() |
100 | 101 | ||
101 | userToken = await server.users.generateUserAndToken('user1') | 102 | userToken = await server.users.generateUserAndToken('user1') |
103 | |||
104 | sqlCommand = new SQLCommand(server) | ||
102 | }) | 105 | }) |
103 | 106 | ||
104 | describe('VOD', function () { | 107 | describe('VOD', function () { |
@@ -439,6 +442,7 @@ describe('Object storage for video static file privacy', function () { | |||
439 | await server.servers.waitUntilLog('Removed files of video ' + v.url) | 442 | await server.servers.waitUntilLog('Removed files of video ' + v.url) |
440 | } | 443 | } |
441 | 444 | ||
445 | await sqlCommand.cleanup() | ||
442 | await cleanupTests([ server ]) | 446 | await cleanupTests([ server ]) |
443 | }) | 447 | }) |
444 | }) | 448 | }) |
diff --git a/server/tests/api/object-storage/videos.ts b/server/tests/api/object-storage/videos.ts index 6aaf32c34..e90753d09 100644 --- a/server/tests/api/object-storage/videos.ts +++ b/server/tests/api/object-storage/videos.ts | |||
@@ -6,12 +6,15 @@ import { stat } from 'fs-extra' | |||
6 | import { merge } from 'lodash' | 6 | import { merge } from 'lodash' |
7 | import { | 7 | import { |
8 | checkTmpIsEmpty, | 8 | checkTmpIsEmpty, |
9 | checkWebTorrentWorks, | ||
9 | expectLogDoesNotContain, | 10 | expectLogDoesNotContain, |
10 | expectStartWith, | 11 | expectStartWith, |
11 | generateHighBitrateVideo, | 12 | generateHighBitrateVideo, |
12 | MockObjectStorageProxy | 13 | MockObjectStorageProxy, |
14 | SQLCommand | ||
13 | } from '@server/tests/shared' | 15 | } from '@server/tests/shared' |
14 | import { areMockObjectStorageTestsDisabled } from '@shared/core-utils' | 16 | import { areMockObjectStorageTestsDisabled } from '@shared/core-utils' |
17 | import { sha1 } from '@shared/extra-utils' | ||
15 | import { HttpStatusCode, VideoDetails } from '@shared/models' | 18 | import { HttpStatusCode, VideoDetails } from '@shared/models' |
16 | import { | 19 | import { |
17 | cleanupTests, | 20 | cleanupTests, |
@@ -23,14 +26,13 @@ import { | |||
23 | ObjectStorageCommand, | 26 | ObjectStorageCommand, |
24 | PeerTubeServer, | 27 | PeerTubeServer, |
25 | setAccessTokensToServers, | 28 | setAccessTokensToServers, |
26 | waitJobs, | 29 | waitJobs |
27 | webtorrentAdd | ||
28 | } from '@shared/server-commands' | 30 | } from '@shared/server-commands' |
29 | import { sha1 } from '@shared/extra-utils' | ||
30 | 31 | ||
31 | async function checkFiles (options: { | 32 | async function checkFiles (options: { |
32 | server: PeerTubeServer | 33 | server: PeerTubeServer |
33 | originServer: PeerTubeServer | 34 | originServer: PeerTubeServer |
35 | originSQLCommand: SQLCommand | ||
34 | 36 | ||
35 | video: VideoDetails | 37 | video: VideoDetails |
36 | 38 | ||
@@ -45,6 +47,7 @@ async function checkFiles (options: { | |||
45 | const { | 47 | const { |
46 | server, | 48 | server, |
47 | originServer, | 49 | originServer, |
50 | originSQLCommand, | ||
48 | video, | 51 | video, |
49 | playlistBucket, | 52 | playlistBucket, |
50 | webtorrentBucket, | 53 | webtorrentBucket, |
@@ -104,7 +107,7 @@ async function checkFiles (options: { | |||
104 | 107 | ||
105 | if (originServer.internalServerNumber === server.internalServerNumber) { | 108 | if (originServer.internalServerNumber === server.internalServerNumber) { |
106 | const infohash = sha1(`${2 + hls.playlistUrl}+V${i}`) | 109 | const infohash = sha1(`${2 + hls.playlistUrl}+V${i}`) |
107 | const dbInfohashes = await originServer.sql.getPlaylistInfohash(hls.id) | 110 | const dbInfohashes = await originSQLCommand.getPlaylistInfohash(hls.id) |
108 | 111 | ||
109 | expect(dbInfohashes).to.include(infohash) | 112 | expect(dbInfohashes).to.include(infohash) |
110 | } | 113 | } |
@@ -114,11 +117,7 @@ async function checkFiles (options: { | |||
114 | } | 117 | } |
115 | 118 | ||
116 | for (const file of allFiles) { | 119 | for (const file of allFiles) { |
117 | const torrent = await webtorrentAdd(file.magnetUri, true) | 120 | await checkWebTorrentWorks(file.magnetUri) |
118 | |||
119 | expect(torrent.files).to.be.an('array') | ||
120 | expect(torrent.files.length).to.equal(1) | ||
121 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
122 | 121 | ||
123 | const res = await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | 122 | const res = await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 }) |
124 | expect(res.body).to.have.length.above(100) | 123 | expect(res.body).to.have.length.above(100) |
@@ -145,6 +144,7 @@ function runTestSuite (options: { | |||
145 | let baseMockUrl: string | 144 | let baseMockUrl: string |
146 | 145 | ||
147 | let servers: PeerTubeServer[] | 146 | let servers: PeerTubeServer[] |
147 | let sqlCommands: SQLCommand[] | ||
148 | 148 | ||
149 | let keptUrls: string[] = [] | 149 | let keptUrls: string[] = [] |
150 | 150 | ||
@@ -202,6 +202,8 @@ function runTestSuite (options: { | |||
202 | const files = await server.videos.listFiles({ id: uuid }) | 202 | const files = await server.videos.listFiles({ id: uuid }) |
203 | keptUrls = keptUrls.concat(files.map(f => f.fileUrl)) | 203 | keptUrls = keptUrls.concat(files.map(f => f.fileUrl)) |
204 | } | 204 | } |
205 | |||
206 | sqlCommands = servers.map(s => new SQLCommand(s)) | ||
205 | }) | 207 | }) |
206 | 208 | ||
207 | it('Should upload a video and move it to the object storage without transcoding', async function () { | 209 | it('Should upload a video and move it to the object storage without transcoding', async function () { |
@@ -214,7 +216,7 @@ function runTestSuite (options: { | |||
214 | 216 | ||
215 | for (const server of servers) { | 217 | for (const server of servers) { |
216 | const video = await server.videos.get({ id: uuid }) | 218 | const video = await server.videos.get({ id: uuid }) |
217 | const files = await checkFiles({ ...options, server, originServer: servers[0], video, baseMockUrl }) | 219 | const files = await checkFiles({ ...options, server, originServer: servers[0], originSQLCommand: sqlCommands[0], video, baseMockUrl }) |
218 | 220 | ||
219 | deletedUrls = deletedUrls.concat(files) | 221 | deletedUrls = deletedUrls.concat(files) |
220 | } | 222 | } |
@@ -230,7 +232,7 @@ function runTestSuite (options: { | |||
230 | 232 | ||
231 | for (const server of servers) { | 233 | for (const server of servers) { |
232 | const video = await server.videos.get({ id: uuid }) | 234 | const video = await server.videos.get({ id: uuid }) |
233 | const files = await checkFiles({ ...options, server, originServer: servers[0], video, baseMockUrl }) | 235 | const files = await checkFiles({ ...options, server, originServer: servers[0], originSQLCommand: sqlCommands[0], video, baseMockUrl }) |
234 | 236 | ||
235 | deletedUrls = deletedUrls.concat(files) | 237 | deletedUrls = deletedUrls.concat(files) |
236 | } | 238 | } |
@@ -274,6 +276,10 @@ function runTestSuite (options: { | |||
274 | after(async function () { | 276 | after(async function () { |
275 | await mockObjectStorageProxy.terminate() | 277 | await mockObjectStorageProxy.terminate() |
276 | 278 | ||
279 | for (const sqlCommand of sqlCommands) { | ||
280 | await sqlCommand.cleanup() | ||
281 | } | ||
282 | |||
277 | await cleanupTests(servers) | 283 | await cleanupTests(servers) |
278 | }) | 284 | }) |
279 | } | 285 | } |
diff --git a/server/tests/api/runners/index.ts b/server/tests/api/runners/index.ts new file mode 100644 index 000000000..7f33ec8dd --- /dev/null +++ b/server/tests/api/runners/index.ts | |||
@@ -0,0 +1,4 @@ | |||
1 | export * from './runner-common' | ||
2 | export * from './runner-live-transcoding' | ||
3 | export * from './runner-socket' | ||
4 | export * from './runner-vod-transcoding' | ||
diff --git a/server/tests/api/runners/runner-common.ts b/server/tests/api/runners/runner-common.ts new file mode 100644 index 000000000..a2204753b --- /dev/null +++ b/server/tests/api/runners/runner-common.ts | |||
@@ -0,0 +1,662 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { wait } from '@shared/core-utils' | ||
5 | import { HttpStatusCode, Runner, RunnerJob, RunnerJobAdmin, RunnerJobState, RunnerRegistrationToken } from '@shared/models' | ||
6 | import { | ||
7 | cleanupTests, | ||
8 | createSingleServer, | ||
9 | makePostBodyRequest, | ||
10 | PeerTubeServer, | ||
11 | setAccessTokensToServers, | ||
12 | setDefaultVideoChannel, | ||
13 | waitJobs | ||
14 | } from '@shared/server-commands' | ||
15 | |||
16 | describe('Test runner common actions', function () { | ||
17 | let server: PeerTubeServer | ||
18 | let registrationToken: string | ||
19 | let runnerToken: string | ||
20 | let jobMaxPriority: string | ||
21 | |||
22 | before(async function () { | ||
23 | this.timeout(120_000) | ||
24 | |||
25 | server = await createSingleServer(1, { | ||
26 | remote_runners: { | ||
27 | stalled_jobs: { | ||
28 | vod: '5 seconds' | ||
29 | } | ||
30 | } | ||
31 | }) | ||
32 | |||
33 | await setAccessTokensToServers([ server ]) | ||
34 | await setDefaultVideoChannel([ server ]) | ||
35 | |||
36 | await server.config.enableTranscoding(true, true) | ||
37 | await server.config.enableRemoteTranscoding() | ||
38 | }) | ||
39 | |||
40 | describe('Managing runner registration tokens', function () { | ||
41 | let base: RunnerRegistrationToken[] | ||
42 | let registrationTokenToDelete: RunnerRegistrationToken | ||
43 | |||
44 | it('Should have a default registration token', async function () { | ||
45 | const { total, data } = await server.runnerRegistrationTokens.list() | ||
46 | |||
47 | expect(total).to.equal(1) | ||
48 | expect(data).to.have.lengthOf(1) | ||
49 | |||
50 | const token = data[0] | ||
51 | expect(token.id).to.exist | ||
52 | expect(token.createdAt).to.exist | ||
53 | expect(token.updatedAt).to.exist | ||
54 | expect(token.registeredRunnersCount).to.equal(0) | ||
55 | expect(token.registrationToken).to.exist | ||
56 | }) | ||
57 | |||
58 | it('Should create other registration tokens', async function () { | ||
59 | await server.runnerRegistrationTokens.generate() | ||
60 | await server.runnerRegistrationTokens.generate() | ||
61 | |||
62 | const { total, data } = await server.runnerRegistrationTokens.list() | ||
63 | expect(total).to.equal(3) | ||
64 | expect(data).to.have.lengthOf(3) | ||
65 | }) | ||
66 | |||
67 | it('Should list registration tokens', async function () { | ||
68 | { | ||
69 | const { total, data } = await server.runnerRegistrationTokens.list({ sort: 'createdAt' }) | ||
70 | expect(total).to.equal(3) | ||
71 | expect(data).to.have.lengthOf(3) | ||
72 | expect(new Date(data[0].createdAt)).to.be.below(new Date(data[1].createdAt)) | ||
73 | expect(new Date(data[1].createdAt)).to.be.below(new Date(data[2].createdAt)) | ||
74 | |||
75 | base = data | ||
76 | |||
77 | registrationTokenToDelete = data[0] | ||
78 | registrationToken = data[1].registrationToken | ||
79 | } | ||
80 | |||
81 | { | ||
82 | const { total, data } = await server.runnerRegistrationTokens.list({ sort: '-createdAt', start: 2, count: 1 }) | ||
83 | expect(total).to.equal(3) | ||
84 | expect(data).to.have.lengthOf(1) | ||
85 | expect(data[0].registrationToken).to.equal(base[0].registrationToken) | ||
86 | } | ||
87 | }) | ||
88 | |||
89 | it('Should have appropriate registeredRunnersCount for registration tokens', async function () { | ||
90 | await server.runners.register({ name: 'to delete 1', registrationToken: registrationTokenToDelete.registrationToken }) | ||
91 | await server.runners.register({ name: 'to delete 2', registrationToken: registrationTokenToDelete.registrationToken }) | ||
92 | |||
93 | const { data } = await server.runnerRegistrationTokens.list() | ||
94 | |||
95 | for (const d of data) { | ||
96 | if (d.registrationToken === registrationTokenToDelete.registrationToken) { | ||
97 | expect(d.registeredRunnersCount).to.equal(2) | ||
98 | } else { | ||
99 | expect(d.registeredRunnersCount).to.equal(0) | ||
100 | } | ||
101 | } | ||
102 | |||
103 | const { data: runners } = await server.runners.list() | ||
104 | expect(runners).to.have.lengthOf(2) | ||
105 | }) | ||
106 | |||
107 | it('Should delete a registration token', async function () { | ||
108 | await server.runnerRegistrationTokens.delete({ id: registrationTokenToDelete.id }) | ||
109 | |||
110 | const { total, data } = await server.runnerRegistrationTokens.list({ sort: 'createdAt' }) | ||
111 | expect(total).to.equal(2) | ||
112 | expect(data).to.have.lengthOf(2) | ||
113 | |||
114 | for (const d of data) { | ||
115 | expect(d.registeredRunnersCount).to.equal(0) | ||
116 | expect(d.registrationToken).to.not.equal(registrationTokenToDelete.registrationToken) | ||
117 | } | ||
118 | }) | ||
119 | |||
120 | it('Should have removed runners of this registration token', async function () { | ||
121 | const { data: runners } = await server.runners.list() | ||
122 | expect(runners).to.have.lengthOf(0) | ||
123 | }) | ||
124 | }) | ||
125 | |||
126 | describe('Managing runners', function () { | ||
127 | let toDelete: Runner | ||
128 | |||
129 | it('Should not have runners available', async function () { | ||
130 | const { total, data } = await server.runners.list() | ||
131 | |||
132 | expect(data).to.have.lengthOf(0) | ||
133 | expect(total).to.equal(0) | ||
134 | }) | ||
135 | |||
136 | it('Should register runners', async function () { | ||
137 | const now = new Date() | ||
138 | |||
139 | const result = await server.runners.register({ | ||
140 | name: 'runner 1', | ||
141 | description: 'my super runner 1', | ||
142 | registrationToken | ||
143 | }) | ||
144 | expect(result.runnerToken).to.exist | ||
145 | runnerToken = result.runnerToken | ||
146 | |||
147 | await server.runners.register({ | ||
148 | name: 'runner 2', | ||
149 | registrationToken | ||
150 | }) | ||
151 | |||
152 | const { total, data } = await server.runners.list({ sort: 'createdAt' }) | ||
153 | expect(total).to.equal(2) | ||
154 | expect(data).to.have.lengthOf(2) | ||
155 | |||
156 | for (const d of data) { | ||
157 | expect(d.id).to.exist | ||
158 | expect(d.createdAt).to.exist | ||
159 | expect(d.updatedAt).to.exist | ||
160 | expect(new Date(d.createdAt)).to.be.above(now) | ||
161 | expect(new Date(d.updatedAt)).to.be.above(now) | ||
162 | expect(new Date(d.lastContact)).to.be.above(now) | ||
163 | expect(d.ip).to.exist | ||
164 | } | ||
165 | |||
166 | expect(data[0].name).to.equal('runner 1') | ||
167 | expect(data[0].description).to.equal('my super runner 1') | ||
168 | |||
169 | expect(data[1].name).to.equal('runner 2') | ||
170 | expect(data[1].description).to.be.null | ||
171 | |||
172 | toDelete = data[1] | ||
173 | }) | ||
174 | |||
175 | it('Should list runners', async function () { | ||
176 | const { total, data } = await server.runners.list({ sort: '-createdAt', start: 1, count: 1 }) | ||
177 | |||
178 | expect(total).to.equal(2) | ||
179 | expect(data).to.have.lengthOf(1) | ||
180 | expect(data[0].name).to.equal('runner 1') | ||
181 | }) | ||
182 | |||
183 | it('Should delete a runner', async function () { | ||
184 | await server.runners.delete({ id: toDelete.id }) | ||
185 | |||
186 | const { total, data } = await server.runners.list() | ||
187 | |||
188 | expect(total).to.equal(1) | ||
189 | expect(data).to.have.lengthOf(1) | ||
190 | expect(data[0].name).to.equal('runner 1') | ||
191 | }) | ||
192 | |||
193 | it('Should unregister a runner', async function () { | ||
194 | const registered = await server.runners.autoRegisterRunner() | ||
195 | |||
196 | { | ||
197 | const { total, data } = await server.runners.list() | ||
198 | expect(total).to.equal(2) | ||
199 | expect(data).to.have.lengthOf(2) | ||
200 | } | ||
201 | |||
202 | await server.runners.unregister({ runnerToken: registered }) | ||
203 | |||
204 | { | ||
205 | const { total, data } = await server.runners.list() | ||
206 | expect(total).to.equal(1) | ||
207 | expect(data).to.have.lengthOf(1) | ||
208 | expect(data[0].name).to.equal('runner 1') | ||
209 | } | ||
210 | }) | ||
211 | }) | ||
212 | |||
213 | describe('Managing runner jobs', function () { | ||
214 | let jobUUID: string | ||
215 | let jobToken: string | ||
216 | let lastRunnerContact: Date | ||
217 | let failedJob: RunnerJob | ||
218 | |||
219 | async function checkMainJobState ( | ||
220 | mainJobState: RunnerJobState, | ||
221 | otherJobStates: RunnerJobState[] = [ RunnerJobState.PENDING, RunnerJobState.WAITING_FOR_PARENT_JOB ] | ||
222 | ) { | ||
223 | const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' }) | ||
224 | |||
225 | for (const job of data) { | ||
226 | if (job.uuid === jobUUID) { | ||
227 | expect(job.state.id).to.equal(mainJobState) | ||
228 | } else { | ||
229 | expect(otherJobStates).to.include(job.state.id) | ||
230 | } | ||
231 | } | ||
232 | } | ||
233 | |||
234 | function getMainJob () { | ||
235 | return server.runnerJobs.getJob({ uuid: jobUUID }) | ||
236 | } | ||
237 | |||
238 | describe('List jobs', function () { | ||
239 | |||
240 | it('Should not have jobs', async function () { | ||
241 | const { total, data } = await server.runnerJobs.list() | ||
242 | |||
243 | expect(data).to.have.lengthOf(0) | ||
244 | expect(total).to.equal(0) | ||
245 | }) | ||
246 | |||
247 | it('Should upload a video and have available jobs', async function () { | ||
248 | await server.videos.quickUpload({ name: 'to transcode' }) | ||
249 | await waitJobs([ server ]) | ||
250 | |||
251 | const { total, data } = await server.runnerJobs.list() | ||
252 | |||
253 | expect(data).to.have.lengthOf(10) | ||
254 | expect(total).to.equal(10) | ||
255 | |||
256 | for (const job of data) { | ||
257 | expect(job.startedAt).to.not.exist | ||
258 | expect(job.finishedAt).to.not.exist | ||
259 | expect(job.payload).to.exist | ||
260 | expect(job.privatePayload).to.exist | ||
261 | } | ||
262 | |||
263 | const hlsJobs = data.filter(d => d.type === 'vod-hls-transcoding') | ||
264 | const webVideoJobs = data.filter(d => d.type === 'vod-web-video-transcoding') | ||
265 | |||
266 | expect(hlsJobs).to.have.lengthOf(5) | ||
267 | expect(webVideoJobs).to.have.lengthOf(5) | ||
268 | |||
269 | const pendingJobs = data.filter(d => d.state.id === RunnerJobState.PENDING) | ||
270 | const waitingJobs = data.filter(d => d.state.id === RunnerJobState.WAITING_FOR_PARENT_JOB) | ||
271 | |||
272 | expect(pendingJobs).to.have.lengthOf(1) | ||
273 | expect(waitingJobs).to.have.lengthOf(9) | ||
274 | }) | ||
275 | |||
276 | it('Should upload another video and list/sort jobs', async function () { | ||
277 | await server.videos.quickUpload({ name: 'to transcode 2' }) | ||
278 | await waitJobs([ server ]) | ||
279 | |||
280 | { | ||
281 | const { total, data } = await server.runnerJobs.list({ start: 0, count: 30 }) | ||
282 | |||
283 | expect(data).to.have.lengthOf(20) | ||
284 | expect(total).to.equal(20) | ||
285 | |||
286 | jobUUID = data[16].uuid | ||
287 | } | ||
288 | |||
289 | { | ||
290 | const { total, data } = await server.runnerJobs.list({ start: 3, count: 1, sort: 'createdAt' }) | ||
291 | expect(total).to.equal(20) | ||
292 | |||
293 | expect(data).to.have.lengthOf(1) | ||
294 | expect(data[0].uuid).to.equal(jobUUID) | ||
295 | } | ||
296 | |||
297 | { | ||
298 | let previousPriority = Infinity | ||
299 | const { total, data } = await server.runnerJobs.list({ start: 0, count: 100, sort: '-priority' }) | ||
300 | expect(total).to.equal(20) | ||
301 | |||
302 | for (const job of data) { | ||
303 | expect(job.priority).to.be.at.most(previousPriority) | ||
304 | previousPriority = job.priority | ||
305 | |||
306 | if (job.state.id === RunnerJobState.PENDING) { | ||
307 | jobMaxPriority = job.uuid | ||
308 | } | ||
309 | } | ||
310 | } | ||
311 | }) | ||
312 | |||
313 | it('Should search jobs', async function () { | ||
314 | { | ||
315 | const { total, data } = await server.runnerJobs.list({ search: jobUUID }) | ||
316 | |||
317 | expect(data).to.have.lengthOf(1) | ||
318 | expect(total).to.equal(1) | ||
319 | |||
320 | expect(data[0].uuid).to.equal(jobUUID) | ||
321 | } | ||
322 | |||
323 | { | ||
324 | const { total, data } = await server.runnerJobs.list({ search: 'toto' }) | ||
325 | |||
326 | expect(data).to.have.lengthOf(0) | ||
327 | expect(total).to.equal(0) | ||
328 | } | ||
329 | |||
330 | { | ||
331 | const { total, data } = await server.runnerJobs.list({ search: 'hls' }) | ||
332 | |||
333 | expect(data).to.not.have.lengthOf(0) | ||
334 | expect(total).to.not.equal(0) | ||
335 | } | ||
336 | }) | ||
337 | }) | ||
338 | |||
339 | describe('Accept/update/abort/process a job', function () { | ||
340 | |||
341 | it('Should request available jobs', async function () { | ||
342 | lastRunnerContact = new Date() | ||
343 | |||
344 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
345 | |||
346 | // Only optimize jobs are available | ||
347 | expect(availableJobs).to.have.lengthOf(2) | ||
348 | |||
349 | for (const job of availableJobs) { | ||
350 | expect(job.uuid).to.exist | ||
351 | expect(job.payload.input).to.exist | ||
352 | expect(job.payload.output).to.exist | ||
353 | |||
354 | expect((job as RunnerJobAdmin).privatePayload).to.not.exist | ||
355 | } | ||
356 | |||
357 | const hlsJobs = availableJobs.filter(d => d.type === 'vod-hls-transcoding') | ||
358 | const webVideoJobs = availableJobs.filter(d => d.type === 'vod-web-video-transcoding') | ||
359 | |||
360 | expect(hlsJobs).to.have.lengthOf(0) | ||
361 | expect(webVideoJobs).to.have.lengthOf(2) | ||
362 | |||
363 | jobUUID = webVideoJobs[0].uuid | ||
364 | }) | ||
365 | |||
366 | it('Should have sorted available jobs by priority', async function () { | ||
367 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
368 | |||
369 | expect(availableJobs[0].uuid).to.equal(jobMaxPriority) | ||
370 | }) | ||
371 | |||
372 | it('Should have last runner contact updated', async function () { | ||
373 | await wait(1000) | ||
374 | |||
375 | const { data } = await server.runners.list({ sort: 'createdAt' }) | ||
376 | expect(new Date(data[0].lastContact)).to.be.above(lastRunnerContact) | ||
377 | }) | ||
378 | |||
379 | it('Should accept a job', async function () { | ||
380 | const startedAt = new Date() | ||
381 | |||
382 | const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID }) | ||
383 | jobToken = job.jobToken | ||
384 | |||
385 | const checkProcessingJob = (job: RunnerJob & { jobToken?: string }, fromAccept: boolean) => { | ||
386 | expect(job.uuid).to.equal(jobUUID) | ||
387 | |||
388 | expect(job.type).to.equal('vod-web-video-transcoding') | ||
389 | expect(job.state.label).to.equal('Processing') | ||
390 | expect(job.state.id).to.equal(RunnerJobState.PROCESSING) | ||
391 | |||
392 | expect(job.runner).to.exist | ||
393 | expect(job.runner.name).to.equal('runner 1') | ||
394 | expect(job.runner.description).to.equal('my super runner 1') | ||
395 | |||
396 | expect(job.progress).to.be.null | ||
397 | |||
398 | expect(job.startedAt).to.exist | ||
399 | expect(new Date(job.startedAt)).to.be.above(startedAt) | ||
400 | |||
401 | expect(job.finishedAt).to.not.exist | ||
402 | |||
403 | expect(job.failures).to.equal(0) | ||
404 | |||
405 | expect(job.payload).to.exist | ||
406 | |||
407 | if (fromAccept) { | ||
408 | expect(job.jobToken).to.exist | ||
409 | expect((job as RunnerJobAdmin).privatePayload).to.not.exist | ||
410 | } else { | ||
411 | expect(job.jobToken).to.not.exist | ||
412 | expect((job as RunnerJobAdmin).privatePayload).to.exist | ||
413 | } | ||
414 | } | ||
415 | |||
416 | checkProcessingJob(job, true) | ||
417 | |||
418 | const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' }) | ||
419 | |||
420 | const processingJob = data.find(j => j.uuid === jobUUID) | ||
421 | checkProcessingJob(processingJob, false) | ||
422 | |||
423 | await checkMainJobState(RunnerJobState.PROCESSING) | ||
424 | }) | ||
425 | |||
426 | it('Should update a job', async function () { | ||
427 | await server.runnerJobs.update({ runnerToken, jobUUID, jobToken, progress: 53 }) | ||
428 | |||
429 | const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' }) | ||
430 | |||
431 | for (const job of data) { | ||
432 | if (job.state.id === RunnerJobState.PROCESSING) { | ||
433 | expect(job.progress).to.equal(53) | ||
434 | } else { | ||
435 | expect(job.progress).to.be.null | ||
436 | } | ||
437 | } | ||
438 | }) | ||
439 | |||
440 | it('Should abort a job', async function () { | ||
441 | await server.runnerJobs.abort({ runnerToken, jobUUID, jobToken, reason: 'for tests' }) | ||
442 | |||
443 | await checkMainJobState(RunnerJobState.PENDING) | ||
444 | |||
445 | const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' }) | ||
446 | for (const job of data) { | ||
447 | expect(job.progress).to.be.null | ||
448 | } | ||
449 | }) | ||
450 | |||
451 | it('Should accept the same job again and post a success', async function () { | ||
452 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
453 | expect(availableJobs.find(j => j.uuid === jobUUID)).to.exist | ||
454 | |||
455 | const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID }) | ||
456 | jobToken = job.jobToken | ||
457 | |||
458 | await checkMainJobState(RunnerJobState.PROCESSING) | ||
459 | |||
460 | const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' }) | ||
461 | |||
462 | for (const job of data) { | ||
463 | expect(job.progress).to.be.null | ||
464 | } | ||
465 | |||
466 | const payload = { | ||
467 | videoFile: 'video_short.mp4' | ||
468 | } | ||
469 | |||
470 | await server.runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
471 | }) | ||
472 | |||
473 | it('Should not have available jobs anymore', async function () { | ||
474 | await checkMainJobState(RunnerJobState.COMPLETED) | ||
475 | |||
476 | const job = await getMainJob() | ||
477 | expect(job.finishedAt).to.exist | ||
478 | |||
479 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
480 | expect(availableJobs.find(j => j.uuid === jobUUID)).to.not.exist | ||
481 | }) | ||
482 | }) | ||
483 | |||
484 | describe('Error job', function () { | ||
485 | |||
486 | it('Should accept another job and post an error', async function () { | ||
487 | await server.runnerJobs.cancelAllJobs() | ||
488 | await server.videos.quickUpload({ name: 'video' }) | ||
489 | await waitJobs([ server ]) | ||
490 | |||
491 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
492 | jobUUID = availableJobs[0].uuid | ||
493 | |||
494 | const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID }) | ||
495 | jobToken = job.jobToken | ||
496 | |||
497 | await server.runnerJobs.error({ runnerToken, jobUUID, jobToken, message: 'Error' }) | ||
498 | }) | ||
499 | |||
500 | it('Should have job failures increased', async function () { | ||
501 | const job = await getMainJob() | ||
502 | expect(job.state.id).to.equal(RunnerJobState.PENDING) | ||
503 | expect(job.failures).to.equal(1) | ||
504 | expect(job.error).to.be.null | ||
505 | expect(job.progress).to.be.null | ||
506 | expect(job.finishedAt).to.not.exist | ||
507 | }) | ||
508 | |||
509 | it('Should error a job when job attempts is too big', async function () { | ||
510 | for (let i = 0; i < 4; i++) { | ||
511 | const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID }) | ||
512 | jobToken = job.jobToken | ||
513 | |||
514 | await server.runnerJobs.error({ runnerToken, jobUUID, jobToken, message: 'Error ' + i }) | ||
515 | } | ||
516 | |||
517 | const job = await getMainJob() | ||
518 | expect(job.failures).to.equal(5) | ||
519 | expect(job.state.id).to.equal(RunnerJobState.ERRORED) | ||
520 | expect(job.state.label).to.equal('Errored') | ||
521 | expect(job.error).to.equal('Error 3') | ||
522 | expect(job.progress).to.be.null | ||
523 | expect(job.finishedAt).to.exist | ||
524 | |||
525 | failedJob = job | ||
526 | }) | ||
527 | |||
528 | it('Should have failed children jobs too', async function () { | ||
529 | const { data } = await server.runnerJobs.list({ count: 50, sort: '-updatedAt' }) | ||
530 | |||
531 | const children = data.filter(j => j.parent?.uuid === failedJob.uuid) | ||
532 | expect(children).to.have.lengthOf(9) | ||
533 | |||
534 | for (const child of children) { | ||
535 | expect(child.parent.uuid).to.equal(failedJob.uuid) | ||
536 | expect(child.parent.type).to.equal(failedJob.type) | ||
537 | expect(child.parent.state.id).to.equal(failedJob.state.id) | ||
538 | expect(child.parent.state.label).to.equal(failedJob.state.label) | ||
539 | |||
540 | expect(child.state.id).to.equal(RunnerJobState.PARENT_ERRORED) | ||
541 | expect(child.state.label).to.equal('Parent job failed') | ||
542 | } | ||
543 | }) | ||
544 | }) | ||
545 | |||
546 | describe('Cancel', function () { | ||
547 | |||
548 | it('Should cancel a pending job', async function () { | ||
549 | await server.videos.quickUpload({ name: 'video' }) | ||
550 | await waitJobs([ server ]) | ||
551 | |||
552 | { | ||
553 | const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' }) | ||
554 | |||
555 | const pendingJob = data.find(j => j.state.id === RunnerJobState.PENDING) | ||
556 | jobUUID = pendingJob.uuid | ||
557 | |||
558 | await server.runnerJobs.cancelByAdmin({ jobUUID }) | ||
559 | } | ||
560 | |||
561 | { | ||
562 | const job = await getMainJob() | ||
563 | expect(job.state.id).to.equal(RunnerJobState.CANCELLED) | ||
564 | expect(job.state.label).to.equal('Cancelled') | ||
565 | } | ||
566 | |||
567 | { | ||
568 | const { data } = await server.runnerJobs.list({ count: 10, sort: '-updatedAt' }) | ||
569 | const children = data.filter(j => j.parent?.uuid === jobUUID) | ||
570 | expect(children).to.have.lengthOf(9) | ||
571 | |||
572 | for (const child of children) { | ||
573 | expect(child.state.id).to.equal(RunnerJobState.PARENT_CANCELLED) | ||
574 | } | ||
575 | } | ||
576 | }) | ||
577 | |||
578 | it('Should cancel an already accepted job and skip success/error', async function () { | ||
579 | await server.videos.quickUpload({ name: 'video' }) | ||
580 | await waitJobs([ server ]) | ||
581 | |||
582 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
583 | jobUUID = availableJobs[0].uuid | ||
584 | |||
585 | const { job } = await server.runnerJobs.accept({ runnerToken, jobUUID }) | ||
586 | jobToken = job.jobToken | ||
587 | |||
588 | await server.runnerJobs.cancelByAdmin({ jobUUID }) | ||
589 | |||
590 | await server.runnerJobs.abort({ runnerToken, jobUUID, jobToken, reason: 'aborted', expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
591 | }) | ||
592 | }) | ||
593 | |||
594 | describe('Stalled jobs', function () { | ||
595 | |||
596 | it('Should abort stalled jobs', async function () { | ||
597 | this.timeout(60000) | ||
598 | |||
599 | await server.videos.quickUpload({ name: 'video' }) | ||
600 | await server.videos.quickUpload({ name: 'video' }) | ||
601 | await waitJobs([ server ]) | ||
602 | |||
603 | const { job: job1 } = await server.runnerJobs.autoAccept({ runnerToken }) | ||
604 | const { job: stalledJob } = await server.runnerJobs.autoAccept({ runnerToken }) | ||
605 | |||
606 | for (let i = 0; i < 6; i++) { | ||
607 | await wait(2000) | ||
608 | |||
609 | await server.runnerJobs.update({ runnerToken, jobToken: job1.jobToken, jobUUID: job1.uuid }) | ||
610 | } | ||
611 | |||
612 | const refreshedJob1 = await server.runnerJobs.getJob({ uuid: job1.uuid }) | ||
613 | const refreshedStalledJob = await server.runnerJobs.getJob({ uuid: stalledJob.uuid }) | ||
614 | |||
615 | expect(refreshedJob1.state.id).to.equal(RunnerJobState.PROCESSING) | ||
616 | expect(refreshedStalledJob.state.id).to.equal(RunnerJobState.PENDING) | ||
617 | }) | ||
618 | }) | ||
619 | |||
620 | describe('Rate limit', function () { | ||
621 | |||
622 | before(async function () { | ||
623 | this.timeout(60000) | ||
624 | |||
625 | await server.kill() | ||
626 | |||
627 | await server.run({ | ||
628 | rates_limit: { | ||
629 | api: { | ||
630 | max: 10 | ||
631 | } | ||
632 | } | ||
633 | }) | ||
634 | }) | ||
635 | |||
636 | it('Should rate limit an unknown runner', async function () { | ||
637 | const path = '/api/v1/ping' | ||
638 | const fields = { runnerToken: 'toto' } | ||
639 | |||
640 | for (let i = 0; i < 20; i++) { | ||
641 | try { | ||
642 | await makePostBodyRequest({ url: server.url, path, fields, expectedStatus: HttpStatusCode.OK_200 }) | ||
643 | } catch {} | ||
644 | } | ||
645 | |||
646 | await makePostBodyRequest({ url: server.url, path, fields, expectedStatus: HttpStatusCode.TOO_MANY_REQUESTS_429 }) | ||
647 | }) | ||
648 | |||
649 | it('Should not rate limit a registered runner', async function () { | ||
650 | const path = '/api/v1/ping' | ||
651 | |||
652 | for (let i = 0; i < 20; i++) { | ||
653 | await makePostBodyRequest({ url: server.url, path, fields: { runnerToken }, expectedStatus: HttpStatusCode.OK_200 }) | ||
654 | } | ||
655 | }) | ||
656 | }) | ||
657 | }) | ||
658 | |||
659 | after(async function () { | ||
660 | await cleanupTests([ server ]) | ||
661 | }) | ||
662 | }) | ||
diff --git a/server/tests/api/runners/runner-live-transcoding.ts b/server/tests/api/runners/runner-live-transcoding.ts new file mode 100644 index 000000000..b11d54039 --- /dev/null +++ b/server/tests/api/runners/runner-live-transcoding.ts | |||
@@ -0,0 +1,330 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { FfmpegCommand } from 'fluent-ffmpeg' | ||
5 | import { readFile } from 'fs-extra' | ||
6 | import { buildAbsoluteFixturePath, wait } from '@shared/core-utils' | ||
7 | import { | ||
8 | HttpStatusCode, | ||
9 | LiveRTMPHLSTranscodingUpdatePayload, | ||
10 | LiveVideo, | ||
11 | LiveVideoError, | ||
12 | RunnerJob, | ||
13 | RunnerJobLiveRTMPHLSTranscodingPayload, | ||
14 | Video, | ||
15 | VideoPrivacy, | ||
16 | VideoState | ||
17 | } from '@shared/models' | ||
18 | import { | ||
19 | cleanupTests, | ||
20 | createSingleServer, | ||
21 | makeRawRequest, | ||
22 | PeerTubeServer, | ||
23 | sendRTMPStream, | ||
24 | setAccessTokensToServers, | ||
25 | setDefaultVideoChannel, | ||
26 | stopFfmpeg, | ||
27 | testFfmpegStreamError, | ||
28 | waitJobs | ||
29 | } from '@shared/server-commands' | ||
30 | |||
31 | describe('Test runner live transcoding', function () { | ||
32 | let server: PeerTubeServer | ||
33 | let runnerToken: string | ||
34 | let baseUrl: string | ||
35 | |||
36 | before(async function () { | ||
37 | this.timeout(120_000) | ||
38 | |||
39 | server = await createSingleServer(1) | ||
40 | |||
41 | await setAccessTokensToServers([ server ]) | ||
42 | await setDefaultVideoChannel([ server ]) | ||
43 | |||
44 | await server.config.enableRemoteTranscoding() | ||
45 | await server.config.enableTranscoding() | ||
46 | runnerToken = await server.runners.autoRegisterRunner() | ||
47 | |||
48 | baseUrl = server.url + '/static/streaming-playlists/hls' | ||
49 | }) | ||
50 | |||
51 | describe('Without transcoding enabled', function () { | ||
52 | |||
53 | before(async function () { | ||
54 | await server.config.enableLive({ | ||
55 | allowReplay: false, | ||
56 | resolutions: 'min', | ||
57 | transcoding: false | ||
58 | }) | ||
59 | }) | ||
60 | |||
61 | it('Should not have available jobs', async function () { | ||
62 | this.timeout(120000) | ||
63 | |||
64 | const { live, video } = await server.live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC }) | ||
65 | |||
66 | const ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey }) | ||
67 | await server.live.waitUntilPublished({ videoId: video.id }) | ||
68 | |||
69 | await waitJobs([ server ]) | ||
70 | |||
71 | const { availableJobs } = await server.runnerJobs.requestLive({ runnerToken }) | ||
72 | expect(availableJobs).to.have.lengthOf(0) | ||
73 | |||
74 | await stopFfmpeg(ffmpegCommand) | ||
75 | }) | ||
76 | }) | ||
77 | |||
78 | describe('With transcoding enabled on classic live', function () { | ||
79 | let live: LiveVideo | ||
80 | let video: Video | ||
81 | let ffmpegCommand: FfmpegCommand | ||
82 | let jobUUID: string | ||
83 | let acceptedJob: RunnerJob & { jobToken: string } | ||
84 | |||
85 | async function testPlaylistFile (fixture: string, expected: string) { | ||
86 | const text = await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/${fixture}` }) | ||
87 | expect(await readFile(buildAbsoluteFixturePath(expected), 'utf-8')).to.equal(text) | ||
88 | |||
89 | } | ||
90 | |||
91 | async function testTSFile (fixture: string, expected: string) { | ||
92 | const { body } = await makeRawRequest({ url: `${baseUrl}/${video.uuid}/${fixture}`, expectedStatus: HttpStatusCode.OK_200 }) | ||
93 | expect(await readFile(buildAbsoluteFixturePath(expected))).to.deep.equal(body) | ||
94 | } | ||
95 | |||
96 | before(async function () { | ||
97 | await server.config.enableLive({ | ||
98 | allowReplay: true, | ||
99 | resolutions: 'max', | ||
100 | transcoding: true | ||
101 | }) | ||
102 | }) | ||
103 | |||
104 | it('Should publish a a live and have available jobs', async function () { | ||
105 | this.timeout(120000) | ||
106 | |||
107 | const data = await server.live.quickCreate({ permanentLive: false, saveReplay: false, privacy: VideoPrivacy.PUBLIC }) | ||
108 | live = data.live | ||
109 | video = data.video | ||
110 | |||
111 | ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey }) | ||
112 | await waitJobs([ server ]) | ||
113 | |||
114 | const job = await server.runnerJobs.requestLiveJob(runnerToken) | ||
115 | jobUUID = job.uuid | ||
116 | |||
117 | expect(job.type).to.equal('live-rtmp-hls-transcoding') | ||
118 | expect(job.payload.input.rtmpUrl).to.exist | ||
119 | |||
120 | expect(job.payload.output.toTranscode).to.have.lengthOf(5) | ||
121 | |||
122 | for (const { resolution, fps } of job.payload.output.toTranscode) { | ||
123 | expect([ 720, 480, 360, 240, 144 ]).to.contain(resolution) | ||
124 | |||
125 | expect(fps).to.be.above(25) | ||
126 | expect(fps).to.be.below(70) | ||
127 | } | ||
128 | }) | ||
129 | |||
130 | it('Should update the live with a new chunk', async function () { | ||
131 | this.timeout(120000) | ||
132 | |||
133 | const { job } = await server.runnerJobs.accept<RunnerJobLiveRTMPHLSTranscodingPayload>({ jobUUID, runnerToken }) | ||
134 | acceptedJob = job | ||
135 | |||
136 | { | ||
137 | const payload: LiveRTMPHLSTranscodingUpdatePayload = { | ||
138 | masterPlaylistFile: 'live/master.m3u8', | ||
139 | resolutionPlaylistFile: 'live/0.m3u8', | ||
140 | resolutionPlaylistFilename: '0.m3u8', | ||
141 | type: 'add-chunk', | ||
142 | videoChunkFile: 'live/0-000067.ts', | ||
143 | videoChunkFilename: '0-000067.ts' | ||
144 | } | ||
145 | await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: job.jobToken, payload, progress: 50 }) | ||
146 | |||
147 | const updatedJob = await server.runnerJobs.getJob({ uuid: job.uuid }) | ||
148 | expect(updatedJob.progress).to.equal(50) | ||
149 | } | ||
150 | |||
151 | { | ||
152 | const payload: LiveRTMPHLSTranscodingUpdatePayload = { | ||
153 | resolutionPlaylistFile: 'live/1.m3u8', | ||
154 | resolutionPlaylistFilename: '1.m3u8', | ||
155 | type: 'add-chunk', | ||
156 | videoChunkFile: 'live/1-000068.ts', | ||
157 | videoChunkFilename: '1-000068.ts' | ||
158 | } | ||
159 | await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: job.jobToken, payload }) | ||
160 | } | ||
161 | |||
162 | await wait(1000) | ||
163 | |||
164 | await testPlaylistFile('master.m3u8', 'live/master.m3u8') | ||
165 | await testPlaylistFile('0.m3u8', 'live/0.m3u8') | ||
166 | await testPlaylistFile('1.m3u8', 'live/1.m3u8') | ||
167 | |||
168 | await testTSFile('0-000067.ts', 'live/0-000067.ts') | ||
169 | await testTSFile('1-000068.ts', 'live/1-000068.ts') | ||
170 | }) | ||
171 | |||
172 | it('Should replace existing m3u8 on update', async function () { | ||
173 | this.timeout(120000) | ||
174 | |||
175 | const payload: LiveRTMPHLSTranscodingUpdatePayload = { | ||
176 | masterPlaylistFile: 'live/1.m3u8', | ||
177 | resolutionPlaylistFilename: '0.m3u8', | ||
178 | resolutionPlaylistFile: 'live/1.m3u8', | ||
179 | type: 'add-chunk', | ||
180 | videoChunkFile: 'live/1-000069.ts', | ||
181 | videoChunkFilename: '1-000068.ts' | ||
182 | } | ||
183 | await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload }) | ||
184 | await wait(1000) | ||
185 | |||
186 | await testPlaylistFile('master.m3u8', 'live/1.m3u8') | ||
187 | await testPlaylistFile('0.m3u8', 'live/1.m3u8') | ||
188 | await testTSFile('1-000068.ts', 'live/1-000069.ts') | ||
189 | }) | ||
190 | |||
191 | it('Should update the live with removed chunks', async function () { | ||
192 | this.timeout(120000) | ||
193 | |||
194 | const payload: LiveRTMPHLSTranscodingUpdatePayload = { | ||
195 | resolutionPlaylistFile: 'live/0.m3u8', | ||
196 | resolutionPlaylistFilename: '0.m3u8', | ||
197 | type: 'remove-chunk', | ||
198 | videoChunkFilename: '1-000068.ts' | ||
199 | } | ||
200 | await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload }) | ||
201 | |||
202 | await wait(1000) | ||
203 | |||
204 | await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/master.m3u8` }) | ||
205 | await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/0.m3u8` }) | ||
206 | await server.streamingPlaylists.get({ url: `${baseUrl}/${video.uuid}/1.m3u8` }) | ||
207 | await makeRawRequest({ url: `${baseUrl}/${video.uuid}/0-000067.ts`, expectedStatus: HttpStatusCode.OK_200 }) | ||
208 | await makeRawRequest({ url: `${baseUrl}/${video.uuid}/1-000068.ts`, expectedStatus: HttpStatusCode.NOT_FOUND_404 }) | ||
209 | }) | ||
210 | |||
211 | it('Should complete the live and save the replay', async function () { | ||
212 | this.timeout(120000) | ||
213 | |||
214 | for (const segment of [ '0-000069.ts', '0-000070.ts' ]) { | ||
215 | const payload: LiveRTMPHLSTranscodingUpdatePayload = { | ||
216 | masterPlaylistFile: 'live/master.m3u8', | ||
217 | resolutionPlaylistFilename: '0.m3u8', | ||
218 | resolutionPlaylistFile: 'live/0.m3u8', | ||
219 | type: 'add-chunk', | ||
220 | videoChunkFile: 'live/' + segment, | ||
221 | videoChunkFilename: segment | ||
222 | } | ||
223 | await server.runnerJobs.update({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload }) | ||
224 | |||
225 | await wait(1000) | ||
226 | } | ||
227 | |||
228 | await waitJobs([ server ]) | ||
229 | |||
230 | { | ||
231 | const { state } = await server.videos.get({ id: video.uuid }) | ||
232 | expect(state.id).to.equal(VideoState.PUBLISHED) | ||
233 | } | ||
234 | |||
235 | await stopFfmpeg(ffmpegCommand) | ||
236 | |||
237 | await server.runnerJobs.success({ jobUUID, runnerToken, jobToken: acceptedJob.jobToken, payload: {} }) | ||
238 | |||
239 | await wait(1500) | ||
240 | await waitJobs([ server ]) | ||
241 | |||
242 | { | ||
243 | const { state } = await server.videos.get({ id: video.uuid }) | ||
244 | expect(state.id).to.equal(VideoState.LIVE_ENDED) | ||
245 | |||
246 | const session = await server.live.findLatestSession({ videoId: video.uuid }) | ||
247 | expect(session.error).to.be.null | ||
248 | } | ||
249 | }) | ||
250 | }) | ||
251 | |||
252 | describe('With transcoding enabled on cancelled/aborted/errored live', function () { | ||
253 | let live: LiveVideo | ||
254 | let video: Video | ||
255 | let ffmpegCommand: FfmpegCommand | ||
256 | |||
257 | async function prepare () { | ||
258 | ffmpegCommand = sendRTMPStream({ rtmpBaseUrl: live.rtmpUrl, streamKey: live.streamKey }) | ||
259 | await server.runnerJobs.requestLiveJob(runnerToken) | ||
260 | |||
261 | const { job } = await server.runnerJobs.autoAccept({ runnerToken, type: 'live-rtmp-hls-transcoding' }) | ||
262 | |||
263 | return job | ||
264 | } | ||
265 | |||
266 | async function checkSessionError (error: LiveVideoError) { | ||
267 | await wait(1500) | ||
268 | await waitJobs([ server ]) | ||
269 | |||
270 | const session = await server.live.findLatestSession({ videoId: video.uuid }) | ||
271 | expect(session.error).to.equal(error) | ||
272 | } | ||
273 | |||
274 | before(async function () { | ||
275 | await server.config.enableLive({ | ||
276 | allowReplay: true, | ||
277 | resolutions: 'max', | ||
278 | transcoding: true | ||
279 | }) | ||
280 | |||
281 | const data = await server.live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC }) | ||
282 | live = data.live | ||
283 | video = data.video | ||
284 | }) | ||
285 | |||
286 | it('Should abort a running live', async function () { | ||
287 | this.timeout(120000) | ||
288 | |||
289 | const job = await prepare() | ||
290 | |||
291 | await Promise.all([ | ||
292 | server.runnerJobs.abort({ jobUUID: job.uuid, runnerToken, jobToken: job.jobToken, reason: 'abort' }), | ||
293 | testFfmpegStreamError(ffmpegCommand, true) | ||
294 | ]) | ||
295 | |||
296 | // Abort is not supported | ||
297 | await checkSessionError(LiveVideoError.RUNNER_JOB_ERROR) | ||
298 | }) | ||
299 | |||
300 | it('Should cancel a running live', async function () { | ||
301 | this.timeout(120000) | ||
302 | |||
303 | const job = await prepare() | ||
304 | |||
305 | await Promise.all([ | ||
306 | server.runnerJobs.cancelByAdmin({ jobUUID: job.uuid }), | ||
307 | testFfmpegStreamError(ffmpegCommand, true) | ||
308 | ]) | ||
309 | |||
310 | await checkSessionError(LiveVideoError.RUNNER_JOB_CANCEL) | ||
311 | }) | ||
312 | |||
313 | it('Should error a running live', async function () { | ||
314 | this.timeout(120000) | ||
315 | |||
316 | const job = await prepare() | ||
317 | |||
318 | await Promise.all([ | ||
319 | server.runnerJobs.error({ jobUUID: job.uuid, runnerToken, jobToken: job.jobToken, message: 'error' }), | ||
320 | testFfmpegStreamError(ffmpegCommand, true) | ||
321 | ]) | ||
322 | |||
323 | await checkSessionError(LiveVideoError.RUNNER_JOB_ERROR) | ||
324 | }) | ||
325 | }) | ||
326 | |||
327 | after(async function () { | ||
328 | await cleanupTests([ server ]) | ||
329 | }) | ||
330 | }) | ||
diff --git a/server/tests/api/runners/runner-socket.ts b/server/tests/api/runners/runner-socket.ts new file mode 100644 index 000000000..df640f99c --- /dev/null +++ b/server/tests/api/runners/runner-socket.ts | |||
@@ -0,0 +1,116 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { wait } from '@shared/core-utils' | ||
5 | import { | ||
6 | cleanupTests, | ||
7 | createSingleServer, | ||
8 | PeerTubeServer, | ||
9 | setAccessTokensToServers, | ||
10 | setDefaultVideoChannel, | ||
11 | waitJobs | ||
12 | } from '@shared/server-commands' | ||
13 | |||
14 | describe('Test runner socket', function () { | ||
15 | let server: PeerTubeServer | ||
16 | let runnerToken: string | ||
17 | |||
18 | before(async function () { | ||
19 | this.timeout(120_000) | ||
20 | |||
21 | server = await createSingleServer(1) | ||
22 | |||
23 | await setAccessTokensToServers([ server ]) | ||
24 | await setDefaultVideoChannel([ server ]) | ||
25 | |||
26 | await server.config.enableTranscoding(true, true) | ||
27 | await server.config.enableRemoteTranscoding() | ||
28 | runnerToken = await server.runners.autoRegisterRunner() | ||
29 | }) | ||
30 | |||
31 | it('Should throw an error without runner token', function (done) { | ||
32 | const localSocket = server.socketIO.getRunnersSocket({ runnerToken: null }) | ||
33 | localSocket.on('connect_error', err => { | ||
34 | expect(err.message).to.contain('No runner token provided') | ||
35 | done() | ||
36 | }) | ||
37 | }) | ||
38 | |||
39 | it('Should throw an error with a bad runner token', function (done) { | ||
40 | const localSocket = server.socketIO.getRunnersSocket({ runnerToken: 'ergag' }) | ||
41 | localSocket.on('connect_error', err => { | ||
42 | expect(err.message).to.contain('Invalid runner token') | ||
43 | done() | ||
44 | }) | ||
45 | }) | ||
46 | |||
47 | it('Should not send ping if there is no available jobs', async function () { | ||
48 | let pings = 0 | ||
49 | const localSocket = server.socketIO.getRunnersSocket({ runnerToken }) | ||
50 | localSocket.on('available-jobs', () => pings++) | ||
51 | |||
52 | expect(pings).to.equal(0) | ||
53 | }) | ||
54 | |||
55 | it('Should send a ping on available job', async function () { | ||
56 | let pings = 0 | ||
57 | const localSocket = server.socketIO.getRunnersSocket({ runnerToken }) | ||
58 | localSocket.on('available-jobs', () => pings++) | ||
59 | |||
60 | await server.videos.quickUpload({ name: 'video1' }) | ||
61 | |||
62 | // Wait for debounce | ||
63 | await wait(1000) | ||
64 | await waitJobs([ server ]) | ||
65 | expect(pings).to.equal(1) | ||
66 | |||
67 | await server.videos.quickUpload({ name: 'video2' }) | ||
68 | |||
69 | // Wait for debounce | ||
70 | await wait(1000) | ||
71 | await waitJobs([ server ]) | ||
72 | expect(pings).to.equal(2) | ||
73 | |||
74 | await server.runnerJobs.cancelAllJobs() | ||
75 | }) | ||
76 | |||
77 | it('Should send a ping when a child is ready', async function () { | ||
78 | let pings = 0 | ||
79 | const localSocket = server.socketIO.getRunnersSocket({ runnerToken }) | ||
80 | localSocket.on('available-jobs', () => pings++) | ||
81 | |||
82 | await server.videos.quickUpload({ name: 'video3' }) | ||
83 | // Wait for debounce | ||
84 | await wait(1000) | ||
85 | await waitJobs([ server ]) | ||
86 | |||
87 | expect(pings).to.equal(1) | ||
88 | |||
89 | await server.runnerJobs.autoProcessWebVideoJob(runnerToken) | ||
90 | // Wait for debounce | ||
91 | await wait(1000) | ||
92 | await waitJobs([ server ]) | ||
93 | |||
94 | expect(pings).to.equal(2) | ||
95 | }) | ||
96 | |||
97 | it('Should not send a ping if the ended job does not have a child', async function () { | ||
98 | let pings = 0 | ||
99 | const localSocket = server.socketIO.getRunnersSocket({ runnerToken }) | ||
100 | localSocket.on('available-jobs', () => pings++) | ||
101 | |||
102 | const { availableJobs } = await server.runnerJobs.request({ runnerToken }) | ||
103 | const job = availableJobs.find(j => j.type === 'vod-web-video-transcoding') | ||
104 | await server.runnerJobs.autoProcessWebVideoJob(runnerToken, job.uuid) | ||
105 | |||
106 | // Wait for debounce | ||
107 | await wait(1000) | ||
108 | await waitJobs([ server ]) | ||
109 | |||
110 | expect(pings).to.equal(0) | ||
111 | }) | ||
112 | |||
113 | after(async function () { | ||
114 | await cleanupTests([ server ]) | ||
115 | }) | ||
116 | }) | ||
diff --git a/server/tests/api/runners/runner-vod-transcoding.ts b/server/tests/api/runners/runner-vod-transcoding.ts new file mode 100644 index 000000000..92a47ac3b --- /dev/null +++ b/server/tests/api/runners/runner-vod-transcoding.ts | |||
@@ -0,0 +1,541 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { readFile } from 'fs-extra' | ||
5 | import { completeCheckHlsPlaylist } from '@server/tests/shared' | ||
6 | import { buildAbsoluteFixturePath } from '@shared/core-utils' | ||
7 | import { | ||
8 | HttpStatusCode, | ||
9 | RunnerJobSuccessPayload, | ||
10 | RunnerJobVODAudioMergeTranscodingPayload, | ||
11 | RunnerJobVODHLSTranscodingPayload, | ||
12 | RunnerJobVODPayload, | ||
13 | RunnerJobVODWebVideoTranscodingPayload, | ||
14 | VideoState, | ||
15 | VODAudioMergeTranscodingSuccess, | ||
16 | VODHLSTranscodingSuccess, | ||
17 | VODWebVideoTranscodingSuccess | ||
18 | } from '@shared/models' | ||
19 | import { | ||
20 | cleanupTests, | ||
21 | createMultipleServers, | ||
22 | doubleFollow, | ||
23 | makeGetRequest, | ||
24 | makeRawRequest, | ||
25 | PeerTubeServer, | ||
26 | setAccessTokensToServers, | ||
27 | setDefaultVideoChannel, | ||
28 | waitJobs | ||
29 | } from '@shared/server-commands' | ||
30 | |||
31 | async function processAllJobs (server: PeerTubeServer, runnerToken: string) { | ||
32 | do { | ||
33 | const { availableJobs } = await server.runnerJobs.requestVOD({ runnerToken }) | ||
34 | if (availableJobs.length === 0) break | ||
35 | |||
36 | const { job } = await server.runnerJobs.accept<RunnerJobVODPayload>({ runnerToken, jobUUID: availableJobs[0].uuid }) | ||
37 | |||
38 | const payload: RunnerJobSuccessPayload = { | ||
39 | videoFile: `video_short_${job.payload.output.resolution}p.mp4`, | ||
40 | resolutionPlaylistFile: `video_short_${job.payload.output.resolution}p.m3u8` | ||
41 | } | ||
42 | await server.runnerJobs.success({ runnerToken, jobUUID: job.uuid, jobToken: job.jobToken, payload }) | ||
43 | } while (true) | ||
44 | |||
45 | await waitJobs([ server ]) | ||
46 | } | ||
47 | |||
48 | describe('Test runner VOD transcoding', function () { | ||
49 | let servers: PeerTubeServer[] = [] | ||
50 | let runnerToken: string | ||
51 | |||
52 | before(async function () { | ||
53 | this.timeout(120_000) | ||
54 | |||
55 | servers = await createMultipleServers(2) | ||
56 | |||
57 | await setAccessTokensToServers(servers) | ||
58 | await setDefaultVideoChannel(servers) | ||
59 | |||
60 | await doubleFollow(servers[0], servers[1]) | ||
61 | |||
62 | await servers[0].config.enableRemoteTranscoding() | ||
63 | runnerToken = await servers[0].runners.autoRegisterRunner() | ||
64 | }) | ||
65 | |||
66 | describe('Without transcoding', function () { | ||
67 | |||
68 | before(async function () { | ||
69 | this.timeout(60000) | ||
70 | |||
71 | await servers[0].config.disableTranscoding() | ||
72 | await servers[0].videos.quickUpload({ name: 'video' }) | ||
73 | |||
74 | await waitJobs(servers) | ||
75 | }) | ||
76 | |||
77 | it('Should not have available jobs', async function () { | ||
78 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
79 | expect(availableJobs).to.have.lengthOf(0) | ||
80 | }) | ||
81 | }) | ||
82 | |||
83 | describe('With classic transcoding enabled', function () { | ||
84 | |||
85 | before(async function () { | ||
86 | this.timeout(60000) | ||
87 | |||
88 | await servers[0].config.enableTranscoding(true, true) | ||
89 | }) | ||
90 | |||
91 | it('Should error a transcoding job', async function () { | ||
92 | this.timeout(60000) | ||
93 | |||
94 | await servers[0].runnerJobs.cancelAllJobs() | ||
95 | const { uuid } = await servers[0].videos.quickUpload({ name: 'video' }) | ||
96 | await waitJobs(servers) | ||
97 | |||
98 | const { availableJobs } = await servers[0].runnerJobs.request({ runnerToken }) | ||
99 | const jobUUID = availableJobs[0].uuid | ||
100 | |||
101 | const { job } = await servers[0].runnerJobs.accept({ runnerToken, jobUUID }) | ||
102 | const jobToken = job.jobToken | ||
103 | |||
104 | await servers[0].runnerJobs.error({ runnerToken, jobUUID, jobToken, message: 'Error' }) | ||
105 | |||
106 | const video = await servers[0].videos.get({ id: uuid }) | ||
107 | expect(video.state.id).to.equal(VideoState.TRANSCODING_FAILED) | ||
108 | }) | ||
109 | |||
110 | it('Should cancel a transcoding job', async function () { | ||
111 | await servers[0].runnerJobs.cancelAllJobs() | ||
112 | const { uuid } = await servers[0].videos.quickUpload({ name: 'video' }) | ||
113 | await waitJobs(servers) | ||
114 | |||
115 | const { availableJobs } = await servers[0].runnerJobs.request({ runnerToken }) | ||
116 | const jobUUID = availableJobs[0].uuid | ||
117 | |||
118 | await servers[0].runnerJobs.cancelByAdmin({ jobUUID }) | ||
119 | |||
120 | const video = await servers[0].videos.get({ id: uuid }) | ||
121 | expect(video.state.id).to.equal(VideoState.PUBLISHED) | ||
122 | }) | ||
123 | }) | ||
124 | |||
125 | describe('Web video transcoding only', function () { | ||
126 | let videoUUID: string | ||
127 | let jobToken: string | ||
128 | let jobUUID: string | ||
129 | |||
130 | before(async function () { | ||
131 | this.timeout(60000) | ||
132 | |||
133 | await servers[0].runnerJobs.cancelAllJobs() | ||
134 | await servers[0].config.enableTranscoding(true, false) | ||
135 | |||
136 | const { uuid } = await servers[0].videos.quickUpload({ name: 'web video', fixture: 'video_short.webm' }) | ||
137 | videoUUID = uuid | ||
138 | |||
139 | await waitJobs(servers) | ||
140 | }) | ||
141 | |||
142 | it('Should have jobs available for remote runners', async function () { | ||
143 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
144 | expect(availableJobs).to.have.lengthOf(1) | ||
145 | |||
146 | jobUUID = availableJobs[0].uuid | ||
147 | }) | ||
148 | |||
149 | it('Should have a valid first transcoding job', async function () { | ||
150 | const { job } = await servers[0].runnerJobs.accept<RunnerJobVODWebVideoTranscodingPayload>({ runnerToken, jobUUID }) | ||
151 | jobToken = job.jobToken | ||
152 | |||
153 | expect(job.type === 'vod-web-video-transcoding') | ||
154 | expect(job.payload.input.videoFileUrl).to.exist | ||
155 | expect(job.payload.output.resolution).to.equal(720) | ||
156 | expect(job.payload.output.fps).to.equal(25) | ||
157 | |||
158 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken }) | ||
159 | const inputFile = await readFile(buildAbsoluteFixturePath('video_short.webm')) | ||
160 | |||
161 | expect(body).to.deep.equal(inputFile) | ||
162 | }) | ||
163 | |||
164 | it('Should transcode the max video resolution and send it back to the server', async function () { | ||
165 | this.timeout(60000) | ||
166 | |||
167 | const payload: VODWebVideoTranscodingSuccess = { | ||
168 | videoFile: 'video_short.mp4' | ||
169 | } | ||
170 | await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
171 | |||
172 | await waitJobs(servers) | ||
173 | }) | ||
174 | |||
175 | it('Should have the video updated', async function () { | ||
176 | for (const server of servers) { | ||
177 | const video = await server.videos.get({ id: videoUUID }) | ||
178 | expect(video.files).to.have.lengthOf(1) | ||
179 | expect(video.streamingPlaylists).to.have.lengthOf(0) | ||
180 | |||
181 | const { body } = await makeRawRequest({ url: video.files[0].fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
182 | expect(body).to.deep.equal(await readFile(buildAbsoluteFixturePath('video_short.mp4'))) | ||
183 | } | ||
184 | }) | ||
185 | |||
186 | it('Should have 4 lower resolution to transcode', async function () { | ||
187 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
188 | expect(availableJobs).to.have.lengthOf(4) | ||
189 | |||
190 | for (const resolution of [ 480, 360, 240, 144 ]) { | ||
191 | const job = availableJobs.find(j => j.payload.output.resolution === resolution) | ||
192 | expect(job).to.exist | ||
193 | expect(job.type).to.equal('vod-web-video-transcoding') | ||
194 | |||
195 | if (resolution === 240) jobUUID = job.uuid | ||
196 | } | ||
197 | }) | ||
198 | |||
199 | it('Should process one of these transcoding jobs', async function () { | ||
200 | const { job } = await servers[0].runnerJobs.accept<RunnerJobVODWebVideoTranscodingPayload>({ runnerToken, jobUUID }) | ||
201 | jobToken = job.jobToken | ||
202 | |||
203 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken }) | ||
204 | const inputFile = await readFile(buildAbsoluteFixturePath('video_short.mp4')) | ||
205 | |||
206 | expect(body).to.deep.equal(inputFile) | ||
207 | |||
208 | const payload: VODWebVideoTranscodingSuccess = { videoFile: 'video_short_240p.mp4' } | ||
209 | await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
210 | }) | ||
211 | |||
212 | it('Should process all other jobs', async function () { | ||
213 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
214 | expect(availableJobs).to.have.lengthOf(3) | ||
215 | |||
216 | for (const resolution of [ 480, 360, 144 ]) { | ||
217 | const availableJob = availableJobs.find(j => j.payload.output.resolution === resolution) | ||
218 | expect(availableJob).to.exist | ||
219 | jobUUID = availableJob.uuid | ||
220 | |||
221 | const { job } = await servers[0].runnerJobs.accept<RunnerJobVODWebVideoTranscodingPayload>({ runnerToken, jobUUID }) | ||
222 | jobToken = job.jobToken | ||
223 | |||
224 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken }) | ||
225 | const inputFile = await readFile(buildAbsoluteFixturePath('video_short.mp4')) | ||
226 | expect(body).to.deep.equal(inputFile) | ||
227 | |||
228 | const payload: VODWebVideoTranscodingSuccess = { videoFile: `video_short_${resolution}p.mp4` } | ||
229 | await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
230 | } | ||
231 | }) | ||
232 | |||
233 | it('Should have the video updated', async function () { | ||
234 | for (const server of servers) { | ||
235 | const video = await server.videos.get({ id: videoUUID }) | ||
236 | expect(video.files).to.have.lengthOf(5) | ||
237 | expect(video.streamingPlaylists).to.have.lengthOf(0) | ||
238 | |||
239 | const { body } = await makeRawRequest({ url: video.files[0].fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
240 | expect(body).to.deep.equal(await readFile(buildAbsoluteFixturePath('video_short.mp4'))) | ||
241 | |||
242 | for (const file of video.files) { | ||
243 | await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
244 | await makeRawRequest({ url: file.torrentUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
245 | } | ||
246 | } | ||
247 | }) | ||
248 | |||
249 | it('Should not have available jobs anymore', async function () { | ||
250 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
251 | expect(availableJobs).to.have.lengthOf(0) | ||
252 | }) | ||
253 | }) | ||
254 | |||
255 | describe('HLS transcoding only', function () { | ||
256 | let videoUUID: string | ||
257 | let jobToken: string | ||
258 | let jobUUID: string | ||
259 | |||
260 | before(async function () { | ||
261 | this.timeout(60000) | ||
262 | |||
263 | await servers[0].config.enableTranscoding(false, true) | ||
264 | |||
265 | const { uuid } = await servers[0].videos.quickUpload({ name: 'hls video', fixture: 'video_short.webm' }) | ||
266 | videoUUID = uuid | ||
267 | |||
268 | await waitJobs(servers) | ||
269 | }) | ||
270 | |||
271 | it('Should run the optimize job', async function () { | ||
272 | this.timeout(60000) | ||
273 | |||
274 | await servers[0].runnerJobs.autoProcessWebVideoJob(runnerToken) | ||
275 | }) | ||
276 | |||
277 | it('Should have 5 HLS resolution to transcode', async function () { | ||
278 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
279 | expect(availableJobs).to.have.lengthOf(5) | ||
280 | |||
281 | for (const resolution of [ 720, 480, 360, 240, 144 ]) { | ||
282 | const job = availableJobs.find(j => j.payload.output.resolution === resolution) | ||
283 | expect(job).to.exist | ||
284 | expect(job.type).to.equal('vod-hls-transcoding') | ||
285 | |||
286 | if (resolution === 480) jobUUID = job.uuid | ||
287 | } | ||
288 | }) | ||
289 | |||
290 | it('Should process one of these transcoding jobs', async function () { | ||
291 | this.timeout(60000) | ||
292 | |||
293 | const { job } = await servers[0].runnerJobs.accept<RunnerJobVODHLSTranscodingPayload>({ runnerToken, jobUUID }) | ||
294 | jobToken = job.jobToken | ||
295 | |||
296 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken }) | ||
297 | const inputFile = await readFile(buildAbsoluteFixturePath('video_short.mp4')) | ||
298 | |||
299 | expect(body).to.deep.equal(inputFile) | ||
300 | |||
301 | const payload: VODHLSTranscodingSuccess = { | ||
302 | videoFile: 'video_short_480p.mp4', | ||
303 | resolutionPlaylistFile: 'video_short_480p.m3u8' | ||
304 | } | ||
305 | await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
306 | |||
307 | await waitJobs(servers) | ||
308 | }) | ||
309 | |||
310 | it('Should have the video updated', async function () { | ||
311 | for (const server of servers) { | ||
312 | const video = await server.videos.get({ id: videoUUID }) | ||
313 | |||
314 | expect(video.files).to.have.lengthOf(1) | ||
315 | expect(video.streamingPlaylists).to.have.lengthOf(1) | ||
316 | |||
317 | const hls = video.streamingPlaylists[0] | ||
318 | expect(hls.files).to.have.lengthOf(1) | ||
319 | |||
320 | await completeCheckHlsPlaylist({ videoUUID, hlsOnly: false, servers, resolutions: [ 480 ] }) | ||
321 | } | ||
322 | }) | ||
323 | |||
324 | it('Should process all other jobs', async function () { | ||
325 | this.timeout(60000) | ||
326 | |||
327 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
328 | expect(availableJobs).to.have.lengthOf(4) | ||
329 | |||
330 | let maxQualityFile = 'video_short.mp4' | ||
331 | |||
332 | for (const resolution of [ 720, 360, 240, 144 ]) { | ||
333 | const availableJob = availableJobs.find(j => j.payload.output.resolution === resolution) | ||
334 | expect(availableJob).to.exist | ||
335 | jobUUID = availableJob.uuid | ||
336 | |||
337 | const { job } = await servers[0].runnerJobs.accept<RunnerJobVODHLSTranscodingPayload>({ runnerToken, jobUUID }) | ||
338 | jobToken = job.jobToken | ||
339 | |||
340 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken }) | ||
341 | const inputFile = await readFile(buildAbsoluteFixturePath(maxQualityFile)) | ||
342 | expect(body).to.deep.equal(inputFile) | ||
343 | |||
344 | const payload: VODHLSTranscodingSuccess = { | ||
345 | videoFile: `video_short_${resolution}p.mp4`, | ||
346 | resolutionPlaylistFile: `video_short_${resolution}p.m3u8` | ||
347 | } | ||
348 | await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
349 | |||
350 | if (resolution === 720) { | ||
351 | maxQualityFile = 'video_short_720p.mp4' | ||
352 | } | ||
353 | } | ||
354 | |||
355 | await waitJobs(servers) | ||
356 | }) | ||
357 | |||
358 | it('Should have the video updated', async function () { | ||
359 | for (const server of servers) { | ||
360 | const video = await server.videos.get({ id: videoUUID }) | ||
361 | |||
362 | expect(video.files).to.have.lengthOf(0) | ||
363 | expect(video.streamingPlaylists).to.have.lengthOf(1) | ||
364 | |||
365 | const hls = video.streamingPlaylists[0] | ||
366 | expect(hls.files).to.have.lengthOf(5) | ||
367 | |||
368 | await completeCheckHlsPlaylist({ videoUUID, hlsOnly: true, servers, resolutions: [ 720, 480, 360, 240, 144 ] }) | ||
369 | } | ||
370 | }) | ||
371 | |||
372 | it('Should not have available jobs anymore', async function () { | ||
373 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
374 | expect(availableJobs).to.have.lengthOf(0) | ||
375 | }) | ||
376 | }) | ||
377 | |||
378 | describe('Web video and HLS transcoding', function () { | ||
379 | |||
380 | before(async function () { | ||
381 | this.timeout(60000) | ||
382 | |||
383 | await servers[0].config.enableTranscoding(true, true) | ||
384 | |||
385 | await servers[0].videos.quickUpload({ name: 'web video and hls video', fixture: 'video_short.webm' }) | ||
386 | |||
387 | await waitJobs(servers) | ||
388 | }) | ||
389 | |||
390 | it('Should process the first optimize job', async function () { | ||
391 | this.timeout(60000) | ||
392 | |||
393 | await servers[0].runnerJobs.autoProcessWebVideoJob(runnerToken) | ||
394 | }) | ||
395 | |||
396 | it('Should have 9 jobs to process', async function () { | ||
397 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
398 | |||
399 | expect(availableJobs).to.have.lengthOf(9) | ||
400 | |||
401 | const webVideoJobs = availableJobs.filter(j => j.type === 'vod-web-video-transcoding') | ||
402 | const hlsJobs = availableJobs.filter(j => j.type === 'vod-hls-transcoding') | ||
403 | |||
404 | expect(webVideoJobs).to.have.lengthOf(4) | ||
405 | expect(hlsJobs).to.have.lengthOf(5) | ||
406 | }) | ||
407 | |||
408 | it('Should process all available jobs', async function () { | ||
409 | await processAllJobs(servers[0], runnerToken) | ||
410 | }) | ||
411 | }) | ||
412 | |||
413 | describe('Audio merge transcoding', function () { | ||
414 | let videoUUID: string | ||
415 | let jobToken: string | ||
416 | let jobUUID: string | ||
417 | |||
418 | before(async function () { | ||
419 | this.timeout(60000) | ||
420 | |||
421 | await servers[0].config.enableTranscoding(true, true) | ||
422 | |||
423 | const attributes = { name: 'audio_with_preview', previewfile: 'preview.jpg', fixture: 'sample.ogg' } | ||
424 | const { uuid } = await servers[0].videos.upload({ attributes, mode: 'legacy' }) | ||
425 | videoUUID = uuid | ||
426 | |||
427 | await waitJobs(servers) | ||
428 | }) | ||
429 | |||
430 | it('Should have an audio merge transcoding job', async function () { | ||
431 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
432 | expect(availableJobs).to.have.lengthOf(1) | ||
433 | |||
434 | expect(availableJobs[0].type).to.equal('vod-audio-merge-transcoding') | ||
435 | |||
436 | jobUUID = availableJobs[0].uuid | ||
437 | }) | ||
438 | |||
439 | it('Should have a valid remote audio merge transcoding job', async function () { | ||
440 | const { job } = await servers[0].runnerJobs.accept<RunnerJobVODAudioMergeTranscodingPayload>({ runnerToken, jobUUID }) | ||
441 | jobToken = job.jobToken | ||
442 | |||
443 | expect(job.type === 'vod-audio-merge-transcoding') | ||
444 | expect(job.payload.input.audioFileUrl).to.exist | ||
445 | expect(job.payload.input.previewFileUrl).to.exist | ||
446 | expect(job.payload.output.resolution).to.equal(480) | ||
447 | |||
448 | { | ||
449 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.audioFileUrl, jobToken, runnerToken }) | ||
450 | const inputFile = await readFile(buildAbsoluteFixturePath('sample.ogg')) | ||
451 | expect(body).to.deep.equal(inputFile) | ||
452 | } | ||
453 | |||
454 | { | ||
455 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.previewFileUrl, jobToken, runnerToken }) | ||
456 | |||
457 | const video = await servers[0].videos.get({ id: videoUUID }) | ||
458 | const { body: inputFile } = await makeGetRequest({ | ||
459 | url: servers[0].url, | ||
460 | path: video.previewPath, | ||
461 | expectedStatus: HttpStatusCode.OK_200 | ||
462 | }) | ||
463 | |||
464 | expect(body).to.deep.equal(inputFile) | ||
465 | } | ||
466 | }) | ||
467 | |||
468 | it('Should merge the audio', async function () { | ||
469 | this.timeout(60000) | ||
470 | |||
471 | const payload: VODAudioMergeTranscodingSuccess = { videoFile: 'video_short_480p.mp4' } | ||
472 | await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
473 | |||
474 | await waitJobs(servers) | ||
475 | }) | ||
476 | |||
477 | it('Should have the video updated', async function () { | ||
478 | for (const server of servers) { | ||
479 | const video = await server.videos.get({ id: videoUUID }) | ||
480 | expect(video.files).to.have.lengthOf(1) | ||
481 | expect(video.streamingPlaylists).to.have.lengthOf(0) | ||
482 | |||
483 | const { body } = await makeRawRequest({ url: video.files[0].fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
484 | expect(body).to.deep.equal(await readFile(buildAbsoluteFixturePath('video_short_480p.mp4'))) | ||
485 | } | ||
486 | }) | ||
487 | |||
488 | it('Should have 7 lower resolutions to transcode', async function () { | ||
489 | const { availableJobs } = await servers[0].runnerJobs.requestVOD({ runnerToken }) | ||
490 | expect(availableJobs).to.have.lengthOf(7) | ||
491 | |||
492 | for (const resolution of [ 360, 240, 144 ]) { | ||
493 | const jobs = availableJobs.filter(j => j.payload.output.resolution === resolution) | ||
494 | expect(jobs).to.have.lengthOf(2) | ||
495 | } | ||
496 | |||
497 | jobUUID = availableJobs.find(j => j.payload.output.resolution === 480).uuid | ||
498 | }) | ||
499 | |||
500 | it('Should process one other job', async function () { | ||
501 | this.timeout(60000) | ||
502 | |||
503 | const { job } = await servers[0].runnerJobs.accept<RunnerJobVODHLSTranscodingPayload>({ runnerToken, jobUUID }) | ||
504 | jobToken = job.jobToken | ||
505 | |||
506 | const { body } = await servers[0].runnerJobs.getInputFile({ url: job.payload.input.videoFileUrl, jobToken, runnerToken }) | ||
507 | const inputFile = await readFile(buildAbsoluteFixturePath('video_short_480p.mp4')) | ||
508 | expect(body).to.deep.equal(inputFile) | ||
509 | |||
510 | const payload: VODHLSTranscodingSuccess = { | ||
511 | videoFile: `video_short_480p.mp4`, | ||
512 | resolutionPlaylistFile: `video_short_480p.m3u8` | ||
513 | } | ||
514 | await servers[0].runnerJobs.success({ runnerToken, jobUUID, jobToken, payload }) | ||
515 | |||
516 | await waitJobs(servers) | ||
517 | }) | ||
518 | |||
519 | it('Should have the video updated', async function () { | ||
520 | for (const server of servers) { | ||
521 | const video = await server.videos.get({ id: videoUUID }) | ||
522 | |||
523 | expect(video.files).to.have.lengthOf(1) | ||
524 | expect(video.streamingPlaylists).to.have.lengthOf(1) | ||
525 | |||
526 | const hls = video.streamingPlaylists[0] | ||
527 | expect(hls.files).to.have.lengthOf(1) | ||
528 | |||
529 | await completeCheckHlsPlaylist({ videoUUID, hlsOnly: false, servers, resolutions: [ 480 ] }) | ||
530 | } | ||
531 | }) | ||
532 | |||
533 | it('Should process all available jobs', async function () { | ||
534 | await processAllJobs(servers[0], runnerToken) | ||
535 | }) | ||
536 | }) | ||
537 | |||
538 | after(async function () { | ||
539 | await cleanupTests(servers) | ||
540 | }) | ||
541 | }) | ||
diff --git a/server/tests/api/server/config.ts b/server/tests/api/server/config.ts index 3683c4ae1..54a40b994 100644 --- a/server/tests/api/server/config.ts +++ b/server/tests/api/server/config.ts | |||
@@ -63,6 +63,7 @@ function checkInitialConfig (server: PeerTubeServer, data: CustomConfig) { | |||
63 | expect(data.videoChannels.maxPerUser).to.equal(20) | 63 | expect(data.videoChannels.maxPerUser).to.equal(20) |
64 | 64 | ||
65 | expect(data.transcoding.enabled).to.be.false | 65 | expect(data.transcoding.enabled).to.be.false |
66 | expect(data.transcoding.remoteRunners.enabled).to.be.false | ||
66 | expect(data.transcoding.allowAdditionalExtensions).to.be.false | 67 | expect(data.transcoding.allowAdditionalExtensions).to.be.false |
67 | expect(data.transcoding.allowAudioFiles).to.be.false | 68 | expect(data.transcoding.allowAudioFiles).to.be.false |
68 | expect(data.transcoding.threads).to.equal(2) | 69 | expect(data.transcoding.threads).to.equal(2) |
@@ -87,6 +88,7 @@ function checkInitialConfig (server: PeerTubeServer, data: CustomConfig) { | |||
87 | expect(data.live.maxInstanceLives).to.equal(20) | 88 | expect(data.live.maxInstanceLives).to.equal(20) |
88 | expect(data.live.maxUserLives).to.equal(3) | 89 | expect(data.live.maxUserLives).to.equal(3) |
89 | expect(data.live.transcoding.enabled).to.be.false | 90 | expect(data.live.transcoding.enabled).to.be.false |
91 | expect(data.live.transcoding.remoteRunners.enabled).to.be.false | ||
90 | expect(data.live.transcoding.threads).to.equal(2) | 92 | expect(data.live.transcoding.threads).to.equal(2) |
91 | expect(data.live.transcoding.profile).to.equal('default') | 93 | expect(data.live.transcoding.profile).to.equal('default') |
92 | expect(data.live.transcoding.resolutions['144p']).to.be.false | 94 | expect(data.live.transcoding.resolutions['144p']).to.be.false |
@@ -172,6 +174,7 @@ function checkUpdatedConfig (data: CustomConfig) { | |||
172 | expect(data.videoChannels.maxPerUser).to.equal(24) | 174 | expect(data.videoChannels.maxPerUser).to.equal(24) |
173 | 175 | ||
174 | expect(data.transcoding.enabled).to.be.true | 176 | expect(data.transcoding.enabled).to.be.true |
177 | expect(data.transcoding.remoteRunners.enabled).to.be.true | ||
175 | expect(data.transcoding.threads).to.equal(1) | 178 | expect(data.transcoding.threads).to.equal(1) |
176 | expect(data.transcoding.concurrency).to.equal(3) | 179 | expect(data.transcoding.concurrency).to.equal(3) |
177 | expect(data.transcoding.allowAdditionalExtensions).to.be.true | 180 | expect(data.transcoding.allowAdditionalExtensions).to.be.true |
@@ -195,6 +198,7 @@ function checkUpdatedConfig (data: CustomConfig) { | |||
195 | expect(data.live.maxInstanceLives).to.equal(-1) | 198 | expect(data.live.maxInstanceLives).to.equal(-1) |
196 | expect(data.live.maxUserLives).to.equal(10) | 199 | expect(data.live.maxUserLives).to.equal(10) |
197 | expect(data.live.transcoding.enabled).to.be.true | 200 | expect(data.live.transcoding.enabled).to.be.true |
201 | expect(data.live.transcoding.remoteRunners.enabled).to.be.true | ||
198 | expect(data.live.transcoding.threads).to.equal(4) | 202 | expect(data.live.transcoding.threads).to.equal(4) |
199 | expect(data.live.transcoding.profile).to.equal('live_profile') | 203 | expect(data.live.transcoding.profile).to.equal('live_profile') |
200 | expect(data.live.transcoding.resolutions['144p']).to.be.true | 204 | expect(data.live.transcoding.resolutions['144p']).to.be.true |
@@ -313,6 +317,9 @@ const newCustomConfig: CustomConfig = { | |||
313 | }, | 317 | }, |
314 | transcoding: { | 318 | transcoding: { |
315 | enabled: true, | 319 | enabled: true, |
320 | remoteRunners: { | ||
321 | enabled: true | ||
322 | }, | ||
316 | allowAdditionalExtensions: true, | 323 | allowAdditionalExtensions: true, |
317 | allowAudioFiles: true, | 324 | allowAudioFiles: true, |
318 | threads: 1, | 325 | threads: 1, |
@@ -348,6 +355,9 @@ const newCustomConfig: CustomConfig = { | |||
348 | maxUserLives: 10, | 355 | maxUserLives: 10, |
349 | transcoding: { | 356 | transcoding: { |
350 | enabled: true, | 357 | enabled: true, |
358 | remoteRunners: { | ||
359 | enabled: true | ||
360 | }, | ||
351 | threads: 4, | 361 | threads: 4, |
352 | profile: 'live_profile', | 362 | profile: 'live_profile', |
353 | resolutions: { | 363 | resolutions: { |
diff --git a/server/tests/api/server/follow-constraints.ts b/server/tests/api/server/follow-constraints.ts index 704d6fc96..ff5332858 100644 --- a/server/tests/api/server/follow-constraints.ts +++ b/server/tests/api/server/follow-constraints.ts | |||
@@ -146,7 +146,7 @@ describe('Test follow constraints', function () { | |||
146 | const body = await servers[0].videos.get({ id: video2UUID, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) | 146 | const body = await servers[0].videos.get({ id: video2UUID, expectedStatus: HttpStatusCode.FORBIDDEN_403 }) |
147 | const error = body as unknown as PeerTubeProblemDocument | 147 | const error = body as unknown as PeerTubeProblemDocument |
148 | 148 | ||
149 | const doc = 'https://docs.joinpeertube.org/api/rest-reference.html#section/Errors/does_not_respect_follow_constraints' | 149 | const doc = 'https://docs.joinpeertube.org/api-rest-reference.html#section/Errors/does_not_respect_follow_constraints' |
150 | expect(error.type).to.equal(doc) | 150 | expect(error.type).to.equal(doc) |
151 | expect(error.code).to.equal(ServerErrorCode.DOES_NOT_RESPECT_FOLLOW_CONSTRAINTS) | 151 | expect(error.code).to.equal(ServerErrorCode.DOES_NOT_RESPECT_FOLLOW_CONSTRAINTS) |
152 | 152 | ||
diff --git a/server/tests/api/server/follows.ts b/server/tests/api/server/follows.ts index 6a2cc2c43..ecec95bf8 100644 --- a/server/tests/api/server/follows.ts +++ b/server/tests/api/server/follows.ts | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { completeVideoCheck, dateIsValid, expectAccountFollows, expectChannelsFollows, testCaptionFile } from '@server/tests/shared' | 4 | import { completeVideoCheck, dateIsValid, expectAccountFollows, expectChannelsFollows, testCaptionFile } from '@server/tests/shared' |
5 | import { VideoCreateResult, VideoPrivacy } from '@shared/models' | 5 | import { Video, VideoPrivacy } from '@shared/models' |
6 | import { cleanupTests, createMultipleServers, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/server-commands' | 6 | import { cleanupTests, createMultipleServers, PeerTubeServer, setAccessTokensToServers, waitJobs } from '@shared/server-commands' |
7 | 7 | ||
8 | describe('Test follows', function () { | 8 | describe('Test follows', function () { |
@@ -357,7 +357,7 @@ describe('Test follows', function () { | |||
357 | }) | 357 | }) |
358 | 358 | ||
359 | describe('Should propagate data on a new server follow', function () { | 359 | describe('Should propagate data on a new server follow', function () { |
360 | let video4: VideoCreateResult | 360 | let video4: Video |
361 | 361 | ||
362 | before(async function () { | 362 | before(async function () { |
363 | this.timeout(50000) | 363 | this.timeout(50000) |
@@ -372,19 +372,19 @@ describe('Test follows', function () { | |||
372 | 372 | ||
373 | await servers[2].videos.upload({ attributes: { name: 'server3-2' } }) | 373 | await servers[2].videos.upload({ attributes: { name: 'server3-2' } }) |
374 | await servers[2].videos.upload({ attributes: { name: 'server3-3' } }) | 374 | await servers[2].videos.upload({ attributes: { name: 'server3-3' } }) |
375 | video4 = await servers[2].videos.upload({ attributes: video4Attributes }) | 375 | const video4CreateResult = await servers[2].videos.upload({ attributes: video4Attributes }) |
376 | await servers[2].videos.upload({ attributes: { name: 'server3-5' } }) | 376 | await servers[2].videos.upload({ attributes: { name: 'server3-5' } }) |
377 | await servers[2].videos.upload({ attributes: { name: 'server3-6' } }) | 377 | await servers[2].videos.upload({ attributes: { name: 'server3-6' } }) |
378 | 378 | ||
379 | { | 379 | { |
380 | const userAccessToken = await servers[2].users.generateUserAndToken('captain') | 380 | const userAccessToken = await servers[2].users.generateUserAndToken('captain') |
381 | 381 | ||
382 | await servers[2].videos.rate({ id: video4.id, rating: 'like' }) | 382 | await servers[2].videos.rate({ id: video4CreateResult.id, rating: 'like' }) |
383 | await servers[2].videos.rate({ token: userAccessToken, id: video4.id, rating: 'dislike' }) | 383 | await servers[2].videos.rate({ token: userAccessToken, id: video4CreateResult.id, rating: 'dislike' }) |
384 | } | 384 | } |
385 | 385 | ||
386 | { | 386 | { |
387 | await servers[2].comments.createThread({ videoId: video4.id, text: 'my super first comment' }) | 387 | await servers[2].comments.createThread({ videoId: video4CreateResult.id, text: 'my super first comment' }) |
388 | 388 | ||
389 | await servers[2].comments.addReplyToLastThread({ text: 'my super answer to thread 1' }) | 389 | await servers[2].comments.addReplyToLastThread({ text: 'my super answer to thread 1' }) |
390 | await servers[2].comments.addReplyToLastReply({ text: 'my super answer to answer of thread 1' }) | 390 | await servers[2].comments.addReplyToLastReply({ text: 'my super answer to answer of thread 1' }) |
@@ -392,20 +392,20 @@ describe('Test follows', function () { | |||
392 | } | 392 | } |
393 | 393 | ||
394 | { | 394 | { |
395 | const { id: threadId } = await servers[2].comments.createThread({ videoId: video4.id, text: 'will be deleted' }) | 395 | const { id: threadId } = await servers[2].comments.createThread({ videoId: video4CreateResult.id, text: 'will be deleted' }) |
396 | await servers[2].comments.addReplyToLastThread({ text: 'answer to deleted' }) | 396 | await servers[2].comments.addReplyToLastThread({ text: 'answer to deleted' }) |
397 | 397 | ||
398 | const { id: replyId } = await servers[2].comments.addReplyToLastThread({ text: 'will also be deleted' }) | 398 | const { id: replyId } = await servers[2].comments.addReplyToLastThread({ text: 'will also be deleted' }) |
399 | 399 | ||
400 | await servers[2].comments.addReplyToLastReply({ text: 'my second answer to deleted' }) | 400 | await servers[2].comments.addReplyToLastReply({ text: 'my second answer to deleted' }) |
401 | 401 | ||
402 | await servers[2].comments.delete({ videoId: video4.id, commentId: threadId }) | 402 | await servers[2].comments.delete({ videoId: video4CreateResult.id, commentId: threadId }) |
403 | await servers[2].comments.delete({ videoId: video4.id, commentId: replyId }) | 403 | await servers[2].comments.delete({ videoId: video4CreateResult.id, commentId: replyId }) |
404 | } | 404 | } |
405 | 405 | ||
406 | await servers[2].captions.add({ | 406 | await servers[2].captions.add({ |
407 | language: 'ar', | 407 | language: 'ar', |
408 | videoId: video4.id, | 408 | videoId: video4CreateResult.id, |
409 | fixture: 'subtitle-good2.vtt' | 409 | fixture: 'subtitle-good2.vtt' |
410 | }) | 410 | }) |
411 | 411 | ||
@@ -479,7 +479,12 @@ describe('Test follows', function () { | |||
479 | } | 479 | } |
480 | ] | 480 | ] |
481 | } | 481 | } |
482 | await completeVideoCheck(servers[0], video4, checkAttributes) | 482 | await completeVideoCheck({ |
483 | server: servers[0], | ||
484 | originServer: servers[2], | ||
485 | videoUUID: video4.uuid, | ||
486 | attributes: checkAttributes | ||
487 | }) | ||
483 | }) | 488 | }) |
484 | 489 | ||
485 | it('Should have propagated comments', async function () { | 490 | it('Should have propagated comments', async function () { |
diff --git a/server/tests/api/server/handle-down.ts b/server/tests/api/server/handle-down.ts index 1fb4d18f9..0bbd9ef47 100644 --- a/server/tests/api/server/handle-down.ts +++ b/server/tests/api/server/handle-down.ts | |||
@@ -1,7 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { completeVideoCheck } from '@server/tests/shared' | 4 | import { completeVideoCheck, SQLCommand } from '@server/tests/shared' |
5 | import { wait } from '@shared/core-utils' | 5 | import { wait } from '@shared/core-utils' |
6 | import { HttpStatusCode, JobState, VideoCreateResult, VideoPrivacy } from '@shared/models' | 6 | import { HttpStatusCode, JobState, VideoCreateResult, VideoPrivacy } from '@shared/models' |
7 | import { | 7 | import { |
@@ -16,6 +16,8 @@ import { | |||
16 | 16 | ||
17 | describe('Test handle downs', function () { | 17 | describe('Test handle downs', function () { |
18 | let servers: PeerTubeServer[] = [] | 18 | let servers: PeerTubeServer[] = [] |
19 | let sqlCommands: SQLCommand[] | ||
20 | |||
19 | let threadIdServer1: number | 21 | let threadIdServer1: number |
20 | let threadIdServer2: number | 22 | let threadIdServer2: number |
21 | let commentIdServer1: number | 23 | let commentIdServer1: number |
@@ -88,6 +90,8 @@ describe('Test handle downs', function () { | |||
88 | 90 | ||
89 | // Get the access tokens | 91 | // Get the access tokens |
90 | await setAccessTokensToServers(servers) | 92 | await setAccessTokensToServers(servers) |
93 | |||
94 | sqlCommands = servers.map(s => new SQLCommand(s)) | ||
91 | }) | 95 | }) |
92 | 96 | ||
93 | it('Should remove followers that are often down', async function () { | 97 | it('Should remove followers that are often down', async function () { |
@@ -209,7 +213,7 @@ describe('Test handle downs', function () { | |||
209 | 213 | ||
210 | // Check unlisted video | 214 | // Check unlisted video |
211 | const video = await servers[2].videos.get({ id: unlistedVideo.uuid }) | 215 | const video = await servers[2].videos.get({ id: unlistedVideo.uuid }) |
212 | await completeVideoCheck(servers[2], video, unlistedCheckAttributes) | 216 | await completeVideoCheck({ server: servers[2], originServer: servers[0], videoUUID: video.uuid, attributes: unlistedCheckAttributes }) |
213 | }) | 217 | }) |
214 | 218 | ||
215 | it('Should send comments on a video to server 3, and automatically fetch the video', async function () { | 219 | it('Should send comments on a video to server 3, and automatically fetch the video', async function () { |
@@ -292,7 +296,7 @@ describe('Test handle downs', function () { | |||
292 | } | 296 | } |
293 | 297 | ||
294 | await waitJobs(servers) | 298 | await waitJobs(servers) |
295 | await servers[1].sql.setActorFollowScores(20) | 299 | await sqlCommands[1].setActorFollowScores(20) |
296 | 300 | ||
297 | // Wait video expiration | 301 | // Wait video expiration |
298 | await wait(11000) | 302 | await wait(11000) |
@@ -325,6 +329,10 @@ describe('Test handle downs', function () { | |||
325 | }) | 329 | }) |
326 | 330 | ||
327 | after(async function () { | 331 | after(async function () { |
332 | for (const sqlCommand of sqlCommands) { | ||
333 | await sqlCommand.cleanup() | ||
334 | } | ||
335 | |||
328 | await cleanupTests(servers) | 336 | await cleanupTests(servers) |
329 | }) | 337 | }) |
330 | }) | 338 | }) |
diff --git a/server/tests/api/server/plugins.ts b/server/tests/api/server/plugins.ts index 8ac7023eb..199d205c7 100644 --- a/server/tests/api/server/plugins.ts +++ b/server/tests/api/server/plugins.ts | |||
@@ -3,7 +3,7 @@ | |||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { pathExists, remove } from 'fs-extra' | 4 | import { pathExists, remove } from 'fs-extra' |
5 | import { join } from 'path' | 5 | import { join } from 'path' |
6 | import { testHelloWorldRegisteredSettings } from '@server/tests/shared' | 6 | import { SQLCommand, testHelloWorldRegisteredSettings } from '@server/tests/shared' |
7 | import { wait } from '@shared/core-utils' | 7 | import { wait } from '@shared/core-utils' |
8 | import { HttpStatusCode, PluginType } from '@shared/models' | 8 | import { HttpStatusCode, PluginType } from '@shared/models' |
9 | import { | 9 | import { |
@@ -17,7 +17,8 @@ import { | |||
17 | } from '@shared/server-commands' | 17 | } from '@shared/server-commands' |
18 | 18 | ||
19 | describe('Test plugins', function () { | 19 | describe('Test plugins', function () { |
20 | let server: PeerTubeServer = null | 20 | let server: PeerTubeServer |
21 | let sqlCommand: SQLCommand | ||
21 | let command: PluginsCommand | 22 | let command: PluginsCommand |
22 | 23 | ||
23 | before(async function () { | 24 | before(async function () { |
@@ -32,6 +33,8 @@ describe('Test plugins', function () { | |||
32 | await setAccessTokensToServers([ server ]) | 33 | await setAccessTokensToServers([ server ]) |
33 | 34 | ||
34 | command = server.plugins | 35 | command = server.plugins |
36 | |||
37 | sqlCommand = new SQLCommand(server) | ||
35 | }) | 38 | }) |
36 | 39 | ||
37 | it('Should list and search available plugins and themes', async function () { | 40 | it('Should list and search available plugins and themes', async function () { |
@@ -236,7 +239,7 @@ describe('Test plugins', function () { | |||
236 | 239 | ||
237 | async function testUpdate (type: 'plugin' | 'theme', name: string) { | 240 | async function testUpdate (type: 'plugin' | 'theme', name: string) { |
238 | // Fake update our plugin version | 241 | // Fake update our plugin version |
239 | await server.sql.setPluginVersion(name, '0.0.1') | 242 | await sqlCommand.setPluginVersion(name, '0.0.1') |
240 | 243 | ||
241 | // Fake update package.json | 244 | // Fake update package.json |
242 | const packageJSON = await command.getPackageJSON(`peertube-${type}-${name}`) | 245 | const packageJSON = await command.getPackageJSON(`peertube-${type}-${name}`) |
@@ -366,7 +369,7 @@ describe('Test plugins', function () { | |||
366 | }) | 369 | }) |
367 | 370 | ||
368 | const query = `UPDATE "application" SET "nodeABIVersion" = 1` | 371 | const query = `UPDATE "application" SET "nodeABIVersion" = 1` |
369 | await server.sql.updateQuery(query) | 372 | await sqlCommand.updateQuery(query) |
370 | 373 | ||
371 | const baseNativeModule = server.servers.buildDirectory(join('plugins', 'node_modules', 'a-native-example')) | 374 | const baseNativeModule = server.servers.buildDirectory(join('plugins', 'node_modules', 'a-native-example')) |
372 | 375 | ||
@@ -401,6 +404,8 @@ describe('Test plugins', function () { | |||
401 | }) | 404 | }) |
402 | 405 | ||
403 | after(async function () { | 406 | after(async function () { |
407 | await sqlCommand.cleanup() | ||
408 | |||
404 | await cleanupTests([ server ]) | 409 | await cleanupTests([ server ]) |
405 | }) | 410 | }) |
406 | }) | 411 | }) |
diff --git a/server/tests/api/transcoding/audio-only.ts b/server/tests/api/transcoding/audio-only.ts index b72f5fdbe..1e31418e7 100644 --- a/server/tests/api/transcoding/audio-only.ts +++ b/server/tests/api/transcoding/audio-only.ts | |||
@@ -1,7 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { getAudioStream, getVideoStreamDimensionsInfo } from '@server/helpers/ffmpeg' | 4 | import { getAudioStream, getVideoStreamDimensionsInfo } from '@shared/ffmpeg' |
5 | import { | 5 | import { |
6 | cleanupTests, | 6 | cleanupTests, |
7 | createMultipleServers, | 7 | createMultipleServers, |
diff --git a/server/tests/api/transcoding/transcoder.ts b/server/tests/api/transcoding/transcoder.ts index c591f5f6f..fa78b58bb 100644 --- a/server/tests/api/transcoding/transcoder.ts +++ b/server/tests/api/transcoding/transcoder.ts | |||
@@ -1,18 +1,18 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { canDoQuickTranscode } from '@server/helpers/ffmpeg' | 4 | import { canDoQuickTranscode } from '@server/lib/transcoding/transcoding-quick-transcode' |
5 | import { generateHighBitrateVideo, generateVideoWithFramerate } from '@server/tests/shared' | 5 | import { checkWebTorrentWorks, generateHighBitrateVideo, generateVideoWithFramerate } from '@server/tests/shared' |
6 | import { buildAbsoluteFixturePath, getAllFiles, getMaxBitrate, getMinLimitBitrate, omit } from '@shared/core-utils' | 6 | import { buildAbsoluteFixturePath, getAllFiles, getMaxBitrate, getMinLimitBitrate, omit } from '@shared/core-utils' |
7 | import { | 7 | import { |
8 | buildFileMetadata, | 8 | ffprobePromise, |
9 | getAudioStream, | 9 | getAudioStream, |
10 | getVideoStreamBitrate, | 10 | getVideoStreamBitrate, |
11 | getVideoStreamDimensionsInfo, | 11 | getVideoStreamDimensionsInfo, |
12 | getVideoStreamFPS, | 12 | getVideoStreamFPS, |
13 | hasAudioStream | 13 | hasAudioStream |
14 | } from '@shared/extra-utils' | 14 | } from '@shared/ffmpeg' |
15 | import { HttpStatusCode, VideoState } from '@shared/models' | 15 | import { HttpStatusCode, VideoFileMetadata, VideoState } from '@shared/models' |
16 | import { | 16 | import { |
17 | cleanupTests, | 17 | cleanupTests, |
18 | createMultipleServers, | 18 | createMultipleServers, |
@@ -20,8 +20,7 @@ import { | |||
20 | makeGetRequest, | 20 | makeGetRequest, |
21 | PeerTubeServer, | 21 | PeerTubeServer, |
22 | setAccessTokensToServers, | 22 | setAccessTokensToServers, |
23 | waitJobs, | 23 | waitJobs |
24 | webtorrentAdd | ||
25 | } from '@shared/server-commands' | 24 | } from '@shared/server-commands' |
26 | 25 | ||
27 | function updateConfigForTranscoding (server: PeerTubeServer) { | 26 | function updateConfigForTranscoding (server: PeerTubeServer) { |
@@ -90,10 +89,7 @@ describe('Test video transcoding', function () { | |||
90 | const magnetUri = videoDetails.files[0].magnetUri | 89 | const magnetUri = videoDetails.files[0].magnetUri |
91 | expect(magnetUri).to.match(/\.webm/) | 90 | expect(magnetUri).to.match(/\.webm/) |
92 | 91 | ||
93 | const torrent = await webtorrentAdd(magnetUri, true) | 92 | await checkWebTorrentWorks(magnetUri, /\.webm$/) |
94 | expect(torrent.files).to.be.an('array') | ||
95 | expect(torrent.files.length).to.equal(1) | ||
96 | expect(torrent.files[0].path).match(/\.webm$/) | ||
97 | } | 93 | } |
98 | }) | 94 | }) |
99 | 95 | ||
@@ -120,10 +116,7 @@ describe('Test video transcoding', function () { | |||
120 | const magnetUri = videoDetails.files[0].magnetUri | 116 | const magnetUri = videoDetails.files[0].magnetUri |
121 | expect(magnetUri).to.match(/\.mp4/) | 117 | expect(magnetUri).to.match(/\.mp4/) |
122 | 118 | ||
123 | const torrent = await webtorrentAdd(magnetUri, true) | 119 | await checkWebTorrentWorks(magnetUri, /\.mp4$/) |
124 | expect(torrent.files).to.be.an('array') | ||
125 | expect(torrent.files.length).to.equal(1) | ||
126 | expect(torrent.files[0].path).match(/\.mp4$/) | ||
127 | } | 120 | } |
128 | }) | 121 | }) |
129 | 122 | ||
@@ -639,7 +632,9 @@ describe('Test video transcoding', function () { | |||
639 | const video = await servers[1].videos.get({ id: videoUUID }) | 632 | const video = await servers[1].videos.get({ id: videoUUID }) |
640 | const file = video.files.find(f => f.resolution.id === 240) | 633 | const file = video.files.find(f => f.resolution.id === 240) |
641 | const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl) | 634 | const path = servers[1].servers.buildWebTorrentFilePath(file.fileUrl) |
642 | const metadata = await buildFileMetadata(path) | 635 | |
636 | const probe = await ffprobePromise(path) | ||
637 | const metadata = new VideoFileMetadata(probe) | ||
643 | 638 | ||
644 | // expected format properties | 639 | // expected format properties |
645 | for (const p of [ | 640 | for (const p of [ |
diff --git a/server/tests/api/users/oauth.ts b/server/tests/api/users/oauth.ts index 6a3da5ea2..153615875 100644 --- a/server/tests/api/users/oauth.ts +++ b/server/tests/api/users/oauth.ts | |||
@@ -1,12 +1,14 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { SQLCommand } from '@server/tests/shared' | ||
4 | import { wait } from '@shared/core-utils' | 5 | import { wait } from '@shared/core-utils' |
5 | import { HttpStatusCode, OAuth2ErrorCode, PeerTubeProblemDocument } from '@shared/models' | 6 | import { HttpStatusCode, OAuth2ErrorCode, PeerTubeProblemDocument } from '@shared/models' |
6 | import { cleanupTests, createSingleServer, killallServers, PeerTubeServer, setAccessTokensToServers } from '@shared/server-commands' | 7 | import { cleanupTests, createSingleServer, killallServers, PeerTubeServer, setAccessTokensToServers } from '@shared/server-commands' |
7 | 8 | ||
8 | describe('Test oauth', function () { | 9 | describe('Test oauth', function () { |
9 | let server: PeerTubeServer | 10 | let server: PeerTubeServer |
11 | let sqlCommand: SQLCommand | ||
10 | 12 | ||
11 | before(async function () { | 13 | before(async function () { |
12 | this.timeout(30000) | 14 | this.timeout(30000) |
@@ -20,6 +22,8 @@ describe('Test oauth', function () { | |||
20 | }) | 22 | }) |
21 | 23 | ||
22 | await setAccessTokensToServers([ server ]) | 24 | await setAccessTokensToServers([ server ]) |
25 | |||
26 | sqlCommand = new SQLCommand(server) | ||
23 | }) | 27 | }) |
24 | 28 | ||
25 | describe('OAuth client', function () { | 29 | describe('OAuth client', function () { |
@@ -118,8 +122,8 @@ describe('Test oauth', function () { | |||
118 | it('Should have an expired access token', async function () { | 122 | it('Should have an expired access token', async function () { |
119 | this.timeout(60000) | 123 | this.timeout(60000) |
120 | 124 | ||
121 | await server.sql.setTokenField(server.accessToken, 'accessTokenExpiresAt', new Date().toISOString()) | 125 | await sqlCommand.setTokenField(server.accessToken, 'accessTokenExpiresAt', new Date().toISOString()) |
122 | await server.sql.setTokenField(server.accessToken, 'refreshTokenExpiresAt', new Date().toISOString()) | 126 | await sqlCommand.setTokenField(server.accessToken, 'refreshTokenExpiresAt', new Date().toISOString()) |
123 | 127 | ||
124 | await killallServers([ server ]) | 128 | await killallServers([ server ]) |
125 | await server.run() | 129 | await server.run() |
@@ -135,7 +139,7 @@ describe('Test oauth', function () { | |||
135 | this.timeout(50000) | 139 | this.timeout(50000) |
136 | 140 | ||
137 | const futureDate = new Date(new Date().getTime() + 1000 * 60).toISOString() | 141 | const futureDate = new Date(new Date().getTime() + 1000 * 60).toISOString() |
138 | await server.sql.setTokenField(server.accessToken, 'refreshTokenExpiresAt', futureDate) | 142 | await sqlCommand.setTokenField(server.accessToken, 'refreshTokenExpiresAt', futureDate) |
139 | 143 | ||
140 | await killallServers([ server ]) | 144 | await killallServers([ server ]) |
141 | await server.run() | 145 | await server.run() |
@@ -187,6 +191,7 @@ describe('Test oauth', function () { | |||
187 | }) | 191 | }) |
188 | 192 | ||
189 | after(async function () { | 193 | after(async function () { |
194 | await sqlCommand.cleanup() | ||
190 | await cleanupTests([ server ]) | 195 | await cleanupTests([ server ]) |
191 | }) | 196 | }) |
192 | }) | 197 | }) |
diff --git a/server/tests/api/videos/multiple-servers.ts b/server/tests/api/videos/multiple-servers.ts index ff730287a..a52a04e07 100644 --- a/server/tests/api/videos/multiple-servers.ts +++ b/server/tests/api/videos/multiple-servers.ts | |||
@@ -5,6 +5,7 @@ import request from 'supertest' | |||
5 | import { | 5 | import { |
6 | checkTmpIsEmpty, | 6 | checkTmpIsEmpty, |
7 | checkVideoFilesWereRemoved, | 7 | checkVideoFilesWereRemoved, |
8 | checkWebTorrentWorks, | ||
8 | completeVideoCheck, | 9 | completeVideoCheck, |
9 | dateIsValid, | 10 | dateIsValid, |
10 | saveVideoInServers, | 11 | saveVideoInServers, |
@@ -21,8 +22,7 @@ import { | |||
21 | setAccessTokensToServers, | 22 | setAccessTokensToServers, |
22 | setDefaultAccountAvatar, | 23 | setDefaultAccountAvatar, |
23 | setDefaultChannelAvatar, | 24 | setDefaultChannelAvatar, |
24 | waitJobs, | 25 | waitJobs |
25 | webtorrentAdd | ||
26 | } from '@shared/server-commands' | 26 | } from '@shared/server-commands' |
27 | 27 | ||
28 | describe('Test multiple servers', function () { | 28 | describe('Test multiple servers', function () { |
@@ -134,7 +134,7 @@ describe('Test multiple servers', function () { | |||
134 | expect(data.length).to.equal(1) | 134 | expect(data.length).to.equal(1) |
135 | const video = data[0] | 135 | const video = data[0] |
136 | 136 | ||
137 | await completeVideoCheck(server, video, checkAttributes) | 137 | await completeVideoCheck({ server, originServer: servers[0], videoUUID: video.uuid, attributes: checkAttributes }) |
138 | publishedAt = video.publishedAt as string | 138 | publishedAt = video.publishedAt as string |
139 | 139 | ||
140 | expect(video.channel.avatars).to.have.lengthOf(2) | 140 | expect(video.channel.avatars).to.have.lengthOf(2) |
@@ -238,7 +238,7 @@ describe('Test multiple servers', function () { | |||
238 | expect(data.length).to.equal(2) | 238 | expect(data.length).to.equal(2) |
239 | const video = data[1] | 239 | const video = data[1] |
240 | 240 | ||
241 | await completeVideoCheck(server, video, checkAttributes) | 241 | await completeVideoCheck({ server, originServer: servers[1], videoUUID: video.uuid, attributes: checkAttributes }) |
242 | } | 242 | } |
243 | }) | 243 | }) |
244 | 244 | ||
@@ -328,7 +328,7 @@ describe('Test multiple servers', function () { | |||
328 | } | 328 | } |
329 | ] | 329 | ] |
330 | } | 330 | } |
331 | await completeVideoCheck(server, video1, checkAttributesVideo1) | 331 | await completeVideoCheck({ server, originServer: servers[2], videoUUID: video1.uuid, attributes: checkAttributesVideo1 }) |
332 | 332 | ||
333 | const checkAttributesVideo2 = { | 333 | const checkAttributesVideo2 = { |
334 | name: 'my super name for server 3-2', | 334 | name: 'my super name for server 3-2', |
@@ -362,7 +362,7 @@ describe('Test multiple servers', function () { | |||
362 | } | 362 | } |
363 | ] | 363 | ] |
364 | } | 364 | } |
365 | await completeVideoCheck(server, video2, checkAttributesVideo2) | 365 | await completeVideoCheck({ server, originServer: servers[2], videoUUID: video2.uuid, attributes: checkAttributesVideo2 }) |
366 | } | 366 | } |
367 | }) | 367 | }) |
368 | }) | 368 | }) |
@@ -408,10 +408,8 @@ describe('Test multiple servers', function () { | |||
408 | toRemove.push(data[3]) | 408 | toRemove.push(data[3]) |
409 | 409 | ||
410 | const videoDetails = await servers[2].videos.get({ id: video.id }) | 410 | const videoDetails = await servers[2].videos.get({ id: video.id }) |
411 | const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri, true) | 411 | |
412 | expect(torrent.files).to.be.an('array') | 412 | await checkWebTorrentWorks(videoDetails.files[0].magnetUri) |
413 | expect(torrent.files.length).to.equal(1) | ||
414 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
415 | }) | 413 | }) |
416 | 414 | ||
417 | it('Should add the file 2 by asking server 1', async function () { | 415 | it('Should add the file 2 by asking server 1', async function () { |
@@ -422,10 +420,7 @@ describe('Test multiple servers', function () { | |||
422 | const video = data[1] | 420 | const video = data[1] |
423 | const videoDetails = await servers[0].videos.get({ id: video.id }) | 421 | const videoDetails = await servers[0].videos.get({ id: video.id }) |
424 | 422 | ||
425 | const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri, true) | 423 | await checkWebTorrentWorks(videoDetails.files[0].magnetUri) |
426 | expect(torrent.files).to.be.an('array') | ||
427 | expect(torrent.files.length).to.equal(1) | ||
428 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
429 | }) | 424 | }) |
430 | 425 | ||
431 | it('Should add the file 3 by asking server 2', async function () { | 426 | it('Should add the file 3 by asking server 2', async function () { |
@@ -436,10 +431,7 @@ describe('Test multiple servers', function () { | |||
436 | const video = data[2] | 431 | const video = data[2] |
437 | const videoDetails = await servers[1].videos.get({ id: video.id }) | 432 | const videoDetails = await servers[1].videos.get({ id: video.id }) |
438 | 433 | ||
439 | const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri, true) | 434 | await checkWebTorrentWorks(videoDetails.files[0].magnetUri) |
440 | expect(torrent.files).to.be.an('array') | ||
441 | expect(torrent.files.length).to.equal(1) | ||
442 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
443 | }) | 435 | }) |
444 | 436 | ||
445 | it('Should add the file 3-2 by asking server 1', async function () { | 437 | it('Should add the file 3-2 by asking server 1', async function () { |
@@ -450,10 +442,7 @@ describe('Test multiple servers', function () { | |||
450 | const video = data[3] | 442 | const video = data[3] |
451 | const videoDetails = await servers[0].videos.get({ id: video.id }) | 443 | const videoDetails = await servers[0].videos.get({ id: video.id }) |
452 | 444 | ||
453 | const torrent = await webtorrentAdd(videoDetails.files[0].magnetUri) | 445 | await checkWebTorrentWorks(videoDetails.files[0].magnetUri) |
454 | expect(torrent.files).to.be.an('array') | ||
455 | expect(torrent.files.length).to.equal(1) | ||
456 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
457 | }) | 446 | }) |
458 | 447 | ||
459 | it('Should add the file 2 in 360p by asking server 1', async function () { | 448 | it('Should add the file 2 in 360p by asking server 1', async function () { |
@@ -467,10 +456,7 @@ describe('Test multiple servers', function () { | |||
467 | const file = videoDetails.files.find(f => f.resolution.id === 360) | 456 | const file = videoDetails.files.find(f => f.resolution.id === 360) |
468 | expect(file).not.to.be.undefined | 457 | expect(file).not.to.be.undefined |
469 | 458 | ||
470 | const torrent = await webtorrentAdd(file.magnetUri) | 459 | await checkWebTorrentWorks(file.magnetUri) |
471 | expect(torrent.files).to.be.an('array') | ||
472 | expect(torrent.files.length).to.equal(1) | ||
473 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
474 | }) | 460 | }) |
475 | }) | 461 | }) |
476 | 462 | ||
@@ -685,7 +671,7 @@ describe('Test multiple servers', function () { | |||
685 | thumbnailfile: 'thumbnail', | 671 | thumbnailfile: 'thumbnail', |
686 | previewfile: 'preview' | 672 | previewfile: 'preview' |
687 | } | 673 | } |
688 | await completeVideoCheck(server, videoUpdated, checkAttributes) | 674 | await completeVideoCheck({ server, originServer: servers[2], videoUUID: videoUpdated.uuid, attributes: checkAttributes }) |
689 | } | 675 | } |
690 | }) | 676 | }) |
691 | 677 | ||
@@ -1087,7 +1073,7 @@ describe('Test multiple servers', function () { | |||
1087 | } | 1073 | } |
1088 | ] | 1074 | ] |
1089 | } | 1075 | } |
1090 | await completeVideoCheck(server, video, checkAttributes) | 1076 | await completeVideoCheck({ server, originServer: servers[1], videoUUID: video.uuid, attributes: checkAttributes }) |
1091 | } | 1077 | } |
1092 | }) | 1078 | }) |
1093 | }) | 1079 | }) |
diff --git a/server/tests/api/videos/resumable-upload.ts b/server/tests/api/videos/resumable-upload.ts index 0cf1e6675..a70a7258b 100644 --- a/server/tests/api/videos/resumable-upload.ts +++ b/server/tests/api/videos/resumable-upload.ts | |||
@@ -261,7 +261,7 @@ describe('Test resumable upload', function () { | |||
261 | pathUploadId: uploadId, | 261 | pathUploadId: uploadId, |
262 | token: server.accessToken, | 262 | token: server.accessToken, |
263 | digestBuilder: () => 'sha=' + 'a'.repeat(40), | 263 | digestBuilder: () => 'sha=' + 'a'.repeat(40), |
264 | expectedStatus: 460 | 264 | expectedStatus: 460 as any |
265 | }) | 265 | }) |
266 | }) | 266 | }) |
267 | 267 | ||
diff --git a/server/tests/api/videos/single-server.ts b/server/tests/api/videos/single-server.ts index e8e981e55..72f833ec2 100644 --- a/server/tests/api/videos/single-server.ts +++ b/server/tests/api/videos/single-server.ts | |||
@@ -164,14 +164,14 @@ describe('Test a single server', function () { | |||
164 | expect(data.length).to.equal(1) | 164 | expect(data.length).to.equal(1) |
165 | 165 | ||
166 | const video = data[0] | 166 | const video = data[0] |
167 | await completeVideoCheck(server, video, getCheckAttributes()) | 167 | await completeVideoCheck({ server, originServer: server, videoUUID: video.uuid, attributes: getCheckAttributes() }) |
168 | }) | 168 | }) |
169 | 169 | ||
170 | it('Should get the video by UUID', async function () { | 170 | it('Should get the video by UUID', async function () { |
171 | this.timeout(5000) | 171 | this.timeout(5000) |
172 | 172 | ||
173 | const video = await server.videos.get({ id: videoUUID }) | 173 | const video = await server.videos.get({ id: videoUUID }) |
174 | await completeVideoCheck(server, video, getCheckAttributes()) | 174 | await completeVideoCheck({ server, originServer: server, videoUUID: video.uuid, attributes: getCheckAttributes() }) |
175 | }) | 175 | }) |
176 | 176 | ||
177 | it('Should have the views updated', async function () { | 177 | it('Should have the views updated', async function () { |
@@ -360,7 +360,7 @@ describe('Test a single server', function () { | |||
360 | 360 | ||
361 | const video = await server.videos.get({ id: videoId }) | 361 | const video = await server.videos.get({ id: videoId }) |
362 | 362 | ||
363 | await completeVideoCheck(server, video, updateCheckAttributes()) | 363 | await completeVideoCheck({ server, originServer: server, videoUUID: video.uuid, attributes: updateCheckAttributes() }) |
364 | }) | 364 | }) |
365 | 365 | ||
366 | it('Should update only the tags of a video', async function () { | 366 | it('Should update only the tags of a video', async function () { |
@@ -371,7 +371,12 @@ describe('Test a single server', function () { | |||
371 | 371 | ||
372 | const video = await server.videos.get({ id: videoId }) | 372 | const video = await server.videos.get({ id: videoId }) |
373 | 373 | ||
374 | await completeVideoCheck(server, video, Object.assign(updateCheckAttributes(), attributes)) | 374 | await completeVideoCheck({ |
375 | server, | ||
376 | originServer: server, | ||
377 | videoUUID: video.uuid, | ||
378 | attributes: Object.assign(updateCheckAttributes(), attributes) | ||
379 | }) | ||
375 | }) | 380 | }) |
376 | 381 | ||
377 | it('Should update only the description of a video', async function () { | 382 | it('Should update only the description of a video', async function () { |
@@ -382,8 +387,12 @@ describe('Test a single server', function () { | |||
382 | 387 | ||
383 | const video = await server.videos.get({ id: videoId }) | 388 | const video = await server.videos.get({ id: videoId }) |
384 | 389 | ||
385 | const expectedAttributes = Object.assign(updateCheckAttributes(), { tags: [ 'supertag', 'tag1', 'tag2' ] }, attributes) | 390 | await completeVideoCheck({ |
386 | await completeVideoCheck(server, video, expectedAttributes) | 391 | server, |
392 | originServer: server, | ||
393 | videoUUID: video.uuid, | ||
394 | attributes: Object.assign(updateCheckAttributes(), { tags: [ 'supertag', 'tag1', 'tag2' ] }, attributes) | ||
395 | }) | ||
387 | }) | 396 | }) |
388 | 397 | ||
389 | it('Should like a video', async function () { | 398 | it('Should like a video', async function () { |
diff --git a/server/tests/api/videos/video-channel-syncs.ts b/server/tests/api/videos/video-channel-syncs.ts index dd483f95e..a31e48d1d 100644 --- a/server/tests/api/videos/video-channel-syncs.ts +++ b/server/tests/api/videos/video-channel-syncs.ts | |||
@@ -1,7 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { FIXTURE_URLS } from '@server/tests/shared' | 4 | import { FIXTURE_URLS, SQLCommand } from '@server/tests/shared' |
5 | import { areHttpImportTestsDisabled } from '@shared/core-utils' | 5 | import { areHttpImportTestsDisabled } from '@shared/core-utils' |
6 | import { VideoChannelSyncState, VideoInclude, VideoPrivacy } from '@shared/models' | 6 | import { VideoChannelSyncState, VideoInclude, VideoPrivacy } from '@shared/models' |
7 | import { | 7 | import { |
@@ -23,6 +23,7 @@ describe('Test channel synchronizations', function () { | |||
23 | 23 | ||
24 | describe('Sync using ' + mode, function () { | 24 | describe('Sync using ' + mode, function () { |
25 | let servers: PeerTubeServer[] | 25 | let servers: PeerTubeServer[] |
26 | let sqlCommands: SQLCommand[] | ||
26 | 27 | ||
27 | let startTestDate: Date | 28 | let startTestDate: Date |
28 | 29 | ||
@@ -36,7 +37,7 @@ describe('Test channel synchronizations', function () { | |||
36 | } | 37 | } |
37 | 38 | ||
38 | async function changeDateForSync (channelSyncId: number, newDate: string) { | 39 | async function changeDateForSync (channelSyncId: number, newDate: string) { |
39 | await servers[0].sql.updateQuery( | 40 | await sqlCommands[0].updateQuery( |
40 | `UPDATE "videoChannelSync" ` + | 41 | `UPDATE "videoChannelSync" ` + |
41 | `SET "createdAt"='${newDate}', "lastSyncAt"='${newDate}' ` + | 42 | `SET "createdAt"='${newDate}', "lastSyncAt"='${newDate}' ` + |
42 | `WHERE id=${channelSyncId}` | 43 | `WHERE id=${channelSyncId}` |
@@ -82,6 +83,8 @@ describe('Test channel synchronizations', function () { | |||
82 | const { videoChannels } = await servers[0].users.getMyInfo({ token: userInfo.accessToken }) | 83 | const { videoChannels } = await servers[0].users.getMyInfo({ token: userInfo.accessToken }) |
83 | userInfo.channelId = videoChannels[0].id | 84 | userInfo.channelId = videoChannels[0].id |
84 | } | 85 | } |
86 | |||
87 | sqlCommands = servers.map(s => new SQLCommand(s)) | ||
85 | }) | 88 | }) |
86 | 89 | ||
87 | it('Should fetch the latest channel videos of a remote channel', async function () { | 90 | it('Should fetch the latest channel videos of a remote channel', async function () { |
@@ -302,6 +305,10 @@ describe('Test channel synchronizations', function () { | |||
302 | }) | 305 | }) |
303 | 306 | ||
304 | after(async function () { | 307 | after(async function () { |
308 | for (const sqlCommand of sqlCommands) { | ||
309 | await sqlCommand.cleanup() | ||
310 | } | ||
311 | |||
305 | await killallServers(servers) | 312 | await killallServers(servers) |
306 | }) | 313 | }) |
307 | }) | 314 | }) |
diff --git a/server/tests/api/videos/video-channels.ts b/server/tests/api/videos/video-channels.ts index 64bd4d9ae..c82ad6f16 100644 --- a/server/tests/api/videos/video-channels.ts +++ b/server/tests/api/videos/video-channels.ts | |||
@@ -3,7 +3,7 @@ | |||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { basename } from 'path' | 4 | import { basename } from 'path' |
5 | import { ACTOR_IMAGES_SIZE } from '@server/initializers/constants' | 5 | import { ACTOR_IMAGES_SIZE } from '@server/initializers/constants' |
6 | import { testFileExistsOrNot, testImage } from '@server/tests/shared' | 6 | import { SQLCommand, testFileExistsOrNot, testImage } from '@server/tests/shared' |
7 | import { wait } from '@shared/core-utils' | 7 | import { wait } from '@shared/core-utils' |
8 | import { ActorImageType, User, VideoChannel } from '@shared/models' | 8 | import { ActorImageType, User, VideoChannel } from '@shared/models' |
9 | import { | 9 | import { |
@@ -25,6 +25,8 @@ async function findChannel (server: PeerTubeServer, channelId: number) { | |||
25 | 25 | ||
26 | describe('Test video channels', function () { | 26 | describe('Test video channels', function () { |
27 | let servers: PeerTubeServer[] | 27 | let servers: PeerTubeServer[] |
28 | let sqlCommands: SQLCommand[] | ||
29 | |||
28 | let userInfo: User | 30 | let userInfo: User |
29 | let secondVideoChannelId: number | 31 | let secondVideoChannelId: number |
30 | let totoChannel: number | 32 | let totoChannel: number |
@@ -45,6 +47,8 @@ describe('Test video channels', function () { | |||
45 | await setDefaultAccountAvatar(servers) | 47 | await setDefaultAccountAvatar(servers) |
46 | 48 | ||
47 | await doubleFollow(servers[0], servers[1]) | 49 | await doubleFollow(servers[0], servers[1]) |
50 | |||
51 | sqlCommands = servers.map(s => new SQLCommand(s)) | ||
48 | }) | 52 | }) |
49 | 53 | ||
50 | it('Should have one video channel (created with root)', async () => { | 54 | it('Should have one video channel (created with root)', async () => { |
@@ -278,7 +282,9 @@ describe('Test video channels', function () { | |||
278 | 282 | ||
279 | await waitJobs(servers) | 283 | await waitJobs(servers) |
280 | 284 | ||
281 | for (const server of servers) { | 285 | for (let i = 0; i < servers.length; i++) { |
286 | const server = servers[i] | ||
287 | |||
282 | const videoChannel = await findChannel(server, secondVideoChannelId) | 288 | const videoChannel = await findChannel(server, secondVideoChannelId) |
283 | const expectedSizes = ACTOR_IMAGES_SIZE[ActorImageType.AVATAR] | 289 | const expectedSizes = ACTOR_IMAGES_SIZE[ActorImageType.AVATAR] |
284 | 290 | ||
@@ -289,7 +295,7 @@ describe('Test video channels', function () { | |||
289 | await testImage(server.url, `avatar-resized-${avatar.width}x${avatar.width}`, avatarPaths[server.port], '.png') | 295 | await testImage(server.url, `avatar-resized-${avatar.width}x${avatar.width}`, avatarPaths[server.port], '.png') |
290 | await testFileExistsOrNot(server, 'avatars', basename(avatarPaths[server.port]), true) | 296 | await testFileExistsOrNot(server, 'avatars', basename(avatarPaths[server.port]), true) |
291 | 297 | ||
292 | const row = await server.sql.getActorImage(basename(avatarPaths[server.port])) | 298 | const row = await sqlCommands[i].getActorImage(basename(avatarPaths[server.port])) |
293 | 299 | ||
294 | expect(expectedSizes.some(({ height, width }) => row.height === height && row.width === width)).to.equal(true) | 300 | expect(expectedSizes.some(({ height, width }) => row.height === height && row.width === width)).to.equal(true) |
295 | } | 301 | } |
@@ -309,14 +315,16 @@ describe('Test video channels', function () { | |||
309 | 315 | ||
310 | await waitJobs(servers) | 316 | await waitJobs(servers) |
311 | 317 | ||
312 | for (const server of servers) { | 318 | for (let i = 0; i < servers.length; i++) { |
319 | const server = servers[i] | ||
320 | |||
313 | const videoChannel = await server.channels.get({ channelName: 'second_video_channel@' + servers[0].host }) | 321 | const videoChannel = await server.channels.get({ channelName: 'second_video_channel@' + servers[0].host }) |
314 | 322 | ||
315 | bannerPaths[server.port] = videoChannel.banners[0].path | 323 | bannerPaths[server.port] = videoChannel.banners[0].path |
316 | await testImage(server.url, 'banner-resized', bannerPaths[server.port]) | 324 | await testImage(server.url, 'banner-resized', bannerPaths[server.port]) |
317 | await testFileExistsOrNot(server, 'avatars', basename(bannerPaths[server.port]), true) | 325 | await testFileExistsOrNot(server, 'avatars', basename(bannerPaths[server.port]), true) |
318 | 326 | ||
319 | const row = await server.sql.getActorImage(basename(bannerPaths[server.port])) | 327 | const row = await sqlCommands[i].getActorImage(basename(bannerPaths[server.port])) |
320 | expect(row.height).to.equal(ACTOR_IMAGES_SIZE[ActorImageType.BANNER][0].height) | 328 | expect(row.height).to.equal(ACTOR_IMAGES_SIZE[ActorImageType.BANNER][0].height) |
321 | expect(row.width).to.equal(ACTOR_IMAGES_SIZE[ActorImageType.BANNER][0].width) | 329 | expect(row.width).to.equal(ACTOR_IMAGES_SIZE[ActorImageType.BANNER][0].width) |
322 | } | 330 | } |
@@ -546,6 +554,10 @@ describe('Test video channels', function () { | |||
546 | }) | 554 | }) |
547 | 555 | ||
548 | after(async function () { | 556 | after(async function () { |
557 | for (const sqlCommand of sqlCommands) { | ||
558 | await sqlCommand.cleanup() | ||
559 | } | ||
560 | |||
549 | await cleanupTests(servers) | 561 | await cleanupTests(servers) |
550 | }) | 562 | }) |
551 | }) | 563 | }) |
diff --git a/server/tests/api/videos/video-static-file-privacy.ts b/server/tests/api/videos/video-static-file-privacy.ts index 2dcfbbc57..542848533 100644 --- a/server/tests/api/videos/video-static-file-privacy.ts +++ b/server/tests/api/videos/video-static-file-privacy.ts | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { decode } from 'magnet-uri' | 4 | import { decode } from 'magnet-uri' |
5 | import { checkVideoFileTokenReinjection, expectStartWith } from '@server/tests/shared' | 5 | import { checkVideoFileTokenReinjection, expectStartWith, parseTorrentVideo } from '@server/tests/shared' |
6 | import { getAllFiles, wait } from '@shared/core-utils' | 6 | import { getAllFiles, wait } from '@shared/core-utils' |
7 | import { HttpStatusCode, LiveVideo, VideoDetails, VideoPrivacy } from '@shared/models' | 7 | import { HttpStatusCode, LiveVideo, VideoDetails, VideoPrivacy } from '@shared/models' |
8 | import { | 8 | import { |
@@ -10,7 +10,6 @@ import { | |||
10 | createSingleServer, | 10 | createSingleServer, |
11 | findExternalSavedVideo, | 11 | findExternalSavedVideo, |
12 | makeRawRequest, | 12 | makeRawRequest, |
13 | parseTorrentVideo, | ||
14 | PeerTubeServer, | 13 | PeerTubeServer, |
15 | sendRTMPStream, | 14 | sendRTMPStream, |
16 | setAccessTokensToServers, | 15 | setAccessTokensToServers, |
diff --git a/server/tests/api/views/videos-views-cleaner.ts b/server/tests/api/views/videos-views-cleaner.ts index 7c543a74a..fce2d538c 100644 --- a/server/tests/api/views/videos-views-cleaner.ts +++ b/server/tests/api/views/videos-views-cleaner.ts | |||
@@ -1,6 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { SQLCommand } from '@server/tests/shared' | ||
4 | import { wait } from '@shared/core-utils' | 5 | import { wait } from '@shared/core-utils' |
5 | import { | 6 | import { |
6 | cleanupTests, | 7 | cleanupTests, |
@@ -14,6 +15,7 @@ import { | |||
14 | 15 | ||
15 | describe('Test video views cleaner', function () { | 16 | describe('Test video views cleaner', function () { |
16 | let servers: PeerTubeServer[] | 17 | let servers: PeerTubeServer[] |
18 | let sqlCommands: SQLCommand[] | ||
17 | 19 | ||
18 | let videoIdServer1: string | 20 | let videoIdServer1: string |
19 | let videoIdServer2: string | 21 | let videoIdServer2: string |
@@ -37,6 +39,8 @@ describe('Test video views cleaner', function () { | |||
37 | await servers[1].views.simulateView({ id: videoIdServer2 }) | 39 | await servers[1].views.simulateView({ id: videoIdServer2 }) |
38 | 40 | ||
39 | await waitJobs(servers) | 41 | await waitJobs(servers) |
42 | |||
43 | sqlCommands = servers.map(s => new SQLCommand(s)) | ||
40 | }) | 44 | }) |
41 | 45 | ||
42 | it('Should not clean old video views', async function () { | 46 | it('Should not clean old video views', async function () { |
@@ -50,18 +54,14 @@ describe('Test video views cleaner', function () { | |||
50 | 54 | ||
51 | // Should still have views | 55 | // Should still have views |
52 | 56 | ||
53 | { | 57 | for (let i = 0; i < servers.length; i++) { |
54 | for (const server of servers) { | 58 | const total = await sqlCommands[i].countVideoViewsOf(videoIdServer1) |
55 | const total = await server.sql.countVideoViewsOf(videoIdServer1) | 59 | expect(total).to.equal(2, 'Server ' + servers[i].serverNumber + ' does not have the correct amount of views') |
56 | expect(total).to.equal(2, 'Server ' + server.serverNumber + ' does not have the correct amount of views') | ||
57 | } | ||
58 | } | 60 | } |
59 | 61 | ||
60 | { | 62 | for (let i = 0; i < servers.length; i++) { |
61 | for (const server of servers) { | 63 | const total = await sqlCommands[i].countVideoViewsOf(videoIdServer2) |
62 | const total = await server.sql.countVideoViewsOf(videoIdServer2) | 64 | expect(total).to.equal(2, 'Server ' + servers[i].serverNumber + ' does not have the correct amount of views') |
63 | expect(total).to.equal(2, 'Server ' + server.serverNumber + ' does not have the correct amount of views') | ||
64 | } | ||
65 | } | 65 | } |
66 | }) | 66 | }) |
67 | 67 | ||
@@ -76,23 +76,23 @@ describe('Test video views cleaner', function () { | |||
76 | 76 | ||
77 | // Should still have views | 77 | // Should still have views |
78 | 78 | ||
79 | { | 79 | for (let i = 0; i < servers.length; i++) { |
80 | for (const server of servers) { | 80 | const total = await sqlCommands[i].countVideoViewsOf(videoIdServer1) |
81 | const total = await server.sql.countVideoViewsOf(videoIdServer1) | 81 | expect(total).to.equal(2) |
82 | expect(total).to.equal(2) | ||
83 | } | ||
84 | } | 82 | } |
85 | 83 | ||
86 | { | 84 | const totalServer1 = await sqlCommands[0].countVideoViewsOf(videoIdServer2) |
87 | const totalServer1 = await servers[0].sql.countVideoViewsOf(videoIdServer2) | 85 | expect(totalServer1).to.equal(0) |
88 | expect(totalServer1).to.equal(0) | ||
89 | 86 | ||
90 | const totalServer2 = await servers[1].sql.countVideoViewsOf(videoIdServer2) | 87 | const totalServer2 = await sqlCommands[1].countVideoViewsOf(videoIdServer2) |
91 | expect(totalServer2).to.equal(2) | 88 | expect(totalServer2).to.equal(2) |
92 | } | ||
93 | }) | 89 | }) |
94 | 90 | ||
95 | after(async function () { | 91 | after(async function () { |
92 | for (const sqlCommand of sqlCommands) { | ||
93 | await sqlCommand.cleanup() | ||
94 | } | ||
95 | |||
96 | await cleanupTests(servers) | 96 | await cleanupTests(servers) |
97 | }) | 97 | }) |
98 | }) | 98 | }) |
diff --git a/server/tests/cli/create-transcoding-job.ts b/server/tests/cli/create-transcoding-job.ts deleted file mode 100644 index 38b737829..000000000 --- a/server/tests/cli/create-transcoding-job.ts +++ /dev/null | |||
@@ -1,262 +0,0 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { areMockObjectStorageTestsDisabled } from '@shared/core-utils' | ||
5 | import { HttpStatusCode, VideoFile } from '@shared/models' | ||
6 | import { | ||
7 | cleanupTests, | ||
8 | createMultipleServers, | ||
9 | doubleFollow, | ||
10 | makeRawRequest, | ||
11 | ObjectStorageCommand, | ||
12 | PeerTubeServer, | ||
13 | setAccessTokensToServers, | ||
14 | waitJobs | ||
15 | } from '@shared/server-commands' | ||
16 | import { checkResolutionsInMasterPlaylist, expectStartWith } from '../shared' | ||
17 | |||
18 | async function checkFilesInObjectStorage (files: VideoFile[], type: 'webtorrent' | 'playlist') { | ||
19 | for (const file of files) { | ||
20 | const shouldStartWith = type === 'webtorrent' | ||
21 | ? ObjectStorageCommand.getMockWebTorrentBaseUrl() | ||
22 | : ObjectStorageCommand.getMockPlaylistBaseUrl() | ||
23 | |||
24 | expectStartWith(file.fileUrl, shouldStartWith) | ||
25 | |||
26 | await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
27 | } | ||
28 | } | ||
29 | |||
30 | function runTests (objectStorage: boolean) { | ||
31 | let servers: PeerTubeServer[] = [] | ||
32 | const videosUUID: string[] = [] | ||
33 | const publishedAt: string[] = [] | ||
34 | |||
35 | before(async function () { | ||
36 | this.timeout(120000) | ||
37 | |||
38 | const config = objectStorage | ||
39 | ? ObjectStorageCommand.getDefaultMockConfig() | ||
40 | : {} | ||
41 | |||
42 | // Run server 2 to have transcoding enabled | ||
43 | servers = await createMultipleServers(2, config) | ||
44 | await setAccessTokensToServers(servers) | ||
45 | |||
46 | await servers[0].config.disableTranscoding() | ||
47 | |||
48 | await doubleFollow(servers[0], servers[1]) | ||
49 | |||
50 | if (objectStorage) await ObjectStorageCommand.prepareDefaultMockBuckets() | ||
51 | |||
52 | for (let i = 1; i <= 5; i++) { | ||
53 | const { uuid, shortUUID } = await servers[0].videos.upload({ attributes: { name: 'video' + i } }) | ||
54 | |||
55 | await waitJobs(servers) | ||
56 | |||
57 | const video = await servers[0].videos.get({ id: uuid }) | ||
58 | publishedAt.push(video.publishedAt as string) | ||
59 | |||
60 | if (i > 2) { | ||
61 | videosUUID.push(uuid) | ||
62 | } else { | ||
63 | videosUUID.push(shortUUID) | ||
64 | } | ||
65 | } | ||
66 | |||
67 | await waitJobs(servers) | ||
68 | }) | ||
69 | |||
70 | it('Should have two video files on each server', async function () { | ||
71 | this.timeout(30000) | ||
72 | |||
73 | for (const server of servers) { | ||
74 | const { data } = await server.videos.list() | ||
75 | expect(data).to.have.lengthOf(videosUUID.length) | ||
76 | |||
77 | for (const video of data) { | ||
78 | const videoDetail = await server.videos.get({ id: video.uuid }) | ||
79 | expect(videoDetail.files).to.have.lengthOf(1) | ||
80 | expect(videoDetail.streamingPlaylists).to.have.lengthOf(0) | ||
81 | } | ||
82 | } | ||
83 | }) | ||
84 | |||
85 | it('Should run a transcoding job on video 2', async function () { | ||
86 | this.timeout(60000) | ||
87 | |||
88 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[1]}`) | ||
89 | await waitJobs(servers) | ||
90 | |||
91 | for (const server of servers) { | ||
92 | const { data } = await server.videos.list() | ||
93 | |||
94 | let infoHashes: { [id: number]: string } | ||
95 | |||
96 | for (const video of data) { | ||
97 | const videoDetails = await server.videos.get({ id: video.uuid }) | ||
98 | |||
99 | if (video.shortUUID === videosUUID[1] || video.uuid === videosUUID[1]) { | ||
100 | expect(videoDetails.files).to.have.lengthOf(4) | ||
101 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) | ||
102 | |||
103 | if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
104 | |||
105 | if (!infoHashes) { | ||
106 | infoHashes = {} | ||
107 | |||
108 | for (const file of videoDetails.files) { | ||
109 | infoHashes[file.resolution.id.toString()] = file.magnetUri | ||
110 | } | ||
111 | } else { | ||
112 | for (const resolution of Object.keys(infoHashes)) { | ||
113 | const file = videoDetails.files.find(f => f.resolution.id.toString() === resolution) | ||
114 | expect(file.magnetUri).to.equal(infoHashes[resolution]) | ||
115 | } | ||
116 | } | ||
117 | } else { | ||
118 | expect(videoDetails.files).to.have.lengthOf(1) | ||
119 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) | ||
120 | } | ||
121 | } | ||
122 | } | ||
123 | }) | ||
124 | |||
125 | it('Should run a transcoding job on video 1 with resolution', async function () { | ||
126 | this.timeout(60000) | ||
127 | |||
128 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[0]} -r 480`) | ||
129 | |||
130 | await waitJobs(servers) | ||
131 | |||
132 | for (const server of servers) { | ||
133 | const { data } = await server.videos.list() | ||
134 | expect(data).to.have.lengthOf(videosUUID.length) | ||
135 | |||
136 | const videoDetails = await server.videos.get({ id: videosUUID[0] }) | ||
137 | |||
138 | expect(videoDetails.files).to.have.lengthOf(2) | ||
139 | expect(videoDetails.files[0].resolution.id).to.equal(720) | ||
140 | expect(videoDetails.files[1].resolution.id).to.equal(480) | ||
141 | |||
142 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(0) | ||
143 | |||
144 | if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
145 | } | ||
146 | }) | ||
147 | |||
148 | it('Should generate an HLS resolution', async function () { | ||
149 | this.timeout(120000) | ||
150 | |||
151 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[2]} --generate-hls -r 480`) | ||
152 | |||
153 | await waitJobs(servers) | ||
154 | |||
155 | for (const server of servers) { | ||
156 | const videoDetails = await server.videos.get({ id: videosUUID[2] }) | ||
157 | |||
158 | expect(videoDetails.files).to.have.lengthOf(1) | ||
159 | if (objectStorage) await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
160 | |||
161 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) | ||
162 | |||
163 | const hlsPlaylist = videoDetails.streamingPlaylists[0] | ||
164 | |||
165 | const files = hlsPlaylist.files | ||
166 | expect(files).to.have.lengthOf(1) | ||
167 | expect(files[0].resolution.id).to.equal(480) | ||
168 | |||
169 | if (objectStorage) { | ||
170 | await checkFilesInObjectStorage(files, 'playlist') | ||
171 | |||
172 | const resolutions = files.map(f => f.resolution.id) | ||
173 | await checkResolutionsInMasterPlaylist({ server, playlistUrl: hlsPlaylist.playlistUrl, resolutions }) | ||
174 | } | ||
175 | } | ||
176 | }) | ||
177 | |||
178 | it('Should not duplicate an HLS resolution', async function () { | ||
179 | this.timeout(120000) | ||
180 | |||
181 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[2]} --generate-hls -r 480`) | ||
182 | |||
183 | await waitJobs(servers) | ||
184 | |||
185 | for (const server of servers) { | ||
186 | const videoDetails = await server.videos.get({ id: videosUUID[2] }) | ||
187 | |||
188 | const files = videoDetails.streamingPlaylists[0].files | ||
189 | expect(files).to.have.lengthOf(1) | ||
190 | expect(files[0].resolution.id).to.equal(480) | ||
191 | |||
192 | if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') | ||
193 | } | ||
194 | }) | ||
195 | |||
196 | it('Should generate all HLS resolutions', async function () { | ||
197 | this.timeout(120000) | ||
198 | |||
199 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[3]} --generate-hls`) | ||
200 | |||
201 | await waitJobs(servers) | ||
202 | |||
203 | for (const server of servers) { | ||
204 | const videoDetails = await server.videos.get({ id: videosUUID[3] }) | ||
205 | |||
206 | expect(videoDetails.files).to.have.lengthOf(1) | ||
207 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) | ||
208 | |||
209 | const files = videoDetails.streamingPlaylists[0].files | ||
210 | expect(files).to.have.lengthOf(4) | ||
211 | |||
212 | if (objectStorage) await checkFilesInObjectStorage(files, 'playlist') | ||
213 | } | ||
214 | }) | ||
215 | |||
216 | it('Should optimize the video file and generate HLS videos if enabled in config', async function () { | ||
217 | this.timeout(120000) | ||
218 | |||
219 | await servers[0].config.enableTranscoding() | ||
220 | await servers[0].cli.execWithEnv(`npm run create-transcoding-job -- -v ${videosUUID[4]}`) | ||
221 | |||
222 | await waitJobs(servers) | ||
223 | |||
224 | for (const server of servers) { | ||
225 | const videoDetails = await server.videos.get({ id: videosUUID[4] }) | ||
226 | |||
227 | expect(videoDetails.files).to.have.lengthOf(5) | ||
228 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) | ||
229 | expect(videoDetails.streamingPlaylists[0].files).to.have.lengthOf(5) | ||
230 | |||
231 | if (objectStorage) { | ||
232 | await checkFilesInObjectStorage(videoDetails.files, 'webtorrent') | ||
233 | await checkFilesInObjectStorage(videoDetails.streamingPlaylists[0].files, 'playlist') | ||
234 | } | ||
235 | } | ||
236 | }) | ||
237 | |||
238 | it('Should not have updated published at attributes', async function () { | ||
239 | for (const id of videosUUID) { | ||
240 | const video = await servers[0].videos.get({ id }) | ||
241 | |||
242 | expect(publishedAt.some(p => video.publishedAt === p)).to.be.true | ||
243 | } | ||
244 | }) | ||
245 | |||
246 | after(async function () { | ||
247 | await cleanupTests(servers) | ||
248 | }) | ||
249 | } | ||
250 | |||
251 | describe('Test create transcoding jobs', function () { | ||
252 | |||
253 | describe('On filesystem', function () { | ||
254 | runTests(false) | ||
255 | }) | ||
256 | |||
257 | describe('On object storage', function () { | ||
258 | if (areMockObjectStorageTestsDisabled()) return | ||
259 | |||
260 | runTests(true) | ||
261 | }) | ||
262 | }) | ||
diff --git a/server/tests/cli/index.ts b/server/tests/cli/index.ts index 6e0cbe58b..8579be39c 100644 --- a/server/tests/cli/index.ts +++ b/server/tests/cli/index.ts | |||
@@ -1,10 +1,8 @@ | |||
1 | // Order of the tests we want to execute | 1 | // Order of the tests we want to execute |
2 | import './create-import-video-file-job' | 2 | import './create-import-video-file-job' |
3 | import './create-transcoding-job' | ||
4 | import './create-move-video-storage-job' | 3 | import './create-move-video-storage-job' |
5 | import './peertube' | 4 | import './peertube' |
6 | import './plugins' | 5 | import './plugins' |
7 | import './print-transcode-command' | ||
8 | import './prune-storage' | 6 | import './prune-storage' |
9 | import './regenerate-thumbnails' | 7 | import './regenerate-thumbnails' |
10 | import './reset-password' | 8 | import './reset-password' |
diff --git a/server/tests/cli/print-transcode-command.ts b/server/tests/cli/print-transcode-command.ts deleted file mode 100644 index 33b6cd27c..000000000 --- a/server/tests/cli/print-transcode-command.ts +++ /dev/null | |||
@@ -1,31 +0,0 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { buildAbsoluteFixturePath } from '@shared/core-utils' | ||
5 | import { CLICommand } from '@shared/server-commands' | ||
6 | import { VideoResolution } from '../../../shared/models/videos' | ||
7 | |||
8 | describe('Test print transcode jobs', function () { | ||
9 | |||
10 | it('Should print the correct command for each resolution', async function () { | ||
11 | const fixturePath = buildAbsoluteFixturePath('video_short.webm') | ||
12 | |||
13 | for (const resolution of [ | ||
14 | VideoResolution.H_720P, | ||
15 | VideoResolution.H_1080P | ||
16 | ]) { | ||
17 | const command = await CLICommand.exec(`npm run print-transcode-command -- ${fixturePath} -r ${resolution}`) | ||
18 | |||
19 | expect(command).to.includes(`-vf scale=w=-2:h=${resolution}`) | ||
20 | expect(command).to.includes(`-y -acodec aac -vcodec libx264`) | ||
21 | expect(command).to.includes('-f mp4') | ||
22 | expect(command).to.includes('-movflags faststart') | ||
23 | expect(command).to.includes('-b:a 256k') | ||
24 | expect(command).to.includes('-r 25') | ||
25 | expect(command).to.includes('-level:v 3.1') | ||
26 | expect(command).to.includes('-g:v 50') | ||
27 | expect(command).to.includes(`-maxrate:v `) | ||
28 | expect(command).to.includes(`-bufsize:v `) | ||
29 | } | ||
30 | }) | ||
31 | }) | ||
diff --git a/server/tests/cli/update-host.ts b/server/tests/cli/update-host.ts index 51257d3d3..386c384e6 100644 --- a/server/tests/cli/update-host.ts +++ b/server/tests/cli/update-host.ts | |||
@@ -7,11 +7,11 @@ import { | |||
7 | createSingleServer, | 7 | createSingleServer, |
8 | killallServers, | 8 | killallServers, |
9 | makeActivityPubGetRequest, | 9 | makeActivityPubGetRequest, |
10 | parseTorrentVideo, | ||
11 | PeerTubeServer, | 10 | PeerTubeServer, |
12 | setAccessTokensToServers, | 11 | setAccessTokensToServers, |
13 | waitJobs | 12 | waitJobs |
14 | } from '@shared/server-commands' | 13 | } from '@shared/server-commands' |
14 | import { parseTorrentVideo } from '../shared' | ||
15 | 15 | ||
16 | describe('Test update host scripts', function () { | 16 | describe('Test update host scripts', function () { |
17 | let server: PeerTubeServer | 17 | let server: PeerTubeServer |
diff --git a/server/tests/fixtures/live/0-000067.ts b/server/tests/fixtures/live/0-000067.ts new file mode 100644 index 000000000..a59f41a63 --- /dev/null +++ b/server/tests/fixtures/live/0-000067.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/0-000068.ts b/server/tests/fixtures/live/0-000068.ts new file mode 100644 index 000000000..83dcbbb4c --- /dev/null +++ b/server/tests/fixtures/live/0-000068.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/0-000069.ts b/server/tests/fixtures/live/0-000069.ts new file mode 100644 index 000000000..cafd4e978 --- /dev/null +++ b/server/tests/fixtures/live/0-000069.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/0-000070.ts b/server/tests/fixtures/live/0-000070.ts new file mode 100644 index 000000000..0936199ea --- /dev/null +++ b/server/tests/fixtures/live/0-000070.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/0.m3u8 b/server/tests/fixtures/live/0.m3u8 new file mode 100644 index 000000000..c3be19d26 --- /dev/null +++ b/server/tests/fixtures/live/0.m3u8 | |||
@@ -0,0 +1,14 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:6 | ||
3 | #EXT-X-TARGETDURATION:2 | ||
4 | #EXT-X-MEDIA-SEQUENCE:68 | ||
5 | #EXT-X-INDEPENDENT-SEGMENTS | ||
6 | #EXTINF:2.000000, | ||
7 | #EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:39.019+0200 | ||
8 | 0-000068.ts | ||
9 | #EXTINF:2.000000, | ||
10 | #EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:41.019+0200 | ||
11 | 0-000069.ts | ||
12 | #EXTINF:2.000000, | ||
13 | #EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:43.019+0200 | ||
14 | 0-000070. | ||
diff --git a/server/tests/fixtures/live/1-000067.ts b/server/tests/fixtures/live/1-000067.ts new file mode 100644 index 000000000..17db8f81e --- /dev/null +++ b/server/tests/fixtures/live/1-000067.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/1-000068.ts b/server/tests/fixtures/live/1-000068.ts new file mode 100644 index 000000000..f7bb97040 --- /dev/null +++ b/server/tests/fixtures/live/1-000068.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/1-000069.ts b/server/tests/fixtures/live/1-000069.ts new file mode 100644 index 000000000..64c791337 --- /dev/null +++ b/server/tests/fixtures/live/1-000069.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/1-000070.ts b/server/tests/fixtures/live/1-000070.ts new file mode 100644 index 000000000..a5f04f109 --- /dev/null +++ b/server/tests/fixtures/live/1-000070.ts | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/live/1.m3u8 b/server/tests/fixtures/live/1.m3u8 new file mode 100644 index 000000000..26d7fa6b0 --- /dev/null +++ b/server/tests/fixtures/live/1.m3u8 | |||
@@ -0,0 +1,14 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:6 | ||
3 | #EXT-X-TARGETDURATION:2 | ||
4 | #EXT-X-MEDIA-SEQUENCE:68 | ||
5 | #EXT-X-INDEPENDENT-SEGMENTS | ||
6 | #EXTINF:2.000000, | ||
7 | #EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:39.019+0200 | ||
8 | 1-000068.ts | ||
9 | #EXTINF:2.000000, | ||
10 | #EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:41.019+0200 | ||
11 | 1-000069.ts | ||
12 | #EXTINF:2.000000, | ||
13 | #EXT-X-PROGRAM-DATE-TIME:2023-04-18T13:38:43.019+0200 | ||
14 | 1-000070.ts | ||
diff --git a/server/tests/fixtures/live/master.m3u8 b/server/tests/fixtures/live/master.m3u8 new file mode 100644 index 000000000..7e52f33cf --- /dev/null +++ b/server/tests/fixtures/live/master.m3u8 | |||
@@ -0,0 +1,8 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:6 | ||
3 | #EXT-X-STREAM-INF:BANDWIDTH=1287342,RESOLUTION=640x360,CODECS="avc1.64001f,mp4a.40.2" | ||
4 | 0.m3u8 | ||
5 | |||
6 | #EXT-X-STREAM-INF:BANDWIDTH=3051742,RESOLUTION=1280x720,CODECS="avc1.64001f,mp4a.40.2" | ||
7 | 1.m3u8 | ||
8 | |||
diff --git a/server/tests/fixtures/video_short_0p.mp4 b/server/tests/fixtures/video_short_0p.mp4 new file mode 100644 index 000000000..2069a49b8 --- /dev/null +++ b/server/tests/fixtures/video_short_0p.mp4 | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/video_short_144p.m3u8 b/server/tests/fixtures/video_short_144p.m3u8 new file mode 100644 index 000000000..96568625b --- /dev/null +++ b/server/tests/fixtures/video_short_144p.m3u8 | |||
@@ -0,0 +1,13 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:7 | ||
3 | #EXT-X-TARGETDURATION:4 | ||
4 | #EXT-X-MEDIA-SEQUENCE:0 | ||
5 | #EXT-X-PLAYLIST-TYPE:VOD | ||
6 | #EXT-X-MAP:URI="3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4",BYTERANGE="1375@0" | ||
7 | #EXTINF:4.000000, | ||
8 | #EXT-X-BYTERANGE:10518@1375 | ||
9 | 3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4 | ||
10 | #EXTINF:1.000000, | ||
11 | #EXT-X-BYTERANGE:3741@11893 | ||
12 | 3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4 | ||
13 | #EXT-X-ENDLIST | ||
diff --git a/server/tests/fixtures/video_short_144p.mp4 b/server/tests/fixtures/video_short_144p.mp4 new file mode 100644 index 000000000..047d43c17 --- /dev/null +++ b/server/tests/fixtures/video_short_144p.mp4 | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/video_short_240p.m3u8 b/server/tests/fixtures/video_short_240p.m3u8 new file mode 100644 index 000000000..96568625b --- /dev/null +++ b/server/tests/fixtures/video_short_240p.m3u8 | |||
@@ -0,0 +1,13 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:7 | ||
3 | #EXT-X-TARGETDURATION:4 | ||
4 | #EXT-X-MEDIA-SEQUENCE:0 | ||
5 | #EXT-X-PLAYLIST-TYPE:VOD | ||
6 | #EXT-X-MAP:URI="3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4",BYTERANGE="1375@0" | ||
7 | #EXTINF:4.000000, | ||
8 | #EXT-X-BYTERANGE:10518@1375 | ||
9 | 3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4 | ||
10 | #EXTINF:1.000000, | ||
11 | #EXT-X-BYTERANGE:3741@11893 | ||
12 | 3dd13e27-1ae1-441c-9b77-48c6b95603be-144-fragmented.mp4 | ||
13 | #EXT-X-ENDLIST | ||
diff --git a/server/tests/fixtures/video_short_240p.mp4 b/server/tests/fixtures/video_short_240p.mp4 index db074940b..46609e81a 100644 --- a/server/tests/fixtures/video_short_240p.mp4 +++ b/server/tests/fixtures/video_short_240p.mp4 | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/video_short_360p.m3u8 b/server/tests/fixtures/video_short_360p.m3u8 new file mode 100644 index 000000000..f7072dc6d --- /dev/null +++ b/server/tests/fixtures/video_short_360p.m3u8 | |||
@@ -0,0 +1,13 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:7 | ||
3 | #EXT-X-TARGETDURATION:4 | ||
4 | #EXT-X-MEDIA-SEQUENCE:0 | ||
5 | #EXT-X-PLAYLIST-TYPE:VOD | ||
6 | #EXT-X-MAP:URI="05c40acd-3e94-4d25-ade8-97f7ff2cf0ac-360-fragmented.mp4",BYTERANGE="1376@0" | ||
7 | #EXTINF:4.000000, | ||
8 | #EXT-X-BYTERANGE:19987@1376 | ||
9 | 05c40acd-3e94-4d25-ade8-97f7ff2cf0ac-360-fragmented.mp4 | ||
10 | #EXTINF:1.000000, | ||
11 | #EXT-X-BYTERANGE:9147@21363 | ||
12 | 05c40acd-3e94-4d25-ade8-97f7ff2cf0ac-360-fragmented.mp4 | ||
13 | #EXT-X-ENDLIST | ||
diff --git a/server/tests/fixtures/video_short_360p.mp4 b/server/tests/fixtures/video_short_360p.mp4 new file mode 100644 index 000000000..7a8189bbc --- /dev/null +++ b/server/tests/fixtures/video_short_360p.mp4 | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/video_short-480.webm b/server/tests/fixtures/video_short_480.webm index 3145105e1..3145105e1 100644 --- a/server/tests/fixtures/video_short-480.webm +++ b/server/tests/fixtures/video_short_480.webm | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/video_short_480p.m3u8 b/server/tests/fixtures/video_short_480p.m3u8 new file mode 100644 index 000000000..5ff30dfa7 --- /dev/null +++ b/server/tests/fixtures/video_short_480p.m3u8 | |||
@@ -0,0 +1,13 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:7 | ||
3 | #EXT-X-TARGETDURATION:4 | ||
4 | #EXT-X-MEDIA-SEQUENCE:0 | ||
5 | #EXT-X-PLAYLIST-TYPE:VOD | ||
6 | #EXT-X-MAP:URI="f9377e69-d8f2-4de8-8087-ddbca6629829-480-fragmented.mp4",BYTERANGE="1376@0" | ||
7 | #EXTINF:4.000000, | ||
8 | #EXT-X-BYTERANGE:26042@1376 | ||
9 | f9377e69-d8f2-4de8-8087-ddbca6629829-480-fragmented.mp4 | ||
10 | #EXTINF:1.000000, | ||
11 | #EXT-X-BYTERANGE:12353@27418 | ||
12 | f9377e69-d8f2-4de8-8087-ddbca6629829-480-fragmented.mp4 | ||
13 | #EXT-X-ENDLIST | ||
diff --git a/server/tests/fixtures/video_short_480p.mp4 b/server/tests/fixtures/video_short_480p.mp4 new file mode 100644 index 000000000..e05b58b6b --- /dev/null +++ b/server/tests/fixtures/video_short_480p.mp4 | |||
Binary files differ | |||
diff --git a/server/tests/fixtures/video_short_720p.m3u8 b/server/tests/fixtures/video_short_720p.m3u8 new file mode 100644 index 000000000..7cee94032 --- /dev/null +++ b/server/tests/fixtures/video_short_720p.m3u8 | |||
@@ -0,0 +1,13 @@ | |||
1 | #EXTM3U | ||
2 | #EXT-X-VERSION:7 | ||
3 | #EXT-X-TARGETDURATION:4 | ||
4 | #EXT-X-MEDIA-SEQUENCE:0 | ||
5 | #EXT-X-PLAYLIST-TYPE:VOD | ||
6 | #EXT-X-MAP:URI="c1014aa4-d1f4-4b66-927b-c23d283fcae0-720-fragmented.mp4",BYTERANGE="1356@0" | ||
7 | #EXTINF:4.000000, | ||
8 | #EXT-X-BYTERANGE:39260@1356 | ||
9 | c1014aa4-d1f4-4b66-927b-c23d283fcae0-720-fragmented.mp4 | ||
10 | #EXTINF:1.000000, | ||
11 | #EXT-X-BYTERANGE:18493@40616 | ||
12 | c1014aa4-d1f4-4b66-927b-c23d283fcae0-720-fragmented.mp4 | ||
13 | #EXT-X-ENDLIST | ||
diff --git a/server/tests/fixtures/video_short_720p.mp4 b/server/tests/fixtures/video_short_720p.mp4 new file mode 100644 index 000000000..35e8f69a7 --- /dev/null +++ b/server/tests/fixtures/video_short_720p.mp4 | |||
Binary files differ | |||
diff --git a/server/tests/index.ts b/server/tests/index.ts index 1718ac424..4ec1ebe67 100644 --- a/server/tests/index.ts +++ b/server/tests/index.ts | |||
@@ -4,6 +4,7 @@ import './misc-endpoints' | |||
4 | import './feeds/' | 4 | import './feeds/' |
5 | import './cli/' | 5 | import './cli/' |
6 | import './api/' | 6 | import './api/' |
7 | import './peertube-runner/' | ||
7 | import './plugins/' | 8 | import './plugins/' |
8 | import './helpers/' | 9 | import './helpers/' |
9 | import './lib/' | 10 | import './lib/' |
diff --git a/server/tests/lib/video-constant-registry-factory.ts b/server/tests/lib/video-constant-registry-factory.ts index e399ac5a5..c3480dc12 100644 --- a/server/tests/lib/video-constant-registry-factory.ts +++ b/server/tests/lib/video-constant-registry-factory.ts | |||
@@ -63,7 +63,7 @@ describe('VideoConstantManagerFactory', function () { | |||
63 | it('Should be able to add a video licence constant', () => { | 63 | it('Should be able to add a video licence constant', () => { |
64 | const successfullyAdded = videoLicenceManager.addConstant(42, 'European Union Public Licence') | 64 | const successfullyAdded = videoLicenceManager.addConstant(42, 'European Union Public Licence') |
65 | expect(successfullyAdded).to.be.true | 65 | expect(successfullyAdded).to.be.true |
66 | expect(videoLicenceManager.getConstantValue(42)).to.equal('European Union Public Licence') | 66 | expect(videoLicenceManager.getConstantValue(42 as any)).to.equal('European Union Public Licence') |
67 | }) | 67 | }) |
68 | 68 | ||
69 | it('Should be able to reset video licence constants', () => { | 69 | it('Should be able to reset video licence constants', () => { |
@@ -87,9 +87,9 @@ describe('VideoConstantManagerFactory', function () { | |||
87 | }) | 87 | }) |
88 | 88 | ||
89 | it('Should be able to add a video playlist privacy constant', () => { | 89 | it('Should be able to add a video playlist privacy constant', () => { |
90 | const successfullyAdded = playlistPrivacyManager.addConstant(42, 'Friends only') | 90 | const successfullyAdded = playlistPrivacyManager.addConstant(42 as any, 'Friends only') |
91 | expect(successfullyAdded).to.be.true | 91 | expect(successfullyAdded).to.be.true |
92 | expect(playlistPrivacyManager.getConstantValue(42)).to.equal('Friends only') | 92 | expect(playlistPrivacyManager.getConstantValue(42 as any)).to.equal('Friends only') |
93 | }) | 93 | }) |
94 | 94 | ||
95 | it('Should be able to reset video playlist privacy constants', () => { | 95 | it('Should be able to reset video playlist privacy constants', () => { |
@@ -113,9 +113,9 @@ describe('VideoConstantManagerFactory', function () { | |||
113 | }) | 113 | }) |
114 | 114 | ||
115 | it('Should be able to add a video privacy constant', () => { | 115 | it('Should be able to add a video privacy constant', () => { |
116 | const successfullyAdded = videoPrivacyManager.addConstant(42, 'Friends only') | 116 | const successfullyAdded = videoPrivacyManager.addConstant(42 as any, 'Friends only') |
117 | expect(successfullyAdded).to.be.true | 117 | expect(successfullyAdded).to.be.true |
118 | expect(videoPrivacyManager.getConstantValue(42)).to.equal('Friends only') | 118 | expect(videoPrivacyManager.getConstantValue(42 as any)).to.equal('Friends only') |
119 | }) | 119 | }) |
120 | 120 | ||
121 | it('Should be able to reset video privacy constants', () => { | 121 | it('Should be able to reset video privacy constants', () => { |
diff --git a/server/tests/peertube-runner/client-cli.ts b/server/tests/peertube-runner/client-cli.ts new file mode 100644 index 000000000..90bf73ef7 --- /dev/null +++ b/server/tests/peertube-runner/client-cli.ts | |||
@@ -0,0 +1,71 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | |||
3 | import { expect } from 'chai' | ||
4 | import { PeerTubeRunnerProcess } from '@server/tests/shared' | ||
5 | import { cleanupTests, createSingleServer, PeerTubeServer, setAccessTokensToServers, setDefaultVideoChannel } from '@shared/server-commands' | ||
6 | |||
7 | describe('Test peertube-runner program client CLI', function () { | ||
8 | let server: PeerTubeServer | ||
9 | let peertubeRunner: PeerTubeRunnerProcess | ||
10 | |||
11 | before(async function () { | ||
12 | this.timeout(120_000) | ||
13 | |||
14 | server = await createSingleServer(1) | ||
15 | |||
16 | await setAccessTokensToServers([ server ]) | ||
17 | await setDefaultVideoChannel([ server ]) | ||
18 | |||
19 | await server.config.enableRemoteTranscoding() | ||
20 | |||
21 | peertubeRunner = new PeerTubeRunnerProcess() | ||
22 | await peertubeRunner.runServer() | ||
23 | }) | ||
24 | |||
25 | it('Should not have PeerTube instance listed', async function () { | ||
26 | const data = await peertubeRunner.listRegisteredPeerTubeInstances() | ||
27 | |||
28 | expect(data).to.not.contain(server.url) | ||
29 | }) | ||
30 | |||
31 | it('Should register a new PeerTube instance', async function () { | ||
32 | const registrationToken = await server.runnerRegistrationTokens.getFirstRegistrationToken() | ||
33 | |||
34 | await peertubeRunner.registerPeerTubeInstance({ | ||
35 | server, | ||
36 | registrationToken, | ||
37 | runnerName: 'my super runner', | ||
38 | runnerDescription: 'super description' | ||
39 | }) | ||
40 | }) | ||
41 | |||
42 | it('Should list this new PeerTube instance', async function () { | ||
43 | const data = await peertubeRunner.listRegisteredPeerTubeInstances() | ||
44 | |||
45 | expect(data).to.contain(server.url) | ||
46 | expect(data).to.contain('my super runner') | ||
47 | expect(data).to.contain('super description') | ||
48 | }) | ||
49 | |||
50 | it('Should still have the configuration after a restart', async function () { | ||
51 | peertubeRunner.kill() | ||
52 | |||
53 | await peertubeRunner.runServer() | ||
54 | }) | ||
55 | |||
56 | it('Should unregister the PeerTube instance', async function () { | ||
57 | await peertubeRunner.unregisterPeerTubeInstance({ server }) | ||
58 | }) | ||
59 | |||
60 | it('Should not have PeerTube instance listed', async function () { | ||
61 | const data = await peertubeRunner.listRegisteredPeerTubeInstances() | ||
62 | |||
63 | expect(data).to.not.contain(server.url) | ||
64 | }) | ||
65 | |||
66 | after(async function () { | ||
67 | await cleanupTests([ server ]) | ||
68 | |||
69 | peertubeRunner.kill() | ||
70 | }) | ||
71 | }) | ||
diff --git a/server/tests/peertube-runner/index.ts b/server/tests/peertube-runner/index.ts new file mode 100644 index 000000000..6258d6eb2 --- /dev/null +++ b/server/tests/peertube-runner/index.ts | |||
@@ -0,0 +1,3 @@ | |||
1 | export * from './client-cli' | ||
2 | export * from './live-transcoding' | ||
3 | export * from './vod-transcoding' | ||
diff --git a/server/tests/peertube-runner/live-transcoding.ts b/server/tests/peertube-runner/live-transcoding.ts new file mode 100644 index 000000000..f58e920ba --- /dev/null +++ b/server/tests/peertube-runner/live-transcoding.ts | |||
@@ -0,0 +1,178 @@ | |||
1 | import { expect } from 'chai' | ||
2 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
3 | import { expectStartWith, PeerTubeRunnerProcess, SQLCommand, testLiveVideoResolutions } from '@server/tests/shared' | ||
4 | import { areMockObjectStorageTestsDisabled, wait } from '@shared/core-utils' | ||
5 | import { HttpStatusCode, VideoPrivacy } from '@shared/models' | ||
6 | import { | ||
7 | cleanupTests, | ||
8 | createMultipleServers, | ||
9 | doubleFollow, | ||
10 | findExternalSavedVideo, | ||
11 | makeRawRequest, | ||
12 | ObjectStorageCommand, | ||
13 | PeerTubeServer, | ||
14 | setAccessTokensToServers, | ||
15 | setDefaultVideoChannel, | ||
16 | stopFfmpeg, | ||
17 | waitJobs, | ||
18 | waitUntilLivePublishedOnAllServers, | ||
19 | waitUntilLiveWaitingOnAllServers | ||
20 | } from '@shared/server-commands' | ||
21 | |||
22 | describe('Test Live transcoding in peertube-runner program', function () { | ||
23 | let servers: PeerTubeServer[] = [] | ||
24 | let peertubeRunner: PeerTubeRunnerProcess | ||
25 | let sqlCommandServer1: SQLCommand | ||
26 | |||
27 | function runSuite (options: { | ||
28 | objectStorage: boolean | ||
29 | }) { | ||
30 | const { objectStorage } = options | ||
31 | |||
32 | it('Should enable transcoding without additional resolutions', async function () { | ||
33 | this.timeout(120000) | ||
34 | |||
35 | const { video } = await servers[0].live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.PUBLIC }) | ||
36 | |||
37 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: video.uuid }) | ||
38 | await waitUntilLivePublishedOnAllServers(servers, video.uuid) | ||
39 | await waitJobs(servers) | ||
40 | |||
41 | await testLiveVideoResolutions({ | ||
42 | originServer: servers[0], | ||
43 | sqlCommand: sqlCommandServer1, | ||
44 | servers, | ||
45 | liveVideoId: video.uuid, | ||
46 | resolutions: [ 720, 480, 360, 240, 144 ], | ||
47 | objectStorage, | ||
48 | transcoded: true | ||
49 | }) | ||
50 | |||
51 | await stopFfmpeg(ffmpegCommand) | ||
52 | |||
53 | await waitUntilLiveWaitingOnAllServers(servers, video.uuid) | ||
54 | await servers[0].videos.remove({ id: video.id }) | ||
55 | }) | ||
56 | |||
57 | it('Should transcode audio only RTMP stream', async function () { | ||
58 | this.timeout(120000) | ||
59 | |||
60 | const { video } = await servers[0].live.quickCreate({ permanentLive: true, saveReplay: false, privacy: VideoPrivacy.UNLISTED }) | ||
61 | |||
62 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: video.uuid, fixtureName: 'video_short_no_audio.mp4' }) | ||
63 | await waitUntilLivePublishedOnAllServers(servers, video.uuid) | ||
64 | await waitJobs(servers) | ||
65 | |||
66 | await stopFfmpeg(ffmpegCommand) | ||
67 | |||
68 | await waitUntilLiveWaitingOnAllServers(servers, video.uuid) | ||
69 | await servers[0].videos.remove({ id: video.id }) | ||
70 | }) | ||
71 | |||
72 | it('Should save a replay', async function () { | ||
73 | this.timeout(120000) | ||
74 | |||
75 | const { video } = await servers[0].live.quickCreate({ permanentLive: true, saveReplay: true }) | ||
76 | |||
77 | const ffmpegCommand = await servers[0].live.sendRTMPStreamInVideo({ videoId: video.uuid }) | ||
78 | await waitUntilLivePublishedOnAllServers(servers, video.uuid) | ||
79 | |||
80 | await testLiveVideoResolutions({ | ||
81 | originServer: servers[0], | ||
82 | sqlCommand: sqlCommandServer1, | ||
83 | servers, | ||
84 | liveVideoId: video.uuid, | ||
85 | resolutions: [ 720, 480, 360, 240, 144 ], | ||
86 | objectStorage, | ||
87 | transcoded: true | ||
88 | }) | ||
89 | |||
90 | await stopFfmpeg(ffmpegCommand) | ||
91 | |||
92 | await waitUntilLiveWaitingOnAllServers(servers, video.uuid) | ||
93 | await waitJobs(servers) | ||
94 | |||
95 | const session = await servers[0].live.findLatestSession({ videoId: video.uuid }) | ||
96 | expect(session.endingProcessed).to.be.true | ||
97 | expect(session.endDate).to.exist | ||
98 | expect(session.saveReplay).to.be.true | ||
99 | |||
100 | const videoLiveDetails = await servers[0].videos.get({ id: video.uuid }) | ||
101 | const replay = await findExternalSavedVideo(servers[0], videoLiveDetails) | ||
102 | |||
103 | for (const server of servers) { | ||
104 | const video = await server.videos.get({ id: replay.uuid }) | ||
105 | |||
106 | expect(video.files).to.have.lengthOf(0) | ||
107 | expect(video.streamingPlaylists).to.have.lengthOf(1) | ||
108 | |||
109 | const files = video.streamingPlaylists[0].files | ||
110 | expect(files).to.have.lengthOf(5) | ||
111 | |||
112 | for (const file of files) { | ||
113 | if (objectStorage) { | ||
114 | expectStartWith(file.fileUrl, ObjectStorageCommand.getMockPlaylistBaseUrl()) | ||
115 | } | ||
116 | |||
117 | await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
118 | } | ||
119 | } | ||
120 | }) | ||
121 | } | ||
122 | |||
123 | before(async function () { | ||
124 | this.timeout(120_000) | ||
125 | |||
126 | servers = await createMultipleServers(2) | ||
127 | |||
128 | await setAccessTokensToServers(servers) | ||
129 | await setDefaultVideoChannel(servers) | ||
130 | |||
131 | await doubleFollow(servers[0], servers[1]) | ||
132 | |||
133 | sqlCommandServer1 = new SQLCommand(servers[0]) | ||
134 | |||
135 | await servers[0].config.enableRemoteTranscoding() | ||
136 | await servers[0].config.enableTranscoding(true, true, true) | ||
137 | await servers[0].config.enableLive({ allowReplay: true, resolutions: 'max', transcoding: true }) | ||
138 | |||
139 | const registrationToken = await servers[0].runnerRegistrationTokens.getFirstRegistrationToken() | ||
140 | |||
141 | peertubeRunner = new PeerTubeRunnerProcess() | ||
142 | await peertubeRunner.runServer({ hideLogs: false }) | ||
143 | await peertubeRunner.registerPeerTubeInstance({ server: servers[0], registrationToken, runnerName: 'runner' }) | ||
144 | }) | ||
145 | |||
146 | describe('With lives on local filesystem storage', function () { | ||
147 | |||
148 | before(async function () { | ||
149 | await servers[0].config.enableTranscoding(true, false, true) | ||
150 | }) | ||
151 | |||
152 | runSuite({ objectStorage: false }) | ||
153 | }) | ||
154 | |||
155 | describe('With lives on object storage', function () { | ||
156 | if (areMockObjectStorageTestsDisabled()) return | ||
157 | |||
158 | before(async function () { | ||
159 | await ObjectStorageCommand.prepareDefaultMockBuckets() | ||
160 | |||
161 | await servers[0].kill() | ||
162 | |||
163 | await servers[0].run(ObjectStorageCommand.getDefaultMockConfig()) | ||
164 | |||
165 | // Wait for peertube runner socket reconnection | ||
166 | await wait(1500) | ||
167 | }) | ||
168 | |||
169 | runSuite({ objectStorage: true }) | ||
170 | }) | ||
171 | |||
172 | after(async function () { | ||
173 | await peertubeRunner.unregisterPeerTubeInstance({ server: servers[0] }) | ||
174 | peertubeRunner.kill() | ||
175 | |||
176 | await cleanupTests(servers) | ||
177 | }) | ||
178 | }) | ||
diff --git a/server/tests/peertube-runner/vod-transcoding.ts b/server/tests/peertube-runner/vod-transcoding.ts new file mode 100644 index 000000000..bdf798379 --- /dev/null +++ b/server/tests/peertube-runner/vod-transcoding.ts | |||
@@ -0,0 +1,330 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | ||
2 | import { expect } from 'chai' | ||
3 | import { completeCheckHlsPlaylist, completeWebVideoFilesCheck, PeerTubeRunnerProcess } from '@server/tests/shared' | ||
4 | import { areMockObjectStorageTestsDisabled, getAllFiles, wait } from '@shared/core-utils' | ||
5 | import { VideoPrivacy } from '@shared/models' | ||
6 | import { | ||
7 | cleanupTests, | ||
8 | createMultipleServers, | ||
9 | doubleFollow, | ||
10 | ObjectStorageCommand, | ||
11 | PeerTubeServer, | ||
12 | setAccessTokensToServers, | ||
13 | setDefaultVideoChannel, | ||
14 | waitJobs | ||
15 | } from '@shared/server-commands' | ||
16 | |||
17 | describe('Test VOD transcoding in peertube-runner program', function () { | ||
18 | let servers: PeerTubeServer[] = [] | ||
19 | let peertubeRunner: PeerTubeRunnerProcess | ||
20 | |||
21 | function runSuite (options: { | ||
22 | webtorrentEnabled: boolean | ||
23 | hlsEnabled: boolean | ||
24 | objectStorage: boolean | ||
25 | }) { | ||
26 | const { webtorrentEnabled, hlsEnabled, objectStorage } = options | ||
27 | |||
28 | const objectStorageBaseUrlWebTorrent = objectStorage | ||
29 | ? ObjectStorageCommand.getMockWebTorrentBaseUrl() | ||
30 | : undefined | ||
31 | |||
32 | const objectStorageBaseUrlHLS = objectStorage | ||
33 | ? ObjectStorageCommand.getMockPlaylistBaseUrl() | ||
34 | : undefined | ||
35 | |||
36 | it('Should upload a classic video mp4 and transcode it', async function () { | ||
37 | this.timeout(120000) | ||
38 | |||
39 | const { uuid } = await servers[0].videos.quickUpload({ name: 'mp4', fixture: 'video_short.mp4' }) | ||
40 | |||
41 | await waitJobs(servers, { runnerJobs: true }) | ||
42 | |||
43 | for (const server of servers) { | ||
44 | if (webtorrentEnabled) { | ||
45 | await completeWebVideoFilesCheck({ | ||
46 | server, | ||
47 | originServer: servers[0], | ||
48 | fixture: 'video_short.mp4', | ||
49 | videoUUID: uuid, | ||
50 | objectStorageBaseUrl: objectStorageBaseUrlWebTorrent, | ||
51 | files: [ | ||
52 | { resolution: 0 }, | ||
53 | { resolution: 144 }, | ||
54 | { resolution: 240 }, | ||
55 | { resolution: 360 }, | ||
56 | { resolution: 480 }, | ||
57 | { resolution: 720 } | ||
58 | ] | ||
59 | }) | ||
60 | } | ||
61 | |||
62 | if (hlsEnabled) { | ||
63 | await completeCheckHlsPlaylist({ | ||
64 | hlsOnly: !webtorrentEnabled, | ||
65 | servers, | ||
66 | videoUUID: uuid, | ||
67 | objectStorageBaseUrl: objectStorageBaseUrlHLS, | ||
68 | resolutions: [ 720, 480, 360, 240, 144, 0 ] | ||
69 | }) | ||
70 | } | ||
71 | } | ||
72 | }) | ||
73 | |||
74 | it('Should upload a webm video and transcode it', async function () { | ||
75 | this.timeout(120000) | ||
76 | |||
77 | const { uuid } = await servers[0].videos.quickUpload({ name: 'mp4', fixture: 'video_short.webm' }) | ||
78 | |||
79 | await waitJobs(servers, { runnerJobs: true }) | ||
80 | |||
81 | for (const server of servers) { | ||
82 | if (webtorrentEnabled) { | ||
83 | await completeWebVideoFilesCheck({ | ||
84 | server, | ||
85 | originServer: servers[0], | ||
86 | fixture: 'video_short.webm', | ||
87 | videoUUID: uuid, | ||
88 | objectStorageBaseUrl: objectStorageBaseUrlWebTorrent, | ||
89 | files: [ | ||
90 | { resolution: 0 }, | ||
91 | { resolution: 144 }, | ||
92 | { resolution: 240 }, | ||
93 | { resolution: 360 }, | ||
94 | { resolution: 480 }, | ||
95 | { resolution: 720 } | ||
96 | ] | ||
97 | }) | ||
98 | } | ||
99 | |||
100 | if (hlsEnabled) { | ||
101 | await completeCheckHlsPlaylist({ | ||
102 | hlsOnly: !webtorrentEnabled, | ||
103 | servers, | ||
104 | videoUUID: uuid, | ||
105 | objectStorageBaseUrl: objectStorageBaseUrlHLS, | ||
106 | resolutions: [ 720, 480, 360, 240, 144, 0 ] | ||
107 | }) | ||
108 | } | ||
109 | } | ||
110 | }) | ||
111 | |||
112 | it('Should upload an audio only video and transcode it', async function () { | ||
113 | this.timeout(120000) | ||
114 | |||
115 | const attributes = { name: 'audio_without_preview', fixture: 'sample.ogg' } | ||
116 | const { uuid } = await servers[0].videos.upload({ attributes, mode: 'resumable' }) | ||
117 | |||
118 | await waitJobs(servers, { runnerJobs: true }) | ||
119 | |||
120 | for (const server of servers) { | ||
121 | if (webtorrentEnabled) { | ||
122 | await completeWebVideoFilesCheck({ | ||
123 | server, | ||
124 | originServer: servers[0], | ||
125 | fixture: 'sample.ogg', | ||
126 | videoUUID: uuid, | ||
127 | objectStorageBaseUrl: objectStorageBaseUrlWebTorrent, | ||
128 | files: [ | ||
129 | { resolution: 0 }, | ||
130 | { resolution: 144 }, | ||
131 | { resolution: 240 }, | ||
132 | { resolution: 360 }, | ||
133 | { resolution: 480 } | ||
134 | ] | ||
135 | }) | ||
136 | } | ||
137 | |||
138 | if (hlsEnabled) { | ||
139 | await completeCheckHlsPlaylist({ | ||
140 | hlsOnly: !webtorrentEnabled, | ||
141 | servers, | ||
142 | videoUUID: uuid, | ||
143 | objectStorageBaseUrl: objectStorageBaseUrlHLS, | ||
144 | resolutions: [ 480, 360, 240, 144, 0 ] | ||
145 | }) | ||
146 | } | ||
147 | } | ||
148 | }) | ||
149 | |||
150 | it('Should upload a private video and transcode it', async function () { | ||
151 | this.timeout(120000) | ||
152 | |||
153 | const { uuid } = await servers[0].videos.quickUpload({ name: 'mp4', fixture: 'video_short.mp4', privacy: VideoPrivacy.PRIVATE }) | ||
154 | |||
155 | await waitJobs(servers, { runnerJobs: true }) | ||
156 | |||
157 | if (webtorrentEnabled) { | ||
158 | await completeWebVideoFilesCheck({ | ||
159 | server: servers[0], | ||
160 | originServer: servers[0], | ||
161 | fixture: 'video_short.mp4', | ||
162 | videoUUID: uuid, | ||
163 | objectStorageBaseUrl: objectStorageBaseUrlWebTorrent, | ||
164 | files: [ | ||
165 | { resolution: 0 }, | ||
166 | { resolution: 144 }, | ||
167 | { resolution: 240 }, | ||
168 | { resolution: 360 }, | ||
169 | { resolution: 480 }, | ||
170 | { resolution: 720 } | ||
171 | ] | ||
172 | }) | ||
173 | } | ||
174 | |||
175 | if (hlsEnabled) { | ||
176 | await completeCheckHlsPlaylist({ | ||
177 | hlsOnly: !webtorrentEnabled, | ||
178 | servers: [ servers[0] ], | ||
179 | videoUUID: uuid, | ||
180 | objectStorageBaseUrl: objectStorageBaseUrlHLS, | ||
181 | resolutions: [ 720, 480, 360, 240, 144, 0 ] | ||
182 | }) | ||
183 | } | ||
184 | }) | ||
185 | |||
186 | it('Should transcode videos on manual run', async function () { | ||
187 | this.timeout(120000) | ||
188 | |||
189 | await servers[0].config.disableTranscoding() | ||
190 | |||
191 | const { uuid } = await servers[0].videos.quickUpload({ name: 'manual transcoding', fixture: 'video_short.mp4' }) | ||
192 | await waitJobs(servers, { runnerJobs: true }) | ||
193 | |||
194 | { | ||
195 | const video = await servers[0].videos.get({ id: uuid }) | ||
196 | expect(getAllFiles(video)).to.have.lengthOf(1) | ||
197 | } | ||
198 | |||
199 | await servers[0].config.enableTranscoding(true, true, true) | ||
200 | |||
201 | await servers[0].videos.runTranscoding({ transcodingType: 'webtorrent', videoId: uuid }) | ||
202 | await waitJobs(servers, { runnerJobs: true }) | ||
203 | |||
204 | await completeWebVideoFilesCheck({ | ||
205 | server: servers[0], | ||
206 | originServer: servers[0], | ||
207 | fixture: 'video_short.mp4', | ||
208 | videoUUID: uuid, | ||
209 | objectStorageBaseUrl: objectStorageBaseUrlWebTorrent, | ||
210 | files: [ | ||
211 | { resolution: 0 }, | ||
212 | { resolution: 144 }, | ||
213 | { resolution: 240 }, | ||
214 | { resolution: 360 }, | ||
215 | { resolution: 480 }, | ||
216 | { resolution: 720 } | ||
217 | ] | ||
218 | }) | ||
219 | |||
220 | await servers[0].videos.runTranscoding({ transcodingType: 'hls', videoId: uuid }) | ||
221 | await waitJobs(servers, { runnerJobs: true }) | ||
222 | |||
223 | await completeCheckHlsPlaylist({ | ||
224 | hlsOnly: false, | ||
225 | servers: [ servers[0] ], | ||
226 | videoUUID: uuid, | ||
227 | objectStorageBaseUrl: objectStorageBaseUrlHLS, | ||
228 | resolutions: [ 720, 480, 360, 240, 144, 0 ] | ||
229 | }) | ||
230 | }) | ||
231 | } | ||
232 | |||
233 | before(async function () { | ||
234 | this.timeout(120_000) | ||
235 | |||
236 | servers = await createMultipleServers(2) | ||
237 | |||
238 | await setAccessTokensToServers(servers) | ||
239 | await setDefaultVideoChannel(servers) | ||
240 | |||
241 | await doubleFollow(servers[0], servers[1]) | ||
242 | |||
243 | await servers[0].config.enableRemoteTranscoding() | ||
244 | |||
245 | const registrationToken = await servers[0].runnerRegistrationTokens.getFirstRegistrationToken() | ||
246 | |||
247 | peertubeRunner = new PeerTubeRunnerProcess() | ||
248 | await peertubeRunner.runServer() | ||
249 | await peertubeRunner.registerPeerTubeInstance({ server: servers[0], registrationToken, runnerName: 'runner' }) | ||
250 | }) | ||
251 | |||
252 | describe('With videos on local filesystem storage', function () { | ||
253 | |||
254 | describe('Web video only enabled', function () { | ||
255 | |||
256 | before(async function () { | ||
257 | await servers[0].config.enableTranscoding(true, false, true) | ||
258 | }) | ||
259 | |||
260 | runSuite({ webtorrentEnabled: true, hlsEnabled: false, objectStorage: false }) | ||
261 | }) | ||
262 | |||
263 | describe('HLS videos only enabled', function () { | ||
264 | |||
265 | before(async function () { | ||
266 | await servers[0].config.enableTranscoding(false, true, true) | ||
267 | }) | ||
268 | |||
269 | runSuite({ webtorrentEnabled: false, hlsEnabled: true, objectStorage: false }) | ||
270 | }) | ||
271 | |||
272 | describe('Web video & HLS enabled', function () { | ||
273 | |||
274 | before(async function () { | ||
275 | await servers[0].config.enableTranscoding(true, true, true) | ||
276 | }) | ||
277 | |||
278 | runSuite({ webtorrentEnabled: true, hlsEnabled: true, objectStorage: false }) | ||
279 | }) | ||
280 | }) | ||
281 | |||
282 | describe('With videos on object storage', function () { | ||
283 | if (areMockObjectStorageTestsDisabled()) return | ||
284 | |||
285 | before(async function () { | ||
286 | await ObjectStorageCommand.prepareDefaultMockBuckets() | ||
287 | |||
288 | await servers[0].kill() | ||
289 | |||
290 | await servers[0].run(ObjectStorageCommand.getDefaultMockConfig()) | ||
291 | |||
292 | // Wait for peertube runner socket reconnection | ||
293 | await wait(1500) | ||
294 | }) | ||
295 | |||
296 | describe('Web video only enabled', function () { | ||
297 | |||
298 | before(async function () { | ||
299 | await servers[0].config.enableTranscoding(true, false, true) | ||
300 | }) | ||
301 | |||
302 | runSuite({ webtorrentEnabled: true, hlsEnabled: false, objectStorage: true }) | ||
303 | }) | ||
304 | |||
305 | describe('HLS videos only enabled', function () { | ||
306 | |||
307 | before(async function () { | ||
308 | await servers[0].config.enableTranscoding(false, true, true) | ||
309 | }) | ||
310 | |||
311 | runSuite({ webtorrentEnabled: false, hlsEnabled: true, objectStorage: true }) | ||
312 | }) | ||
313 | |||
314 | describe('Web video & HLS enabled', function () { | ||
315 | |||
316 | before(async function () { | ||
317 | await servers[0].config.enableTranscoding(true, true, true) | ||
318 | }) | ||
319 | |||
320 | runSuite({ webtorrentEnabled: true, hlsEnabled: true, objectStorage: true }) | ||
321 | }) | ||
322 | }) | ||
323 | |||
324 | after(async function () { | ||
325 | await peertubeRunner.unregisterPeerTubeInstance({ server: servers[0] }) | ||
326 | peertubeRunner.kill() | ||
327 | |||
328 | await cleanupTests(servers) | ||
329 | }) | ||
330 | }) | ||
diff --git a/server/tests/plugins/plugin-transcoding.ts b/server/tests/plugins/plugin-transcoding.ts index ce1047388..689eec5ac 100644 --- a/server/tests/plugins/plugin-transcoding.ts +++ b/server/tests/plugins/plugin-transcoding.ts | |||
@@ -1,7 +1,7 @@ | |||
1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ | 1 | /* eslint-disable @typescript-eslint/no-unused-expressions,@typescript-eslint/require-await */ |
2 | 2 | ||
3 | import { expect } from 'chai' | 3 | import { expect } from 'chai' |
4 | import { getAudioStream, getVideoStreamFPS, getVideoStream } from '@server/helpers/ffmpeg' | 4 | import { getAudioStream, getVideoStream, getVideoStreamFPS } from '@shared/ffmpeg' |
5 | import { VideoPrivacy } from '@shared/models' | 5 | import { VideoPrivacy } from '@shared/models' |
6 | import { | 6 | import { |
7 | cleanupTests, | 7 | cleanupTests, |
diff --git a/server/tests/shared/checks.ts b/server/tests/shared/checks.ts index c0098b293..d7eb25bb5 100644 --- a/server/tests/shared/checks.ts +++ b/server/tests/shared/checks.ts | |||
@@ -11,7 +11,7 @@ import { HttpStatusCode } from '@shared/models' | |||
11 | import { makeGetRequest, PeerTubeServer } from '@shared/server-commands' | 11 | import { makeGetRequest, PeerTubeServer } from '@shared/server-commands' |
12 | 12 | ||
13 | // Default interval -> 5 minutes | 13 | // Default interval -> 5 minutes |
14 | function dateIsValid (dateString: string, interval = 300000) { | 14 | function dateIsValid (dateString: string | Date, interval = 300000) { |
15 | const dateToCheck = new Date(dateString) | 15 | const dateToCheck = new Date(dateString) |
16 | const now = new Date() | 16 | const now = new Date() |
17 | 17 | ||
@@ -90,6 +90,8 @@ async function testFileExistsOrNot (server: PeerTubeServer, directory: string, f | |||
90 | expect(await pathExists(join(base, filePath))).to.equal(exist) | 90 | expect(await pathExists(join(base, filePath))).to.equal(exist) |
91 | } | 91 | } |
92 | 92 | ||
93 | // --------------------------------------------------------------------------- | ||
94 | |||
93 | function checkBadStartPagination (url: string, path: string, token?: string, query = {}) { | 95 | function checkBadStartPagination (url: string, path: string, token?: string, query = {}) { |
94 | return makeGetRequest({ | 96 | return makeGetRequest({ |
95 | url, | 97 | url, |
diff --git a/server/tests/shared/generate.ts b/server/tests/shared/generate.ts index 9a57084e4..b0c8dba66 100644 --- a/server/tests/shared/generate.ts +++ b/server/tests/shared/generate.ts | |||
@@ -3,7 +3,7 @@ import ffmpeg from 'fluent-ffmpeg' | |||
3 | import { ensureDir, pathExists } from 'fs-extra' | 3 | import { ensureDir, pathExists } from 'fs-extra' |
4 | import { dirname } from 'path' | 4 | import { dirname } from 'path' |
5 | import { buildAbsoluteFixturePath, getMaxBitrate } from '@shared/core-utils' | 5 | import { buildAbsoluteFixturePath, getMaxBitrate } from '@shared/core-utils' |
6 | import { getVideoStreamBitrate, getVideoStreamFPS, getVideoStreamDimensionsInfo } from '@shared/extra-utils' | 6 | import { getVideoStreamBitrate, getVideoStreamDimensionsInfo, getVideoStreamFPS } from '@shared/ffmpeg' |
7 | 7 | ||
8 | async function ensureHasTooBigBitrate (fixturePath: string) { | 8 | async function ensureHasTooBigBitrate (fixturePath: string) { |
9 | const bitrate = await getVideoStreamBitrate(fixturePath) | 9 | const bitrate = await getVideoStreamBitrate(fixturePath) |
diff --git a/server/tests/shared/index.ts b/server/tests/shared/index.ts index 963ef8fe6..eda24adb5 100644 --- a/server/tests/shared/index.ts +++ b/server/tests/shared/index.ts | |||
@@ -6,11 +6,14 @@ export * from './directories' | |||
6 | export * from './generate' | 6 | export * from './generate' |
7 | export * from './live' | 7 | export * from './live' |
8 | export * from './notifications' | 8 | export * from './notifications' |
9 | export * from './peertube-runner-process' | ||
9 | export * from './video-playlists' | 10 | export * from './video-playlists' |
10 | export * from './plugins' | 11 | export * from './plugins' |
11 | export * from './requests' | 12 | export * from './requests' |
13 | export * from './sql-command' | ||
12 | export * from './streaming-playlists' | 14 | export * from './streaming-playlists' |
13 | export * from './tests' | 15 | export * from './tests' |
14 | export * from './tracker' | 16 | export * from './tracker' |
15 | export * from './videos' | 17 | export * from './videos' |
16 | export * from './views' | 18 | export * from './views' |
19 | export * from './webtorrent' | ||
diff --git a/server/tests/shared/live.ts b/server/tests/shared/live.ts index ff0b2f226..31f92ef19 100644 --- a/server/tests/shared/live.ts +++ b/server/tests/shared/live.ts | |||
@@ -6,6 +6,7 @@ import { join } from 'path' | |||
6 | import { sha1 } from '@shared/extra-utils' | 6 | import { sha1 } from '@shared/extra-utils' |
7 | import { LiveVideo, VideoStreamingPlaylistType } from '@shared/models' | 7 | import { LiveVideo, VideoStreamingPlaylistType } from '@shared/models' |
8 | import { ObjectStorageCommand, PeerTubeServer } from '@shared/server-commands' | 8 | import { ObjectStorageCommand, PeerTubeServer } from '@shared/server-commands' |
9 | import { SQLCommand } from './sql-command' | ||
9 | import { checkLiveSegmentHash, checkResolutionsInMasterPlaylist } from './streaming-playlists' | 10 | import { checkLiveSegmentHash, checkResolutionsInMasterPlaylist } from './streaming-playlists' |
10 | 11 | ||
11 | async function checkLiveCleanup (options: { | 12 | async function checkLiveCleanup (options: { |
@@ -36,8 +37,10 @@ async function checkLiveCleanup (options: { | |||
36 | 37 | ||
37 | // --------------------------------------------------------------------------- | 38 | // --------------------------------------------------------------------------- |
38 | 39 | ||
39 | async function testVideoResolutions (options: { | 40 | async function testLiveVideoResolutions (options: { |
41 | sqlCommand: SQLCommand | ||
40 | originServer: PeerTubeServer | 42 | originServer: PeerTubeServer |
43 | |||
41 | servers: PeerTubeServer[] | 44 | servers: PeerTubeServer[] |
42 | liveVideoId: string | 45 | liveVideoId: string |
43 | resolutions: number[] | 46 | resolutions: number[] |
@@ -48,6 +51,7 @@ async function testVideoResolutions (options: { | |||
48 | }) { | 51 | }) { |
49 | const { | 52 | const { |
50 | originServer, | 53 | originServer, |
54 | sqlCommand, | ||
51 | servers, | 55 | servers, |
52 | liveVideoId, | 56 | liveVideoId, |
53 | resolutions, | 57 | resolutions, |
@@ -116,7 +120,7 @@ async function testVideoResolutions (options: { | |||
116 | 120 | ||
117 | if (originServer.internalServerNumber === server.internalServerNumber) { | 121 | if (originServer.internalServerNumber === server.internalServerNumber) { |
118 | const infohash = sha1(`${2 + hlsPlaylist.playlistUrl}+V${i}`) | 122 | const infohash = sha1(`${2 + hlsPlaylist.playlistUrl}+V${i}`) |
119 | const dbInfohashes = await originServer.sql.getPlaylistInfohash(hlsPlaylist.id) | 123 | const dbInfohashes = await sqlCommand.getPlaylistInfohash(hlsPlaylist.id) |
120 | 124 | ||
121 | expect(dbInfohashes).to.include(infohash) | 125 | expect(dbInfohashes).to.include(infohash) |
122 | } | 126 | } |
@@ -128,7 +132,7 @@ async function testVideoResolutions (options: { | |||
128 | 132 | ||
129 | export { | 133 | export { |
130 | checkLiveCleanup, | 134 | checkLiveCleanup, |
131 | testVideoResolutions | 135 | testLiveVideoResolutions |
132 | } | 136 | } |
133 | 137 | ||
134 | // --------------------------------------------------------------------------- | 138 | // --------------------------------------------------------------------------- |
diff --git a/server/tests/shared/peertube-runner-process.ts b/server/tests/shared/peertube-runner-process.ts new file mode 100644 index 000000000..84e2dc6df --- /dev/null +++ b/server/tests/shared/peertube-runner-process.ts | |||
@@ -0,0 +1,87 @@ | |||
1 | import { ChildProcess, fork } from 'child_process' | ||
2 | import execa from 'execa' | ||
3 | import { join } from 'path' | ||
4 | import { root } from '@shared/core-utils' | ||
5 | import { PeerTubeServer } from '@shared/server-commands' | ||
6 | |||
7 | export class PeerTubeRunnerProcess { | ||
8 | private app?: ChildProcess | ||
9 | |||
10 | runServer (options: { | ||
11 | hideLogs?: boolean // default true | ||
12 | } = {}) { | ||
13 | const { hideLogs = true } = options | ||
14 | |||
15 | return new Promise<void>((res, rej) => { | ||
16 | const args = [ 'server', '--verbose', '--id', 'test' ] | ||
17 | |||
18 | const forkOptions = { | ||
19 | detached: false, | ||
20 | silent: true | ||
21 | } | ||
22 | this.app = fork(this.getRunnerPath(), args, forkOptions) | ||
23 | |||
24 | this.app.stdout.on('data', data => { | ||
25 | const str = data.toString() as string | ||
26 | |||
27 | if (!hideLogs) { | ||
28 | console.log(str) | ||
29 | } | ||
30 | }) | ||
31 | |||
32 | res() | ||
33 | }) | ||
34 | } | ||
35 | |||
36 | registerPeerTubeInstance (options: { | ||
37 | server: PeerTubeServer | ||
38 | registrationToken: string | ||
39 | runnerName: string | ||
40 | runnerDescription?: string | ||
41 | }) { | ||
42 | const { server, registrationToken, runnerName, runnerDescription } = options | ||
43 | |||
44 | const args = [ | ||
45 | 'register', | ||
46 | '--url', server.url, | ||
47 | '--registration-token', registrationToken, | ||
48 | '--runner-name', runnerName, | ||
49 | '--id', 'test' | ||
50 | ] | ||
51 | |||
52 | if (runnerDescription) { | ||
53 | args.push('--runner-description') | ||
54 | args.push(runnerDescription) | ||
55 | } | ||
56 | |||
57 | return execa.node(this.getRunnerPath(), args) | ||
58 | } | ||
59 | |||
60 | unregisterPeerTubeInstance (options: { | ||
61 | server: PeerTubeServer | ||
62 | }) { | ||
63 | const { server } = options | ||
64 | |||
65 | const args = [ 'unregister', '--url', server.url, '--id', 'test' ] | ||
66 | return execa.node(this.getRunnerPath(), args) | ||
67 | } | ||
68 | |||
69 | async listRegisteredPeerTubeInstances () { | ||
70 | const args = [ 'list-registered', '--id', 'test' ] | ||
71 | const { stdout } = await execa.node(this.getRunnerPath(), args) | ||
72 | |||
73 | return stdout | ||
74 | } | ||
75 | |||
76 | kill () { | ||
77 | if (!this.app) return | ||
78 | |||
79 | process.kill(this.app.pid) | ||
80 | |||
81 | this.app = null | ||
82 | } | ||
83 | |||
84 | private getRunnerPath () { | ||
85 | return join(root(), 'packages', 'peertube-runner', 'dist', 'peertube-runner.js') | ||
86 | } | ||
87 | } | ||
diff --git a/server/tests/shared/sql-command.ts b/server/tests/shared/sql-command.ts new file mode 100644 index 000000000..5c53a8ac6 --- /dev/null +++ b/server/tests/shared/sql-command.ts | |||
@@ -0,0 +1,150 @@ | |||
1 | import { QueryTypes, Sequelize } from 'sequelize' | ||
2 | import { forceNumber } from '@shared/core-utils' | ||
3 | import { PeerTubeServer } from '@shared/server-commands' | ||
4 | |||
5 | export class SQLCommand { | ||
6 | private sequelize: Sequelize | ||
7 | |||
8 | constructor (private readonly server: PeerTubeServer) { | ||
9 | |||
10 | } | ||
11 | |||
12 | deleteAll (table: string) { | ||
13 | const seq = this.getSequelize() | ||
14 | |||
15 | const options = { type: QueryTypes.DELETE } | ||
16 | |||
17 | return seq.query(`DELETE FROM "${table}"`, options) | ||
18 | } | ||
19 | |||
20 | async getVideoShareCount () { | ||
21 | const [ { total } ] = await this.selectQuery<{ total: string }>(`SELECT COUNT(*) as total FROM "videoShare"`) | ||
22 | if (total === null) return 0 | ||
23 | |||
24 | return parseInt(total, 10) | ||
25 | } | ||
26 | |||
27 | async getInternalFileUrl (fileId: number) { | ||
28 | return this.selectQuery<{ fileUrl: string }>(`SELECT "fileUrl" FROM "videoFile" WHERE id = :fileId`, { fileId }) | ||
29 | .then(rows => rows[0].fileUrl) | ||
30 | } | ||
31 | |||
32 | setActorField (to: string, field: string, value: string) { | ||
33 | return this.updateQuery(`UPDATE actor SET ${this.escapeColumnName(field)} = :value WHERE url = :to`, { value, to }) | ||
34 | } | ||
35 | |||
36 | setVideoField (uuid: string, field: string, value: string) { | ||
37 | return this.updateQuery(`UPDATE video SET ${this.escapeColumnName(field)} = :value WHERE uuid = :uuid`, { value, uuid }) | ||
38 | } | ||
39 | |||
40 | setPlaylistField (uuid: string, field: string, value: string) { | ||
41 | return this.updateQuery(`UPDATE "videoPlaylist" SET ${this.escapeColumnName(field)} = :value WHERE uuid = :uuid`, { value, uuid }) | ||
42 | } | ||
43 | |||
44 | async countVideoViewsOf (uuid: string) { | ||
45 | const query = 'SELECT SUM("videoView"."views") AS "total" FROM "videoView" ' + | ||
46 | `INNER JOIN "video" ON "video"."id" = "videoView"."videoId" WHERE "video"."uuid" = :uuid` | ||
47 | |||
48 | const [ { total } ] = await this.selectQuery<{ total: number }>(query, { uuid }) | ||
49 | if (!total) return 0 | ||
50 | |||
51 | return forceNumber(total) | ||
52 | } | ||
53 | |||
54 | getActorImage (filename: string) { | ||
55 | return this.selectQuery<{ width: number, height: number }>(`SELECT * FROM "actorImage" WHERE filename = :filename`, { filename }) | ||
56 | .then(rows => rows[0]) | ||
57 | } | ||
58 | |||
59 | // --------------------------------------------------------------------------- | ||
60 | |||
61 | setPluginVersion (pluginName: string, newVersion: string) { | ||
62 | return this.setPluginField(pluginName, 'version', newVersion) | ||
63 | } | ||
64 | |||
65 | setPluginLatestVersion (pluginName: string, newVersion: string) { | ||
66 | return this.setPluginField(pluginName, 'latestVersion', newVersion) | ||
67 | } | ||
68 | |||
69 | setPluginField (pluginName: string, field: string, value: string) { | ||
70 | return this.updateQuery( | ||
71 | `UPDATE "plugin" SET ${this.escapeColumnName(field)} = :value WHERE "name" = :pluginName`, | ||
72 | { pluginName, value } | ||
73 | ) | ||
74 | } | ||
75 | |||
76 | // --------------------------------------------------------------------------- | ||
77 | |||
78 | selectQuery <T extends object> (query: string, replacements: { [id: string]: string | number } = {}) { | ||
79 | const seq = this.getSequelize() | ||
80 | const options = { | ||
81 | type: QueryTypes.SELECT as QueryTypes.SELECT, | ||
82 | replacements | ||
83 | } | ||
84 | |||
85 | return seq.query<T>(query, options) | ||
86 | } | ||
87 | |||
88 | updateQuery (query: string, replacements: { [id: string]: string | number } = {}) { | ||
89 | const seq = this.getSequelize() | ||
90 | const options = { type: QueryTypes.UPDATE as QueryTypes.UPDATE, replacements } | ||
91 | |||
92 | return seq.query(query, options) | ||
93 | } | ||
94 | |||
95 | // --------------------------------------------------------------------------- | ||
96 | |||
97 | async getPlaylistInfohash (playlistId: number) { | ||
98 | const query = 'SELECT "p2pMediaLoaderInfohashes" FROM "videoStreamingPlaylist" WHERE id = :playlistId' | ||
99 | |||
100 | const result = await this.selectQuery<{ p2pMediaLoaderInfohashes: string }>(query, { playlistId }) | ||
101 | if (!result || result.length === 0) return [] | ||
102 | |||
103 | return result[0].p2pMediaLoaderInfohashes | ||
104 | } | ||
105 | |||
106 | // --------------------------------------------------------------------------- | ||
107 | |||
108 | setActorFollowScores (newScore: number) { | ||
109 | return this.updateQuery(`UPDATE "actorFollow" SET "score" = :newScore`, { newScore }) | ||
110 | } | ||
111 | |||
112 | setTokenField (accessToken: string, field: string, value: string) { | ||
113 | return this.updateQuery( | ||
114 | `UPDATE "oAuthToken" SET ${this.escapeColumnName(field)} = :value WHERE "accessToken" = :accessToken`, | ||
115 | { value, accessToken } | ||
116 | ) | ||
117 | } | ||
118 | |||
119 | async cleanup () { | ||
120 | if (!this.sequelize) return | ||
121 | |||
122 | await this.sequelize.close() | ||
123 | this.sequelize = undefined | ||
124 | } | ||
125 | |||
126 | private getSequelize () { | ||
127 | if (this.sequelize) return this.sequelize | ||
128 | |||
129 | const dbname = 'peertube_test' + this.server.internalServerNumber | ||
130 | const username = 'peertube' | ||
131 | const password = 'peertube' | ||
132 | const host = '127.0.0.1' | ||
133 | const port = 5432 | ||
134 | |||
135 | this.sequelize = new Sequelize(dbname, username, password, { | ||
136 | dialect: 'postgres', | ||
137 | host, | ||
138 | port, | ||
139 | logging: false | ||
140 | }) | ||
141 | |||
142 | return this.sequelize | ||
143 | } | ||
144 | |||
145 | private escapeColumnName (columnName: string) { | ||
146 | return this.getSequelize().escape(columnName) | ||
147 | .replace(/^'/, '"') | ||
148 | .replace(/'$/, '"') | ||
149 | } | ||
150 | } | ||
diff --git a/server/tests/shared/streaming-playlists.ts b/server/tests/shared/streaming-playlists.ts index 1c38cb512..acfb2b408 100644 --- a/server/tests/shared/streaming-playlists.ts +++ b/server/tests/shared/streaming-playlists.ts | |||
@@ -4,10 +4,11 @@ import { expect } from 'chai' | |||
4 | import { basename, dirname, join } from 'path' | 4 | import { basename, dirname, join } from 'path' |
5 | import { removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils' | 5 | import { removeFragmentedMP4Ext, uuidRegex } from '@shared/core-utils' |
6 | import { sha256 } from '@shared/extra-utils' | 6 | import { sha256 } from '@shared/extra-utils' |
7 | import { HttpStatusCode, VideoStreamingPlaylist, VideoStreamingPlaylistType } from '@shared/models' | 7 | import { HttpStatusCode, VideoPrivacy, VideoResolution, VideoStreamingPlaylist, VideoStreamingPlaylistType } from '@shared/models' |
8 | import { makeRawRequest, PeerTubeServer, webtorrentAdd } from '@shared/server-commands' | 8 | import { makeRawRequest, PeerTubeServer } from '@shared/server-commands' |
9 | import { expectStartWith } from './checks' | 9 | import { expectStartWith } from './checks' |
10 | import { hlsInfohashExist } from './tracker' | 10 | import { hlsInfohashExist } from './tracker' |
11 | import { checkWebTorrentWorks } from './webtorrent' | ||
11 | 12 | ||
12 | async function checkSegmentHash (options: { | 13 | async function checkSegmentHash (options: { |
13 | server: PeerTubeServer | 14 | server: PeerTubeServer |
@@ -15,14 +16,15 @@ async function checkSegmentHash (options: { | |||
15 | baseUrlSegment: string | 16 | baseUrlSegment: string |
16 | resolution: number | 17 | resolution: number |
17 | hlsPlaylist: VideoStreamingPlaylist | 18 | hlsPlaylist: VideoStreamingPlaylist |
19 | token?: string | ||
18 | }) { | 20 | }) { |
19 | const { server, baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist } = options | 21 | const { server, baseUrlPlaylist, baseUrlSegment, resolution, hlsPlaylist, token } = options |
20 | const command = server.streamingPlaylists | 22 | const command = server.streamingPlaylists |
21 | 23 | ||
22 | const file = hlsPlaylist.files.find(f => f.resolution.id === resolution) | 24 | const file = hlsPlaylist.files.find(f => f.resolution.id === resolution) |
23 | const videoName = basename(file.fileUrl) | 25 | const videoName = basename(file.fileUrl) |
24 | 26 | ||
25 | const playlist = await command.get({ url: `${baseUrlPlaylist}/${removeFragmentedMP4Ext(videoName)}.m3u8` }) | 27 | const playlist = await command.get({ url: `${baseUrlPlaylist}/${removeFragmentedMP4Ext(videoName)}.m3u8`, token }) |
26 | 28 | ||
27 | const matches = /#EXT-X-BYTERANGE:(\d+)@(\d+)/.exec(playlist) | 29 | const matches = /#EXT-X-BYTERANGE:(\d+)@(\d+)/.exec(playlist) |
28 | 30 | ||
@@ -33,11 +35,12 @@ async function checkSegmentHash (options: { | |||
33 | const segmentBody = await command.getFragmentedSegment({ | 35 | const segmentBody = await command.getFragmentedSegment({ |
34 | url: `${baseUrlSegment}/${videoName}`, | 36 | url: `${baseUrlSegment}/${videoName}`, |
35 | expectedStatus: HttpStatusCode.PARTIAL_CONTENT_206, | 37 | expectedStatus: HttpStatusCode.PARTIAL_CONTENT_206, |
36 | range: `bytes=${range}` | 38 | range: `bytes=${range}`, |
39 | token | ||
37 | }) | 40 | }) |
38 | 41 | ||
39 | const shaBody = await command.getSegmentSha256({ url: hlsPlaylist.segmentsSha256Url }) | 42 | const shaBody = await command.getSegmentSha256({ url: hlsPlaylist.segmentsSha256Url, token }) |
40 | expect(sha256(segmentBody)).to.equal(shaBody[videoName][range]) | 43 | expect(sha256(segmentBody)).to.equal(shaBody[videoName][range], `Invalid sha256 result for ${videoName} range ${range}`) |
41 | } | 44 | } |
42 | 45 | ||
43 | // --------------------------------------------------------------------------- | 46 | // --------------------------------------------------------------------------- |
@@ -64,19 +67,24 @@ async function checkResolutionsInMasterPlaylist (options: { | |||
64 | server: PeerTubeServer | 67 | server: PeerTubeServer |
65 | playlistUrl: string | 68 | playlistUrl: string |
66 | resolutions: number[] | 69 | resolutions: number[] |
70 | token?: string | ||
67 | transcoded?: boolean // default true | 71 | transcoded?: boolean // default true |
68 | withRetry?: boolean // default false | 72 | withRetry?: boolean // default false |
69 | }) { | 73 | }) { |
70 | const { server, playlistUrl, resolutions, withRetry = false, transcoded = true } = options | 74 | const { server, playlistUrl, resolutions, token, withRetry = false, transcoded = true } = options |
71 | 75 | ||
72 | const masterPlaylist = await server.streamingPlaylists.get({ url: playlistUrl, withRetry }) | 76 | const masterPlaylist = await server.streamingPlaylists.get({ url: playlistUrl, token, withRetry }) |
73 | 77 | ||
74 | for (const resolution of resolutions) { | 78 | for (const resolution of resolutions) { |
75 | const reg = transcoded | 79 | const base = '#EXT-X-STREAM-INF:BANDWIDTH=\\d+,RESOLUTION=\\d+x' + resolution |
76 | ? new RegExp('#EXT-X-STREAM-INF:BANDWIDTH=\\d+,RESOLUTION=\\d+x' + resolution + ',(FRAME-RATE=\\d+,)?CODECS="avc1.64001f,mp4a.40.2"') | 80 | |
77 | : new RegExp('#EXT-X-STREAM-INF:BANDWIDTH=\\d+,RESOLUTION=\\d+x' + resolution + '') | 81 | if (resolution === VideoResolution.H_NOVIDEO) { |
78 | 82 | expect(masterPlaylist).to.match(new RegExp(`${base},CODECS="mp4a.40.2"`)) | |
79 | expect(masterPlaylist).to.match(reg) | 83 | } else if (transcoded) { |
84 | expect(masterPlaylist).to.match(new RegExp(`${base},(FRAME-RATE=\\d+,)?CODECS="avc1.64001f,mp4a.40.2"`)) | ||
85 | } else { | ||
86 | expect(masterPlaylist).to.match(new RegExp(`${base}`)) | ||
87 | } | ||
80 | } | 88 | } |
81 | 89 | ||
82 | const playlistsLength = masterPlaylist.split('\n').filter(line => line.startsWith('#EXT-X-STREAM-INF:BANDWIDTH=')) | 90 | const playlistsLength = masterPlaylist.split('\n').filter(line => line.startsWith('#EXT-X-STREAM-INF:BANDWIDTH=')) |
@@ -89,14 +97,23 @@ async function completeCheckHlsPlaylist (options: { | |||
89 | hlsOnly: boolean | 97 | hlsOnly: boolean |
90 | 98 | ||
91 | resolutions?: number[] | 99 | resolutions?: number[] |
92 | objectStorageBaseUrl: string | 100 | objectStorageBaseUrl?: string |
93 | }) { | 101 | }) { |
94 | const { videoUUID, hlsOnly, objectStorageBaseUrl } = options | 102 | const { videoUUID, hlsOnly, objectStorageBaseUrl } = options |
95 | 103 | ||
96 | const resolutions = options.resolutions ?? [ 240, 360, 480, 720 ] | 104 | const resolutions = options.resolutions ?? [ 240, 360, 480, 720 ] |
97 | 105 | ||
98 | for (const server of options.servers) { | 106 | for (const server of options.servers) { |
99 | const videoDetails = await server.videos.get({ id: videoUUID }) | 107 | const videoDetails = await server.videos.getWithToken({ id: videoUUID }) |
108 | const requiresAuth = videoDetails.privacy.id === VideoPrivacy.PRIVATE || videoDetails.privacy.id === VideoPrivacy.INTERNAL | ||
109 | |||
110 | const privatePath = requiresAuth | ||
111 | ? 'private/' | ||
112 | : '' | ||
113 | const token = requiresAuth | ||
114 | ? server.accessToken | ||
115 | : undefined | ||
116 | |||
100 | const baseUrl = `http://${videoDetails.account.host}` | 117 | const baseUrl = `http://${videoDetails.account.host}` |
101 | 118 | ||
102 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) | 119 | expect(videoDetails.streamingPlaylists).to.have.lengthOf(1) |
@@ -115,35 +132,55 @@ async function completeCheckHlsPlaylist (options: { | |||
115 | const file = hlsFiles.find(f => f.resolution.id === resolution) | 132 | const file = hlsFiles.find(f => f.resolution.id === resolution) |
116 | expect(file).to.not.be.undefined | 133 | expect(file).to.not.be.undefined |
117 | 134 | ||
118 | expect(file.magnetUri).to.have.lengthOf.above(2) | 135 | if (file.resolution.id === VideoResolution.H_NOVIDEO) { |
119 | expect(file.torrentUrl).to.match( | 136 | expect(file.resolution.label).to.equal('Audio') |
120 | new RegExp(`${server.url}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}-hls.torrent`) | ||
121 | ) | ||
122 | |||
123 | if (objectStorageBaseUrl) { | ||
124 | expectStartWith(file.fileUrl, objectStorageBaseUrl) | ||
125 | } else { | 137 | } else { |
126 | expect(file.fileUrl).to.match( | 138 | expect(file.resolution.label).to.equal(resolution + 'p') |
127 | new RegExp(`${baseUrl}/static/streaming-playlists/hls/${videoDetails.uuid}/${uuidRegex}-${file.resolution.id}-fragmented.mp4`) | ||
128 | ) | ||
129 | } | 139 | } |
130 | 140 | ||
131 | expect(file.resolution.label).to.equal(resolution + 'p') | 141 | expect(file.magnetUri).to.have.lengthOf.above(2) |
132 | 142 | await checkWebTorrentWorks(file.magnetUri) | |
133 | await makeRawRequest({ url: file.torrentUrl, expectedStatus: HttpStatusCode.OK_200 }) | 143 | |
134 | await makeRawRequest({ url: file.fileUrl, expectedStatus: HttpStatusCode.OK_200 }) | 144 | { |
145 | const nameReg = `${uuidRegex}-${file.resolution.id}` | ||
146 | |||
147 | expect(file.torrentUrl).to.match(new RegExp(`${server.url}/lazy-static/torrents/${nameReg}-hls.torrent`)) | ||
148 | |||
149 | if (objectStorageBaseUrl && requiresAuth) { | ||
150 | // eslint-disable-next-line max-len | ||
151 | expect(file.fileUrl).to.match(new RegExp(`${server.url}/object-storage-proxy/streaming-playlists/hls/${privatePath}${videoDetails.uuid}/${nameReg}-fragmented.mp4`)) | ||
152 | } else if (objectStorageBaseUrl) { | ||
153 | expectStartWith(file.fileUrl, objectStorageBaseUrl) | ||
154 | } else { | ||
155 | expect(file.fileUrl).to.match( | ||
156 | new RegExp(`${baseUrl}/static/streaming-playlists/hls/${privatePath}${videoDetails.uuid}/${nameReg}-fragmented.mp4`) | ||
157 | ) | ||
158 | } | ||
159 | } | ||
135 | 160 | ||
136 | const torrent = await webtorrentAdd(file.magnetUri, true) | 161 | { |
137 | expect(torrent.files).to.be.an('array') | 162 | await Promise.all([ |
138 | expect(torrent.files.length).to.equal(1) | 163 | makeRawRequest({ url: file.torrentUrl, token, expectedStatus: HttpStatusCode.OK_200 }), |
139 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | 164 | makeRawRequest({ url: file.torrentDownloadUrl, token, expectedStatus: HttpStatusCode.OK_200 }), |
165 | makeRawRequest({ url: file.metadataUrl, token, expectedStatus: HttpStatusCode.OK_200 }), | ||
166 | makeRawRequest({ url: file.fileUrl, token, expectedStatus: HttpStatusCode.OK_200 }), | ||
167 | |||
168 | makeRawRequest({ | ||
169 | url: file.fileDownloadUrl, | ||
170 | token, | ||
171 | expectedStatus: objectStorageBaseUrl | ||
172 | ? HttpStatusCode.FOUND_302 | ||
173 | : HttpStatusCode.OK_200 | ||
174 | }) | ||
175 | ]) | ||
176 | } | ||
140 | } | 177 | } |
141 | 178 | ||
142 | // Check master playlist | 179 | // Check master playlist |
143 | { | 180 | { |
144 | await checkResolutionsInMasterPlaylist({ server, playlistUrl: hlsPlaylist.playlistUrl, resolutions }) | 181 | await checkResolutionsInMasterPlaylist({ server, token, playlistUrl: hlsPlaylist.playlistUrl, resolutions }) |
145 | 182 | ||
146 | const masterPlaylist = await server.streamingPlaylists.get({ url: hlsPlaylist.playlistUrl }) | 183 | const masterPlaylist = await server.streamingPlaylists.get({ url: hlsPlaylist.playlistUrl, token }) |
147 | 184 | ||
148 | let i = 0 | 185 | let i = 0 |
149 | for (const resolution of resolutions) { | 186 | for (const resolution of resolutions) { |
@@ -163,11 +200,16 @@ async function completeCheckHlsPlaylist (options: { | |||
163 | const file = hlsFiles.find(f => f.resolution.id === resolution) | 200 | const file = hlsFiles.find(f => f.resolution.id === resolution) |
164 | const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8' | 201 | const playlistName = removeFragmentedMP4Ext(basename(file.fileUrl)) + '.m3u8' |
165 | 202 | ||
166 | const url = objectStorageBaseUrl | 203 | let url: string |
167 | ? `${objectStorageBaseUrl}hls/${videoUUID}/${playlistName}` | 204 | if (objectStorageBaseUrl && requiresAuth) { |
168 | : `${baseUrl}/static/streaming-playlists/hls/${videoUUID}/${playlistName}` | 205 | url = `${baseUrl}/object-storage-proxy/streaming-playlists/hls/${privatePath}${videoUUID}/${playlistName}` |
206 | } else if (objectStorageBaseUrl) { | ||
207 | url = `${objectStorageBaseUrl}hls/${videoUUID}/${playlistName}` | ||
208 | } else { | ||
209 | url = `${baseUrl}/static/streaming-playlists/hls/${privatePath}${videoUUID}/${playlistName}` | ||
210 | } | ||
169 | 211 | ||
170 | const subPlaylist = await server.streamingPlaylists.get({ url }) | 212 | const subPlaylist = await server.streamingPlaylists.get({ url, token }) |
171 | 213 | ||
172 | expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`)) | 214 | expect(subPlaylist).to.match(new RegExp(`${uuidRegex}-${resolution}-fragmented.mp4`)) |
173 | expect(subPlaylist).to.contain(basename(file.fileUrl)) | 215 | expect(subPlaylist).to.contain(basename(file.fileUrl)) |
@@ -175,13 +217,19 @@ async function completeCheckHlsPlaylist (options: { | |||
175 | } | 217 | } |
176 | 218 | ||
177 | { | 219 | { |
178 | const baseUrlAndPath = objectStorageBaseUrl | 220 | let baseUrlAndPath: string |
179 | ? objectStorageBaseUrl + 'hls/' + videoUUID | 221 | if (objectStorageBaseUrl && requiresAuth) { |
180 | : baseUrl + '/static/streaming-playlists/hls/' + videoUUID | 222 | baseUrlAndPath = `${baseUrl}/object-storage-proxy/streaming-playlists/hls/${privatePath}${videoUUID}` |
223 | } else if (objectStorageBaseUrl) { | ||
224 | baseUrlAndPath = `${objectStorageBaseUrl}hls/${videoUUID}` | ||
225 | } else { | ||
226 | baseUrlAndPath = `${baseUrl}/static/streaming-playlists/hls/${privatePath}${videoUUID}` | ||
227 | } | ||
181 | 228 | ||
182 | for (const resolution of resolutions) { | 229 | for (const resolution of resolutions) { |
183 | await checkSegmentHash({ | 230 | await checkSegmentHash({ |
184 | server, | 231 | server, |
232 | token, | ||
185 | baseUrlPlaylist: baseUrlAndPath, | 233 | baseUrlPlaylist: baseUrlAndPath, |
186 | baseUrlSegment: baseUrlAndPath, | 234 | baseUrlSegment: baseUrlAndPath, |
187 | resolution, | 235 | resolution, |
diff --git a/server/tests/shared/videos.ts b/server/tests/shared/videos.ts index f8ec65752..856fabd11 100644 --- a/server/tests/shared/videos.ts +++ b/server/tests/shared/videos.ts | |||
@@ -4,16 +4,106 @@ import { expect } from 'chai' | |||
4 | import { pathExists, readdir } from 'fs-extra' | 4 | import { pathExists, readdir } from 'fs-extra' |
5 | import { basename, join } from 'path' | 5 | import { basename, join } from 'path' |
6 | import { loadLanguages, VIDEO_CATEGORIES, VIDEO_LANGUAGES, VIDEO_LICENCES, VIDEO_PRIVACIES } from '@server/initializers/constants' | 6 | import { loadLanguages, VIDEO_CATEGORIES, VIDEO_LANGUAGES, VIDEO_LICENCES, VIDEO_PRIVACIES } from '@server/initializers/constants' |
7 | import { getLowercaseExtension, uuidRegex } from '@shared/core-utils' | 7 | import { getLowercaseExtension, pick, uuidRegex } from '@shared/core-utils' |
8 | import { HttpStatusCode, VideoCaption, VideoDetails } from '@shared/models' | 8 | import { HttpStatusCode, VideoCaption, VideoDetails, VideoPrivacy, VideoResolution } from '@shared/models' |
9 | import { makeRawRequest, PeerTubeServer, VideoEdit, waitJobs, webtorrentAdd } from '@shared/server-commands' | 9 | import { makeRawRequest, PeerTubeServer, VideoEdit, waitJobs } from '@shared/server-commands' |
10 | import { dateIsValid, testImage } from './checks' | 10 | import { dateIsValid, expectStartWith, testImage } from './checks' |
11 | import { checkWebTorrentWorks } from './webtorrent' | ||
11 | 12 | ||
12 | loadLanguages() | 13 | loadLanguages() |
13 | 14 | ||
14 | async function completeVideoCheck ( | 15 | async function completeWebVideoFilesCheck (options: { |
15 | server: PeerTubeServer, | 16 | server: PeerTubeServer |
16 | video: any, | 17 | originServer: PeerTubeServer |
18 | videoUUID: string | ||
19 | fixture: string | ||
20 | files: { | ||
21 | resolution: number | ||
22 | size?: number | ||
23 | }[] | ||
24 | objectStorageBaseUrl?: string | ||
25 | }) { | ||
26 | const { originServer, server, videoUUID, files, fixture, objectStorageBaseUrl } = options | ||
27 | const video = await server.videos.getWithToken({ id: videoUUID }) | ||
28 | const serverConfig = await originServer.config.getConfig() | ||
29 | const requiresAuth = video.privacy.id === VideoPrivacy.PRIVATE || video.privacy.id === VideoPrivacy.INTERNAL | ||
30 | |||
31 | const transcodingEnabled = serverConfig.transcoding.webtorrent.enabled | ||
32 | |||
33 | for (const attributeFile of files) { | ||
34 | const file = video.files.find(f => f.resolution.id === attributeFile.resolution) | ||
35 | expect(file, `resolution ${attributeFile.resolution} does not exist`).not.to.be.undefined | ||
36 | |||
37 | let extension = getLowercaseExtension(fixture) | ||
38 | // Transcoding enabled: extension will always be .mp4 | ||
39 | if (transcodingEnabled) extension = '.mp4' | ||
40 | |||
41 | expect(file.id).to.exist | ||
42 | expect(file.magnetUri).to.have.lengthOf.above(2) | ||
43 | |||
44 | { | ||
45 | const privatePath = requiresAuth | ||
46 | ? 'private/' | ||
47 | : '' | ||
48 | const nameReg = `${uuidRegex}-${file.resolution.id}` | ||
49 | |||
50 | expect(file.torrentDownloadUrl).to.match(new RegExp(`${server.url}/download/torrents/${nameReg}.torrent`)) | ||
51 | expect(file.torrentUrl).to.match(new RegExp(`${server.url}/lazy-static/torrents/${nameReg}.torrent`)) | ||
52 | |||
53 | if (objectStorageBaseUrl && requiresAuth) { | ||
54 | expect(file.fileUrl).to.match(new RegExp(`${originServer.url}/object-storage-proxy/webseed/${privatePath}${nameReg}${extension}`)) | ||
55 | } else if (objectStorageBaseUrl) { | ||
56 | expectStartWith(file.fileUrl, objectStorageBaseUrl) | ||
57 | } else { | ||
58 | expect(file.fileUrl).to.match(new RegExp(`${originServer.url}/static/webseed/${privatePath}${nameReg}${extension}`)) | ||
59 | } | ||
60 | |||
61 | expect(file.fileDownloadUrl).to.match(new RegExp(`${originServer.url}/download/videos/${nameReg}${extension}`)) | ||
62 | } | ||
63 | |||
64 | { | ||
65 | const token = requiresAuth | ||
66 | ? server.accessToken | ||
67 | : undefined | ||
68 | |||
69 | await Promise.all([ | ||
70 | makeRawRequest({ url: file.torrentUrl, token, expectedStatus: HttpStatusCode.OK_200 }), | ||
71 | makeRawRequest({ url: file.torrentDownloadUrl, token, expectedStatus: HttpStatusCode.OK_200 }), | ||
72 | makeRawRequest({ url: file.metadataUrl, token, expectedStatus: HttpStatusCode.OK_200 }), | ||
73 | makeRawRequest({ url: file.fileUrl, token, expectedStatus: HttpStatusCode.OK_200 }), | ||
74 | makeRawRequest({ | ||
75 | url: file.fileDownloadUrl, | ||
76 | token, | ||
77 | expectedStatus: objectStorageBaseUrl ? HttpStatusCode.FOUND_302 : HttpStatusCode.OK_200 | ||
78 | }) | ||
79 | ]) | ||
80 | } | ||
81 | |||
82 | expect(file.resolution.id).to.equal(attributeFile.resolution) | ||
83 | |||
84 | if (file.resolution.id === VideoResolution.H_NOVIDEO) { | ||
85 | expect(file.resolution.label).to.equal('Audio') | ||
86 | } else { | ||
87 | expect(file.resolution.label).to.equal(attributeFile.resolution + 'p') | ||
88 | } | ||
89 | |||
90 | if (attributeFile.size) { | ||
91 | const minSize = attributeFile.size - ((10 * attributeFile.size) / 100) | ||
92 | const maxSize = attributeFile.size + ((10 * attributeFile.size) / 100) | ||
93 | expect( | ||
94 | file.size, | ||
95 | 'File size for resolution ' + file.resolution.label + ' outside confidence interval (' + minSize + '> size <' + maxSize + ')' | ||
96 | ).to.be.above(minSize).and.below(maxSize) | ||
97 | } | ||
98 | |||
99 | await checkWebTorrentWorks(file.magnetUri) | ||
100 | } | ||
101 | } | ||
102 | |||
103 | async function completeVideoCheck (options: { | ||
104 | server: PeerTubeServer | ||
105 | originServer: PeerTubeServer | ||
106 | videoUUID: string | ||
17 | attributes: { | 107 | attributes: { |
18 | name: string | 108 | name: string |
19 | category: number | 109 | category: number |
@@ -50,13 +140,14 @@ async function completeVideoCheck ( | |||
50 | thumbnailfile?: string | 140 | thumbnailfile?: string |
51 | previewfile?: string | 141 | previewfile?: string |
52 | } | 142 | } |
53 | ) { | 143 | }) { |
144 | const { attributes, originServer, server, videoUUID } = options | ||
145 | |||
146 | const video = await server.videos.get({ id: videoUUID }) | ||
147 | |||
54 | if (!attributes.likes) attributes.likes = 0 | 148 | if (!attributes.likes) attributes.likes = 0 |
55 | if (!attributes.dislikes) attributes.dislikes = 0 | 149 | if (!attributes.dislikes) attributes.dislikes = 0 |
56 | 150 | ||
57 | const host = new URL(server.url).host | ||
58 | const originHost = attributes.account.host | ||
59 | |||
60 | expect(video.name).to.equal(attributes.name) | 151 | expect(video.name).to.equal(attributes.name) |
61 | expect(video.category.id).to.equal(attributes.category) | 152 | expect(video.category.id).to.equal(attributes.category) |
62 | expect(video.category.label).to.equal(attributes.category !== null ? VIDEO_CATEGORIES[attributes.category] : 'Unknown') | 153 | expect(video.category.label).to.equal(attributes.category !== null ? VIDEO_CATEGORIES[attributes.category] : 'Unknown') |
@@ -77,7 +168,7 @@ async function completeVideoCheck ( | |||
77 | expect(video.dislikes).to.equal(attributes.dislikes) | 168 | expect(video.dislikes).to.equal(attributes.dislikes) |
78 | expect(video.isLocal).to.equal(attributes.isLocal) | 169 | expect(video.isLocal).to.equal(attributes.isLocal) |
79 | expect(video.duration).to.equal(attributes.duration) | 170 | expect(video.duration).to.equal(attributes.duration) |
80 | expect(video.url).to.contain(originHost) | 171 | expect(video.url).to.contain(originServer.host) |
81 | expect(dateIsValid(video.createdAt)).to.be.true | 172 | expect(dateIsValid(video.createdAt)).to.be.true |
82 | expect(dateIsValid(video.publishedAt)).to.be.true | 173 | expect(dateIsValid(video.publishedAt)).to.be.true |
83 | expect(dateIsValid(video.updatedAt)).to.be.true | 174 | expect(dateIsValid(video.updatedAt)).to.be.true |
@@ -92,67 +183,28 @@ async function completeVideoCheck ( | |||
92 | expect(video.originallyPublishedAt).to.be.null | 183 | expect(video.originallyPublishedAt).to.be.null |
93 | } | 184 | } |
94 | 185 | ||
95 | const videoDetails = await server.videos.get({ id: video.uuid }) | 186 | expect(video.files).to.have.lengthOf(attributes.files.length) |
96 | 187 | expect(video.tags).to.deep.equal(attributes.tags) | |
97 | expect(videoDetails.files).to.have.lengthOf(attributes.files.length) | 188 | expect(video.account.name).to.equal(attributes.account.name) |
98 | expect(videoDetails.tags).to.deep.equal(attributes.tags) | 189 | expect(video.account.host).to.equal(attributes.account.host) |
99 | expect(videoDetails.account.name).to.equal(attributes.account.name) | ||
100 | expect(videoDetails.account.host).to.equal(attributes.account.host) | ||
101 | expect(video.channel.displayName).to.equal(attributes.channel.displayName) | 190 | expect(video.channel.displayName).to.equal(attributes.channel.displayName) |
102 | expect(video.channel.name).to.equal(attributes.channel.name) | 191 | expect(video.channel.name).to.equal(attributes.channel.name) |
103 | expect(videoDetails.channel.host).to.equal(attributes.account.host) | 192 | expect(video.channel.host).to.equal(attributes.account.host) |
104 | expect(videoDetails.channel.isLocal).to.equal(attributes.channel.isLocal) | 193 | expect(video.channel.isLocal).to.equal(attributes.channel.isLocal) |
105 | expect(dateIsValid(videoDetails.channel.createdAt.toString())).to.be.true | 194 | expect(dateIsValid(video.channel.createdAt.toString())).to.be.true |
106 | expect(dateIsValid(videoDetails.channel.updatedAt.toString())).to.be.true | 195 | expect(dateIsValid(video.channel.updatedAt.toString())).to.be.true |
107 | expect(videoDetails.commentsEnabled).to.equal(attributes.commentsEnabled) | 196 | expect(video.commentsEnabled).to.equal(attributes.commentsEnabled) |
108 | expect(videoDetails.downloadEnabled).to.equal(attributes.downloadEnabled) | 197 | expect(video.downloadEnabled).to.equal(attributes.downloadEnabled) |
109 | |||
110 | for (const attributeFile of attributes.files) { | ||
111 | const file = videoDetails.files.find(f => f.resolution.id === attributeFile.resolution) | ||
112 | expect(file).not.to.be.undefined | ||
113 | |||
114 | let extension = getLowercaseExtension(attributes.fixture) | ||
115 | // Transcoding enabled: extension will always be .mp4 | ||
116 | if (attributes.files.length > 1) extension = '.mp4' | ||
117 | |||
118 | expect(file.id).to.exist | ||
119 | expect(file.magnetUri).to.have.lengthOf.above(2) | ||
120 | |||
121 | expect(file.torrentDownloadUrl).to.match(new RegExp(`http://${host}/download/torrents/${uuidRegex}-${file.resolution.id}.torrent`)) | ||
122 | expect(file.torrentUrl).to.match(new RegExp(`http://${host}/lazy-static/torrents/${uuidRegex}-${file.resolution.id}.torrent`)) | ||
123 | |||
124 | expect(file.fileUrl).to.match(new RegExp(`http://${originHost}/static/webseed/${uuidRegex}-${file.resolution.id}${extension}`)) | ||
125 | expect(file.fileDownloadUrl).to.match(new RegExp(`http://${originHost}/download/videos/${uuidRegex}-${file.resolution.id}${extension}`)) | ||
126 | 198 | ||
127 | await Promise.all([ | 199 | expect(video.thumbnailPath).to.exist |
128 | makeRawRequest({ url: file.torrentUrl, expectedStatus: HttpStatusCode.OK_200 }), | 200 | await testImage(server.url, attributes.thumbnailfile || attributes.fixture, video.thumbnailPath) |
129 | makeRawRequest({ url: file.torrentDownloadUrl, expectedStatus: HttpStatusCode.OK_200 }), | ||
130 | makeRawRequest({ url: file.metadataUrl, expectedStatus: HttpStatusCode.OK_200 }) | ||
131 | ]) | ||
132 | |||
133 | expect(file.resolution.id).to.equal(attributeFile.resolution) | ||
134 | expect(file.resolution.label).to.equal(attributeFile.resolution + 'p') | ||
135 | |||
136 | const minSize = attributeFile.size - ((10 * attributeFile.size) / 100) | ||
137 | const maxSize = attributeFile.size + ((10 * attributeFile.size) / 100) | ||
138 | expect( | ||
139 | file.size, | ||
140 | 'File size for resolution ' + file.resolution.label + ' outside confidence interval (' + minSize + '> size <' + maxSize + ')' | ||
141 | ).to.be.above(minSize).and.below(maxSize) | ||
142 | |||
143 | const torrent = await webtorrentAdd(file.magnetUri, true) | ||
144 | expect(torrent.files).to.be.an('array') | ||
145 | expect(torrent.files.length).to.equal(1) | ||
146 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
147 | } | ||
148 | |||
149 | expect(videoDetails.thumbnailPath).to.exist | ||
150 | await testImage(server.url, attributes.thumbnailfile || attributes.fixture, videoDetails.thumbnailPath) | ||
151 | 201 | ||
152 | if (attributes.previewfile) { | 202 | if (attributes.previewfile) { |
153 | expect(videoDetails.previewPath).to.exist | 203 | expect(video.previewPath).to.exist |
154 | await testImage(server.url, attributes.previewfile, videoDetails.previewPath) | 204 | await testImage(server.url, attributes.previewfile, video.previewPath) |
155 | } | 205 | } |
206 | |||
207 | await completeWebVideoFilesCheck({ server, originServer, videoUUID: video.uuid, ...pick(attributes, [ 'fixture', 'files' ]) }) | ||
156 | } | 208 | } |
157 | 209 | ||
158 | async function checkVideoFilesWereRemoved (options: { | 210 | async function checkVideoFilesWereRemoved (options: { |
@@ -245,6 +297,7 @@ async function uploadRandomVideoOnServers ( | |||
245 | 297 | ||
246 | export { | 298 | export { |
247 | completeVideoCheck, | 299 | completeVideoCheck, |
300 | completeWebVideoFilesCheck, | ||
248 | checkUploadVideoParam, | 301 | checkUploadVideoParam, |
249 | uploadRandomVideoOnServers, | 302 | uploadRandomVideoOnServers, |
250 | checkVideoFilesWereRemoved, | 303 | checkVideoFilesWereRemoved, |
diff --git a/server/tests/shared/webtorrent.ts b/server/tests/shared/webtorrent.ts new file mode 100644 index 000000000..d5bd86500 --- /dev/null +++ b/server/tests/shared/webtorrent.ts | |||
@@ -0,0 +1,58 @@ | |||
1 | import { expect } from 'chai' | ||
2 | import { readFile } from 'fs-extra' | ||
3 | import parseTorrent from 'parse-torrent' | ||
4 | import { basename, join } from 'path' | ||
5 | import * as WebTorrent from 'webtorrent' | ||
6 | import { VideoFile } from '@shared/models' | ||
7 | import { PeerTubeServer } from '@shared/server-commands' | ||
8 | |||
9 | let webtorrent: WebTorrent.Instance | ||
10 | |||
11 | export async function checkWebTorrentWorks (magnetUri: string, pathMatch?: RegExp) { | ||
12 | const torrent = await webtorrentAdd(magnetUri, true) | ||
13 | |||
14 | expect(torrent.files).to.be.an('array') | ||
15 | expect(torrent.files.length).to.equal(1) | ||
16 | expect(torrent.files[0].path).to.exist.and.to.not.equal('') | ||
17 | |||
18 | if (pathMatch) { | ||
19 | expect(torrent.files[0].path).match(pathMatch) | ||
20 | } | ||
21 | } | ||
22 | |||
23 | export async function parseTorrentVideo (server: PeerTubeServer, file: VideoFile) { | ||
24 | const torrentName = basename(file.torrentUrl) | ||
25 | const torrentPath = server.servers.buildDirectory(join('torrents', torrentName)) | ||
26 | |||
27 | const data = await readFile(torrentPath) | ||
28 | |||
29 | return parseTorrent(data) | ||
30 | } | ||
31 | |||
32 | // --------------------------------------------------------------------------- | ||
33 | // Private | ||
34 | // --------------------------------------------------------------------------- | ||
35 | |||
36 | function webtorrentAdd (torrentId: string, refreshWebTorrent = false) { | ||
37 | const WebTorrent = require('webtorrent') | ||
38 | |||
39 | if (webtorrent && refreshWebTorrent) webtorrent.destroy() | ||
40 | if (!webtorrent || refreshWebTorrent) webtorrent = new WebTorrent() | ||
41 | |||
42 | webtorrent.on('error', err => console.error('Error in webtorrent', err)) | ||
43 | |||
44 | return new Promise<WebTorrent.Torrent>(res => { | ||
45 | const torrent = webtorrent.add(torrentId, res) | ||
46 | |||
47 | torrent.on('error', err => console.error('Error in webtorrent torrent', err)) | ||
48 | torrent.on('warning', warn => { | ||
49 | const msg = typeof warn === 'string' | ||
50 | ? warn | ||
51 | : warn.message | ||
52 | |||
53 | if (msg.includes('Unsupported')) return | ||
54 | |||
55 | console.error('Warning in webtorrent torrent', warn) | ||
56 | }) | ||
57 | }) | ||
58 | } | ||
diff --git a/server/tsconfig.json b/server/tsconfig.json index 4be7ae2f4..240bd3bfe 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json | |||
@@ -7,6 +7,7 @@ | |||
7 | { "path": "../shared" } | 7 | { "path": "../shared" } |
8 | ], | 8 | ], |
9 | "exclude": [ | 9 | "exclude": [ |
10 | "tools/" | 10 | "tools/", |
11 | "tests/fixtures" | ||
11 | ] | 12 | ] |
12 | } | 13 | } |